major update
This commit is contained in:
@@ -22,17 +22,17 @@ const checkHeaders = (req, res, next) => {
|
||||
* HTTP/1/1 400 Not Found
|
||||
* {
|
||||
* status:400,
|
||||
* ref:"headers"
|
||||
* ref:"middlewares"
|
||||
* msg:"missingheaders",
|
||||
* data: ["headermissing1"]
|
||||
* data: ["headermissing1"]
|
||||
* }
|
||||
*@apiErrorExample {json} Error-Response:
|
||||
* HTTP/1/1 404 Not Found
|
||||
* {
|
||||
* status:404,
|
||||
* ref:"headers"
|
||||
* ref:"middlewares"
|
||||
* msg:"tribeiddoesnotexist",
|
||||
* data: {xalias}
|
||||
* data: {xalias}
|
||||
* }
|
||||
*
|
||||
* @apiHeaderExample {json} Header-Exemple:
|
||||
@@ -61,27 +61,28 @@ const checkHeaders = (req, res, next) => {
|
||||
missingheader.push(h);
|
||||
}
|
||||
}
|
||||
//console.log( 'header', header )
|
||||
// console.log( 'pass header', header )
|
||||
// store in session the header information
|
||||
req.session.header = header;
|
||||
// Each header have to be declared
|
||||
if (missingheader != "") {
|
||||
// bad request
|
||||
return res.status(400).json({
|
||||
ref: "headers",
|
||||
ref: "middlewares",
|
||||
msg: "missingheader",
|
||||
data: missingheader,
|
||||
});
|
||||
}
|
||||
//console.log( req.app.locals.tribeids )
|
||||
// xtribe == "town" is used during the setup process
|
||||
// xtribe == "adminapi" is used to access /adminapi
|
||||
if (
|
||||
!(
|
||||
header.xtribe == "town" || req.app.locals.tribeids.includes(header.xtribe)
|
||||
["town","adminapi"].includes(header.xtribe) || req.app.locals.tribeids.includes(header.xtribe)
|
||||
)
|
||||
) {
|
||||
return res.status(404).json({
|
||||
ref: "headers",
|
||||
ref: "middlewares",
|
||||
msg: "tribeiddoesnotexist",
|
||||
data: { xtribe: header.xtribe },
|
||||
});
|
||||
@@ -90,6 +91,8 @@ const checkHeaders = (req, res, next) => {
|
||||
console.log("warning language requested does not exist force to english");
|
||||
header.xlang = "en";
|
||||
}
|
||||
//set anonymous profil
|
||||
req.session.header.xprofils=["anonymous"]
|
||||
next();
|
||||
};
|
||||
module.exports = checkHeaders;
|
||||
|
@@ -1,69 +0,0 @@
|
||||
const fs = require("fs-extra");
|
||||
const glob = require("glob");
|
||||
const path = require("path");
|
||||
|
||||
const conf = require(`${process.env.dirtown}/conf.json`);
|
||||
|
||||
const hasAccessrighton = (object, action, ownby) => {
|
||||
/*
|
||||
@action (mandatory) : CRUDO
|
||||
@object (mandatory)= name of a folder object in /tribeid space can be a tree for example objects/items
|
||||
@ownby (option) = list des uuid propriétaire
|
||||
return next() if all action exist in req.app.local.tokens[UUID].ACCESSRIGHTS.data[object]
|
||||
OR if last action ="O" and uuid exist in ownBy
|
||||
Careffull if you have many action CRO let O at the end this will force req.right at true if the owner try an action on this object
|
||||
|
||||
need to check first a person exist with this alias in tribe
|
||||
|
||||
const person = fs.readJsonSync(
|
||||
`${conf.dirapi}/nationchains/tribes/${req.session.header.xtribe}/persons/${req.session.header.xalias}.json`
|
||||
);
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
*/
|
||||
|
||||
return (req, res, next) => {
|
||||
//console.log( 'err.stack hasAccessrights', err.statck )
|
||||
//console.log( `test accessright on object:${object} for ${req.session.header.xworkon}:`, req.app.locals.tokens[ req.session.header.xpaganid ].ACCESSRIGHTS.data[ req.session.header.xworkon ] )
|
||||
req.right = false;
|
||||
if (
|
||||
req.app.locals.tokens[req.session.header.xpaganid].ACCESSRIGHTS.data[
|
||||
req.session.header.xworkon
|
||||
] &&
|
||||
req.app.locals.tokens[req.session.header.xpaganid].ACCESSRIGHTS.data[
|
||||
req.session.header.xworkon
|
||||
][object]
|
||||
) {
|
||||
req.right = true;
|
||||
[...action].forEach((a) => {
|
||||
if (a == "O" && ownby && ownby.includes(req.session.header.xpaganid)) {
|
||||
req.right = true;
|
||||
} else {
|
||||
req.right =
|
||||
req.right &&
|
||||
req.app.locals.tokens[
|
||||
req.session.header.xpaganid
|
||||
].ACCESSRIGHTS.data[req.session.header.xworkon][object].includes(a);
|
||||
}
|
||||
});
|
||||
}
|
||||
//console.log( 'Access data autorise? ', req.right )
|
||||
if (!req.right) {
|
||||
return res.status(403).json({
|
||||
info: "forbiddenAccessright",
|
||||
ref: "headers",
|
||||
moreinfo: {
|
||||
xpaganid: req.session.header.xpaganid,
|
||||
object: object,
|
||||
xworkon: req.session.header.xworkon,
|
||||
action: action,
|
||||
},
|
||||
});
|
||||
}
|
||||
next();
|
||||
};
|
||||
};
|
||||
module.exports = hasAccessrighton;
|
@@ -1,118 +1,198 @@
|
||||
const fs = require("fs-extra");
|
||||
const dayjs = require("dayjs");
|
||||
const glob = require("glob");
|
||||
// To debug it could be easier with source code:
|
||||
// const openpgp = require("/media/phil/usbfarm/apxtrib/node_modules/openpgp/dist/node/openpgp.js");
|
||||
const openpgp = require("openpgp");
|
||||
|
||||
const conf = require(`${process.env.dirtown}/conf.json`);
|
||||
|
||||
/**
|
||||
* Check authentification and get person profils for a tribe
|
||||
* @param {object} req
|
||||
* @param {object} res
|
||||
* @param {function} next
|
||||
* @returns {status:}
|
||||
*
|
||||
* 3 steps:
|
||||
* - clean eventual tokens oldest than 24 hours (the first pagan that authenticate of the day will process this)
|
||||
* - if token present in /town/tmp/tokens/alias_tribe_part of the xhash return xprofils with list of profils pagans
|
||||
* - if no token then check xhash with openpgp lib and create one
|
||||
*
|
||||
* All data related are store in town/tmp/tokens backend, and localstorage headers for front end
|
||||
* A penalty function increase a sleep function between 2 fail try of authentification to avoid bruteforce
|
||||
*/
|
||||
const isAuthenticated = async (req, res, next) => {
|
||||
// tokens if valid are store in /dirtown/tmp/tokens/xalias_xdays_xhash(20,200)
|
||||
// tokens if valid are store in /dirtown/tmp/tokens/xalias_xdays_xhash(20,200)
|
||||
// once a day rm oldest tokens than 24hours tag job by adding tmp/tokensmenagedone{day}
|
||||
|
||||
const withlog = true;
|
||||
const currentday = dayjs().date();
|
||||
console.log(
|
||||
"if menagedone" + currentday,
|
||||
!fs.existsSync(`${process.env.dirtown}/tmp/tokensmenagedone${currentday}`)
|
||||
fs.ensureDirSync(`${process.env.dirtown}/tmp/tokens`);
|
||||
let menagedone = fs.existsSync(
|
||||
`${process.env.dirtown}/tmp/tokens/menagedone${currentday}`
|
||||
);
|
||||
if (!fs.existsSync(`${process.env.dirtown}/tmp/tokens`))
|
||||
fs.mkdirSync(`${process.env.dirtown}/tmp/tokens`);
|
||||
if (!fs.existsSync(`${process.env.dirtown}/tmp/tokensmenagedone${currentday}`)) {
|
||||
if (withlog)
|
||||
console.log(`menagedone${currentday} was it done today?:${menagedone}`);
|
||||
if (!menagedone) {
|
||||
// clean oldest
|
||||
const tsday = dayjs().valueOf(); // now in timestamp format
|
||||
glob.sync(`${process.env.dirtown}/tmp/tokensmenagedone*`).forEach((f) => {
|
||||
glob.sync(`${process.env.dirtown}/tmp/tokens/menagedone*`).forEach((f) => {
|
||||
fs.removeSync(f);
|
||||
});
|
||||
glob.sync(`${process.env.dirtown}/tmp/tokens/*.json`).forEach((f) => {
|
||||
if (tsday - parseInt(f.split("_")[1]) > 86400000) fs.remove(f);
|
||||
const fsplit = f.split("_");
|
||||
const elapse = tsday - parseInt(fsplit[2]);
|
||||
//24h 86400000 milliseconde 15mn 900000
|
||||
if (elapse && elapse > 86400000) {
|
||||
fs.remove(f);
|
||||
}
|
||||
});
|
||||
fs.outputFile(
|
||||
`${process.env.dirtown}/tmp/tokens/menagedone${currentday}`,
|
||||
"done by middleware/isAUthenticated"
|
||||
);
|
||||
}
|
||||
//Check register in tmp/tokens/
|
||||
console.log("isAuthenticate?");
|
||||
if (withlog) console.log("isAuthenticate?", req.session.header, req.body);
|
||||
|
||||
const resnotauth = {
|
||||
ref: "headers",
|
||||
ref: "middlewares",
|
||||
msg: "notauthenticated",
|
||||
data: {
|
||||
xalias: req.session.header.xalias,
|
||||
xaliasexists: true,
|
||||
},
|
||||
};
|
||||
//console.log(req.session.header);
|
||||
if (req.session.header.xalias == "anonymous" || req.session.header.xhash == "anonymous") {
|
||||
console.log("alias anonymous means not auth");
|
||||
return res.status(401).json(resnotauth);
|
||||
if (
|
||||
req.session.header.xalias == "anonymous" ||
|
||||
req.session.header.xhash == "anonymous"
|
||||
) {
|
||||
if (withlog) console.log("alias anonymous means not auth");
|
||||
resnotauth.status = 401;
|
||||
return res.status(resnotauth.status).json(resnotauth);
|
||||
}
|
||||
|
||||
const tmpfs = `${process.env.dirtown}/tmp/tokens/${req.session.header.xalias}_${
|
||||
req.session.header.xdays
|
||||
}_${req.session.header.xhash.substring(20, 200)}`;
|
||||
//console.log(tmpfs);
|
||||
let tmpfs = `${process.env.dirtown}/tmp/tokens/${req.session.header.xalias}_${req.session.header.xtribe}_${req.session.header.xdays}`;
|
||||
//max filename in ext4: 255 characters
|
||||
tmpfs += `_${req.session.header.xhash.substring(
|
||||
150,
|
||||
150 + tmpfs.length - 249
|
||||
)}.json`;
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} alias that request an access
|
||||
* @param {string} action "clean" | "penalty"
|
||||
*/
|
||||
const bruteforcepenalty = async (alias, action) => {
|
||||
const sleep = (ms) => {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||
};
|
||||
const failstamp = `${process.env.dirtown}/tmp/tokens/${alias}.json`;
|
||||
if (action == "clean") {
|
||||
//to reinit bruteforce checker
|
||||
if (withlog) console.log("try to clean penalty file ", failstamp);
|
||||
fs.remove(failstamp, (err) => {
|
||||
if (err) console.log("Check forcebrut ", err);
|
||||
});
|
||||
} else if (action == "penalty") {
|
||||
const stamp = fs.existsSync(failstamp)
|
||||
? fs.readJSONSync(failstamp)
|
||||
: { lastfail: dayjs().format(), numberfail: 0 };
|
||||
stamp.lastfail = dayjs().format();
|
||||
stamp.numberfail += 1;
|
||||
fs.outputJSON(failstamp, stamp);
|
||||
if (withlog) console.log("penalty:", stamp);
|
||||
await sleep(stamp.numberfail * 100); //increase of 0,1 second the answer time per fail
|
||||
if (withlog) console.log("time out penalty");
|
||||
}
|
||||
};
|
||||
if (!fs.existsSync(tmpfs)) {
|
||||
// need to check detached sign
|
||||
let publickey;
|
||||
if (
|
||||
fs.existsSync(
|
||||
`${conf.dirapi}/nationchains/pagans/itm/${req.session.header.xalias}.json`
|
||||
)
|
||||
) {
|
||||
const pagan = fs.readJsonSync(
|
||||
`${conf.dirapi}/nationchains/pagans/itm/${req.session.header.xalias}.json`
|
||||
);
|
||||
publickey = pagan.publicKey;
|
||||
} else {
|
||||
let publickey = "";
|
||||
console.log(process.cwd());
|
||||
console.log(process.env.PWD);
|
||||
console.log(__dirname);
|
||||
const aliasinfo = `${process.env.PWD}/nationchains/pagans/itm/${req.session.header.xalias}.json`;
|
||||
if (fs.existsSync(aliasinfo)) {
|
||||
publickey = fs.readJsonSync(aliasinfo).publickey;
|
||||
} else if (req.body.publickey) {
|
||||
resnotauth.data.xaliasexists = false;
|
||||
if (req.body.publickey) {
|
||||
publickey = req.body.publickey;
|
||||
} else {
|
||||
console.log("alias unknown");
|
||||
return res.status(404).send(resnotauth);
|
||||
publickey = req.body.publickey;
|
||||
}
|
||||
if (publickey == "") {
|
||||
if (withlog) console.log("alias unknown");
|
||||
resnotauth.status = 404;
|
||||
resnotauth.data.xaliasexists = false;
|
||||
return res.status(resnotauth.status).send(resnotauth);
|
||||
}
|
||||
if (withlog) console.log("publickey", publickey);
|
||||
if (publickey.substring(0, 31) !== "-----BEGIN PGP PUBLIC KEY BLOCK") {
|
||||
if (withlog)
|
||||
console.log("Publickey is not valid as armored key:", publickey);
|
||||
await bruteforcepenalty(req.session.header.xalias, "penalty");
|
||||
resnotauth.status = 404;
|
||||
return res.status(resnotauth.status).send(resnotauth);
|
||||
}
|
||||
const clearmsg = Buffer.from(req.session.header.xhash, "base64").toString();
|
||||
if (clearmsg.substring(0, 10) !== "-----BEGIN") {
|
||||
if (withlog)
|
||||
console.log("xhash conv is not valid as armored key:", clearmsg);
|
||||
await bruteforcepenalty(req.session.header.xalias, "penalty");
|
||||
resnotauth.status = 404;
|
||||
return res.status(resnotauth.status).send(resnotauth);
|
||||
}
|
||||
if (withlog) console.log("clearmsg", clearmsg);
|
||||
const pubkey = await openpgp.readKey({ armoredKey: publickey });
|
||||
const signedMessage = await openpgp.readCleartextMessage({
|
||||
cleartextMessage: clearmsg,
|
||||
});
|
||||
const verificationResult = await openpgp.verify({
|
||||
message: signedMessage,
|
||||
verificationKeys: pubkey,
|
||||
});
|
||||
if (withlog) console.log(verificationResult);
|
||||
if (withlog) console.log(verificationResult.signatures[0].keyID.toHex());
|
||||
try {
|
||||
await verificationResult.signatures[0].verified;
|
||||
if (
|
||||
verificationResult.data !=
|
||||
`${req.session.header.xalias}_${req.session.header.xdays}`
|
||||
) {
|
||||
resnotauth.msg = "signaturefailled";
|
||||
if (withlog)
|
||||
console.log(
|
||||
`message recu:${verificationResult.data} , message attendu:${req.session.header.xalias}_${req.session.header.xdays}`
|
||||
);
|
||||
await bruteforcepenalty(req.session.header.xalias, "penalty");
|
||||
resnotauth.status = 401;
|
||||
return res.status(resnotauth.status).send(resnotauth);
|
||||
}
|
||||
}
|
||||
if (publickey.substring(0,10)!=="-----BEGIN"){
|
||||
console.log("Publickey is not valid as armored key:", publickey)
|
||||
return res.status(404).send(resnotauth);
|
||||
}
|
||||
if (Buffer.from(req.session.header.xhash, "base64").toString().substring(0,10)!=="-----BEGIN"){
|
||||
console.log("xhash conv is not valid as armored key:", Buffer.from(req.session.header.xhash, "base64").toString())
|
||||
return res.status(404).send(resnotauth);
|
||||
}
|
||||
let publicKey;
|
||||
try {
|
||||
publicKey = await openpgp.readKey({ armoredKey: publickey });
|
||||
}catch(err){
|
||||
console.log(erreur)
|
||||
}
|
||||
const msg = await openpgp.createMessage({
|
||||
text: `${req.session.header.xalias}_${req.session.header.xdays}`,
|
||||
});
|
||||
const signature = await openpgp.readSignature({
|
||||
armoredSignature: Buffer.from(
|
||||
req.session.header.xhash,
|
||||
"base64"
|
||||
).toString(),
|
||||
});
|
||||
//console.log(msg);
|
||||
//console.log(signature);
|
||||
//console.log(publicKey);
|
||||
const checkauth = await openpgp.verify({
|
||||
message: msg,
|
||||
signature: signature,
|
||||
verificationKeys: publicKey,
|
||||
});
|
||||
//console.log(checkauth);
|
||||
//console.log(checkauth.signatures[0].keyID);
|
||||
//console.log(await checkauth.signatures[0].signature);
|
||||
//console.log(await checkauth.signatures[0].verified);
|
||||
|
||||
const { check, keyID } = checkauth.signatures[0];
|
||||
try {
|
||||
await check; // raise an error if necessary
|
||||
fs.outputFileSync(tmpfs, req.session.header.xhash, "utf8");
|
||||
} catch (e) {
|
||||
resnotauth.msg = "signaturefailed";
|
||||
console.log("not auth fail sign");
|
||||
return res.status(401).send(resnotauth);
|
||||
resnotauth.msg = "signaturefailled";
|
||||
if (withlog) console.log("erreur", e);
|
||||
await bruteforcepenalty(req.session.header.xalias, "penalty");
|
||||
resnotauth.status = 401;
|
||||
return res.status(resnotauth.status).send(resnotauth);
|
||||
}
|
||||
// authenticated then get person profils (person = pagan for a xtrib)
|
||||
req.session.header.xprofils.push("pagans");
|
||||
const person = `${process.env.dirtown}/tribes/${req.session.header.xtribe}/persons/itm/${req.session.header.xalias}.json`;
|
||||
if (withlog) {
|
||||
console.log("Profils tribe/app management");
|
||||
console.log("person", person);
|
||||
}
|
||||
if (fs.existsSync(person)) {
|
||||
const infoperson = fs.readJSONSync(person);
|
||||
console.log(infoperson);
|
||||
infoperson.profils.forEach((p) => req.session.header.xprofils.push(p));
|
||||
}
|
||||
fs.outputJSONSync(tmpfs, req.session.header.xprofils);
|
||||
} else {
|
||||
//tmpfs exist get profils from identification process
|
||||
req.session.header.xprofils = fs.readJSONSync(tmpfs);
|
||||
}
|
||||
console.log("Authenticated");
|
||||
bruteforcepenalty(req.session.header.xalias, "clean");
|
||||
console.log(`${req.session.header.xalias} Authenticated`);
|
||||
next();
|
||||
};
|
||||
module.exports = isAuthenticated;
|
||||
|
@@ -11,7 +11,7 @@ Checkjson.schema = {};
|
||||
Checkjson.schema.properties = {};
|
||||
Checkjson.schema.properties.type = {};
|
||||
Checkjson.schema.properties.type.string = (str) => typeof str === "string";
|
||||
Checkjson.schema.properties.type.array = (val)=> Array.isArray(val);
|
||||
Checkjson.schema.properties.type.array = (val) => Array.isArray(val);
|
||||
Checkjson.schema.properties.type.number = (n) => typeof n === "number";
|
||||
Checkjson.schema.properties.type.boolean = (n) => typeof n === "boolean";
|
||||
Checkjson.schema.properties.type.integer = (n) =>
|
||||
@@ -47,19 +47,44 @@ Checkjson.schema.properties.range = (
|
||||
};
|
||||
Checkjson.schema.properties.pattern = (str, pattern) => {
|
||||
try {
|
||||
pattern= new RegExp(pattern);
|
||||
pattern = new RegExp(pattern);
|
||||
} catch (e) {
|
||||
console.log('err pattern in checkjon',pattern);
|
||||
console.log("err pattern in checkjon", pattern);
|
||||
return false;
|
||||
}
|
||||
return pattern.test(str);
|
||||
};
|
||||
Checkjson.schema.properties.enum = (str, enumvalues) =>
|
||||
typeof str === "string" && enumvalues.includes(str);
|
||||
Checkjson.schema.properties.enum = (str, enumvalues) => {
|
||||
if (Array.isArray(enumvalues)) {
|
||||
return typeof str === "string" && enumvalues.includes(str);
|
||||
} else if (tribeId) {
|
||||
//enumvalues is a reference of objectname.key
|
||||
const { tribeId, obj, keyid } = enumvalues.split(".");
|
||||
return fs.existsSync(
|
||||
`${conf.dirtown}/tribes/${tribeId}/schema/${obj}/itm/${keyid}.json`
|
||||
);
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
};
|
||||
// to check a value for a pattern
|
||||
// Checkjson.schema.properties.pattern(value, properties[p].pattern)
|
||||
/**
|
||||
*
|
||||
* @param {string} str to test
|
||||
* @param {string} format keyworkd existing in Checkjson.schema.properties.format
|
||||
* @return null if format does not exist, true or false
|
||||
*/
|
||||
Checkjson.testformat=(str, format)=>{
|
||||
if (!Checkjson.schema.properties.format[format]) { return null}
|
||||
return Checkjson.schema.properties.pattern(str, Checkjson.schema.properties.format[format])
|
||||
|
||||
}
|
||||
// see format https://json-schema.org/understanding-json-schema/reference/string.html#format
|
||||
// to check a just value with a format use Checkjson.testformat=(value, format)
|
||||
Checkjson.schema.properties.format = {
|
||||
"date-time": /\d{4}-[01]\d-[0-3]\dT[0-2]\d:[0-5]\d:[0-5]\d\.\d{1,3}/,
|
||||
stringalphaonly:/^[A-Za-z0-9]{3,}$/,
|
||||
stringalphaonly: /^[A-Za-z0-9]{3,}$/,
|
||||
time: /[0-2]\d:[0-5]\d:[0-5]\d\.\d{1,3}/,
|
||||
date: /\d{4}-[01]\d-[0-3]\d/,
|
||||
duration: / /,
|
||||
@@ -79,11 +104,16 @@ Checkjson.schema.properties.format = {
|
||||
password:
|
||||
/^(?=.*[a-z])(?=.*[A-Z])(?=.*\d)(?=.*[$@$!%*?&.])[A-Za-z\d$@$!%*?&.{}:|\s]{8,}/,
|
||||
postalcodefr: /(^\d{5}$)|(^\d{5}-\d{4}$)/,
|
||||
pgppublickey:
|
||||
/^-----BEGIN PGP PUBLIC KEY BLOCK-----(\n|\r|\r\n)(\n|\r|\r\n)([0-9a-zA-Z\+\/=]*(\n|\r|\r\n))*-----END PGP PUBLIC KEY BLOCK-----(\n|\r|\r\n)?$/gm,
|
||||
pgpprivatekey:
|
||||
/^-----BEGIN PGP PRIVATE KEY BLOCK-----(\n|\r|\r\n)(\n|\r|\r\n)([0-9a-zA-Z\+\/=]*(\n|\r|\r\n))*-----END PGP PRIVATE KEY BLOCK-----(\n|\r|\r\n)?$/gm,
|
||||
};
|
||||
Checkjson.schema.properties.default
|
||||
Checkjson.schema.properties.default;
|
||||
Checkjson.schema.validation = (schema) => {
|
||||
/*validate a schema structure*/
|
||||
const res = { status: 200, err: [] };
|
||||
const multimsg = [];
|
||||
const res = {};
|
||||
if (schema.properties) {
|
||||
Object.keys(schema.properties).forEach((p) => {
|
||||
const properties = schema.properties;
|
||||
@@ -92,10 +122,10 @@ Checkjson.schema.validation = (schema) => {
|
||||
typeof properties[p].type === "string" &&
|
||||
!Checkjson.schema.properties.type[properties[p].type]
|
||||
) {
|
||||
res.err.push({
|
||||
ref:"Checkjson",
|
||||
msg:"schemaerrtypedoesnotexist",
|
||||
data: {propertie:p,type:properties[p].type}
|
||||
multimsg.push({
|
||||
ref: "Checkjson",
|
||||
msg: "schemaerrtypedoesnotexist",
|
||||
data: { propertie: p, type: properties[p].type },
|
||||
});
|
||||
}
|
||||
if (
|
||||
@@ -105,10 +135,10 @@ Checkjson.schema.validation = (schema) => {
|
||||
) {
|
||||
properties[p].type.forEach((tp) => {
|
||||
if (!Checkjson.schema.properties.type[tp])
|
||||
res.err.push({
|
||||
ref:"Checkjson",
|
||||
msg:"schemaerrtypedoesnotexist",
|
||||
data: {propertie:p,type:properties[p].type}
|
||||
multimsg.push({
|
||||
ref: "Checkjson",
|
||||
msg: "schemaerrtypedoesnotexist",
|
||||
data: { propertie: p, type: properties[p].type },
|
||||
});
|
||||
});
|
||||
}
|
||||
@@ -116,26 +146,41 @@ Checkjson.schema.validation = (schema) => {
|
||||
properties[p].format &&
|
||||
!Checkjson.schema.properties.format[properties[p].format]
|
||||
) {
|
||||
res.err.push({
|
||||
ref:"Checkjson",
|
||||
msg:"schemaerrformatdoesnotexist",
|
||||
data: {propertie:p,format:properties[p].format}
|
||||
multimsg.push({
|
||||
ref: "Checkjson",
|
||||
msg: "schemaerrformatdoesnotexist",
|
||||
data: { propertie: p, format: properties[p].format },
|
||||
});
|
||||
}
|
||||
if (properties[p].enum && !Array.isArray(properties[p].enum)) {
|
||||
res.err.push({
|
||||
ref:"Checkjson",
|
||||
msg:"schemaerrenumnotarray",
|
||||
data: {propertie:p,enum:properties[p].enum}
|
||||
multimsg.push({
|
||||
ref: "Checkjson",
|
||||
msg: "schemaerrenumnotarray",
|
||||
data: { propertie: p, enum: properties[p].enum },
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
// 406 means not acceptable
|
||||
if (res.err.length > 0) res.status = 406;
|
||||
if (multimsg.length > 0) {
|
||||
res.status = 406;
|
||||
res.multimsg = multimsg;
|
||||
} else {
|
||||
res.status = 200;
|
||||
res.ref = "Checkjson";
|
||||
res.msg = "validcheck";
|
||||
}
|
||||
return res;
|
||||
};
|
||||
|
||||
/**
|
||||
* Check data with a schema
|
||||
*
|
||||
* @param {object} schema a json schema
|
||||
* @param {*} data some data to check using schema
|
||||
* @param {*} withschemacheck boolean that force a schema check (usefull on modification schema)
|
||||
* @returns {status: 200, ref:"Checkjson", msg:"validcheck", data:{itm:object}}
|
||||
* {status:417, multimsg:[{re,msg,data}],data:{itm:object}}
|
||||
*/
|
||||
Checkjson.schema.data = (schema, data, withschemacheck) => {
|
||||
/* validate a data set with a schema in a context ctx */
|
||||
/*
|
||||
@@ -148,7 +193,11 @@ Checkjson.schema.data = (schema, data, withschemacheck) => {
|
||||
const validschema = Checkjson.schema.validation(schema);
|
||||
if (validschema.status != 200) return validschema;
|
||||
}
|
||||
const res = { status: 200, err: [] };
|
||||
const multimsg = [];
|
||||
const res = {
|
||||
status: 200,
|
||||
data: { itm: data },
|
||||
};
|
||||
if (schema.properties) {
|
||||
const properties = schema.properties;
|
||||
Object.keys(properties).forEach((p) => {
|
||||
@@ -164,40 +213,61 @@ Checkjson.schema.data = (schema, data, withschemacheck) => {
|
||||
if (Checkjson.schema.properties.type[typ](data[p])) valid = true;
|
||||
});
|
||||
if (!valid)
|
||||
res.err.push({
|
||||
ref:"Checkjson",
|
||||
msg:"dataerrpropertie",
|
||||
data: {key:p,value:data[p]}
|
||||
multimsg.push({
|
||||
ref: "Checkjson",
|
||||
msg: "dataerrpropertie",
|
||||
data: { key: p, value: data[p] },
|
||||
});
|
||||
|
||||
if (
|
||||
properties[p].minLength &&
|
||||
!Checkjson.schema.properties.minLength(data[p], properties[p].minLength)
|
||||
!Checkjson.schema.properties.minLength(
|
||||
data[p],
|
||||
properties[p].minLength
|
||||
)
|
||||
) {
|
||||
res.err.push({
|
||||
ref:"Checkjson",
|
||||
msg:"dataerrpropertie",
|
||||
data:{key:p,value:data[p],minLength:properties[p].minLength}
|
||||
});
|
||||
multimsg.push({
|
||||
ref: "Checkjson",
|
||||
msg: "dataerrpropertie",
|
||||
data: {
|
||||
key: p,
|
||||
value: data[p],
|
||||
minLength: properties[p].minLength,
|
||||
},
|
||||
});
|
||||
}
|
||||
if (
|
||||
properties[p].maxLength &&
|
||||
!Checkjson.schema.properties.maxLength(data[p], properties[p].maxLength)
|
||||
!Checkjson.schema.properties.maxLength(
|
||||
data[p],
|
||||
properties[p].maxLength
|
||||
)
|
||||
) {
|
||||
res.err.push({
|
||||
ref:"Checkjson",
|
||||
msg:"dataerrpropertie",
|
||||
data:{key:p,value:data[p],maxLength:properties[p].maxLength}
|
||||
multimsg.push({
|
||||
ref: "Checkjson",
|
||||
msg: "dataerrpropertie",
|
||||
data: {
|
||||
key: p,
|
||||
value: data[p],
|
||||
maxLength: properties[p].maxLength,
|
||||
},
|
||||
});
|
||||
}
|
||||
if (
|
||||
properties[p].multipleOf &&
|
||||
!Checkjson.schema.properties.multipleOf(data[p], properties[p].multipleOf)
|
||||
!Checkjson.schema.properties.multipleOf(
|
||||
data[p],
|
||||
properties[p].multipleOf
|
||||
)
|
||||
) {
|
||||
res.err.push({
|
||||
ref:"Checkjson",
|
||||
msg:"dataerrpropertie",
|
||||
data:{key:p,value:data[p],multipleOf:properties[p].multipleOf}
|
||||
multimsg.push({
|
||||
ref: "Checkjson",
|
||||
msg: "dataerrpropertie",
|
||||
data: {
|
||||
key: p,
|
||||
value: data[p],
|
||||
multipleOf: properties[p].multipleOf,
|
||||
},
|
||||
});
|
||||
}
|
||||
if (
|
||||
@@ -216,10 +286,17 @@ Checkjson.schema.data = (schema, data, withschemacheck) => {
|
||||
properties[p].exclusiveMaximum
|
||||
)
|
||||
) {
|
||||
res.err.push({
|
||||
ref:"Checkjson",
|
||||
msg:"dataerrpropertie",
|
||||
data:{key:p,value:data[p],minimum:properties[p].minimum,maximum:properties[p].maximum,exclusiveMinimum:properties[p].exclusiveMinimum,exclusiveMaximum:properties[p].exclusiveMaximum}
|
||||
multimsg.push({
|
||||
ref: "Checkjson",
|
||||
msg: "dataerrpropertie",
|
||||
data: {
|
||||
key: p,
|
||||
value: data[p],
|
||||
minimum: properties[p].minimum,
|
||||
maximum: properties[p].maximum,
|
||||
exclusiveMinimum: properties[p].exclusiveMinimum,
|
||||
exclusiveMaximum: properties[p].exclusiveMaximum,
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -227,10 +304,10 @@ Checkjson.schema.data = (schema, data, withschemacheck) => {
|
||||
properties[p].enum &&
|
||||
!Checkjson.schema.properties.enum(data[p], properties[p].enum)
|
||||
) {
|
||||
res.err.push({
|
||||
ref:"Checkjson",
|
||||
msg:"dataerrpropertie",
|
||||
data:{key:p,value:data[p],enumlst:properties[p].enum}
|
||||
multimsg.push({
|
||||
ref: "Checkjson",
|
||||
msg: "dataerrpropertie",
|
||||
data: { key: p, value: data[p], enumlst: properties[p].enum },
|
||||
});
|
||||
}
|
||||
if (properties[p].format) {
|
||||
@@ -241,22 +318,32 @@ Checkjson.schema.data = (schema, data, withschemacheck) => {
|
||||
properties[p].pattern &&
|
||||
!Checkjson.schema.properties.pattern(data[p], properties[p].pattern)
|
||||
) {
|
||||
res.err.push({
|
||||
ref:"Checkjson",
|
||||
msg:"dataerrpropertie",
|
||||
data:{key:p,value:data[p],pattern:properties[p].pattern}
|
||||
multimsg.push({
|
||||
ref: "Checkjson",
|
||||
msg: "dataerrpropertie",
|
||||
data: { key: p, value: data[p], pattern: properties[p].pattern },
|
||||
});
|
||||
}
|
||||
} else if (schema.required && schema.required.includes(p)) {
|
||||
res.err.push({
|
||||
ref:"Checkjson",
|
||||
msg:"dataerrpropertierequired",
|
||||
data:{key:p,required:true}
|
||||
multimsg.push({
|
||||
ref: "Checkjson",
|
||||
msg: "dataerrpropertierequired",
|
||||
data: { key: p, required: true },
|
||||
});
|
||||
}
|
||||
});
|
||||
} //end properties
|
||||
if (schema.apxid) {
|
||||
res.data.apxid = data[schema.apxid];
|
||||
}
|
||||
if (multimsg.length > 0) {
|
||||
res.status = 417;
|
||||
res.multimsg = multimsg;
|
||||
} else {
|
||||
res.status = 200;
|
||||
res.ref = "Checkjson";
|
||||
res.msg = "validcheck";
|
||||
}
|
||||
if (res.err.length > 0) res.status = 417;
|
||||
return res;
|
||||
};
|
||||
if (typeof module !== "undefined") module.exports = Checkjson;
|
||||
|
@@ -1,7 +1,10 @@
|
||||
const glob = require("glob");
|
||||
const path = require("path");
|
||||
const fs = require("fs-extra");
|
||||
|
||||
const axios = require("axios");
|
||||
//const smtp = require("smtp-client");
|
||||
const nodemailer = require("nodemailer");
|
||||
const conf = require(`${process.env.dirtown}/conf.json`);
|
||||
/**
|
||||
* To manage any communication between Pagan
|
||||
* mayor druid emailing/sms/paper from tribe register smtp, simcard, mail api to Person(s) / Pagan(s)
|
||||
@@ -11,10 +14,160 @@ const fs = require("fs-extra");
|
||||
|
||||
const Notifications = {};
|
||||
|
||||
Notifications.send = (data) => {
|
||||
const ret = {};
|
||||
console.log("TODO dev notification emailing");
|
||||
return ret;
|
||||
|
||||
Notifications.sendsms = async (data, tribeId) => {
|
||||
/**
|
||||
* Never use need wallet in mailjet to test
|
||||
* To set up with mailjet see https://dev.mailjet.com/sms/guides/send-sms-api/#authentication
|
||||
*
|
||||
* @param {string} data.To a phone number with international +3360101010101
|
||||
* @param {string} data.Text text to send
|
||||
*
|
||||
* a conf.sms with {url:"smsurl", Token:"", From:""}
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
if (!conf.sms) {
|
||||
return {
|
||||
status: 412,
|
||||
ref: "Notifications",
|
||||
msg: "missingconf",
|
||||
tribe: tribeId,
|
||||
};
|
||||
}
|
||||
let missingk = [][("To", "Text")].forEach((k) => {
|
||||
if (!data[k]) {
|
||||
missingk.push(k);
|
||||
}
|
||||
});
|
||||
if (missingk.lenght > 0) {
|
||||
return {
|
||||
status: 428,
|
||||
ref: "Notifications",
|
||||
msg: "missingdata",
|
||||
missingk: missingk,
|
||||
};
|
||||
}
|
||||
let confsms= conf.sms;
|
||||
if (
|
||||
fs.existsSync(
|
||||
`${process.env.dirtown}/tribes/itm/${req.session.header.xtribe}.json`
|
||||
)
|
||||
) {
|
||||
const conftrib = fs.readJSONSync(
|
||||
`${process.env.dirtown}/tribes/itm/${req.session.header.xtribe}.json`
|
||||
);
|
||||
if (conftrib.sms) confsms = conftrib.sms;
|
||||
}
|
||||
data.From=confsms.From
|
||||
const sendsms= await axios.post(confsms.url,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${confsms.MJ_TOKEN}`,
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify(data),
|
||||
});
|
||||
if (sendsms.status==200){
|
||||
return {status:200,ref:"Notifications",msg:"successfullsentsms"};
|
||||
}else{
|
||||
return {status:sendsms.status, ref:"Notifications",msg:"errsendsms",err:sendsms.data}
|
||||
}
|
||||
|
||||
/* si tout se passe bien:
|
||||
{
|
||||
"From": "MJPilot",
|
||||
"To": "+33600000000",
|
||||
"Text": "Have a nice SMS flight with Mailjet !",
|
||||
"MessageId": "2034075536371630429",
|
||||
"SmsCount": 1,
|
||||
"CreationTS": 1521626400,
|
||||
"SentTS": 1521626402,
|
||||
"Cost": {
|
||||
"Value": 0.0012,
|
||||
"Currency": "EUR"
|
||||
},
|
||||
"Status": {
|
||||
"Code": 2,
|
||||
"Name": "sent",
|
||||
"Description": "Message sent"
|
||||
}
|
||||
}
|
||||
}
|
||||
*/
|
||||
};
|
||||
|
||||
Notifications.sendmail = async (data, tribe) => {
|
||||
/**
|
||||
* @param {string} data.From an email authorized by smtp used priority from header xtribe
|
||||
* @param {string} data.To list of email separate by ,
|
||||
* @param {string} data.subject
|
||||
* @param {string} data.html
|
||||
* @param {string} data.text
|
||||
* @param {string} data.Cc list of email in copy
|
||||
* @param {string} data.Bcc list of email in hidden copy
|
||||
* @param {string} data.filelist an array of object {filename:"",pathfile:"",filetype:""} pathfile to attach as file name of type:filetype "filename" to this email
|
||||
* example of filetype : "text/plain", "text/csv", image/gif", "application/json", "application/zip"
|
||||
*
|
||||
* @example data
|
||||
* {"From":"wall-ants.ndda.fr",
|
||||
* "To":"wall-ants.ndda.fr",
|
||||
* "subject":"Test",
|
||||
* "html":"<h1>test welcome</h1>",
|
||||
* "text":"test welcome",
|
||||
* "attachments":[{filename:"text.txt",pathfile:"/media/phil/textA.txt","contenttype":"text/plain"}]
|
||||
* }
|
||||
* @return {object} { status: 200, ref:"pagans",msg:"aliasexist",data: { alias, publicKey } }
|
||||
*
|
||||
*
|
||||
*/
|
||||
if (!conf.smtp) {
|
||||
return {
|
||||
status: 412,
|
||||
ref: "Notifications",
|
||||
msg: "missingconf",
|
||||
tribe: tribeId,
|
||||
};
|
||||
}
|
||||
let missingk = [];
|
||||
["from", "to", "subject", "html", "text"].forEach((k) => {
|
||||
if (!data[k]) {
|
||||
missingk.push(k);
|
||||
}
|
||||
});
|
||||
if (missingk.lenght > 0) {
|
||||
return {
|
||||
status: 428,
|
||||
ref: "Notifications",
|
||||
msg: "missingdata",
|
||||
missingk: missingk,
|
||||
};
|
||||
}
|
||||
const conftribfile=`${process.env.dirtown}/tribes/itm/${tribe}.json`;
|
||||
const confsmtp =(fs.existsSync(conftribfile))? fs.readJSONSync(conftribfile).smtp : conf.smtp;
|
||||
|
||||
//const client = smtp.connect(confsmtp);
|
||||
const transporter = await nodemailer.createTransport(confsmtp);
|
||||
//@todo add attachments management
|
||||
let missingfile=[]
|
||||
if (missingfile.lenght > 0)
|
||||
return {
|
||||
status: 428,
|
||||
ref: "Notifications",
|
||||
msg: "missingfile",
|
||||
missingfile: missingfile,
|
||||
};
|
||||
try {
|
||||
// Send the email
|
||||
//const res = await client.sendMail(data)
|
||||
const res = await transporter.sendMail(data)
|
||||
//console.log('res envoie',res)
|
||||
return { status: 200, ref: "Notifications", msg: "successfullsent", data };
|
||||
} catch (err) {
|
||||
//console.log('err envoie',err)
|
||||
return { status: 502, ref: "Notifications", msg: "errsendmail", err: err };
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = Notifications;
|
||||
|
@@ -6,40 +6,85 @@ const axios = require("axios");
|
||||
const conf = require(`${process.env.dirtown}/conf.json`);
|
||||
const Checkjson = require(`./Checkjson.js`);
|
||||
|
||||
/* This manage Objects for indexing and check and act to CRUD
|
||||
objectpath/objects/schema/objectName.json
|
||||
/objectNames/searchindes/objectName_valueofkey_uuildlist.json
|
||||
/objectNames/uuid.json
|
||||
/**
|
||||
* This manage Objects for indexing, searching, checking and act to CRUD
|
||||
* @objectPathName = objectpath/objectname
|
||||
* objectpath/objectname/conf.json
|
||||
* /idx/all_key1.json = {key1value:{object}}
|
||||
* lst_key1.json = [key1valA,key1val2,...]
|
||||
* key2_key1.json = {key2value:[key1value]}
|
||||
* all_key1_filtername = {key1value:{object}}
|
||||
* /itm/apxidvalue.json
|
||||
* in conf.json:
|
||||
* {
|
||||
* "name": "object name ex:'nations'",
|
||||
* "schema": "relativ schema from dirapi dirtown ex:'adminapi/schema/nations.json'"",
|
||||
* "lastupdateschema": 0, time stamp last schema update
|
||||
* "lastupdatedata":0 time stamp last itm update
|
||||
* }
|
||||
*
|
||||
* Specifics key in schema to apXtrib:
|
||||
* apxid : the field value to use to store item
|
||||
* apxuniquekey : list of field that has to be unique you cannot have 2 itm with same key value
|
||||
* apxidx : list of index file /idx/
|
||||
* { "name":"lst_fieldA", "keyval": "alias" }, => lst_fieldA.json = [fieldAvalue1,...]
|
||||
{ "name":"all_fieldA", "keyval": "fieldA" }, => all_fieldA.json =
|
||||
if fieldA in apxuniquekey = {fieldAvalue1:{object}}
|
||||
not in apxuniquekey = {fieldAvalue1:[{object}]}
|
||||
{ "name":"word_fieldA", "keyval": "fieldA", "objkey": ["fieldB"]}, => word_fieldA.json
|
||||
if fieldA in apxuniquekey = {fieldAvalue1:fieldBvalue,}
|
||||
if fieldA not in uniquekey = {fieldAvalue1: [fieldBv1,fieldBv2,]}
|
||||
{ "name":"word_fieldA", "keyval": "fieldA", "objkey": ["fieldB","fieldC"]}, => word_fieldA.json
|
||||
if fieldA in apxuniquekey = {fieldAvalue1:{fieldB:val,fieldC:val},}
|
||||
if fieldA not in uniquekey = {fieldAvalue1: [{fieldB:val,fieldC:val},]}
|
||||
* apxaccessrights : list of profil with CRUD accesrights
|
||||
|
||||
|
||||
|
||||
**/
|
||||
|
||||
*/
|
||||
const Odmdb = {};
|
||||
/*
|
||||
Input: metaobject => data mapper of Key: Value
|
||||
|
||||
objname + an object {} + action Checkjson => get a valid or not answer
|
||||
objname + an object {} + action search => apply matching algo to find probalistic object id
|
||||
objname + action index => update /searcindex of objects concern
|
||||
/**
|
||||
* @api syncObject
|
||||
* @param {string} url to an existing object conf (/objectname/conf.json)
|
||||
* @param {timestamp} timestamp
|
||||
* 0 => rebuild local object from all_{idapx}.json
|
||||
* >0 => update itm and idx search by datetime
|
||||
* @param
|
||||
*/
|
||||
Odmdb.syncObject = () => {};
|
||||
|
||||
*/
|
||||
|
||||
Odmdb.setObject = (schemaPath, objectPath, objectName, schema, lgjson, lg) => {
|
||||
/**
|
||||
*
|
||||
* @schemapath {string} path to create or replace a schema ${schemaPath}/schema/
|
||||
* @objectPath {string} path where object are store
|
||||
* @objectName {string} name of the object
|
||||
* @schema {object} the json schema for this object
|
||||
* @lgjson {object} the json file for a specific language
|
||||
* @lg {string} the 2 letters language
|
||||
*
|
||||
* a shema :
|
||||
* schemaPath/schema/objectName.json
|
||||
* /lg/objectName_{lg}.json
|
||||
* an object :
|
||||
* objectPath/objectName/idx/confjson ={"schema":"relativpathfile or http"}
|
||||
* /uniqueid.json defining schema
|
||||
*
|
||||
*/
|
||||
/**
|
||||
* @api createObject: create a space to host object
|
||||
*
|
||||
* @source {string} "new", url,
|
||||
* @schemapath {string} path to create or replace a schema ${schemaPath}/schema/
|
||||
* @objectPath {string} path where object are store
|
||||
* @objectName {string} name of the object
|
||||
* @schema {object} the json schema for this object
|
||||
* @lgjson {object} the json file for a specific language
|
||||
* @lg {string} the 2 letters language
|
||||
*
|
||||
* Setup a new object localy =>
|
||||
* source
|
||||
*
|
||||
* - from scratch =>
|
||||
* Create
|
||||
* - from a synchronization
|
||||
* Download from source all_{apxid}.json
|
||||
*
|
||||
*
|
||||
*/
|
||||
Odmdb.createObject = (
|
||||
source,
|
||||
schemaPath,
|
||||
objectPath,
|
||||
objectName,
|
||||
schema,
|
||||
lgjson,
|
||||
lg
|
||||
) => {
|
||||
if (!fs.existsSync(schemaPath)) {
|
||||
return {
|
||||
status: 404,
|
||||
@@ -85,165 +130,99 @@ Odmdb.setObject = (schemaPath, objectPath, objectName, schema, lgjson, lg) => {
|
||||
return { status: 200 };
|
||||
};
|
||||
|
||||
Odmdb.schema = (schemaPath, objectName, withschemacheck) => {
|
||||
// Return schema if exist and objectpath contain objectName { status:200;data:schema}
|
||||
if (!fs.existsSync(`${schemaPath}/${objectName}`))
|
||||
return {
|
||||
status: 404,
|
||||
info: "|odmdb|schemapathnamedoesnotexist",
|
||||
moreinfo: `${schemaPath}/${objectName}`,
|
||||
};
|
||||
if (!fs.existsSync(`${objectPath}/schema/${objectName}.json`)) {
|
||||
return {
|
||||
status: 404,
|
||||
info: `|odmdb|schemanotfound`,
|
||||
moreinfo: `file not found ${schemaPath}/schema/${objectName}.json`,
|
||||
};
|
||||
}
|
||||
const schema = fs.readJsonSync(`${schemaPath}/schema/${objectName}.json`);
|
||||
// check schema apx validity specificities primary unique ans searchindex
|
||||
if (withschemacheck) {
|
||||
if (!schema.apxprimarykey) {
|
||||
// code 422: unprocessable Content
|
||||
return {
|
||||
status: 422,
|
||||
info: "|Odmdb|apxprimarykeynotfound",
|
||||
moreinfo: `${schemaPath}/schema/${objectName}.json`,
|
||||
};
|
||||
} else {
|
||||
if (
|
||||
!(
|
||||
schema.apxsearchindex[schema.apxprimarykey] &&
|
||||
schema.apxsearchindex[schema.apxprimarykey].list
|
||||
)
|
||||
) {
|
||||
return {
|
||||
status: 422,
|
||||
info: "|Odmdb|apxprimaryketnotinsearchindexlist",
|
||||
moreinfo: `${schemaPath}/schema/${objectName}.json`,
|
||||
};
|
||||
}
|
||||
if (schema.apxuniquekey) {
|
||||
schema.apxuniquekey.forEach((k) => {
|
||||
if (
|
||||
!(
|
||||
schema.apxsearchindex[k] &&
|
||||
schema.apxsearchindex[k][schema.apxprimarykey]
|
||||
)
|
||||
) {
|
||||
return {
|
||||
status: 422,
|
||||
info: "|Odmdb|apxuniquekeynotinsearchindex",
|
||||
moreinfo: `${schemaPath}/schema/${objectName}.json`,
|
||||
};
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
const validschema = Checkjson.schema.validation(schema);
|
||||
if (validschema.status != 200) return validschema;
|
||||
}
|
||||
return {
|
||||
/**
|
||||
* Update an object
|
||||
* @param {string} objectPathname
|
||||
* @param {object} meta update request
|
||||
* lg:
|
||||
* lgobj: object to store in /lg/objectname_lg.json
|
||||
* schema: an update schema
|
||||
* @return {status, ref:"Odmdb", msg:"", data}
|
||||
*
|
||||
* Create a tmp object env and check existing object to identify any issues
|
||||
* If all is fine then apply change by replacing
|
||||
*/
|
||||
Odmdb.updateObject = (objectPathname, meta) => {};
|
||||
|
||||
/**
|
||||
* Get a schema from objectPathname
|
||||
*
|
||||
* todo only local schema => plan a sync each 10minutes
|
||||
* @schemaPath local path adminapi/schema/objectName.json or /tribename/schema/objectName
|
||||
* @validschema boolean if necessary to check schema or not mainly use when change schema
|
||||
* @return {status:200,data:{conf:"schemaconf",schema:"schemacontent"} }
|
||||
*/
|
||||
Odmdb.Schema = (objectPathname, validschema) => {
|
||||
const confschema = fs.readJsonSync(`${objectPathname}/conf.json`);
|
||||
let schemaPath = confschema.schema;
|
||||
const res = {
|
||||
status: 200,
|
||||
data: schema,
|
||||
ref: "Odmdb",
|
||||
msg: "getschema",
|
||||
data: { conf: confschema },
|
||||
};
|
||||
};
|
||||
|
||||
//Odmdb.Checkjson = (objectPath, objectName, data, withschemacheck) => {
|
||||
/*
|
||||
@objectPath path to the folder that contain /objects/objectName/ /lg/objectName_{lg}.json /schema/objectName.json
|
||||
@objectName name of object
|
||||
@data data to check based on schema objectName definition
|
||||
|
||||
@return status:200 Data is consistent with schema and primarykey does not exist
|
||||
status:201 Data is consistent with schema and primarykey does already exist
|
||||
status:other means unconsistent schema:
|
||||
404: schema does not exist
|
||||
or unconsitent data and schema from Checkjson.js Checkjson.schema.data
|
||||
|
||||
*/
|
||||
/* const res = { status: 200,ref="Odmdb",msg:"",data:{} };
|
||||
//get schema link of object
|
||||
const schemaPath = fs.readJsonSync(
|
||||
`${objectPath}/${objectName}/idx/conf.json`
|
||||
)["schema"];
|
||||
if (schemaPath.substring(0, 4) == "http") {
|
||||
// lance requete http pour recuperer le schema
|
||||
} else {
|
||||
res.data.schema = Odmdb.schema(objectPath, objectName, withschemacheck);
|
||||
}
|
||||
// check schema validity in case withschemacheck
|
||||
if (schema.status != 200) return ;
|
||||
console.log("SCHEMA for checking:");
|
||||
console.log(schema.data);
|
||||
console.log("DATA to check:");
|
||||
console.log(data);
|
||||
// withschemacheck at false, if check then it is done at Odmdb.schema
|
||||
const validate = Checkjson.schema.data(schema.data, data, false);
|
||||
if (validate.status != 200) {
|
||||
return validate;
|
||||
}
|
||||
if (
|
||||
schema.data.apxprimarykey &&
|
||||
data[k] &&
|
||||
fs.existsSync(`${objectPath}/${objectName}/${data[k]}.json}`)
|
||||
) {
|
||||
res.status = 201; // means created => exist an object with this primary key
|
||||
}
|
||||
if (schema.data.apxuniquekey) {
|
||||
schema.data.apxuniquekey.forEach((k) => {
|
||||
if (
|
||||
data[k] &&
|
||||
fs.existsSync(
|
||||
`${objectPath}/${objectName}/searchindex/${objectName}_${k}_${schema.data.apxprimarykey}.json}`
|
||||
) &&
|
||||
fs.readJsonSync(
|
||||
`${objectPath}/${objectName}/searchindex/${objectName}_${k}_${schema.data.apxprimarykey}.json}`
|
||||
)[k]
|
||||
) {
|
||||
res.status = 201; // means created => exist as primary key
|
||||
}
|
||||
});
|
||||
}
|
||||
return res;
|
||||
};
|
||||
*/
|
||||
Odmdb.getSchema=async (schemaPath,validschema)=>{
|
||||
/**
|
||||
* @schemaPath public http link or local path adminapi/schema/objectName.json or /tribename/schema/objectName
|
||||
* @return schema or {}
|
||||
*/
|
||||
const res={status:200,data:{schema:{}}}
|
||||
if (schemaPath.slice(-5)!=".json") schemaPath+=".json";
|
||||
if (schemaPath.slice(-5) != ".json") schemaPath += ".json";
|
||||
if (schemaPath.substring(0, 4) == "http") {
|
||||
// lance requete http pour recuperer le schema avec un await axios
|
||||
} else {
|
||||
if (schemaPath.substring(0,9)=="adminapi/"){
|
||||
schemaPath=`${conf.dirapi}/${schemaPath}`
|
||||
}else{
|
||||
schemaPath=`${conf.dirtown}/tribes/${schemaPath}`
|
||||
if (schemaPath.substring(0, 9) == "adminapi/") {
|
||||
schemaPath = `${conf.dirapi}/${schemaPath}`;
|
||||
} else {
|
||||
schemaPath = `${conf.dirtown}/tribes/${schemaPath}`;
|
||||
}
|
||||
if (!fs.existsSync(schemaPath)){
|
||||
return {status:404, ref:"Odmdb", msg:"schemanotfound", data:{schemaPath,schema:{}}}
|
||||
if (!fs.existsSync(schemaPath)) {
|
||||
return {
|
||||
status: 404,
|
||||
ref: "Odmdb",
|
||||
msg: "schemanotfound",
|
||||
data: { schemaPath, schema: {} },
|
||||
};
|
||||
}
|
||||
res.data.schema=fs.readJsonSync(schemaPath)
|
||||
if (validschema ||1==1){
|
||||
const check = Checkjson.schema.validation(res.data.schema)
|
||||
if (check.err.length>0) {
|
||||
res.status=check.status
|
||||
res.data.err=check.err
|
||||
res.data.schema = fs.readJsonSync(schemaPath);
|
||||
if (!res.data.schema.apxid) {
|
||||
return {
|
||||
status: 406,
|
||||
ref: "Odmdb",
|
||||
msg: "missingprimarykey",
|
||||
data: {},
|
||||
};
|
||||
}
|
||||
if (res.data.schema.apxidx) {
|
||||
//add apxidx to apxuniquekey in case not
|
||||
if (!res.data.schema.apxuniquekey.includes(res.data.schema.apxid)) {
|
||||
res.data.schema.apxuniquekey.push(res.data.schema.apxid);
|
||||
}
|
||||
res.data.schema.apxidx.forEach((idx) => {
|
||||
if (
|
||||
idx.objkey &&
|
||||
!res.data.schema.apxuniquekey.includes(idx.keyval) &&
|
||||
!idx.objkey.includes(res.data.schema.apxid)
|
||||
) {
|
||||
return {
|
||||
status: 406,
|
||||
ref: "Odmdb",
|
||||
msg: "unconsistencyapxidx",
|
||||
data: {
|
||||
name: idx.name,
|
||||
keyval: idx.keyval,
|
||||
objkey: idx.objkey,
|
||||
apxid: res.data.schema.apxid,
|
||||
},
|
||||
};
|
||||
}
|
||||
});
|
||||
}
|
||||
if (validschema || 1 == 1) {
|
||||
// return {status:200, ref, msg} or {status!:200,multimsg:[{ref,msg;data}]}
|
||||
const check = Checkjson.schema.validation(res.data.schema);
|
||||
if (check.status != 200) {
|
||||
res.multimsg = check.multimsg;
|
||||
res.status = check.status;
|
||||
}
|
||||
//check json schema for Odmdb context
|
||||
if (!res.data.schema.apxprimarykey || !res.data.schema.properties[res.data.schema.apxprimarykey]){
|
||||
// primarykey require for Odmdb
|
||||
res.status=406
|
||||
if (!res.data.err) res.data.err=[];
|
||||
res.data.err.push({ref:"Odmdb",msg:"novalidprimarykey",data:{apxprimarykey:res.data.schema.apxprimarykey}})
|
||||
}
|
||||
}
|
||||
}
|
||||
return res
|
||||
}
|
||||
return res;
|
||||
};
|
||||
Odmdb.search = (objectPath, objectName, search) => {
|
||||
/*
|
||||
@search= {
|
||||
@@ -263,96 +242,367 @@ Odmdb.search = (objectPath, objectName, search) => {
|
||||
const schema = Odmdb.schema(objectPath, objectName);
|
||||
if (schema.status != 200) return schema;
|
||||
};
|
||||
Odmdb.get = (objectPath, objectName, uuidprimarykeyList, fieldList) => {
|
||||
/*
|
||||
@objectPath where object are store (where /object/conf.json indicate where the schema is)
|
||||
@uuidprimarykeyList list of uuid requested
|
||||
@fieldList key to return for each object
|
||||
Return {status:200; data:{uuid:{data filter by @fieldList},uuid:"notfound"}}
|
||||
*/
|
||||
|
||||
/**
|
||||
* To get an array of item (itm) per primarykey with a list of field
|
||||
* Object are store in objectPath/objectName/conf.json contain
|
||||
*
|
||||
* @objectPathname where object are store (where /object/conf.json indicate where the schema is)
|
||||
* @uuidprimarykeyList list of uuid requested
|
||||
* @role {xalias,xprofiles} allow to get accessright come from header
|
||||
* @propertiesfilter (optionnal) key to return for each object (if undefined then return all)
|
||||
* @Return {status:200; data:{uuid:{data filter by @propertiesfilter},uuid:"notfound"}}
|
||||
*/
|
||||
Odmdb.reads = (objectPathname, apxidlist, role, propertiesfilter) => {
|
||||
const res = { status: 200, data: {} };
|
||||
uuidprimarykeyList.forEach(id => {
|
||||
if (fs.existsSync(`${objectPath}/${objectName}/itm/${id}.json`)) {
|
||||
const objectdata = fs.readJsonSync(
|
||||
`${objectPath}/${objectName}/itm/${id}.json`
|
||||
);
|
||||
if (!fieldList) {
|
||||
res.data[id]=objectdata;
|
||||
const getschema = Odmdb.Schema(objectPathname, true);
|
||||
if (getschema.status != 200) return getschema;
|
||||
// Test if readable at least if owner
|
||||
role.xprofils.push("owner");
|
||||
const accessright = (Odmdb.accessright =
|
||||
(getschema.data.schema.apxaccessright, role.xprofils));
|
||||
if (!accessright.R) {
|
||||
return {
|
||||
status: 403,
|
||||
ref: "Odmdb",
|
||||
msg: "accessforbidden",
|
||||
data: { crud: "R", accessright },
|
||||
};
|
||||
}
|
||||
apxidlist.forEach((id) => {
|
||||
if (fs.existsSync(`${objectPathname}/itm/${id}.json`)) {
|
||||
const objectdata = fs.readJsonSync(`${objectPathname}/itm/${id}.json`);
|
||||
if (objectdata.owner && objectdata.owner == role.xalias) {
|
||||
if (!role.xprofils.includes("owner")) role.xprofils.push("owner");
|
||||
} else {
|
||||
if (role.xprofils.includes("owner"))
|
||||
role.xprofils = role.xprofils.filter((e) => e !== "owner");
|
||||
}
|
||||
const accessright = Odmdb.accessright(
|
||||
getschema.data.schema.apxaccessright,
|
||||
role
|
||||
);
|
||||
if (!accessright.R) {
|
||||
res.data[id] = "forbiden";
|
||||
} else {
|
||||
let newpropertiesfilter = Object.keys(objectdata);
|
||||
if (accessright.R.length > 0) {
|
||||
const setaccess = new Set(accessright.R);
|
||||
if (!propertiesfilter) propertiesfilter = Object.keys(objectdata);
|
||||
newpropertiesfilter = propertiesfilter.filter((f) =>
|
||||
setaccess.has(f)
|
||||
);
|
||||
}
|
||||
const objinfo = {};
|
||||
fieldlList.forEach((k) => {
|
||||
newpropertiesfilter.forEach((k) => {
|
||||
if (objectdata[k]) objinfo[k] = objectdata[k];
|
||||
});
|
||||
res.data[id]=objinfo;
|
||||
res.data[id] = objinfo;
|
||||
}
|
||||
} else {
|
||||
res.data[id]="notfound";
|
||||
res.data[id] = "notfound";
|
||||
}
|
||||
});
|
||||
return res;
|
||||
};
|
||||
Odmdb.create = (objectPath, objectName, data, accessright) => {
|
||||
/*
|
||||
Create an objects data into objectName
|
||||
@objectPath path to the folder that contain /objects/objectName/ /objectsInfo/objectName_lg.json /objectsMeta/objectName.json
|
||||
@objectName name of object
|
||||
@data data to check based on objectsMeta definition
|
||||
@accessright a string with accessright of the user on this objectName ex: "CRUDO" or "R" or "O"
|
||||
*/
|
||||
/**
|
||||
* Convert profils in accessright
|
||||
* @param {*} apxaccessright from schema object {profilname:{R}}
|
||||
* @param {*} role {xprofils,xalias} accessible after isAuthenticated
|
||||
* @returns access right to C create if present, to read (properties list or all if empty), to Update properties list or all if empty, D elete
|
||||
* example: {"C":[],"R":[properties list],"U":[properties ist],"D":[]}
|
||||
*/
|
||||
Odmdb.accessright = (apxaccessrights, role) => {
|
||||
const accessright = {};
|
||||
role.xprofils.forEach((p) => {
|
||||
if (apxaccessrights[p]) {
|
||||
Object.keys(apxaccessrights[p]).forEach((act) => {
|
||||
if (!accessright[act]) {
|
||||
accessright[act] = apxaccessrights[p][act];
|
||||
} else {
|
||||
accessright[act] = [
|
||||
...new Set([...accessright[act], ...apxaccessrights[p][act]]),
|
||||
];
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
return accessright;
|
||||
};
|
||||
Odmdb.update = async (objectPath, objectName, data, id, accessright) => {
|
||||
/*
|
||||
Create an objects data into objectName
|
||||
@objectPath path to the folder that contain /objects/objectName/ /objectsInfo/objectName_lg.json /objectsMeta/objectName.json
|
||||
@objectName name of object
|
||||
@data data to check based on objectsMeta definition
|
||||
*/
|
||||
if (!fs.existsSync(`${objectPath}/${objectName}/itm/${id}.json`)){
|
||||
return {status:404,ref:"Odmdb",msg:"itmnotfound",data:{objectPath,objectName,id}}
|
||||
}
|
||||
const currentobj=fs.readJSONSync(`${objectPath}/${objectName}/itm/${id}.json`)
|
||||
Object.keys(data).forEach(k=>{
|
||||
currentobj[k]=data[k]
|
||||
})
|
||||
if (currentobj.dt_update) currentobj.dt_update=dayjs().toISOString();
|
||||
const schemaPath = fs.readJsonSync(
|
||||
`${objectPath}/${objectName}/conf.json`
|
||||
)["schema"];
|
||||
const getschema = await Odmdb.getSchema(schemaPath);
|
||||
if (getschema.status!=200 || Object.keys(getschema.data.schema).length==0) {
|
||||
console.log('this is not suppose to happen in Odmdb',Object.keys(getschema.data.schema))
|
||||
return getschema
|
||||
}
|
||||
const schema=getschema.data.schema;
|
||||
const check = Checkjson.schema.data(schema,currentobj,false);
|
||||
console.log(check)
|
||||
if (check.err.length==0){
|
||||
// update
|
||||
fs.outputJsonSync(`${objectPath}/${objectName}/itm/${id}.json`,currentobj)
|
||||
//@todo select index file to generate depending of k update currently we re-index all
|
||||
/**
|
||||
* CUD a data itm into objectPathname if checkJson is valid
|
||||
* and update idx
|
||||
* idx is upto date for unique properties but not for list
|
||||
* @param {string} objectpathname folder name where object are stored
|
||||
* @param {object} itm an object respecting the checkJson schema in objectPathname/conf.json
|
||||
* @param {string} crud: C reate U pdate D elete
|
||||
* @param {array} role {xprofils,xalias} xprofils list of profils like anonymous,pagans, person owner is deuce if object properties owner is alias
|
||||
|
||||
return {status:200,ref:"Odmdb",msg:"updatesuccessfull"}
|
||||
}else{
|
||||
return {status:409, ref:"Odmdb",msg:"datavsschemaunconsistent",data:check.err}
|
||||
}
|
||||
};
|
||||
Odmdb.delete = (objectPath, objectName, data,accessright) => {
|
||||
/*
|
||||
Create an objects data into objectName
|
||||
@objectPath path to the folder that contain /objects/objectName/ /objectsInfo/objectName_lg.json /objectsMeta/objectName.json
|
||||
@objectName name of object
|
||||
@data data to check based on objectsMeta definition
|
||||
*/
|
||||
};
|
||||
/*console.log("test Odmdb");
|
||||
console.log(
|
||||
Odmdb.check(
|
||||
"/media/phil/usbfarm/apxtrib/nationchains/socialworld/objects",
|
||||
"nations",
|
||||
{ nationId: "123", status: "unchain" }
|
||||
)
|
||||
);*/
|
||||
* */
|
||||
|
||||
Odmdb.cud = (objectPathname, crud, itm, role) => {
|
||||
const getschema = Odmdb.Schema(objectPathname, true);
|
||||
if (getschema.status != 200) return getschema;
|
||||
|
||||
if (!itm[getschema.data.schema.apxid]) {
|
||||
return {
|
||||
status: 406,
|
||||
ref: "Odmdb",
|
||||
msg: "apxidmissing",
|
||||
data: { missingkey: getschema.data.schema.apxid },
|
||||
};
|
||||
}
|
||||
const existid = fs
|
||||
.readJSONSync(
|
||||
`${objectPathname}/idx/lst_${getschema.data.schema.apxid}.json`
|
||||
)
|
||||
.includes(itm[getschema.data.schema.apxid]);
|
||||
if (existid && crud == "C") {
|
||||
return {
|
||||
status: 406,
|
||||
ref: "Odmdb",
|
||||
msg: "alreadyexist",
|
||||
data: {
|
||||
objectname: path.basename(objectPathname),
|
||||
key: getschema.data.schema.apxid,
|
||||
val: itm[getschema.data.schema.apxid],
|
||||
},
|
||||
};
|
||||
}
|
||||
if (!existid && ["U", "D"].includes(crud)) {
|
||||
return {
|
||||
status: 406,
|
||||
ref: "Odmdb",
|
||||
msg: "doesnotexist",
|
||||
data: {
|
||||
objectname: path.basename(objectPathname),
|
||||
key: getschema.data.schema.apxid,
|
||||
val: itm[getschema.data.schema.apxid],
|
||||
},
|
||||
};
|
||||
}
|
||||
const itmold = existid
|
||||
? fs.readJSONSync(
|
||||
`${objectPathname}/itm/${itm[getschema.data.schema.apxid]}.json`
|
||||
)
|
||||
: {};
|
||||
if (existid && itmold.owner && itmold.owner == role.xalias) {
|
||||
role.xprofils.push("owner");
|
||||
} else {
|
||||
// set owner cause this is a Create
|
||||
itm.owner = role.xalias;
|
||||
}
|
||||
//get accessrigh {C:[],R:[],U:[],D:[]} if exist means authorize, if array contain properties (for R and U) right is only allowed on properties
|
||||
const accessright = Odmdb.accessright(
|
||||
getschema.data.schema.apxaccessrights,
|
||||
role
|
||||
);
|
||||
console.log("accessright", accessright);
|
||||
if (
|
||||
(crud == "C" && !accessright.C) ||
|
||||
(crud == "D" && !accessright.D) ||
|
||||
(crud == "U" && !accessright.U)
|
||||
) {
|
||||
return {
|
||||
status: 403,
|
||||
ref: "Odmdb",
|
||||
msg: "accessforbidden",
|
||||
data: { crud, accessright },
|
||||
};
|
||||
}
|
||||
//delete or save
|
||||
if (crud == "D") {
|
||||
itmold["dt_delete"] = dayjs();
|
||||
fs.outputJSONSync(
|
||||
`${objectPathname}/delitm/${itmold[getschema.data.schema.apxid]}.json`,
|
||||
itmold
|
||||
);
|
||||
fs.rmSync(
|
||||
`${objectPathname}/itm/${itmold[getschema.data.schema.apxid]}.json`
|
||||
);
|
||||
} else {
|
||||
// if Create Update erase old version
|
||||
let itmtostore = itm;
|
||||
if (crud == "U" && accessright.U.length > 0) {
|
||||
itmtostore = itmold;
|
||||
accessright.U.forEach((p) => {
|
||||
itmtostore[p] = itm[p];
|
||||
});
|
||||
itmtostore.dt_update = dayjs();
|
||||
}
|
||||
if (crud == "C") itmtostore.dt_create = dayjs();
|
||||
// check consistency of datatostore
|
||||
const chkdata = Checkjson.schema.data(
|
||||
getschema.data.schema,
|
||||
itmtostore,
|
||||
false
|
||||
);
|
||||
if (chkdata.status != 200) return chkdata;
|
||||
if (!getschema.data.schema.apxuniquekey)
|
||||
getschema.data.schema.apxuniquekey = [];
|
||||
|
||||
fs.outputJSONSync(
|
||||
`${objectPathname}/itm/${chkdata.data.apxid}.json`,
|
||||
chkdata.data.itm
|
||||
);
|
||||
}
|
||||
console.log("getschema", getschema);
|
||||
//update idx
|
||||
Odmdb.idxfromitm(
|
||||
objectPathname,
|
||||
crud,
|
||||
itm,
|
||||
itmold,
|
||||
[],
|
||||
getschema.data.schema
|
||||
);
|
||||
getschema.data.conf.lastupdatedata = dayjs();
|
||||
fs.outputJSONSync(`${objectPathname}/conf.json`, getschema.data.conf);
|
||||
return {
|
||||
status: 200,
|
||||
ref: "Odmdb",
|
||||
msg: "cudsuccessfull",
|
||||
data: { itm: chkdata.data.itm },
|
||||
};
|
||||
};
|
||||
/**
|
||||
* create/update idx from itm(s)
|
||||
*
|
||||
* @param {string} objectPathname
|
||||
* @param {object} itm item to Create or to Update or {} if crud == I or crud == D
|
||||
* @param {object} itmold (current item) if crud == U or D to get previous itm before change or {} if crud==I or C
|
||||
* @param {letter} crud CUDI C add, U update, D delete I reindex
|
||||
* @param {array} idx if specific request to rebuild list of idx only if [] then use schema one
|
||||
* @param {object} schema if empty it use schema from Odmdb.Schema().data.schema
|
||||
*
|
||||
* example create alias 12 name fred:
|
||||
* Odmdb.idxfromitm('.../tribes/ndda/persons',"C",{alias:'12',name:"fred"},{},[], {person schema})
|
||||
* example update alias 12 in name freddy:
|
||||
* Odmdb.idxfromitm('.../tribes/ndda/persons',"U",{alias:'12',name:"freddy"},{alias:'12',name:"fred"},[], {person schema})
|
||||
* example delete alias 12:
|
||||
* Odmdb.idxfromitm('.../tribes/ndda/persons',"D",{},{alias:'12',name:"fred"},[], {person schema})
|
||||
* example to rebuild all index from scratch
|
||||
* Odmdb.idxfromitm('.../tribes/ndda/persons',"I",{},{},[], {person schema})
|
||||
* example to rebuild only publickey_alias index from scratch
|
||||
* Odmdb.idxfromitm('.../tribes/ndda/pagans',"I",{},{},[{ name:"publickey_alias",keyval:"publickey",objkey:["alias"]}], {pagans schema})
|
||||
*
|
||||
*/
|
||||
Odmdb.idxfromitm = (objectPathname, crud, itm, itmold, idxs = [], schema) => {
|
||||
console.log(`idxfromitem for ${objectPathname} action:${crud}`);
|
||||
if (!schema || !schema.apxid) {
|
||||
const getschema = Odmdb.Schema(objectPathname, true);
|
||||
if (getschema.status != 200) return getschema;
|
||||
schema = getschema.data.schema;
|
||||
}
|
||||
console.log(schema.apxuniquekey);
|
||||
const itms = crud == "I" ? glob.sync(`${objectPathname}/itm/*.json`) : [itm];
|
||||
console.log(itms);
|
||||
if (crud == "I") {
|
||||
//reinit all idx
|
||||
idxs.forEach((idx) => {
|
||||
fs.remove(`${objectPathname}/idx/${idx.name}.json`);
|
||||
});
|
||||
}
|
||||
let idxtoreindex = []; //store index that has to be reprocessto get full context
|
||||
idxs = idxs.length == 0 ? schema.apxidx : idxs; // get all index if none
|
||||
itms.forEach((i) => {
|
||||
if (crud == "I") {
|
||||
itm = fs.readJSONSync(i);
|
||||
}
|
||||
//console.log(itm);
|
||||
idxs.forEach((idx) => {
|
||||
const keyvalisunique = schema.apxuniquekey.includes(idx.keyval); // check if keyval is unique mean store as an object (or string) else store as an array
|
||||
const idxsrc = `${objectPathname}/idx/${idx.name}.json`;
|
||||
const idxinit = idx.name.substring(0, 4) == "lst_" ? [] : {}; // select type of idx (array or object)
|
||||
let idxfile = !fs.existsSync(idxsrc) ? idxinit : fs.readJSONSync(idxsrc);
|
||||
if (idx.name.substring(0, 4) == "lst_") {
|
||||
if (["D", "U"].includes(crud)) {
|
||||
if (keyvalisunique) {
|
||||
idxfile = idxfile.filter((e) => e !== itmold[idx.keyval]);
|
||||
} else {
|
||||
idxtoreindex.push(idx); //@todo
|
||||
}
|
||||
}
|
||||
console.log(idx.keyval);
|
||||
console.log(itm[idx.keyval]);
|
||||
|
||||
if (
|
||||
["C", "U", "I"].includes(crud) &&
|
||||
!idxfile.includes(itm[idx.keyval])
|
||||
) {
|
||||
idxfile.push(itm[idx.keyval]);
|
||||
}
|
||||
} else {
|
||||
if (!idx.objkey) {
|
||||
//mean all properties
|
||||
idx.objkey = Object.keys(schema.properties);
|
||||
}
|
||||
if (keyvalisunique && idx.objkey.length == 1) {
|
||||
if (["D", "U"].includes(crud)) {
|
||||
delete idxfile[itmold[idx.keyval]];
|
||||
} else {
|
||||
idxfile[itm[idx.keyval]] = itm[idx.objkey[0]];
|
||||
}
|
||||
}
|
||||
if (keyvalisunique && idx.objkey.length > 1) {
|
||||
if (["D", "U"].includes(crud)) {
|
||||
delete idxfile[itmold[idx.keyval]];
|
||||
} else {
|
||||
const itmfilter = {};
|
||||
idx.objkey.forEach((i) => {
|
||||
if (itm[i]) itmfilter[i] = itm[i];
|
||||
});
|
||||
idxfile[itm[idx.keyval]] = itmfilter;
|
||||
}
|
||||
}
|
||||
if (!keyvalisunique && idx.objkey.length == 1) {
|
||||
if (
|
||||
["D", "U"].includes(crud) &&
|
||||
idxfile[itmold[idx.keyval]].IndexOf(itmold[idx.objkey[0]]) > -1
|
||||
) {
|
||||
// U because need to remove previous value before adding it
|
||||
idxfile[itmold[idx.keyval]].splice(
|
||||
idxfile[itmold[idx.keyval]].IndexOf(itmold[idx.objkey[0]]),
|
||||
1
|
||||
);
|
||||
}
|
||||
if (["C", "U", "I"].includes(crud)) {
|
||||
if (!idxfile[itm[idx.keyval]]) idxfile[itm[idx.keyval]] = [];
|
||||
if (!idxfile[itm[idx.keyval]].includes(itm[idx.objkey[0]])) {
|
||||
idxfile[itm[idx.keyval]].push(itm[idx.objkey[0]]);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!keyvalisunique && idx.objkey.length > 1) {
|
||||
if (["D", "U"].includes(crud) && idxfile[itmold[idx.keyval]]) {
|
||||
// U because need to remove previous value before adding it
|
||||
let arrayofit = [];
|
||||
idxfile[itmold[idx.keyval]].forEach((it) => {
|
||||
if (it[schema.apxid] != itm[schema.apxid]) arrayofit.push(it);
|
||||
});
|
||||
idxfile[itmold[idx.keyval]] = arrayofit;
|
||||
}
|
||||
if (["C", "U", "I"].includes(crud)) {
|
||||
const itmfilter = {};
|
||||
idx.objkey.forEach((i) => {
|
||||
if (itm[i]) itmfilter[i] = itm[i];
|
||||
});
|
||||
if (!idxfile[itm[idx.keyval]]) idxfile[itm[idx.keyval]] = [];
|
||||
idxfile[itm[idx.keyval]].push(itmfilter);
|
||||
}
|
||||
}
|
||||
}
|
||||
fs.outputJSONSync(idxsrc, idxfile);
|
||||
});
|
||||
});
|
||||
if (crud != "I") {
|
||||
//update lastupdatedata to inform something change
|
||||
const confschema = fs.readJSONSync(`${objectPathname}/conf.json`);
|
||||
confschema.lastupdatedata = dayjs();
|
||||
fs.outputJSONSync(`${objectPathname}/conf.json`, getschema.data.conf);
|
||||
}
|
||||
return { status: 200, ref: "Odmdb", msg: "successreindex", data: {} };
|
||||
};
|
||||
Odmdb.updatefromidxall = (objectname, idxname, data, lastupdate) => {
|
||||
/**
|
||||
* Update all itm of objectname from index idx/idxname with data
|
||||
|
@@ -4,6 +4,8 @@ const dayjs = require("dayjs");
|
||||
const fs = require("fs-extra");
|
||||
const axios = require("axios");
|
||||
const openpgp = require("openpgp");
|
||||
const Notifications = require("../models/Notifications.js");
|
||||
const Odmdb = require("../models/Odmdb.js");
|
||||
|
||||
/*if (fs.existsSync("../../nationchains/tribes/conf.json")) {
|
||||
conf = require("../../nationchains/tribes/conf.json");
|
||||
@@ -16,23 +18,42 @@ const conf = require(`${process.env.dirtown}/conf.json`);
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
const Pagans = {};
|
||||
|
||||
/**
|
||||
* Remove authentification token after a logout
|
||||
* @param {string} alias
|
||||
* @param {string} tribe
|
||||
* @param {integer} xdays
|
||||
* @param {string} xhash
|
||||
* @returns {status:200, ref:"Pagans",msg:"logout"}
|
||||
* tmpfs name file has to be on line with the tmpfs create by isAuthenticated
|
||||
* tmpfs contain profils name for a tribe/
|
||||
*/
|
||||
Pagans.logout = (alias, tribe, xdays, xhash) => {
|
||||
//console.log(alias, tribe, xdays, xhash);
|
||||
// inline with middleware isAuthenticated.js
|
||||
let tmpfs = `${process.env.dirtown}/tmp/tokens/${alias}_${tribe}_${xdays}`;
|
||||
//max filename in ext4: 255 characters
|
||||
tmpfs += `_${xhash.substring(150, 150 + tmpfs.length - 249)}.json`;
|
||||
fs.remove(tmpfs);
|
||||
return { status: 200, ref: "Pagans", msg: "logout" };
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {string} alias a alias that exist or not
|
||||
* @return {object} { status: 200, ref:"pagans",msg:"aliasexist",data: { alias, publicKey } }
|
||||
* { status: 404, ref:"pagans",msg:"aliasdoesnotexist",data: { alias} }
|
||||
*
|
||||
**/
|
||||
Pagans.getalias = (alias) => {
|
||||
/**
|
||||
* @param {string} alias a alias that exist or not
|
||||
* @return {object} { status: 200, ref:"pagans",msg:"aliasexist",data: { alias, publicKey } }
|
||||
* { status: 404, ref:"pagans",msg:"aliasdoesnotexist",data: { alias} }
|
||||
*
|
||||
**/
|
||||
console.log(`${conf.dirapi}/nationchains/pagans/itm/${alias}.json`);
|
||||
//bypass Odmdb cause all is public
|
||||
if (fs.existsSync(`${conf.dirapi}/nationchains/pagans/itm/${alias}.json`)) {
|
||||
return {
|
||||
status: 200,
|
||||
ref: "Pagans",
|
||||
msg: "aliasexist",
|
||||
data: fs.readJsonSync(
|
||||
data: fs.readJSONSync(
|
||||
`${conf.dirapi}/nationchains/pagans/itm/${alias}.json`
|
||||
),
|
||||
};
|
||||
@@ -46,52 +67,56 @@ Pagans.getalias = (alias) => {
|
||||
}
|
||||
};
|
||||
|
||||
Pagans.getperson = (alias, tribeid) => {
|
||||
/**
|
||||
* @param {string} alias that exist
|
||||
* @param {string} tribeId that exist with a person alias
|
||||
* @return {object} { status: 200, ref:"pagans",msg:"personexist",data: { person } }
|
||||
* { status: 404, ref:"pagans",msg:"persondoesnotexist",data: { person } }
|
||||
*
|
||||
**/
|
||||
if (
|
||||
fs.existsSync(`${conf.dirtown}/tribes/${tribeid}/person/itm/${alias}.json`)
|
||||
) {
|
||||
const person = fs.readJsonSync(
|
||||
`${conf.dirtown}/tribes/${tribeid}/person/itm/${alias}.json`
|
||||
);
|
||||
delete person.auth;
|
||||
return {
|
||||
status: 200,
|
||||
ref: "Pagans",
|
||||
msg: "personexist",
|
||||
data: person,
|
||||
};
|
||||
} else {
|
||||
/**
|
||||
* @param {string} alias that exist
|
||||
* @param {string} tribeId that exist with a person alias
|
||||
* @return {object} { status: 200, ref:"pagans",msg:"personexist",data: { person } }
|
||||
* { status: 404, ref:"pagans",msg:"persondoesnotexist",data: { person } }
|
||||
*
|
||||
**/
|
||||
Pagans.getperson = (tribeid, alias, role) => {
|
||||
const objlst = Odmdb.reads(
|
||||
`${conf.dirtown}/tribes/${tribeid}/persons`,
|
||||
[alias],
|
||||
role
|
||||
);
|
||||
if (objlst.data[alias] == "notfound") {
|
||||
return {
|
||||
status: 404,
|
||||
ref: "Pagans",
|
||||
msg: "persondoesnotexist",
|
||||
data: { alias, tribeid },
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
status: 200,
|
||||
ref: "Pagans",
|
||||
msg: "personexist",
|
||||
data: objlst.data[alias],
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
Pagans.create = (alias, publicKey) => {
|
||||
Pagans.create = (objpagan, role) => {
|
||||
/**
|
||||
* @param {string} alias a unique alias that identify an identity
|
||||
* @param {string} publicKey a publicKey
|
||||
* @param {object} objpagan {alias,publickey} a unique alias/publickey that identify an identity
|
||||
* @param {array} role {xalias,xprofils} requester and list of profil
|
||||
* @return {object} { status: 200, data: { alias, publicKey } }
|
||||
* xhash was checked by isauthenticated
|
||||
* @todo use Odmdb to add a pagan
|
||||
*/
|
||||
return Odmdb.cud(`${conf.dirapi}/nationchains/pagans`, "C", objpagan, role);
|
||||
/*
|
||||
let apxpagans = {};
|
||||
if (fs.existsSync(`${conf.dirapi}/nationchains/pagans/idx/alias_all.json`)) {
|
||||
apxpagans = fs.readJsonSync(
|
||||
`${conf.dirapi}/nationchains/pagans/idx/alias_all.json`
|
||||
);
|
||||
}
|
||||
apxpagans[alias] = { alias, publicKey };
|
||||
if (apxpagans[objpagan.alias]) {
|
||||
return { status: 409, ref: "Pagans", msg: "aliasexist", data: { alias } };
|
||||
}
|
||||
apxpagans[objpagan.alias] = { alias, publicKey };
|
||||
fs.outputJsonSync(
|
||||
`${conf.dirapi}/nationchains/pagans/idx/alias_all.json`,
|
||||
apxpagans
|
||||
@@ -100,52 +125,148 @@ Pagans.create = (alias, publicKey) => {
|
||||
alias,
|
||||
publicKey,
|
||||
});
|
||||
return { status: 200, ref:"Pagans", msg:"identitycreate",data: { alias, publicKey } };
|
||||
return {
|
||||
status: 200,
|
||||
ref: "Pagans",
|
||||
msg: "identitycreate",
|
||||
data: { alias, publicKey },
|
||||
};
|
||||
*/
|
||||
};
|
||||
|
||||
Pagans.personupdate = (alias, tribe, persondata) => {
|
||||
//later use Odmdb ans schema person to manage this
|
||||
/**
|
||||
* @Param {string} alias pagan unique id
|
||||
* @Param {string} tribe tribe id in this town
|
||||
* @Param {object} persondata that respect /nationchains/schema/person.json + nationchains/tribe/tribeid/schema/personextented.json
|
||||
* @return create or update a person /tribe/tribeid/person/alias.json
|
||||
*/
|
||||
let person = {
|
||||
/**
|
||||
* @Param {string} alias pagan unique id
|
||||
* @Param {string} tribeid tribe id in this town
|
||||
* @Param {object} persondata that respect /nationchains/schema/person.json + nationchains/tribe/tribeid/schema/personextented.json
|
||||
* @return create or update a person /tribe/tribeid/person/alias.json
|
||||
* todo later use Odmdb ans schema person to manage this
|
||||
*/
|
||||
Pagans.personupdate = (tribeid, alias, personupdate, role) => {
|
||||
const personinit = {
|
||||
alias: alias,
|
||||
dt_create: dayjs(),
|
||||
accessrights: { profil: "user" },
|
||||
profils: ["person"],
|
||||
};
|
||||
if (
|
||||
fs.existsSync(
|
||||
`${process.env.dirtown}/tribes/${tribe}/person/itm/${alias}.json`
|
||||
)
|
||||
) {
|
||||
person = fs.readJsonSync(
|
||||
`${process.env.dirtown}/tribes/${tribe}/person/itm/${alias}.json`
|
||||
);
|
||||
person.dt_update = dayjs();
|
||||
}
|
||||
Object.keys(persondata).forEach((d) => {
|
||||
person[d] = persondata[d];
|
||||
const personfile = `${process.env.dirtown}/tribes/${tribeid}/person/itm/${alias}.json`;
|
||||
const persondata = fs.existsSync(personfile)
|
||||
? fs.readJSONSync(personfile)
|
||||
: personinit;
|
||||
persondata.dt_update = dayjs();
|
||||
|
||||
Object.keys(personupdate).forEach((d) => {
|
||||
persondata[d] = personupdate[d];
|
||||
});
|
||||
//const checkjson= Checkjson.schema.data = (fs.readJsonSync(`${conf.dirapi}/nationchains/schema/person.json`, person, false)
|
||||
// if checkjson.status==200 create /update with odmdb to update index data
|
||||
// see odmdb that did all and return standard message
|
||||
fs.outputJSONSync(
|
||||
`${process.env.dirtown}/tribes/${tribe}/person/itm/${alias}.json`,
|
||||
person,
|
||||
{
|
||||
space: 2,
|
||||
}
|
||||
);
|
||||
fs.outputJSONSync(personfile, persondata, { space: 2 });
|
||||
return {
|
||||
status: 200,
|
||||
ref: "Pagans",
|
||||
msg: "successfullupdate",
|
||||
data: { tribe: tribe },
|
||||
data: { alias: alias, tribeid: tribeid },
|
||||
};
|
||||
};
|
||||
/**
|
||||
* Send email with alias's keys to email or person alias person.recovery.email
|
||||
*
|
||||
* If email or pubkey is undefined then get data from tribe/person(alias)
|
||||
* Send email with keys
|
||||
*
|
||||
* @param {string} alias
|
||||
* @param {pgpPrivate} privkey
|
||||
* @param {string} passphrase
|
||||
* @param {string} tribe
|
||||
* @param {pgpPublic} pubkey
|
||||
* @param {string} email
|
||||
*/
|
||||
Pagans.sendmailkey = (
|
||||
alias,
|
||||
privatekey,
|
||||
tribeid,
|
||||
passphrase,
|
||||
publickey,
|
||||
email
|
||||
) => {
|
||||
const person = { alias, privatekey, tribeid };
|
||||
console.log(
|
||||
alias,
|
||||
"-",
|
||||
privatekey,
|
||||
"-",
|
||||
tribeid,
|
||||
"-",
|
||||
passphrase,
|
||||
"-",
|
||||
publickey,
|
||||
"-",
|
||||
email
|
||||
);
|
||||
if (!publickey || !email || !passphrase || !privatekey) {
|
||||
const personfile = `${process.env.dirtown}/tribes/${tribeid}/person/itm/${alias}.json`;
|
||||
const persondata = fs.existsSync(personfile)
|
||||
? fs.readJsonSync(personfile)
|
||||
: {};
|
||||
if (persondata.length == 0) {
|
||||
return {
|
||||
status: 404,
|
||||
ref: "Pagans",
|
||||
msg: "persondoesnotexist",
|
||||
data: { alias, tribeid },
|
||||
};
|
||||
}
|
||||
person.email = persondata.recoveryauth.email;
|
||||
person.publickey = persondata.recoveryauth.publickey;
|
||||
person.privatekey = persondata.recoveryauth.privatekey;
|
||||
person.passphrase = persondata.recoveryauth.passphrase;
|
||||
} else {
|
||||
person.email = email;
|
||||
person.passphrase = passphrase;
|
||||
person.publickey = publickey;
|
||||
}
|
||||
console.log("person:", person);
|
||||
|
||||
//feedback.withemail = true;
|
||||
//feedback.email = email;
|
||||
//feedback.privatekey = privatekey;
|
||||
//feedback.passphrase = passphrase;
|
||||
const mailidentity = {
|
||||
subjecttpl: "Information pour l'alias: {{alias}}",
|
||||
htmltpl:
|
||||
"<h1>Votre identité {{alias}} via {{tribeid}}</h1><p>Passphrase:</p></p><p>{{{passphrase}}</p><p>Cle public:</p><p>{{{publickey}}</p><p>Cle privée</p><p>{{{privatekey}}</p>",
|
||||
texttpl:
|
||||
"Votre identité {{alias}}\nPassphrase:\n{{{passphrase}}\nCle public:\n{{{publickey}}\nCle privée\n{{{privatekey}}",
|
||||
filelist: [],
|
||||
};
|
||||
|
||||
const maildata = {
|
||||
To: person.email,
|
||||
subject: Mustache.render(mailidentity.subject, person),
|
||||
htmlpart: Mustache.render(mailidentity.htmltpl, person),
|
||||
textpart: Mustache.render(mailidentity.texttpl, person),
|
||||
filelist: [],
|
||||
};
|
||||
fs.outputFileSync(
|
||||
`${conf.dirtown}/tmp/${person.alias}_privatekey.txt`,
|
||||
person.privatekey,
|
||||
"utf8"
|
||||
);
|
||||
maildata.filelist.push({
|
||||
filename: "${person.alias}_privatekey.txt",
|
||||
pathfile: `${conf.dirtown}/tmp/${person.alias}_privatekey.txt`,
|
||||
});
|
||||
fs.outputFileSync(
|
||||
`${conf.dirtown}/tmp/${person.alias}_publickey.txt`,
|
||||
person.publickey,
|
||||
"utf8"
|
||||
);
|
||||
maildata.filelist.push({
|
||||
filename: "${person.alias}_publickey.txt",
|
||||
pathfile: `${conf.dirtown}/tmp/${person.alias}_publickey.txt`,
|
||||
});
|
||||
//fs.readJSONSync('${conf.dirapi}/api/')
|
||||
return Notifications.sendmail(maildata, tribeid);
|
||||
};
|
||||
|
||||
Pagans.authenticatedetachedSignature = async (
|
||||
alias,
|
||||
@@ -185,6 +306,15 @@ Pagans.authenticatedetachedSignature = async (
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* todo recuperer tous les tokens et les distribuer à la town
|
||||
* @param {string} alias
|
||||
*/
|
||||
Pagans.deletealias = (alias) => {
|
||||
// check if alias is used in the blockchain
|
||||
// if not then delete itm pagan alias => this means that alias is now available for someone else
|
||||
};
|
||||
Pagans.deleteperson = (alias, tribeId) => {};
|
||||
Pagans.keyrecovery = (tribeid, email) => {
|
||||
glob
|
||||
.GlobSync(`${conf.dirtown}/tribes/${tribeId}/Person/*.json`)
|
||||
|
57
api/models/Trackings.js
Normal file
57
api/models/Trackings.js
Normal file
@@ -0,0 +1,57 @@
|
||||
/**
|
||||
* Tracking management:
|
||||
*
|
||||
* without header:
|
||||
* https://dns.xx/trk/pathtofile?alias=anonymous&uuid=1b506f71-1bff-416c-9057-cb8b86296f60&src=btnregister&version=1&lg=fr
|
||||
*
|
||||
* with header
|
||||
* https://dns.xx/trk/pathtofile?src=btnregister&version=1
|
||||
*
|
||||
* where pathtofile is a ressource accessible from https://dns.xx/pathtofile
|
||||
*
|
||||
* We get :
|
||||
* alias: if athenticated from header else anonymous
|
||||
* uuid: a uuid v4 générate the first time a web page is open on a browser
|
||||
* src: source action that trig this get
|
||||
* version: can be an int, date or any version of the src
|
||||
* tm: optionnal is a timestamp of action when it is not immediate (offline app)
|
||||
*
|
||||
* html usage to track a loading page or email when a picture is load
|
||||
* using apxwebapp in /src/ we got:
|
||||
* <img src="static/img/photo.jpg" data-trksrckey="loadpage" data-version="1">
|
||||
*
|
||||
* using html + apx.js (or at least with header {xalias,xuuid,xlang})
|
||||
* <img lazysrc="trk/static/img/photo.jpg data-trksrckey="loadpage" data-version="1">
|
||||
*
|
||||
*
|
||||
* in js action:
|
||||
* <button></button> or
|
||||
* <a data-trksrc="linktoblabla" href='https:..'
|
||||
* onclick="apx.trackvisit("btnaction",1);actionfct();">
|
||||
* </a>
|
||||
* will hit an eventlistener
|
||||
* axios.get("https://dns.xx/trk/cdn/empty.json?alias=anonymous&uuid=1b506f71-1bff-416c-9057-cb8b86296f60&src=btnregister&version=1");
|
||||
*
|
||||
*
|
||||
* or if no js available (example:email or pdf document)
|
||||
* <img src="https://dns.xx/trk/static/img/photo.jpg?alias=anonymous&uuid=1b506f71-1bff-416c-9057-cb8b86296f60&srckey=loadpage&version=1" will hit a tracker
|
||||
*
|
||||
* <a href="https://dns.xx/trk/redirect?alias=anonymous&uuid=1b506f71-1bff-416c-9057-cb8b86296f60&srckey=loadpage&version=1&url=http://..." will hit a tracker then redirect to url></a> *
|
||||
*
|
||||
*
|
||||
* if you use apx.js :
|
||||
* in html add in <button>, <img>, <a> tag data-trksrc="srckey"
|
||||
* <img src="https://dns.xx/static/img/photo.jpg" data-trkversion="1" data-trksrckey="registerform">
|
||||
* <button data-trksrc="https://dns.xx/static/img/photo.jpg" data-trkversion="1" data-trksrckey="registerform">
|
||||
* in js call apx.track(srckey);
|
||||
*
|
||||
* Tracking log into tribe/logs/nginx/tribe_appname.trk.log
|
||||
* Src have to be manage in tribe/api/models/lg/src_en.json
|
||||
* {"srckey":{
|
||||
* "app":"presentation|app|apptest",
|
||||
* "title":"",
|
||||
* "description":""
|
||||
* }
|
||||
* }
|
||||
*
|
||||
*/
|
@@ -15,6 +15,33 @@ const Checkjson = require( `./Checkjson.js`);
|
||||
/*
|
||||
tribeid manager
|
||||
|
||||
@TODO @STUDY
|
||||
|
||||
To add a tribe in dirtown/tribes with a mayor phil
|
||||
see man adduser and file reference call skelet directory to set an env for apxtrib in /home/tribename/
|
||||
accessible by tribename/password
|
||||
then add group group me to phil to allow phil to ate a symlink /dirtown/tribes/tribename => to /home/tribename
|
||||
|
||||
At each reboot run a process to analyse /api/routes and api/models whre only js can be exexuted are safe (only write data into /home/tribename, never outside)
|
||||
|
||||
1- Create a user in linux with $ sudo useradd smatchit
|
||||
2 => this create a user:group and a folder smatchit in /home/phil/dirtown/tribes/
|
||||
2 => add group smatchit to phil to allow phil to access file with a group accessright
|
||||
3 set a password if needed "$sudo passwd smatchit" (sm@tchit) to smatchit to make it available from ssh on port 22
|
||||
4
|
||||
4 to delete a user sudo userdel smatchit (this keep folder smatchit to remove folder smatchit => sudo userdel --remove smacthit)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
/tribes/tribeid
|
||||
Manage a tribeid space
|
||||
* create
|
||||
|
@@ -1,4 +1,5 @@
|
||||
{
|
||||
"validcheck":"Your data are valid",
|
||||
"typedoesnnotexistinschema":"This type in your propertie is not manage by Checkjson.js",
|
||||
"dataerrpropertie":"Check your data that not fit your schema rules propertie",
|
||||
"dataerrpropertiesrequired":"This propertie is required and not present in your data"
|
||||
|
6
api/models/lg/Checkjson_fr.json
Normal file
6
api/models/lg/Checkjson_fr.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"validcheck":"Your data are valid",
|
||||
"typedoesnnotexistinschema":"This type in your propertie is not manage by Checkjson.js",
|
||||
"dataerrpropertie":"Check your data that not fit your schema rules propertie",
|
||||
"dataerrpropertiesrequired":"This propertie is required and not present in your data"
|
||||
}
|
9
api/models/lg/Notifications_fr.json
Normal file
9
api/models/lg/Notifications_fr.json
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"missingconf":"Il manque un smtp/sms valide pour {{tribe}} ou sur le serveur /conf.json",
|
||||
"missingdata":"Il manque des données obligatoire dans data {{#missingk}} {{.}} {{/missingk}}",
|
||||
"missingfile":"Le ou les fichiers suivants n'existent pas {{#missingfile}} {{.}} {{/missingfile}}",
|
||||
"errsendmail":"Une erreur s'est produite lors de l'envoie de l'email",
|
||||
"successfullsentemail":"Email correctement envoyé",
|
||||
"errsendsms":"Une erreur s'est produite lors de l'envoie du sms",
|
||||
"successfullsentsms":"Sms bien envoyé à {{To}}"
|
||||
}
|
13
api/models/lg/Odmdb_fr.json
Normal file
13
api/models/lg/Odmdb_fr.json
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"alreadyexist": "Un object {{objectname}} avec la clé {{key}} existe déjà avec {{val}}",
|
||||
"doesnotexist": "L'object {{objectname}} avec la clé {{key}} ,'existe pas avec {{val}}",
|
||||
"getschema": "Schema {{{conf.name}}}",
|
||||
"schemanotfound": "Schema introuvable dans {{{schemaPath}}}",
|
||||
"pathnamedoesnotexist": "Le repertoire n'existe pas {{{indexpath}}}",
|
||||
"objectfiledoesnotexist": "Le fichier n'exuiste pas {{{objectpath}}}",
|
||||
"cudsuccessfull": "Mise à jour effectuée avec succés",
|
||||
"misssingprimarykey": "Il manque une clé primaire apxid pour stocker et identifier les objects",
|
||||
"unconsistencyapxidx": "L'index {{name}} doit contenir en objkey au moins {{apxid}} car keyval n'est pas unique",
|
||||
"profilnotallow": "Vous n'avez pas le profil de {{profils}}, cette action n'est pas authorisée",
|
||||
"successreindex": "Objet reindexé à partir des items, vos index sont à jour"
|
||||
}
|
@@ -1,5 +1,8 @@
|
||||
{
|
||||
"successfullcreate": "Alias creation for {{alias}} successfull. {{#withemail}} An email was sent to {{email}}, if you do not receive it, please download your keys before living this page.{{/withemail}}",
|
||||
"successfulluppdate": "Your alias as a Person is now update into {{tribe}}",
|
||||
"tribedoesnotexist": "Your tribe {{tribe}} does not exist in this town"
|
||||
"aliasexist":"This alias {{alias]} exist",
|
||||
"aliasdoesnotexist":"This alias {{alias}} does not exist ",
|
||||
"personexist":"This person {{alias}} exist for {{tribeid}}",
|
||||
"successfullcreate": "This identity {{alias}} creation was successfull. {{#withemail}} An email was sent to {{email}}, if you do not receive it, please download your keys before living this page.{{/withemail}}",
|
||||
"successfulluppdate": "Your alias as a Person is now update into {{tribeid}}",
|
||||
"tribedoesnotexist": "Your tribe {{tribeid}} does not exist in this town"
|
||||
}
|
||||
|
@@ -1,41 +1,13 @@
|
||||
{
|
||||
"ERRcritical": "Erreur critique",
|
||||
"loginAlreadyExist": "Ce login exist déjà",
|
||||
"emailAlreadyExist":"Cet email exist déjà",
|
||||
"failtoWritefs":"Impossible d'ecrire sur le serveur",
|
||||
"successfullCreate": "Création réussit",
|
||||
"successfullDelete": "Mise à jour effectuée",
|
||||
"serverNeedAuthentification":"Ce serveur a besoin d'une authentification",
|
||||
"forbiddenAccess":"Accès interdit",
|
||||
"userNotAllowtoCreate":"Pas d'autorisation de creation",
|
||||
"userNotAllowtoUpdate":"Pas d'autorisatiuon de mise à jour",
|
||||
"userNotAllowtoDelet":"Pas d'autorisation de suppression",
|
||||
"uuidNotFound":"Le paîen {{uuid}} n'existe pas dans la tribu {{tribeName}}",
|
||||
"useremailNotfound":"Email introuvable",
|
||||
"loginDoesNotExist":" Login introuvable",
|
||||
"checkCredentials":" Vérifier vos parametres d'accès"
|
||||
"wrongPassword":"Vérifier votre mot de passe",
|
||||
"invalidData":"Vérifier vos données",
|
||||
"pswToosimple":"Votre mot de passe est trop simple, doit contenir au moins 8 caractères avec des lettres majusculmes, minuscules des nombres et au moins un caractere special @! ...",
|
||||
"ERRemail":"Vérifier votre email",
|
||||
"ERRnewnewbisdiff":"Les 2 mots de passe ne sont pas identique",
|
||||
"uuiddesc":"Identifiant",
|
||||
"uuiddesclong":"Identifiant unique au format UUID.v4()",
|
||||
"uuidinfo":"<p> L'usage d'UUID v4 permet de générer un code unique sans centralisation, car il est basé sur un timestamp et une clé crypto ce qui donne un code du type 7d8291c0-e137-11e8-9f7b-1dc8e57bed33 </p>",
|
||||
"logindesc":"login",
|
||||
"logininfo":"<p>Le login doit être unique sur une instance d'apxtrib.</p><p> Pour échanger en dehors d'une instance apxtrib on utilise la clé public du user ou pour un humain login@trib.town§.nation.xx avec le nom du domaine qui heberge l'instance</p><p> Ou encore login@domain.xx tout domain.xx utilisé pour heberger un espace web client /tribeid/www/</p>",
|
||||
"biographydesc":"Vous en quelques mots",
|
||||
"publickeyinfo":"<p>Cette clé est générée par votre navigateur, garder précisuesement votre clé privée que seule vous connaissez. En cas de perte de cette clé tous vos actifs seront perdus.</p><p>Cette méthode nous permet de vous garantir un contrôle total décentralisé.</p>",
|
||||
"imgavatardesc":"Changer votren avatar",
|
||||
"imgavatarinfo":"Pour un meilleur rendu, une mage carré de 128pc en foat jpg",
|
||||
"emaildesc":"Email",
|
||||
"telephonedesc":"Tel",
|
||||
"familyNamedesc":"Nom",
|
||||
"givenNamedesc":"Prénom",
|
||||
"additionalNamedesc":"Pseudo",
|
||||
"additionalNamesinfo":"<p>Nom avec lequel vous souhaitez qu'on vous reconnaisse sur l'instance de l'apxtrib </p><p>Attention ce nom n'est unique que sur une instance d'apxtrib. Un même speudo peut-être utilisé sur un autre serveur pour garantir l'identité vérifié pseudo@ domaine de rattachement.</p>",
|
||||
"dtcreatedesc":"Date de creation",
|
||||
"dtupdatedesc":"Dernière mise à jour",
|
||||
"dtlastlogindesc":"Dernier accès au login",
|
||||
"accessrightsdesc":"Vos droits d'accès"
|
||||
}
|
||||
"aliasexist": "Cet alias {{data.alias}} existe",
|
||||
"emailerr": "Verifier votre email",
|
||||
"aliasorprivkeytooshort": "Vérifiez votre alias et votre clé privée",
|
||||
"aliasdoesnotexist": "Cet alias {{data.alias}} n'existe pas",
|
||||
"personexist": "Cette personne {{data.alias}} existe pour {{data.tribeid}}",
|
||||
"persondoesnotexist": "Cette personne {{data.alias}} n'existe pas pour {{data.tribeid}}",
|
||||
"successfullcreate": "La création de cette identité {{data.alias}} a été un succès. {{#data.withemail}} Un email a été envoyé à {{data.email}}, si vous ne le recevez pas, veuillez télécharger vos clés avant de quitter cette page.{{/data.withemail}}",
|
||||
"successfulcreatewithoutemail": "La creation de data.alias}} a été un succès. Aucun email ,'a été envoyé, verifier bien que vos clés sont bien sauvegardé de votre coté",
|
||||
"successfulluppdate": "Votre alias en tant que Personne est maintenant mis à jour dans {{data.tribeid}}",
|
||||
"errcreate": "Desolé, un probléme inconnu empeche la creation",
|
||||
"logout": "Votre token a été supprimé du server"
|
||||
}
|
||||
|
3
api/models/lg/Tribes_fr.json
Normal file
3
api/models/lg/Tribes_fr.json
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"actionmissing":"L'action {{data.action}} n'existe pas pour la tribut {{data.tribe}}."
|
||||
}
|
@@ -1,7 +1,8 @@
|
||||
{
|
||||
"errrequest": "Backend seems not available",
|
||||
"missingheader": "Some header miss to have a valid request: {{#data}} {{.}} {{/data}}",
|
||||
"tribeiddoesnotexist": "Header xtribe: {{data.xtribe}} does not exist in this town",
|
||||
"tribeiddoesnotexist": "Header xtribe: {{data.xtribe}} does not exist in this town you cannot access",
|
||||
"authenticated": "Your alias{{{data.xalias}}} is authenticated",
|
||||
"notauthenticated": "Your alias: {{data.xalias}} is not authenticated {{^data.aliasexists}} and this alias does not exist !{{/data.aliasexists}}",
|
||||
"forbiddenAccessright": "Pagan {{data.xalias}} has not access right to act {{data.action}} onto object {{data.object}} for tribe {{mor.xworkon}}"
|
||||
"forbiddenAccessright": "Alias {{data.xalias}} has not access right to act {{data.action}} onto object {{data.object}} for tribe {{mor.xworkon}}"
|
||||
}
|
10
api/models/lg/middlewares_fr.json
Normal file
10
api/models/lg/middlewares_fr.json
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"errrequest": "Le serveur ne semble pas répondre",
|
||||
"unconsistentpgp": "Vos clés ne sont pas conforme {{err}}",
|
||||
"missingheader": "Certains en-têtes manquent pour avoir une requête valide : {{#data}} {{.}} {{/data}}",
|
||||
"tribeiddoesnotexist": "L'en-tête xtribe : {{data.xtribe}} n'existe pas dans cette ville, vous ne pouvez pas y accéder",
|
||||
"authenticated": "Votre alias {{{data.xalias}}} est authentifié",
|
||||
"notauthenticated": "Votre alias : {{data.xalias}} n'est pas authentifié {{^data.aliasexists}} et cet alias n'existe pas !{{/data.aliasexists}}",
|
||||
"forbiddenAccessright": "L'alias {{data.xalias}} n'a pas le droit d'agir {{data.action}} sur l'objet {{data.object}} pour la tribu {{mor.xworkon}}",
|
||||
"signaturefailled": "Desolé votre signature n'est pas valide pour cet alias."
|
||||
}
|
@@ -3,6 +3,7 @@ Unit testing
|
||||
*/
|
||||
const assert = require("assert");
|
||||
const Checkjson = require("../Checkjson.js");
|
||||
const conf = require(`${process.env.dirtown}/conf.json`);
|
||||
|
||||
const ut = { name: "Checkjson" };
|
||||
|
||||
@@ -17,115 +18,120 @@ const schema = {
|
||||
},
|
||||
};
|
||||
const testproperties = [
|
||||
{
|
||||
name: "test0",
|
||||
data: { totest: true },
|
||||
properties: { totest: { type: "boolean" } },
|
||||
status: 200
|
||||
},
|
||||
{
|
||||
name: "test0",
|
||||
data: { totest: true },
|
||||
properties: { totest: { type: "boolean" } },
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
name: "test1",
|
||||
data: { totest: "blabla" },
|
||||
properties: { totest: { type: "string" } },
|
||||
status: 200
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
name: "test2",
|
||||
data: { totest: 123 },
|
||||
properties: { totest: { type: "string" } },
|
||||
status: 417
|
||||
status: 417,
|
||||
},
|
||||
{
|
||||
name: "test3",
|
||||
data: { totest: 123.13 },
|
||||
properties: { totest: { type: "integer" } },
|
||||
status: 417
|
||||
status: 417,
|
||||
},
|
||||
{
|
||||
name: "test4",
|
||||
data: { totest: 123 },
|
||||
properties: { totest: { type: "number" } },
|
||||
status: 200
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
name: "test5",
|
||||
data: { totest: 12312 },
|
||||
properties: { totest: { type: "number" } },
|
||||
status: 200
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
name: "test6",
|
||||
data: { totest: 12.313 },
|
||||
properties: { totest: { type: "float" } },
|
||||
status: 200
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
name: "test7",
|
||||
data: { totest: "blablab sfde" },
|
||||
data: { totest: "blablab sfde" },
|
||||
properties: { totest: { type: "string", minLength: 1111 } },
|
||||
status: 417
|
||||
status: 417,
|
||||
},
|
||||
{
|
||||
name: "test8",
|
||||
data: { totest: "blablab sfde" },
|
||||
properties: { totest: { type: "string", minLength: 4, maxLength: 128} },
|
||||
status: 200
|
||||
data: { totest: "blablab sfde" },
|
||||
properties: { totest: { type: "string", minLength: 4, maxLength: 128 } },
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
name: "test9",
|
||||
data: { totest: 12 },
|
||||
properties: { totest: { type: "integer", multipleOf:3} },
|
||||
status: 200
|
||||
data: { totest: 12 },
|
||||
properties: { totest: { type: "integer", multipleOf: 3 } },
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
name: "test10",
|
||||
data: { totest: 9 },
|
||||
properties: { totest: { type: "number", minimum:-10, exclusiveMaximum:10} },
|
||||
status: 200
|
||||
data: { totest: 9 },
|
||||
properties: {
|
||||
totest: { type: "number", minimum: -10, exclusiveMaximum: 10 },
|
||||
},
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
name: "test11",
|
||||
data: { totest: 10 },
|
||||
properties: { totest: { type: "number", minimum:-10, exclusiveMaximum:10} },
|
||||
status: 417
|
||||
data: { totest: 10 },
|
||||
properties: {
|
||||
totest: { type: "number", minimum: -10, exclusiveMaximum: 10 },
|
||||
},
|
||||
status: 417,
|
||||
},
|
||||
{
|
||||
name: "test12",
|
||||
data: { totest: "gfhrtabcdgfr" },
|
||||
properties: { totest: { type: "string", pattern:/.*abc.*/} },
|
||||
status: 200
|
||||
data: { totest: "gfhrtabcdgfr" },
|
||||
properties: { totest: { type: "string", pattern: /.*abc.*/ } },
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
name: "test13",
|
||||
data: { totest: "toto@google.com" },
|
||||
properties: { totest: { type: "string", format:"email"} },
|
||||
status: 200
|
||||
data: { totest: "toto@google.com" },
|
||||
properties: { totest: { type: "string", format: "email" } },
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
name: "test14",
|
||||
data: { totest: "Aze123@0" },
|
||||
properties: { totest: { type: "string", format:"password"} },
|
||||
status: 200
|
||||
data: { totest: "Aze123@0" },
|
||||
properties: { totest: { type: "string", format: "password" } },
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
name: "test15",
|
||||
data: { totest: "value1" },
|
||||
properties: { totest: { type: "string", enum:["value1","value2","value3"]} },
|
||||
status: 200
|
||||
data: { totest: "value1" },
|
||||
properties: {
|
||||
totest: { type: "string", enum: ["value1", "value2", "value3"] },
|
||||
},
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
name: "test16",
|
||||
data: { totest: ["t1","t2"] },
|
||||
properties: { totest: { type: ["string", "number"] }},
|
||||
status: 417
|
||||
}
|
||||
,
|
||||
data: { totest: ["t1", "t2"] },
|
||||
properties: { totest: { type: ["string", "number"] } },
|
||||
status: 417,
|
||||
},
|
||||
{
|
||||
name: "test17",
|
||||
data: { totest: 12 },
|
||||
properties: { totest: { type: ["string", "number"] }},
|
||||
status: 200
|
||||
}
|
||||
properties: { totest: { type: ["string", "number"] } },
|
||||
status: 200,
|
||||
},
|
||||
];
|
||||
|
||||
ut.testproperties = (options) => {
|
||||
@@ -134,10 +140,10 @@ ut.testproperties = (options) => {
|
||||
schema.properties = t.properties;
|
||||
const res = Checkjson.schema.data(schema, t.data);
|
||||
if (res.status != t.status) {
|
||||
msg = (msg == "") ? "Unconsistent testproperties() name list: " : `${msg},`;
|
||||
msg = msg == "" ? "Unconsistent testproperties() name list: " : `${msg},`;
|
||||
if (options.verbose) {
|
||||
console.log(t)
|
||||
console.log(res);
|
||||
console.log(t);
|
||||
console.log(res);
|
||||
}
|
||||
msg += res.err.map((e) => ` ${t.name} ${e.info}`);
|
||||
}
|
||||
|
@@ -2,71 +2,42 @@
|
||||
Unit testing
|
||||
*/
|
||||
const assert = require("assert");
|
||||
const fs=require('fs-extra');
|
||||
const path= require('path');
|
||||
const fs = require("fs-extra");
|
||||
const path = require("path");
|
||||
const Odmdb = require("../Odmdb.js");
|
||||
const {generemdp} = require('../../nationchains/socialworld/contracts/toolsbox.js');
|
||||
const { generemdp } = require("../toolsbox.js");
|
||||
const conf = require(`${process.env.dirtown}/conf.json`);
|
||||
|
||||
const ut = { name: "Odmdb" };
|
||||
/*
|
||||
We test only search and indexation here
|
||||
Create Update Read and Delete are unit testing with specificities of each Object.
|
||||
|
||||
To do that we create in tmp a dummy data folder for a dummy schema object
|
||||
Test crud process for any object
|
||||
*/
|
||||
const schema = {
|
||||
$schema: "http://json-schema.org/schema#",
|
||||
title: "Dummy schema to test Checkjson.js",
|
||||
description: "Checkjson is use on server as well as into a browser",
|
||||
$comment: "We change schema type on the fly to simplify the test",
|
||||
type: "object",
|
||||
properties: {
|
||||
uuid: {
|
||||
type:"string",
|
||||
format:"uuid",
|
||||
default:"=uuid.v4()"
|
||||
},
|
||||
dtcreate:{
|
||||
type:"string",
|
||||
format:"datetime",
|
||||
default:"=date.now()"
|
||||
},
|
||||
tag:{
|
||||
type:"string",
|
||||
enum:["t1","t2","t3"],
|
||||
default:"t1"
|
||||
},
|
||||
info:{
|
||||
type:"string",
|
||||
minLength: 10,
|
||||
default:"=generemdp(255,'ABCDEFGHIJKLM 12340')"
|
||||
}
|
||||
},
|
||||
required:["uuid"],
|
||||
apxprimarykey:"uuid",
|
||||
apxuniquekey:["info"],
|
||||
apxsearchindex:{
|
||||
"uuid":{"list":[],"taginfo":['tag','info'],"all":""},
|
||||
"info":{"uuid":['uuid']}
|
||||
}
|
||||
};
|
||||
ut.crud = (objectPathname, itm, profils) => {
|
||||
//
|
||||
// test if exist
|
||||
// if not test create
|
||||
// test to read
|
||||
// test update
|
||||
// test delete
|
||||
const res = { status: 200, err: [] };
|
||||
return res;
|
||||
};
|
||||
|
||||
const obj={tag:"t1",info:"Lorem ipsum A"}
|
||||
const testvar={alias:"tutu", passphrase:"",privatekey:"", publickey:""}
|
||||
|
||||
ut.createanobject=(schema,obj)=>{
|
||||
const testitms=[
|
||||
{objectPathname:`${conf.dirapi}/nationchains/pagans`,
|
||||
itm:{alias:'toutou', publickey:}}
|
||||
]
|
||||
|
||||
const res={status:200,err:[]}
|
||||
return res
|
||||
}
|
||||
|
||||
ut.run = (options) => {
|
||||
const objectPath=path.resolve(__dirname,'../../tmp/testobjects');
|
||||
const schemaPath=path.resolve(__dirname,'../../tmp/testschema');
|
||||
if (!fs.existsSync(objectPath)) fs.ensureDirSync(objectPath);
|
||||
if (!fs.existsSync(schemaPath)) fs.ensureDirSync(schemaPath);
|
||||
const createenvobj=Odmdb.setObject(schemaPath,objectPath,"objtest",schema,{},"en");
|
||||
assert.deepEqual(createenvobj,{status:200},JSON.stringify(createenvobj));
|
||||
const checkschema= Odmdb.schema(schemaPath,"objtest",true)
|
||||
assert.deepEqual(checkschema.status,200,JSON.stringify(checkschema))
|
||||
};
|
||||
module.exports = ut;
|
||||
|
||||
ut.run = (options) => {
|
||||
let msg=""
|
||||
testitms.forEach(i=>{
|
||||
ut.crud(i)
|
||||
//si erreur add msg+++
|
||||
})
|
||||
assert.deepEqual(msg, "", msg);
|
||||
};
|
||||
module.exports = ut;
|
||||
|
@@ -6,7 +6,6 @@ const Nations = require( '../models/Nations.js' );
|
||||
// Middlewares
|
||||
const checkHeaders = require( '../middlewares/checkHeaders' );
|
||||
const isAuthenticated = require( '../middlewares/isAuthenticated' );
|
||||
const hasAccessrighton = require( '../middlewares/hasAccessrighton' );
|
||||
const router = express.Router();
|
||||
|
||||
/*
|
||||
|
@@ -7,32 +7,98 @@ const Odmdb = require("../models/Odmdb.js");
|
||||
// Middlewares
|
||||
const checkHeaders = require("../middlewares/checkHeaders");
|
||||
const isAuthenticated = require("../middlewares/isAuthenticated");
|
||||
const hasAccessrighton = require("../middlewares/hasAccessrighton");
|
||||
const router = express.Router();
|
||||
|
||||
/**
|
||||
* @api {get} /odmdb/rebuildidx/:objectname
|
||||
* @apiName Rebuild all index for an object
|
||||
* @apiGroup Odmdb
|
||||
*
|
||||
* @apiUse apxHeader
|
||||
* @objectname {string} Mandatory
|
||||
*
|
||||
* @apiError (404) {string} status the file does not exist
|
||||
* @apiError (404) {string} ref objectmodel to get in the right language
|
||||
* @apiError (404) {string} msg key to get template from objectmodel
|
||||
* @apiError (404) {object} data use to render lg/objectmodel_lg.json
|
||||
*
|
||||
* @apiSuccess (200) {object} data contains indexfile requested
|
||||
*
|
||||
*/
|
||||
router.get(
|
||||
"/rebuildidx/:objectname",
|
||||
checkHeaders,
|
||||
isAuthenticated,
|
||||
(req, res) => {
|
||||
console.log("reindex");
|
||||
// check validity and accessright
|
||||
const objectPathname = conf.api.nationObjects.includes(
|
||||
req.params.objectname
|
||||
)
|
||||
? `${conf.dirapi}/nationchains/${req.params.objectname}`
|
||||
: `${conf.dirtown}/tribes/${req.session.header.xtribe}/${req.params.objectname}`;
|
||||
//console.log(objectPathname);
|
||||
if (!fs.existsSync(objectPathname)) {
|
||||
res.status(404).json({
|
||||
status: 404,
|
||||
ref: "Odmdb",
|
||||
msg: "pathnamedoesnotexist",
|
||||
data: { indexpath: objectPathname },
|
||||
});
|
||||
return false;
|
||||
}
|
||||
if (
|
||||
conf.api.nationObjects.includes(req.params.objectname) &&
|
||||
!req.session.header.xprofils.includes("mayor")
|
||||
) {
|
||||
res.status(403).json({
|
||||
status: 403,
|
||||
ref: "Odmdb",
|
||||
msg: "profilnotallow",
|
||||
data: { profils: "mayor" },
|
||||
});
|
||||
return false;
|
||||
}
|
||||
if (
|
||||
!conf.api.nationObjects.includes(req.params.objectname) &&
|
||||
!req.session.header.xprofils.includes("druid")
|
||||
) {
|
||||
res.status(403).json({
|
||||
status: 403,
|
||||
ref: "Odmdb",
|
||||
msg: "profilnotallow",
|
||||
data: { profils: "druid" },
|
||||
});
|
||||
return false;
|
||||
}
|
||||
const reindex = Odmdb.idxfromitm(objectPathname, "I", {}, {}, [], {});
|
||||
res.status(reindex.status).json(reindex);
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* @api {get} /odmdb/idx/:indexname
|
||||
* @apiName Get index file for an object
|
||||
* @apiGroup Odmdb
|
||||
*
|
||||
* @apiUse apxHeader
|
||||
* @objectname {string} Mandatory
|
||||
* @apiParam {String} indexname Mandatory if in conf.nationObjects then file is into nationchains/ else in /nationchains/tribes/xtribe/objectname/idx/indexname indexname contains the ObjectName .*_ (before the first _)
|
||||
*
|
||||
* @apiError (404) {string} status the file does not exist
|
||||
* @apiError (404) {string} ref objectmodel to get in the right language
|
||||
* @apiError (404) {string} msg key to get template from objectmodel
|
||||
* @apiError (404) {object} data use to render lg/objectmodel_lg.json
|
||||
*
|
||||
* @apiSuccess (200) {object} data contains indexfile requested
|
||||
*
|
||||
*/
|
||||
router.get(
|
||||
"/:objectname/idx/:indexname",
|
||||
checkHeaders,
|
||||
isAuthenticated,
|
||||
(req, res) => {
|
||||
/**
|
||||
* @api {get} /odmdb/idx/:indexname
|
||||
* @apiName Get index file for an object
|
||||
* @apiGroup Odmdb
|
||||
*
|
||||
* @apiUse apxHeader
|
||||
* @objectname {string} Mandatory
|
||||
* @apiParam {String} indexname Mandatory if in conf.nationObjects then file is into nationchains/ else in /nationchains/tribes/xtribe/objectname/idx/indexname indexname contains the ObjectName .*_ (before the first _)
|
||||
*
|
||||
* @apiError (404) {string} status the file does not exist
|
||||
* @apiError (404) {string} ref objectmodel to get in the right language
|
||||
* @apiError (404) {string} msg key to get template from objectmodel
|
||||
* @apiError (404) {object} data use to render lg/objectmodel_lg.json
|
||||
*
|
||||
* @apiSuccess (200) {object} data contains indexfile requested
|
||||
*
|
||||
*/
|
||||
console.log("pzasse");
|
||||
console.log("passe");
|
||||
// indexname = objectname_key_value.json
|
||||
let objectLocation = "../../nationchains/";
|
||||
if (!conf.api.nationObjects.includes(req.params.objectname)) {
|
||||
@@ -51,29 +117,29 @@ router.get(
|
||||
}
|
||||
}
|
||||
);
|
||||
/**
|
||||
* @api {get} /odmdb/itm/:objectname/:primaryindex
|
||||
* @apiName Get index file for an object
|
||||
* @apiGroup Odmdb
|
||||
*
|
||||
* @apiUse apxHeader
|
||||
*
|
||||
* @apiParam {String} objectname name Mandatory if in conf.nationObjects then file is into nationchains/ else in /nationchains/tribes/xtribe/objectname
|
||||
* @apiParam {String} primaryindex the unique id where item is store
|
||||
* @apiError (404) {string} status the file does not exist
|
||||
* @apiError (404) {string} ref objectmodel to get in the right language
|
||||
* @apiError (404) {string} msg key to get template from objectmodel
|
||||
* @apiError (404) {object} data use to render lg/objectmodel_lg.json
|
||||
*
|
||||
* @apiSuccess (200) {object} data contains indexfile requested
|
||||
*
|
||||
*/
|
||||
// indexname = objectname_key_value.json
|
||||
router.get(
|
||||
"/:objectname/itm/:primaryindex",
|
||||
checkHeaders,
|
||||
isAuthenticated,
|
||||
(req, res) => {
|
||||
/**
|
||||
* @api {get} /odmdb/item/:objectname/:primaryindex
|
||||
* @apiName Get index file for an object
|
||||
* @apiGroup Odmdb
|
||||
*
|
||||
* @apiUse apxHeader
|
||||
*
|
||||
* @apiParam {String} objectname name Mandatory if in conf.nationObjects then file is into nationchains/ else in /nationchains/tribes/xtribe/objectname
|
||||
* @apiParam {String} primaryindex the unique id where item is store
|
||||
* @apiError (404) {string} status the file does not exist
|
||||
* @apiError (404) {string} ref objectmodel to get in the right language
|
||||
* @apiError (404) {string} msg key to get template from objectmodel
|
||||
* @apiError (404) {object} data use to render lg/objectmodel_lg.json
|
||||
*
|
||||
* @apiSuccess (200) {object} data contains indexfile requested
|
||||
*
|
||||
*/
|
||||
// indexname = objectname_key_value.json
|
||||
const objectName = req.params.objectname;
|
||||
const objectId = req.params.primaryindex;
|
||||
let objectLocation = "../../nationchains/";
|
||||
@@ -97,7 +163,8 @@ router.get(
|
||||
);
|
||||
|
||||
router.post(":objectname/itm", checkHeaders, isAuthenticated, (req, res) => {
|
||||
// Create an item of an object
|
||||
// Create an item of an object with no specificities
|
||||
// if specificities then create a route / model that import odmdb
|
||||
});
|
||||
router.get(
|
||||
"/searchitems/:objectname/:question",
|
||||
@@ -126,24 +193,24 @@ router.get(
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* @api {get} /odmdb/schema/:objectname
|
||||
* @apiName GetSchema
|
||||
* @apiGroup Odmdb
|
||||
*
|
||||
* @apiUse apxHeader
|
||||
*
|
||||
* @apiParam {String} objectname Mandatory if headers.xworkon == nationchains then into ./nationchains/ else into ./tribes/xworkon/
|
||||
*
|
||||
* @apiError (404) {string} status a key word to understand not found schema
|
||||
* @apiError (404) {string} ref objectmodel to get in the right language
|
||||
* @apiError (404) {string} msg key to get template from objectmodel
|
||||
* @apiError (404) {object} data use to render lg/objectmodel_lg.json
|
||||
*
|
||||
* @apiSuccess (200) {object} data contains schema requested
|
||||
*
|
||||
*/
|
||||
router.get("schema/:objectname", checkHeaders, isAuthenticated, (req, res) => {
|
||||
/**
|
||||
* @api {get} /odmdb/schema/:objectname
|
||||
* @apiName GetSchema
|
||||
* @apiGroup Odmdb
|
||||
*
|
||||
* @apiUse apxHeader
|
||||
*
|
||||
* @apiParam {String} objectname Mandatory if headers.xworkon == nationchains then into ./nationchains/ else into ./tribes/xworkon/
|
||||
*
|
||||
* @apiError (404) {string} status a key word to understand not found schema
|
||||
* @apiError (404) {string} ref objectmodel to get in the right language
|
||||
* @apiError (404) {string} msg key to get template from objectmodel
|
||||
* @apiError (404) {object} data use to render lg/objectmodel_lg.json
|
||||
*
|
||||
* @apiSuccess (200) {object} data contains schema requested
|
||||
*
|
||||
*/
|
||||
const fullpath = path.resolve(
|
||||
`${__dirname}/tribes/${req.session.header.xworkon}/schema/${req.params.pathobjectname}.json`
|
||||
);
|
||||
|
@@ -1,210 +1,226 @@
|
||||
const express = require("express");
|
||||
const fs = require("fs-extra");
|
||||
const path = require("path");
|
||||
|
||||
// Classes
|
||||
const Pagans = require("../models/Pagans.js");
|
||||
const Notifications = require("../models/Notifications.js");
|
||||
|
||||
// Middlewares
|
||||
const checkHeaders = require("../middlewares/checkHeaders");
|
||||
const isAuthenticated = require("../middlewares/isAuthenticated");
|
||||
const hasAccessrighton = require("../middlewares/hasAccessrighton");
|
||||
|
||||
const router = express.Router();
|
||||
/*
|
||||
models/Pagans.js
|
||||
Managed:
|
||||
/data/tribee/client-Id/users/uuid.json
|
||||
/searchindex/emails.json {email:uuid}
|
||||
/login.json {login:uuid}
|
||||
/uids.json {uuid;[[
|
||||
login,
|
||||
email,
|
||||
encrypted psw,
|
||||
accessrights]}
|
||||
/**
|
||||
* /api/models/Pagans.js
|
||||
*
|
||||
* Managed:
|
||||
|
||||
ACCESSRIGHTS = {
|
||||
app:{"tribeid:appname":"profil"},
|
||||
data:{"tribeid":{object:"CRUDO"}}
|
||||
}
|
||||
ACCESSRIGHTS is store into the token and is load into req.session.header.accessrights by hasAccessrighton() middleware
|
||||
|
||||
appname is a website space object /sitewebsrc/appname
|
||||
website live is strored into /dist source in /src
|
||||
|
||||
This can be managed by maildigitcreator or not.
|
||||
apxtrib/sitewebs/webapp is the webinterface of apxtrib
|
||||
|
||||
profil: admin / manager / user are key word to give specific access to data into model. Any kind of other profil can exist. It is usefull to manage specific menu in an app.
|
||||
It is also possible to authorize update a field's object depending of rule into dataManagement/object/
|
||||
{ field:X
|
||||
nouserupdate: "!(['admin','manager'].includes(contexte.profil))",
|
||||
}
|
||||
|
||||
data allow a user to access tribeid with Create Read Update Delete Own (CRUDO) on each object of a tribeid independantly of any app.
|
||||
|
||||
Create allow to create a new object respecting rules defined into /referentials/dataManagement/object/name.json
|
||||
Update idem
|
||||
Delete idem
|
||||
Owner means it can be Write/Delete if field OWNER contain the UUID that try to act on this object. Usefull to allow someone to fully manage its objects.
|
||||
|
||||
*/
|
||||
/**
|
||||
* @api {get} /pagans/alias/:alias
|
||||
* @apiName Is register check xalias and xhash
|
||||
* @apiGroup Pagans
|
||||
* @param {string} alias a alias that exist or not
|
||||
* @apiSuccess (200) {object} {ref:"pagans",msg:"aliasexist",data: { alias, publicKey } }
|
||||
* @apiError (404) {object} {ref:"pagans",msg:"aliasdoesnotexist",data: { alias} }
|
||||
*
|
||||
**/
|
||||
router.get("/alias/:alias", (req, res) => {
|
||||
/**
|
||||
* @api {get} /pagans/alias/:alias
|
||||
* @apiName Is register check xalias and xhash
|
||||
* @apiGroup Pagans
|
||||
* @param {string} alias a alias that exist or not
|
||||
* @apiSuccess (200) {object} {ref:"pagans",msg:"aliasexist",data: { alias, publicKey } }
|
||||
* @apiError (404) {object} {ref:"pagans",msg:"aliasdoesnotexist",data: { alias} }
|
||||
*
|
||||
**/
|
||||
res.send(Pagans.getalias(req.params.alias));
|
||||
const getalias = Pagans.getalias(req.params.alias);
|
||||
res.status(getalias.status).send(getalias);
|
||||
});
|
||||
router.get("/person/:alias", checkHeaders, isAuthenticated, (req, res) => {
|
||||
/**
|
||||
* @api {get} /pagans/person:alias
|
||||
* @apiName Is register check xalias and xhash
|
||||
* @apiGroup Pagans
|
||||
* @apiUse apxHeader
|
||||
* @param {string} alias that exist
|
||||
* @param {string} tribeId that exist with a person alias
|
||||
* @apiSuccess (200) {ref:"pagans",msg:"personexist",data: { person } }
|
||||
* @apiError (404) {ref:"pagans",msg:"persondoesnotexist",data: { person } }
|
||||
*
|
||||
* @todo check accessright for req.session.header.xalias to see if jhe can get person data
|
||||
* if req.param.alias == req.session.header.xalias => Owner
|
||||
* else need accessright to on person set at R
|
||||
* */
|
||||
res.send(Pagans.getperson(req.params.alias, req.session.header.xtribe));
|
||||
/**
|
||||
* @api {get} /pagans/logout
|
||||
* @apiName Remove token
|
||||
* @apiGroup Pagans
|
||||
*
|
||||
*/
|
||||
router.get("/logout", checkHeaders, isAuthenticated, (req, res) => {
|
||||
console.log(req.session.header);
|
||||
const logout = Pagans.logout(
|
||||
req.session.header.xalias,
|
||||
req.session.header.xtribe,
|
||||
req.session.header.xdays,
|
||||
req.session.header.xhash
|
||||
);
|
||||
res.status(logout.status).json(logout);
|
||||
});
|
||||
|
||||
/**
|
||||
* @api {get} /pagans/person:alias
|
||||
* @apiName Is register check xalias and xhash
|
||||
* @apiGroup Pagans
|
||||
* @apiUse apxHeader
|
||||
* @param {string} alias that exist
|
||||
* @param {string} tribeId that exist with a person alias
|
||||
* @apiSuccess (200) {ref:"pagans",msg:"personexist",data: { person } }
|
||||
* @apiError (404) {ref:"pagans",msg:"persondoesnotexist",data: { person } }
|
||||
*
|
||||
* @todo check accessright for req.session.header.xalias to see if jhe can get person data
|
||||
* if req.param.alias == req.session.header.xalias => Owner
|
||||
* else need accessright to on person set at R
|
||||
* */
|
||||
router.get("/person/:alias", checkHeaders, isAuthenticated, (req, res) => {
|
||||
const getperson = Pagans.getperson(
|
||||
req.session.header.xtribe,
|
||||
req.params.alias,
|
||||
{ xprofils: req.session.header.xprofils, xalias: req.session.header.xalias }
|
||||
);
|
||||
res.status(getperson.status).send(getperson);
|
||||
});
|
||||
|
||||
/**
|
||||
* @api {get} /pagans/isauth
|
||||
* @apiName Is register check xalias and xhash
|
||||
* @apiGroup Pagans
|
||||
* @apiUse apxHeader
|
||||
*
|
||||
* @apiError (400) {object} status missingheaders / xalias does not exist / signaturefailled
|
||||
* @apiError (401) {object} alias anonymous (not authenticated)
|
||||
* @apiError (404) {string} tribe does not exist
|
||||
*
|
||||
* @apiSuccess (200) {object} data contains indexfile requested
|
||||
*
|
||||
*/
|
||||
router.get("/isauth", checkHeaders, isAuthenticated, (req, res) => {
|
||||
/**
|
||||
* @api {get} /pagans/isauth
|
||||
* @apiName Is register check xalias and xhash
|
||||
* @apiGroup Pagans
|
||||
* @apiUse apxHeader
|
||||
*
|
||||
* @apiError (400) {object} status missingheaders / xalias does not exist / signaturefailled
|
||||
* @apiError (401) {object} alias anonymous (not authenticated)
|
||||
* @apiError (404) {string} tribe does not exist
|
||||
*
|
||||
* @apiSuccess (200) {object} data contains indexfile requested
|
||||
*
|
||||
*/
|
||||
res.send({
|
||||
res.status(200).send({
|
||||
status: 200,
|
||||
ref: "headers",
|
||||
msg: "authenticated",
|
||||
data: {
|
||||
xalias: req.session.header.xalias,
|
||||
xprofils: req.session.header.xprofils,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
/**
|
||||
* @api {post} /pagans
|
||||
* @apiName Is register check xalias and xhash
|
||||
* @apiGroup Pagans
|
||||
* @apiUse apxHeader
|
||||
*
|
||||
* Create a pagan account from alias, publickey, if trusted recovery =>
|
||||
* Create a person in xtribe/person/xalias.json with profil.auth={email,privatekey, passphrase}
|
||||
* Middleware isAuthenticated check that:
|
||||
* - xhash is well signed from private key linked to the publickey of alias
|
||||
* - check that alias does not already exist (if yes then verifiedsigne would be false)
|
||||
* Need to wait next block chain to be sure that alias is register in the blokchain
|
||||
*/
|
||||
router.post("/", checkHeaders, isAuthenticated, (req, res) => {
|
||||
/**
|
||||
* @api {post} /pagans
|
||||
* @apiName Is register check xalias and xhash
|
||||
* @apiGroup Pagans
|
||||
* @apiUse apxHeader
|
||||
*
|
||||
* Create a pagan account from alias, publickey, if trusted recovery =>
|
||||
* Create a person in xtribe/person/xalias.json with profil.auth={email,privatekey, passphrase}
|
||||
* Middleware isAuthenticated check that:
|
||||
* - xhash is well signed from private key linked to the publickey of alias
|
||||
* - check that alias does not already exist (if yes then verifiedsigne would be false)
|
||||
* Need to wait next block chain to be sure that alias is register in the blokchain
|
||||
*/
|
||||
//console.log("pass ici", req.body);
|
||||
const feedback = { alias: req.body.alias, publickey: req.body.publickey };
|
||||
const newpagan = Pagans.create(req.body.alias, req.body.publickey);
|
||||
const objpagan = { alias: req.body.alias, publickey: req.body.publickey };
|
||||
const newpagan = Pagans.create(objpagan, {
|
||||
xalias: req.session.header.xalias,
|
||||
xprofils: req.session.header.xprofils,
|
||||
});
|
||||
if (newpagan.status == 200) {
|
||||
if (req.body.email) {
|
||||
feedback.withemail = true;
|
||||
feedback.email = req.body.email;
|
||||
feedback.privatekey = req.body.privatekey;
|
||||
feedback.passphrase = req.body.passphrase;
|
||||
Notifications.send({
|
||||
type: "email",
|
||||
from: "",
|
||||
dest: [req.body.email],
|
||||
tpl: "registeremail",
|
||||
tribe: req.session.header.xtribe,
|
||||
data: feedback,
|
||||
});
|
||||
const emailsent = Pagans.sendmailkey(
|
||||
req.body.alias,
|
||||
req.body.privatekey,
|
||||
req.session.header.xtribe,
|
||||
req.body.passphrase,
|
||||
req.body.publickey,
|
||||
req.body.email
|
||||
);
|
||||
}
|
||||
if (req.body.trustedtribe) {
|
||||
if (req.app.locals.tribeids.includes(req.body.trustedtribe)) {
|
||||
delete feedback.withemail;
|
||||
const persondata = { recovery: feedback };
|
||||
const persoup = Pagans.personupdate(req.body.alias, req.body.trustedtribe, persondata)
|
||||
res.status(persoup.status).json(persoup)
|
||||
/*res.send(
|
||||
Pagans.personupdate(req.body.alias, req.body.trustedtribe, persondata)
|
||||
);*/
|
||||
} else {
|
||||
res.status(404).json({
|
||||
status:404,
|
||||
ref: "Pagans",
|
||||
msg: "tribedoesnotexist",
|
||||
data: { tribe: req.body.trustedtribe },
|
||||
});
|
||||
/*res.send({
|
||||
status: 404,
|
||||
ref: "Pagans",
|
||||
msg: "tribedoesnotexist",
|
||||
data: { tribe: req.body.trustedtribe },
|
||||
});*/
|
||||
}
|
||||
} else {
|
||||
newpagan.data = feedback;
|
||||
const personup = Pagans.personupdate(
|
||||
req.body.alias,
|
||||
req.body.trustedtribe,
|
||||
{
|
||||
recoveryauth: {
|
||||
email: req.body.email,
|
||||
privatekey: req.body.privatekey,
|
||||
publickey: req.body.publickey,
|
||||
passphrase: req.body.passphrase,
|
||||
},
|
||||
}
|
||||
);
|
||||
if (personup.status !== 200)
|
||||
console.log("Warning no recovery registration", personup);
|
||||
}
|
||||
if (emailsent && emailsent.status != 200) {
|
||||
newpagan.msg = "successfulcreatewithoutemail";
|
||||
res.status(newpagan.status).json(newpagan);
|
||||
//res.send(newpagan);
|
||||
}
|
||||
} else {
|
||||
//error to create pagan
|
||||
res.send(newpagan);
|
||||
//error to create pagan certaily already exist
|
||||
res.status(newpagan.status).json(newpagan);
|
||||
}
|
||||
});
|
||||
router.put("/person", checkHeaders, isAuthenticated, (req, res) => {
|
||||
/**
|
||||
* @api {put} /pagans/person
|
||||
* @apiName Is register check xalias and xhash
|
||||
* @apiGroup Pagans
|
||||
* @apiUse apxHeader
|
||||
*
|
||||
* add/update a person = alias + tribe with specific accessright and specific schema link to tribe
|
||||
* @todo add tribe/schema/person.json
|
||||
*/
|
||||
/**
|
||||
* @api {post} /pagans/person
|
||||
* @apiName Is register check xalias and xhash
|
||||
* @apiGroup Pagans
|
||||
* @apiUse apxHeader
|
||||
*
|
||||
* add a person = alias + tribe with specific accessright and specific schema link to tribe
|
||||
* @todo add tribe/schema/person.json
|
||||
*/
|
||||
router.post("/person", checkHeaders, isAuthenticated, (req, res) => {
|
||||
//console.log(req.body);
|
||||
const persoup = Pagans.personupdate(req.body.alias, req.session.header.xtribe, req.body);
|
||||
const persoad = Pagans.personcreate(
|
||||
req.session.header.xtribe,
|
||||
req.body.alias,
|
||||
req.body,
|
||||
{ xprofils: req.session.header.xprofils, xalias: req.session.header.xalias }
|
||||
);
|
||||
res.status(persoad.status).json(persoad);
|
||||
});
|
||||
|
||||
/**
|
||||
* @api {put} /pagans/person
|
||||
* @apiName Is register check xalias and xhash
|
||||
* @apiGroup Pagans
|
||||
* @apiUse apxHeader
|
||||
*
|
||||
* update a person = alias + tribe with specific accessright and specific schema link to tribe
|
||||
* @todo add tribe/schema/person.json
|
||||
*/
|
||||
router.put("/person", checkHeaders, isAuthenticated, (req, res) => {
|
||||
//console.log(req.body);
|
||||
const persoup = Pagans.personupdate(
|
||||
req.session.header.xtribe,
|
||||
req.body.alias,
|
||||
req.body,
|
||||
{ xprofils: req.session.header.xprofils, xalias: req.session.header.xalias }
|
||||
);
|
||||
res.status(persoup.status).json(persoup);
|
||||
});
|
||||
router.delete("/:alias", checkHeaders, isAuthenticated, (req, res) => {
|
||||
/**
|
||||
* @api {delete} /pagans/:alias
|
||||
* @apiName Is register check xalias and xhash
|
||||
* @apiGroup Pagans
|
||||
* @apiUse apxHeader
|
||||
* */
|
||||
|
||||
/**
|
||||
* @api {delete} /pagans/alias/:alias
|
||||
* @apiName Is register check xalias and xhash
|
||||
* @apiGroup Pagans
|
||||
* @apiUse apxHeader
|
||||
* */
|
||||
router.delete("/alias/:alias", checkHeaders, isAuthenticated, (req, res) => {
|
||||
console.log(`DELETE pagans nationchains/pagans/${req.params.alias}.json`);
|
||||
const result = Pagans.delete(req.params.id, req.session.header);
|
||||
const result = Pagans.deletealias(req.params.id, req.session.header);
|
||||
res.status(result.status).send(result.data);
|
||||
});
|
||||
router.delete("/person/:alias", checkHeaders, isAuthenticated, (req, res) => {
|
||||
console.log(`DELETE pagans nationchains/pagans/${req.params.alias}.json`);
|
||||
const result = Pagans.deleteperson(req.params.id, req.session.header);
|
||||
res.status(result.status).send(result.data);
|
||||
});
|
||||
|
||||
/**
|
||||
* @api {get} /pagans/keyrecovery/tribe/email
|
||||
* @apiName apxtrib
|
||||
* @apiGroup Pagans
|
||||
*
|
||||
*
|
||||
*
|
||||
* @apiError (400) {object} status missingheaders / xalias does not exist / signaturefailled
|
||||
* @apiError (401) {object} alias anonymous (not authenticated)
|
||||
* @apiError (404) {string} tribe does not exist
|
||||
*
|
||||
* @apiSuccess (200) {object} data contains indexfile requested
|
||||
*
|
||||
*/
|
||||
router.get("/keyrecovery/:tribeid/:email", checkHeaders, (req, res) => {
|
||||
/**
|
||||
* @api {get} /pagans/keyrecovery/tribe/email
|
||||
* @apiName apxtrib
|
||||
* @apiGroup Pagans
|
||||
*
|
||||
*
|
||||
*
|
||||
* @apiError (400) {object} status missingheaders / xalias does not exist / signaturefailled
|
||||
* @apiError (401) {object} alias anonymous (not authenticated)
|
||||
* @apiError (404) {string} tribe does not exist
|
||||
*
|
||||
* @apiSuccess (200) {object} data contains indexfile requested
|
||||
*
|
||||
*/
|
||||
res.send(Pagans.keyrecovery(req.params.tribeId, req.params.email));
|
||||
});
|
||||
module.exports = router;
|
||||
|
@@ -7,7 +7,6 @@ const Notifications = require("../models/Notifications.js");
|
||||
// Middlewares
|
||||
const checkHeaders = require("../middlewares/checkHeaders");
|
||||
const isAuthenticated = require("../middlewares/isAuthenticated");
|
||||
const hasAccessrighton = require("../middlewares/hasAccessrighton");
|
||||
const router = express.Router();
|
||||
/*
|
||||
|
||||
|
@@ -8,11 +8,10 @@ const Tribes = require( '../models/Tribes.js' );
|
||||
// Middlewares
|
||||
const checkHeaders = require( '../middlewares/checkHeaders' );
|
||||
const isAuthenticated = require( '../middlewares/isAuthenticated' );
|
||||
const hasAccessrighton = require( '../middlewares/hasAccessrighton' );
|
||||
const router = express.Router();
|
||||
|
||||
|
||||
router.get('www', checkHeaders,isAuthenticated,hasAccessrighton('www','R'),(req,res)=>{
|
||||
router.get('www', checkHeaders,isAuthenticated,(req,res)=>{
|
||||
/**
|
||||
* @api {get} /tribes/www/:tribeId
|
||||
* @apiName Get list of www object (space web)
|
||||
@@ -38,6 +37,19 @@ router.get('www', checkHeaders,isAuthenticated,hasAccessrighton('www','R'),(req,
|
||||
|
||||
//router.delete('www/:tribeId/:app)
|
||||
|
||||
router.post('/actionanonyme',checkHeaders,(req,res)=>{
|
||||
if (!fs.existsSync(`${conf.dirtown}/tribes/${req.session.header.xtribe}/actions/${req.body.action}.js`)){
|
||||
res.status(403).send({status:403,msg:"actionmissing",ref:"Tribes", data:{action:req.body.action,tribe:req.session.header.xtribe}})
|
||||
}
|
||||
const action = require(`${conf.dirtown}/tribes/${req.session.header.xtribe}/actions/${req.body.action}.js`)
|
||||
const resaction= action.run(req.body,req.session.header);
|
||||
res.status(resaction.status).send(resaction);
|
||||
})
|
||||
|
||||
router.post('/action',checkHeaders,isAuthenticated,(req,res)=>{
|
||||
|
||||
})
|
||||
|
||||
|
||||
router.get( '/clientconf/:tribeid', checkHeaders, isAuthenticated, ( req, res ) => {
|
||||
/*
|
||||
@@ -151,9 +163,9 @@ router.put( '/sendjson', checkHeaders, isAuthenticated, ( req, res ) => {
|
||||
} else {
|
||||
if( fs.existsSync( `${config.tribes}/${req.session.header.xworkon}/${req.body.object}/${req.body.path}` ) ) {
|
||||
// exist so can be update check accessright update on this
|
||||
hasAccessrighton( req.body.object, "U" );
|
||||
//A REVOIR hasAccessrighton( req.body.object, "U" );
|
||||
} else {
|
||||
hasAccessrighton( req.body.object, "C" );
|
||||
// AREVOIRhasAccessrighton( req.body.object, "C" );
|
||||
}
|
||||
fs.outputJsonSync( dest, req.body.data );
|
||||
res.status( 200 )
|
||||
@@ -224,7 +236,7 @@ router.delete( '/file', checkHeaders, isAuthenticated, ( req, res ) => {
|
||||
.send( { info: [ 'deleteerror' ], models: "Tribes", moreinfo: "your del req need a src" } )
|
||||
return;
|
||||
};
|
||||
hasAccessrighton( req.query.src.split( '/' )[ 0 ], "D" );
|
||||
// A REVOIR hasAccessrighton( req.query.src.split( '/' )[ 0 ], "D" );
|
||||
console.log( 'Remove file', `${config.tribes}/${req.session.header.xworkon}/${req.query.src}` )
|
||||
console.log( req.body )
|
||||
fs.removeSync( `${config.tribes}/${req.session.header.xworkon}/${req.query.src}` );
|
||||
|
@@ -6,7 +6,7 @@ const Wwws = require("../models/Wwws.js");
|
||||
// Middlewares
|
||||
const checkHeaders = require("../middlewares/checkHeaders");
|
||||
const isAuthenticated = require("../middlewares/isAuthenticated");
|
||||
const hasAccessrighton = require("../middlewares/hasAccessrighton");
|
||||
|
||||
const router = express.Router();
|
||||
/**
|
||||
* To manage an nginx conf
|
||||
|
Reference in New Issue
Block a user