diff --git a/.gitignore b/.gitignore index 6cde7b6..855c84e 100644 --- a/.gitignore +++ b/.gitignore @@ -2,7 +2,9 @@ * # Except those folder !README.md +!LICENSE !.gitignore +!backup.sh !package.json !apxtri/ !apxtri/** @@ -11,4 +13,6 @@ !schema/ !schema/** !index.js +!nginx/ +!nginx/nginx.*cf diff --git a/README.md b/README.md index 06795c4..ac112a5 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,94 @@ -# adminapi +# apXtri + +## Context + +**A community of SAAS Hosters** (Software As A Service), a full solution to create a decentralized economy for Sys Admin, web dev (back/front/fullstack), entrepreneur, producer, consummer. + +**apXtri a DAO** (Decentralize Autonomous Organization), synchronize many nodes (server) called "town" that respects set of rules (called "nation"). A user called a "pagan" has a unique alias anonymous (or not) identity link to a PGP key pair (public/private) that allow him to sign any action to proof the ownership of an alias. + +A node (town) can host group of people called a tribe. Each tribe has a web space (public or private) to host web app or page as well as api endpoint. Any user (Pagan) can join a tribe and becoming a person into a tribe define by the tribe owner. + +**Rules management** works like russian dolls, a tribe define its onw rules that respect by design the towns rules, that respect the nation rules. Any user actions under rules are store into a blockchain. The blockchain is manage by each town owner. + +## Tech in actions + +apXtri, use open source tools to be easily auditable and respect the RESTFull api concept. + +- ubuntu LTS (desktop https://ubuntu.com/desktop for dev, server https://ubuntu.com/server for production) +- nginx to serve https web page and proxy serve api services https://nginx.org/ +- node.js / express.js to provide endpoint in a routes/models https://expressjs.com/ +- JSON schema https://json-schema.org/specification to define some item collection + +## Get support + +The adminapi endpoint doc are documented here https://antsnation.mooo.com/apidoc/index.html + +Training is under working, for entrepreneur that have ingenior skills but not IT skills, to help to become atonomous in managing an apXtri node as well to dev some web interface and endpoint to dev a full SAAS. + +Contact support@need-data.com for any help. + +## Installation + +adminapi is a special tribe that rules all tribes to provide a SAAS solution easy to customize as a node.js dev. + +### Tech pre-request + +#### For dev: +Install an ubuntu desktop machine https://ubuntu.com/desktop +``` +$ sudo apt update +$ sudo apt upgrade +$ sudo apt install git vim libcap2-bin p7zip-full p7zip-rar curl nginx +``` +In the next step replace {apxuser} by the login user you used to install ubuntu + +**Only if you want to create a specific user** in your desktop : +``` +$ sudo useradd -s /bin/bash -m -d /home/{apxuser} -c "{apxuser}" {apxuser} +$ sudo passwd {apxuser} +$ sudo usermod -aG sudo {apxuser} +# Switch to the new user: +$ su {apxuser} +``` + +In anycase continue with: +``` +$ sudo visudo +# Add the following line to the file: +$ {apxuser} ALL=(ALL) NOPASSWD: ALL +$ mkdir ~apxtowns/dev-ants +$ sudo echo "127.0.0.1 dev-ants" > /etc/hosts +# Install the latest version of nvm (check the website for the latest version: https://github.com/nvm-sh/nvm) +$ curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.38.0/install.sh | bash +# Close and re-open the terminal under the apxuser +$ nvm --version +# Then recheck the nvm version +$ nvm --version +# Install Node.js, npm, and yarn. Follow the online instructions to update the configuration properly. +$ nvm install node +$ node --version # to check the Node.js version +$ npm install --global yarn +$ yarn --version # to check the yarn version +$ mkdir ~/apxtowns # if it does not exist +$ mkdir ~/apxtowns/dev-ants +$ cd ~/apxtowns/dev-ants +$ sudo chown {apxuser}:root /etc/nginx/nginx.conf +$ git clone https://gitea.ndda.fr/apxtri/adminapi.git +$ yarn install +$ yarn dev +``` +Open http://dev-ants in your browser after installation. you are ready to dev in adminapi or in your tribe by following the web interface. + +You can create a git project of your tribe, we add a default .gitignore to not ignore data store in object. + +If you want to contribute fixing bug, add new features in adminapi please push in your branch and send an email to support@need-data.com. + +#### For production + + + +step + + + -Start gate to apxtri \ No newline at end of file diff --git a/apxtri/apxchat.js b/apxtri/apxchat.js new file mode 100644 index 0000000..f614709 --- /dev/null +++ b/apxtri/apxchat.js @@ -0,0 +1,46 @@ +const express = require('express'); +const app = express(); +const http = require('http'); +const server = http.createServer(app); +const { Server } = require("socket.io"); +const io = new Server(server); +const nodepgp = require('node-pgp'); // ou une autre bibliothèque PGP +const openpgp = require("openpgp"); +// ... (initialisation de nodepgp) + +io.on('connection', (socket) => { + // Gestion de la connexion d'un utilisateur + // ... + + socket.on('message', async (data) => { + // Vérifier si le destinataire est connecté + // ... + + // Générer une nouvelle clé de session + const sessionKey = await nodepgp.generateKey({ + bits: 2048, + type: 'rsa' + }); + + // Chiffrer le message avec la clé publique du destinataire et la clé de session + const encryptedMessage = await nodepgp.encrypt({ + message: data.message, + publicKeys: [destinataire.publicKey], + signingKeys: [sessionKey.privateKey] + }); + + // Envoyer la clé de session chiffrée avec la clé publique du destinataire + // et le message chiffré au destinataire + socket.to(destinataire.id).emit('message', { + message: encryptedMessage, + sessionKey: await nodepgp.encrypt({ + message: sessionKey.publicKey, + publicKeys: [destinataire.publicKey] + }) + }); + }); +}); + +server.listen(3030, () => { + console.log('apxchat listening on *:3030'); +}); \ No newline at end of file diff --git a/apxtri/apxtri.js b/apxtri/apxtri.js new file mode 100755 index 0000000..ea2f690 --- /dev/null +++ b/apxtri/apxtri.js @@ -0,0 +1,315 @@ +//const { argv } = require("process"); +const fs = require("fs-extra"); +//const mustache = require("mustache"); +const bodyParser = require("body-parser"); +const glob = require("glob"); +const path = require("path"); +const Mustache = require("mustache"); +const hosts = require("hosts"); +const cors = require("cors"); +const express = require("express"); +const process = require("process"); + +/******************************************* +SEE README.md to start +********************************************/ +const apxtri = {}; + +apxtri.main = async () => { + if (!fs.existsSync("/etc/nginx/nginx.conf")) { + console.log( + "\x1b[31m Check documentation, nginx have to be installed on this server first, no /etc/nginx/nginx.conf available, install then rerun yarn command." + ); + process.exit(0); + } + //console.log(path.resolve("../adminapi/objects/tribes/itm/adminapi.json")) + if ( + fs.existsSync("../adminapi/objects/tribes/idx/tribes_dns.json") && + fs.existsSync("../adminapi/objects/tribes/itm/adminapi.json") + ) { + apxtri.runexpress( + fs.readJsonSync(`../adminapi/objects/tribes/idx/tribes_dns.json`), + fs.readJSONSync("../adminapi/objects/tribes/itm/adminapi.json") + ); + } else { + await apxtri.setup(); + } + //const conf = require(path.resolve(`../itm/adminapi.json`)); + //const conf = fs.readJsonSync(`../adminapi/objects/tribes/idx/tribes_dns.json`); + //let doms = conf.dns; // only dns of town during the init process + //let tribelist; + /*if (!fs.existsSync(`../adminapi/objects/tribes/idx/tribes_dns.json`)) { + console.log("ERROR,Missing ../adminapi/objects/tribes/idx/tribes_dns.json"); + process.exit(0); + } + */ + /* const tribesdns = fs.readJsonSync(`../adminapi/objects/tribes/idx/tribes_dns.json`); + let doms=[] + for (const tribe in tribedns){ + tribedns[tribe].forEach(d=>{ + if (!doms.includes(d)) doms.push(d); + }) + } +*/ + //apxtri.runexpress(fs.readJsonSync(`../adminapi/objects/tribes/idx/tribes_dns.json`)); +}; + +apxtri.setup = async () => { + console.log("Warning, this is a first install"); + const initadminapi = fs.readJsonSync("../adminapi/apxtri/initadminapi.json"); + try { + initadminapi.townpath = __dirname.replace("/adminapi/apxtri", ""); + const townnation = initadminapi.townpath.split("/").slice(-1)[0].split("-"); + initadminapi.townId = townnation[0]; + initadminapi.nationId = townnation[1]; + } catch (err) { + console.log("Your town folder must be something townid-nation"); + } + initadminapi.sudoUser = process.env.USER; + // Loop in initadminapi.urlinit until one answer + /*const headers = { + xtrkversion: 1, + xtribe: "adminapi", + xapp: "apx", + xlang: "fr", + xalias: "anonymous", + xhash: "anonymous", + xdays: 0, + xuuid: "0", + }; + // add in crontab each day + // find /home/phil/apxtowns/dev-ants/adminapi/objects -type d -name "nations" -o -name "towns" -o -name "pagans" -o -name "tplstring" -o -name "wwws" | tar -cvzf /home/phil/dev-ants/adminapi/objects/wwws/cdn/share/setupobjects.tar.gz + for (url of initadminapi.urlinit) { + if (!fs.existsSync("../objects/nations")) { + const urlinit = `${url}/api/adminapi/wwws/updatelocaldbanonymous/adminapi/apx/blockchain/0`; + const getdata = await fetch(urlinit, { headers: headers }); + console.log(getdata) + if (getdata.ok) { { + "name": "tribes_dns", + "keyval": "tribeId", + "type": "view", + "objkey": [ + "dns" + ], + "filter": "" + } + const data = await getdata.json(); + console.log(Object.keys(data.data.itms)); + Object.keys(data.data.itms).forEach((o) => { + //idxname contain the real name of the index to use + fs.outputJSONSync( + `../objects/${o}/idx/${initadminapi.idxname[o]}s.json`, + data.data.itms[o] + ); + // save each itm to init data + Object.keys(data.data.itms[o]).forEach((i) => { + fs.outputJSONSync( + `../objects/${o}/itms/${i}.json`, + data.data.itms[o][i] + ); + }); + }); + } + } + }*/ + if (!fs.existsSync("../adminapi/objects/nations")) { + console.log( + `Sorry, check setup.sh process that was not able to init your adminapi/objects ` + ); + process.exit(0); + } + fs.outputJSONSync( + "../adminapi/objects/tribes/itm/adminapi.json", + initadminapi, + { space: 2 } + ); + fs.outputJSONSync("../adminapi/objects/tribes/conf.json", { + name: "tribes", + schema: "adminapi/schema/tribes.json", + lastupdate: 0, + }); + fs.outputJSONSync("../adminapi/objects/tribes/idx/lst_tribeId.json", [ + "adminapi", + ]); + fs.outputJSONSync("../adminapi/objects/tribes/idx/tribes_dns.json", { + adminapi: initadminapi.dns, + }); + + const idxadminapi = { + adminapi: { + tribeId: "adminapi", + dns: initadminapi.dns, + status: initadminapi.status, + nationId: initadminapi.nationId, + townId: initadminapi.townId, + }, + }; + fs.outputJSONSync("../adminapi/objects/tribes/idx/tribes.json", idxadminapi, { + space: 2, + }); + // check nginx conf and eventually adapt + let etcnginx = fs.readFileSync("/etc/nginx/nginx.conf") + const etcnginxlines=etcnginx.slipt("\n"); + if (etcnginx.slipt("\n")[0] !== `user ${initadminapi.sudoUser};`){ + const nginxmain = fs.readFileSync("../adminapi/apxtri/setup/nginx.maincf"); + fs.outputFileSync("/etc/nginx/nginx.conf",Mustache.render(nginxmain,initadminapi),{adAdmin:true}) + + } + + const nginxapx = fs.readFileSync("./nginx/nginx.wwwscf"); + fs.outputSync( + `/etc/nginx/nginx.conf`, + Mustache.render(nginxmain, initadminapi, "utf-8") + ); + fs.outputSync( + `../nginx/adminapi-apx.conf`, + Mustache.render(nginxapx, initadminapi, "utf-8") + ); + if (!hosts.exists("127.0.0.1", "apx.adminapi")) { + hosts.add("127.0.0.1", "apx.adminapi"); + } + const { exec } = require("child_process"); + exec(initadminapi.nginx.restart, (error, stdout, stderr) => { + if (error) { + console.log("\x1b[42m", error, stdout, stderr, "x1b[0m"); + process.exit(0); + } else { + console.log( + `\x1b[42m###################################################################\x1b[0m\n\x1b[42mWellcome into apxtri, you can now 'yarn dev' for dev or 'yarn startpm2' for prod or \n'yarn unittest' for testing purpose. Access to your town here \x1b[0m\x1b[32mhttp://apx.adminapî\x1b[0m \x1b[42m \n\x1b[0m\n\x1b[42m###########################################################################################\x1b[0m` + ); + } + }); +}; + +apxtri.runexpress = async (tribesdns, conf) => { + const Odmdb = require(path.resolve("./apxtri/models/Odmdb.js")); + let tribeIds = Object.keys(tribesdns); + // context is store in /itm/tribename.json ={contexte:{routes:[],models:[{model:,tplstringslg:[]}]} + // routes={url,route} check how to add plugin tribe route later + // keep only the 2 last part (.) of domain name to validate cors with it (generic domain) + let routes = []; + let doms = []; + tribeIds.forEach((t) => { + tribesdns[t].forEach((d) => { + const dm = d.split(".").slice(-2).join("."); + if (!doms.includes(dm)) doms.push(dm); + //reindex database attention check dev-ants/.. a bug was fixed + glob.sync(`../${t}/objects/*`).forEach((o) => { + console.log(t, o); + Odmdb.runidx(o); + }); + }); + const context = {}; + const pathtr = path.resolve(`../${t}`); + context.routes = []; + tribroutes = glob.sync(`${pathtr}/apxtri/routes/*.js`).map((f) => { + const rt = `/${t}/${path.basename(f, ".js")}`; + context.routes.push(rt); + return { url: rt, route: f }; + }); + context.models = glob.sync(`${pathtr}/apxtri/models/*.js`).map((f) => { + const modname = `${path.basename(f, ".js")}`; + return { + model: modname, + tplstrings: glob + .sync(`${pathtr}/objects/tplstrings/${modname}_*.json`) + .map((l) => path.basename(l, ".json").split("_")[1]), + }; + }); + console.log(context.routes); + console.log(context.models); + //const conft = `../itm/${t}.json`; + //const ctx = fs.readJsonSync(conft); + //ctx.context = context; + //fs.outputJSONSync(conft, ctx, { spaces: 2 }); + routes = routes.concat(tribroutes); + }); + const app = express(); + // load express parameter from conf + Object.keys(conf.api.appset).forEach((p) => { + app.set(p, conf.api.appset[p]); + }); + // To set depending of data form or get size to send + app.use(bodyParser.urlencoded(conf.api.bodyparse.urlencoded)); + app.use(bodyParser.json(conf.api.bodyparse.json)); + // To set depending of post put json data size to send + app.use(express.json(conf.api.json)); + app.disable("x-powered-by"); // for security + app.locals.tribeids = tribeIds; + const currentmod = "Odmdb"; + const log = conf.api.activelog + ? conf.api.activelog.includes(currentmod) + : false; + + console.log( + currentmod, + " Allowed DOMs to access to this apxtri server:", + JSON.stringify(doms) + ); + console.log(currentmod, " app.locals.tribeids", app.locals.tribeids); + + // Cors management + let originlst = "test"; + doms.forEach((d) => { + originlst += `|${d.replace(/\./g, "\\.")}`; + }); + const regtxt = `^http.?:\/\/(${originlst})`; + let cor = false; + const regorigin = new RegExp(regtxt); + app.use((req, res, next) => { + if (req.headers.origin == undefined) { + cor = true; + } else { + cor = regorigin.test(req.headers.origin); + } + if (!cor) + console.log( + `The domain name ${req.headers.origin} is not allow to access for CORS settings, add it in itm/tribename.json in dns` + ); + cors({ + origin: cor, + allowedHeaders: conf.api.exposedHeaders, + exposedHeaders: conf.api.exposedHeaders, + credentials: true, + preflightContinue: false, + optionsSuccessStatus: 204, + }); + next(); + }); + + // Routers add any routes from /routes and /plugins + let logroute = "Routes available on this apxtri instance: \n"; + routes.forEach((r) => { + try { + logroute += r.url.padEnd(30, " ") + r.route + "\n"; + app.use(r.url, require(r.route)); + } catch (err) { + logroute += " (err check it module.exports=router;? or ...)\n======\n "; + console.log("raise err-:", err); + } + }); + if (log) { + console.log(currentmod, logroute); + if (process.env.NODE_MODE == "dev") + console.log( + `\x1b[42m############################################################################################\x1b[0m\n\x1b[42mThis is dev conf accessible in http://dev-ants to switch this as production, you must run:\n 1 - 'yarn dev nationId:ants townId:dev dns:dev-ants' to conf your town and check it.\n 2 - 'yarn startpm2'\n Where:\n\x1b[42m * nationId have to exist in the nationchains\n * townId new or if exist must have the same current dns,\n * dns domaine that has to redirect 80/443 into this server.\n Check README's project to learn more.\x1b[0m\n To work with apxweb for the front use http://dev-ants/apxwebapp/www/websitename/src/index.html to use the api during dev process\n\x1b[42m############################################################################################\x1b[0m` + ); + } + + //Listen event file for each tribe + // @TODO à ajouter ici + + app.listen(conf.api.port, () => { + let webaccess = `api waits request on port:${conf.api.port} for`; + conf.dns.forEach((u) => { + webaccess += `${u}/api/ `; + }); + if (log) console.log(currentmod, webaccess); + }); + console.log( + "\x1b[42m\x1b[37m", + "Made with love for people's freedom, enjoy !!!", + "\x1b[0m" + ); +}; +apxtri.main(); diff --git a/apxtri/middlewares/checkHeaders.js b/apxtri/middlewares/checkHeaders.js new file mode 100755 index 0000000..ca69c73 --- /dev/null +++ b/apxtri/middlewares/checkHeaders.js @@ -0,0 +1,115 @@ +const conf = require(`../../../adminapi/objects/tribes/itm/adminapi.json`); +const currentmod='checkHeaders'; +const log = conf.api.activelog.includes(currentmod) + /** + * @api {get} / - CheckHeaders + * @apiGroup Middlewares + * @apiName CheckHeaders + * @apiDescription a list of headers are mandatory to access apxtri see in your space town /conf.json.exposedHeaders + * + * @apiHeader {string} xalias 'anonymous' or unique alias + * @apiHeader {string} xapp name of the webapp store in tribe/tribeid/www/{xapp} + * @apiHeader {string} xlang the 2 letter request langage (if does not exist then return en = english). + * @apiHeader {string} xtribe unique tribe name where xapp exist + * @apiHeader {string} xdays a timestamp 0 or generate during the authentifyme process + * @apiHeader {string} xhash anonymous or signature of message: xalias_xdays created by alias private key during authentifyme process + * @apiHeader {array[]} xprofils list of string profil apply into xtribe for xapp + * @apiHeader {string} xuuid a unique number uuid.v4 created the fisrt time a domain is visited on a device + * @apiHeader {integer} xtrkversion a version number link to tracking system + * @apiHeaderExample {json} Header-Example for anonymous: + * { + * "xtrkversion":1, + * "xtribe":"smatchit", + * "xapp":"presentation", + * "xlang":"fr", + * "xalias":"anonymous", + * "xhash":"anonymous", + * "xdays":0 + * } + * @apiHeaderExample {json} Header-Example: + * { + * Cache-Control: "no-cache", + * Expires: 0, Pragma:"no-cache", + * xalias:"jojo", + * xapp:"presentation", + * xdays:1700733068298 + * xhash:"LS0tLS1CRUdJTiBQR1AgU0lHTkVEIE1FU1NBR0UtLS0tLQpIYXNoOiBTSEE1MTIKCmpvam9fMTcwMDczMzA2ODI5OAotLS0tLUJFR0lOIFBHUCBTSUdOQVRVUkUtLS0tLQoKd25VRUFSWUtBQ2NGZ21WZklJd0prTmFVQ0daRHVUYnBGaUVFTjZlc1ZMSWdURmtPRGFVaDFwUUlaa081Ck51a0FBR09MQVA5OS96c21YeEd0b0VuYnpnekppZDJMcDA3YlBNZ1gwNUdhOUFVWjlCQm91Z0VBOVlYVworYjZIM2JHWHVhbEVOc3BrdUk1alNlTFNUWGNkSStjTExTZk5OQTg9Cj1uVjhNCi0tLS0tRU5EIFBHUCBTSUdOQVRVUkUtLS0tLQo=", + * xlang:"fr", + * xprofils:["anonymous", "pagans"], + * xtribe:"smatchit", + * xtrkversion:1, + * xuuid:"ea1cf73f-27f5-4c69-ab53-197a0feab9b2" + * } + * @apiErrorExample {json} Error-Response: + * HTTP/1/1 400 Not Found + * { + * status:400, + * ref:"middlewares", + * msg:"missingheaders", + * data:["headermissing1"] + * } + * @apiErrorExample {json} Error-Response: + * HTTP/1/1 404 Not Found + * { + * status:404, + * ref:"middlewares" + * msg:"tribeiddoesnotexist", + * data: {xalias} + * } + */ +const checkHeaders = (req, res, next) => { + req.session = {}; + const header = {}; + if (!req.header("xlang") && req.header("Content-Language")) + req.params.xlang = req.header("Content-Language"); + let missingheader = []; + if (log) console.log(currentmod," req.headers", req.headers); + for (const h of conf.api.exposedHeaders) { + if (log) console.log(currentmod, h, req.header( h ) ) + if (req.params[h]) { + header[h] = req.params[h]; + } else if (req.header(h)) { + header[h] = req.header(h); + } else { + missingheader.push(h); + } + } + if (log) console.log( currentmod, ' pass header', header ) + // store in session the header information + req.session.header = header; + // Each header have to be declared + if (missingheader.length>0) { + // bad request + return res.status(400).json({ + status:400, + ref: "middlewares", + msg: "missingheader", + data: {missingheader}, + }); + } + //console.log( req.app.locals.tribeids ) + // xtribe == "town" is used during the setup process + // xtribe == "adminapi" is used to access /adminapi + if ( + !( + ["town","adminapi"].includes(header.xtribe) || req.app.locals.tribeids.includes(header.xtribe) + ) + ) { + return res.status(404).json({ + status:404, + ref: "middlewares", + msg: "tribeiddoesnotexist", + data: { xtribe: header.xtribe }, + }); + } + if (!conf.api.languages.includes(header.xlang)) { + const info="warning language requested does not exist force to english"; + if (log) console.log(currentmod, info); + console.log(Date.now(),currentmod, req.header("xtribe"),info); + header.xlang = "en"; + } + //set anonymous profil + req.session.header.xprofils=["anonymous"] + next(); +}; +module.exports = checkHeaders; diff --git a/apxtri/middlewares/footer.md b/apxtri/middlewares/footer.md new file mode 100644 index 0000000..b219490 --- /dev/null +++ b/apxtri/middlewares/footer.md @@ -0,0 +1 @@ +Documentation Best practices \ No newline at end of file diff --git a/apxtri/middlewares/header.md b/apxtri/middlewares/header.md new file mode 100644 index 0000000..144d7f5 --- /dev/null +++ b/apxtri/middlewares/header.md @@ -0,0 +1,123 @@ +## api users and backend developers + +api documentation for routes and middleware has to respect apidoc's rules [https://apidocjs.com/](https://apidocjs.com) + +To update this doc accessible in [https://wal-ants.ndda.fr/apidoc](https://wal-ants.ndda.fr/cdn/apidoc) : + + `$ tribe=adminapi yarn apidoc` + +For api tribe's doc accessible in [https://admin.smatchit.io/apidoc](https://smatchit.io/cdn/apidoc) [:](https://smatchit.io/cdn/apidoc:) + +`$ tribe=smatchit yarn apidoc` + +To get overview check README.md project and the package.json [https://gitea.ndda.fr/apxtri/apxtri](https://gitea.ndda.fr/apxtri/apxtri) + +A special tribe call adminapi in any towns (node), it works the same than all the other tribe except that all their data are synchronize with a blockchain + +Objects manage by adminapi are: pagans (numerique id =alias/public key / private key), notifications (cypher message betxeen alias) , nations (rules apply to all towns belonging to a nations), towns ( a server that host IT ressources disk space, ram, bandwith and rules aplly to all tribe belonging to a town), tribes (( a sharing space to store data as well as api with rules to any person that use it), wwws (web space, dns) + +## Object management (Odmdb) + +An object has a name and is defined by a schema that contain properties key. + +A propertie has a name and a list of caracteristics (type, pattern,format,...) that have to be validate to be accepted. +All properties respect the rules [https://json-schema.org/draft/2020-12/schema,](https://json-schema.org/draft/2020-12/schema,) some extra"format" can be add to mutualise recurrent regex pattern + +To access a schema [https://wall-ants.ndda.fr/api/adminapi/schema/tribename/schamname.json](https://wall-ants.ndda.fr/nationchains/schema/nations.json) and language is set by the header in xlang + +A checkjson.js is available to manage all specific format [https://wall-ants.ndda.fr/Checkjson.js](https://wall-ants.ndda.fr/Checkjson.js) see **Odmdb - schema Checkjson** + +**required**: an array of required properties + +**Additional properties that not exist in 2020-12/schema :** + +**apxid**: the propertie used as an unique id + +**apxuniquekey**: array of unique properties + +**apxidx** : array of index definition + +**apxaccessrights**: object with key profilname and accessrights on properties {profilname:{C:\[properties array\],R:\[properties array\],U:\[\],D:\[\]}} + +Items of an object are store in files into : + +```plaintext +tribename/objectnames/idx/keyval_objkey.json +tribename//objectnames/itm/uniqueid.json +``` + +## Accessrights: + +An alias is just an identity, to access a tribe, a person must exist with an authenticated alias into /tribes/{tribename}/objects/persons/itm/{alias}.json + +A person has a property profils with a list of profilename, common profiles are : anonymous (no identity) / pagan (an identity) / person (an identity with access right into a tribe) / druid (the administrator of a tribe) / mayor (administrator of a town/server)/ and any profil can be define for a tribe + +Each object has an apxaccessrights that is a list of profil and CRUD access per object key. + +Example: owner on this object cab create delete an item is own, can read a list of propertie and update only some. + +```plaintext +"owner": { + "C" : [], + "D": [], + "R": ["alias","owner","profils","firstname","lastname","dt_birth"], + "U": ["firstname","lastname","dt_birth"] + } +``` + +## api pre-request + +API Endpoint url: **/api/{tribename}/{routename}/xxx** + +Domaine name can be a adminapi donaim name aswell any tribe's domain name. Check nginx conf in /tribename/nginx + +**Valid header see Middlewares** + +App use openpgp.js lib to sign xalias\_xdays (xdays a timestamp integer in miilisecond from Unix Epoch) with a privatekey and store it in xhash. + +/middlewares/isAuthenticated.js check if (xhash) is a valid signature of the public key a xhash is valid for 24 hours + +See Pagans models that contain authentification process + +**api Return in 3 data structure:** + +A - data file from a classical get [https://wall-ants.ndda.fr/Checkjson.js](https://smatchit.io/Checkjson.js) + +B - a json single answer **{status, ref,msg,data}:** + +* status: http code return +* ref: model/route name reference where message come from +* msg: a message template key store into models/lg/name\_lg.json (where lg is 2 letters language) +* data: an object data use to render the value of the message key. + +C - a json multi answer **{status,multimsg:\[{ref,msg,data}\]}** + + Each {ref,msg,data\] work the same way than B + +To show feedback context message in a language lg => get /api/adminapi/objects/tplstrings/{{model}}\_{{lg}}.json +This contain a json {msg:"mustache template string to render with data"} + +## Add tribe's api: + +Accessible with https://dns/api/tribename/routename/ + +```plaintext +/tribes/tribename/apxtri/routes +/tribes/tribename/apxtri/middlewares +/tribes/tribename/apxtri/models +/tribes/tribename/schema +/tribes/tribename/schema/lg +``` + +```plaintext +// Example of a route +const tribe="smatchit"; +const conftrib = require(`../../../adminapi/objects/tribes/itm/${tribe}.json`); +const conf = require(`../../../adminapi/objects/tribes/itm/adminapi.json`); +const express = require(`../../../adminapi/apxtri/node_modules/express`); +const fs = require(`../../../adminapi/apxtri/node_modules/fs-extra`); +const Nofications = require(`../../../adminapi/apxtri/models/Notifications.js`); +const Appscreens = require(`../models/Appscreens`); +const router=express.Router(); +module.exports=router; +``` \ No newline at end of file diff --git a/apxtri/middlewares/isAuthenticated.js b/apxtri/middlewares/isAuthenticated.js new file mode 100755 index 0000000..12ce180 --- /dev/null +++ b/apxtri/middlewares/isAuthenticated.js @@ -0,0 +1,207 @@ +const fs = require("fs-extra"); +const dayjs = require("dayjs"); +//const path=require('path'); +const glob = require("glob"); +// To debug it could be easier with source code: +// const openpgp = require("/media/phil/usbfarm/apxtri/node_modules/openpgp/dist/node/openpgp.js"); +const openpgp = require("openpgp"); + +const conf = require(`../../../adminapi/objects/tribes/itm/adminapi.json`); +const currentmod='isAuthenticated'; +const log = conf.api.activelog.includes(currentmod) +/** + * @api {get} / - isAuthenticated + * @apiGroup Middlewares + * @apiName isAuthenticated + * @apiDescription - valid if exist xalias_xdays_xhash.substr(20,200) in town/tmp/tokens/ + * - if not, + * - valid if xhash signature sign xalias_xdays with alias's publickey. + * - if not valid => not allowed + * - If valid => + * - store a xalias_xdays_xhash.substr (20,200) into /tmp/tokens with xprofils array from person. + * - update header.xprofils from this token + * + * apxtri profils are anonymous, pagans, mayor (on a node server), druid (on a tribe like smatchit). + * + * pagan identity is independant of domain (tribe), by default profils are :['anonymous','pagans']. if this alias exist in a tribe domain as a person then his profils come from /tribes/{tribeId}/objects/person/itm/{alias}.json profils:['anonymous','pagans','person','seeker'] any profils allowed to act on tribe objects. + * + * Each profil have CRUD accessright on object managed in schema in apxaccessrights:{owner,profil:{"C":[],"R":[properties],"U":[properties],"D":[]}}, see Odmdb for details. + * + * A process run once each day to clean up all xhash tmp/tokens oldest than 24 hours. + * + **/ +const isAuthenticated = async (req, res, next) => { + /* + console.log(__dirname) + console.log(path.resolve('../tmp/tokens')) + if (fs.existsSync('../tmp/tokens')) console.log('pass A') + if (fs.existsSync('../tmp/tokens')) console.log('pass B') + */ + const currentday = dayjs().date(); + fs.ensureDirSync(`../tmp/tokens`); + let menagedone = fs.existsSync( + `../../tmp/tokens/menagedone${currentday}` + ); + if (menagedone) console.log(Date.now(),`menagedone${currentday} was it done today?:${menagedone}`); + if (!menagedone) { + // clean oldest + const tsday = dayjs().valueOf(); // now in timestamp format + glob.sync(`../../tmp/tokens/menagedone*`).forEach((f) => { + fs.removeSync(f); + }); + glob.sync(`../../tmp/tokens/*.json`).forEach((f) => { + const fsplit = f.split("_"); + const elapse = tsday - parseInt(fsplit[2]); + //24h 86400000 milliseconde 15mn 900000 + if (elapse && elapse > 86400000) { + fs.remove(f); + } + }); + //clean tmp + glob.sync(`../../tmp/*.txt`).forEach((f) => { + fs.remove(f); + }); + fs.outputFile( + `../../tmp/tokens/menagedone${currentday}`, + "done by middleware/isAUthenticated" + ); + } + //Check register in tmp/tokens/ + if (log) console.log( currentmod," isAuthenticate?", req.session.header, req.body); + + const resnotauth = { + ref: "middlewares", + msg: "notauthenticated", + data: { + xalias: req.session.header.xalias, + xaliasexists: true, + }, + }; + if ( + req.session.header.xalias == "anonymous" || + req.session.header.xhash == "anonymous" + ) { + if (log) console.log(currentmod,"alias anonymous means not auth"); + resnotauth.status = 401; + return res.status(resnotauth.status).json(resnotauth); + } + + let tmpfs = `../../tmp/tokens/${req.session.header.xalias}_${req.session.header.xtribe}_${req.session.header.xdays}`; + //max filename in ext4: 255 characters + tmpfs += `_${req.session.header.xhash.substring( + 150, + 150 + tmpfs.length - 249 + )}.json`; + + const bruteforcepenalty = async (alias, action) => { + const sleep = (ms) => { + return new Promise((resolve) => setTimeout(resolve, ms)); + }; + const failstamp = `../../tmp/tokens/${alias}.json`; + if (action == "clean") { + //to reinit bruteforce checker + if (log) console.log(currentmod, "try to clean penalty file ", failstamp); + fs.remove(failstamp, (err) => { + if (err) console.log(Date.now(),currentmod,"Check forcebrut ", err); + }); + } else if (action == "penalty") { + const stamp = fs.existsSync(failstamp) + ? fs.readJSONSync(failstamp) + : { lastfail: dayjs().format(), numberfail: 0 }; + stamp.lastfail = dayjs().format(); + stamp.numberfail += 1; + fs.outputJSON(failstamp, stamp); + if (log) console.log(currentmod,"penalty:", stamp); + await sleep(stamp.numberfail * 100); //increase of 0,1 second the answer time per fail + if (log) console.log(currentmod,"time out penalty"); + } + }; + if (!fs.existsSync(tmpfs)) { + // need to check detached sign + let publickey = ""; + const aliasinfo = `../objects/pagans/itm/${req.session.header.xalias}.json`; + if (fs.existsSync(aliasinfo)) { + publickey = fs.readJsonSync(aliasinfo).publickey; + } else if (req.body.publickey) { + resnotauth.data.xaliasexists = false; + publickey = req.body.publickey; + } + if (publickey == "") { + if (log) console.log(currentmod,"header xalias unknown:",req.session.header.xalias); + resnotauth.status = 404; + resnotauth.data.xaliasexists = false; + return res.status(resnotauth.status).send(resnotauth); + } + if (log) console.log(currentmod,"publickey", publickey); + if (publickey.substring(0, 31) !== "-----BEGIN PGP PUBLIC KEY BLOCK") { + console.log(Date.now(),currentmod,"Publickey is not valid as armored key:", publickey); + await bruteforcepenalty(req.session.header.xalias, "penalty"); + resnotauth.status = 404; + return res.status(resnotauth.status).send(resnotauth); + } + const clearmsg = Buffer.from(req.session.header.xhash, "base64").toString(); + if (clearmsg.substring(0, 10) !== "-----BEGIN") { + if (log) console.log(currentmod,"xhash conv is not valid as armored key:", clearmsg); + await bruteforcepenalty(req.session.header.xalias, "penalty"); + resnotauth.status = 404; + return res.status(resnotauth.status).send(resnotauth); + } + if (log) console.log(currentmod, "clearmsg", clearmsg); + let signedMessage="" + const pubkey = await openpgp.readKey({ armoredKey: publickey }); + try{ + signedMessage = await openpgp.readCleartextMessage({ + cleartextMessage: clearmsg, + }); + }catch(err){ + return res.status(422).send({status:422,ref:"Middleware",msg:"unconsistentcleartextmessage",data:{xhash:req.session.header.xhash,clearmsg}}) + } + const verificationResult = await openpgp.verify({ + message: signedMessage, + verificationKeys: pubkey, + }); + if (log) console.log(currentmod,verificationResult); + if (log) console.log(currentmod,verificationResult.signatures[0].keyID.toHex()); + try { + await verificationResult.signatures[0].verified; + if ( + verificationResult.data != + `${req.session.header.xalias}_${req.session.header.xdays}` + ) { + resnotauth.msg = "signaturefailled"; + if (log) console.log(currentmod,`message recu:${verificationResult.data} , message attendu:${req.session.header.xalias}_${req.session.header.xdays}` + ); + await bruteforcepenalty(req.session.header.xalias, "penalty"); + resnotauth.status = 401; + return res.status(resnotauth.status).send(resnotauth); + } + } catch (e) { + resnotauth.msg = "signaturefailled"; + if (log) console.log(currentmod,"erreur", e); + await bruteforcepenalty(req.session.header.xalias, "penalty"); + resnotauth.status = 401; + return res.status(resnotauth.status).send(resnotauth); + } + // authenticated then get person profils (person = pagan for a xtrib) + const person = `../../${req.session.header.xtribe}/objects/persons/itm/${req.session.header.xalias}.json`; + if (log) console.log(currentmod,"Profils tribe/app management"); + if (log) console.log(currentmod,"person", person); + if (fs.existsSync(person)) { + const infoperson = fs.readJSONSync(person); + if (log) console.log(currentmod,"infoperson",infoperson); + infoperson.profils.forEach((p) => { + if (!req.session.header.xprofils.includes(p)) req.session.header.xprofils.push(p); + }) + }else{ + if (!req.session.header.xprofils.includes('pagans')) req.session.header.xprofils.push("pagans"); + } + fs.outputJSONSync(tmpfs, req.session.header.xprofils); + } else { + //tmpfs exist get profils from identification process + req.session.header.xprofils = fs.readJSONSync(tmpfs); + } + bruteforcepenalty(req.session.header.xalias, "clean"); + if (log) console.log(currentmod,`${req.session.header.xalias} Authenticated`); + next(); +}; +module.exports = isAuthenticated; diff --git a/apxtri/models/Checkjson.js b/apxtri/models/Checkjson.js new file mode 100755 index 0000000..fa26e8f --- /dev/null +++ b/apxtri/models/Checkjson.js @@ -0,0 +1,381 @@ +/* +This module have to be use in back as well front +can be include in project with + - >into a browser : + - into a node.js : const Checkjson = require( `../nationchains/socialworld/contracts/Checkjson.js`); +*/ +// --## + +const Checkjson = {}; +Checkjson.schema = {}; +Checkjson.schema.properties = {}; +Checkjson.schema.properties.type = {}; +Checkjson.schema.properties.type.string = (str) => typeof str === "string"; +Checkjson.schema.properties.type.array = (val) => Array.isArray(val); +Checkjson.schema.properties.type.object = (val) => + typeof val === "object" && val !== null && !Array.isArray(val); +Checkjson.schema.properties.type.number = (n) => typeof n === "number"; +Checkjson.schema.properties.type.boolean = (n) => typeof n === "boolean"; +Checkjson.schema.properties.type.integer = (n) => + n !== "" && !isNaN(n) && Math.round(n) === n; +Checkjson.schema.properties.type.float = (n) => + n !== "" && !isNaN(n) && Math.round(n) !== n; //not yet in json schema +Checkjson.schema.properties.minLength = (str, min) => + typeof str === "string" && str.length >= parseInt(min); +Checkjson.schema.properties.maxLength = (str, max) => + typeof str === "string" && str.length <= parseInt(max); +Checkjson.schema.properties.multipleOf = (n, val) => + typeof n === "number" && + typeof val === "number" && + parseFloat(n) / parseFloat(val) - + Math.round(parseFloat(n) / parseFloat(val)) < + 0.0000001; +Checkjson.schema.properties.range = ( + n, + minimum, + exclusiveMinimum, + maximum, + exclusiveMaximum +) => { + //console.log(minimum,exclusiveMinimum,maximum, exclusiveMaximum,n) + if (typeof n !== "number") return false; + if (minimum && parseFloat(n) < parseFloat(minimum)) return false; + if (exclusiveMinimum && parseFloat(n) <= parseFloat(exclusiveMinimum)) + return false; + if (maximum && parseFloat(n) > parseFloat(maximum)) return false; + if (exclusiveMaximum && parseFloat(n) >= parseFloat(exclusiveMaximum)) + return false; + return true; +}; +Checkjson.schema.properties.pattern = (str, pattern) => { + try { + pattern = new RegExp(pattern); + } catch (e) { + console.log("err pattern in checkjon", pattern); + return false; + } + return pattern.test(str); +}; +Checkjson.schema.properties.enum = (str, enumvalues) => { + if (Array.isArray(enumvalues)) { + return typeof str === "string" && enumvalues.includes(str); + } else if (tribeId) { + //enumvalues is a reference of objectname.key + const { tribeId, obj, keyid } = enumvalues.split("."); + return fs.existsSync(`../../../${tribeId}/schema/${obj}/itm/${keyid}.json`); + } else { + return true; + } +}; +// to check a value for a pattern +// Checkjson.schema.properties.pattern(value, properties[p].pattern) +/** + * + * @param {string} str to test + * @param {string} format keyworkd existing in Checkjson.schema.properties.format + * @return null if format does not exist, true or false + */ +Checkjson.testformat = (str, format) => { + if (!Checkjson.schema.properties.format[format]) { + return null; + } + return Checkjson.schema.properties.pattern( + str, + Checkjson.schema.properties.format[format] + ); +}; +// see format https://json-schema.org/understanding-json-schema/reference/string.html#format +// to check a just value with a format use Checkjson.testformat=(value, format) +Checkjson.schema.properties.format = { + "date-time": /\d{4}-[01]\d-[0-3]\dT[0-2]\d:[0-5]\d:[0-5]\d\.\d{1,3}/, + stringalphaonly: /^[A-Za-z0-9]{3,}$/, + time: /[0-2]\d:[0-5]\d:[0-5]\d\.\d{1,3}/, + timehhmm: /^(0[0-9]|1[0-9]|2[0-3]):[0-5][0-9]$/, + date: /\d{4}-[01]\d-[0-3]\d/, + duration: / /, + email: + /^(([^<>()[\]\\.,;:\s@\"]+(\.[^<>()[\]\\.,;:\s@\"]+)*)|(\".+\"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/, + "idn-email": / /, + uuid: /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/, + uri: / /, + url: /^(?:(?:https?|ftp):\/\/)(?:\w+(?::\w+)?@)?(?:(?:[a-z0-9-\.]+\.[a-z]{2,})(?:[-a-z0-9+\._\%\!\\[\]\(\)\,\*\?\&\=\:]*){1,})|(?:(?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9][0-9]?)\.(?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9][0-9]?)\.(?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9][0-9]?)\.(?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9][0-9]?))(?:[:\/#][^#]*)?$/, + "uri-reference": / /, + iri: / /, + hostname: / /, + "idn-hostname": / /, + ipv4: /^([0–9]{1,3}.){3}.([0–9]{1,3})$/, + ipv6: /^((([0–9A-Fa-f]{1,4}:){7}[0–9A-Fa-f]{1,4})|(([0–9A-Fa-f]{1,4}:){6}:[0–9A-Fa-f]{1,4})|(([0–9A-Fa-f]{1,4}:){5}:([0–9A-Fa-f]{1,4}:)?[0–9A-Fa-f]{1,4})|(([0–9A-Fa-f]{1,4}:){4}:([0–9A-Fa-f]{1,4}:){0,2}[0–9A-Fa-f]{1,4})|(([0–9A-Fa-f]{1,4}:){3}:([0–9A-Fa-f]{1,4}:){0,3}[0–9A-Fa-f]{1,4})|(([0–9A-Fa-f]{1,4}:){2}:([0–9A-Fa-f]{1,4}:){0,4}[0–9A-Fa-f]{1,4})|(([0–9A-Fa-f]{1,4}:){6}((b((25[0–5])|(1d{2})|(2[0–4]d)|(d{1,2}))b).){3}(b((25[0–5])|(1d{2})|(2[0–4]d)|(d{1,2}))b))|(([0–9A-Fa-f]{1,4}:){0,5}:((b((25[0–5])|(1d{2})|(2[0–4]d)|(d{1,2}))b).){3}(b((25[0–5])|(1d{2})|(2[0–4]d)|(d{1,2}))b))|(::([0–9A-Fa-f]{1,4}:){0,5}((b((25[0–5])|(1d{2})|(2[0–4]d)|(d{1,2}))b).){3}(b((25[0–5])|(1d{2})|(2[0–4]d)|(d{1,2}))b))|([0–9A-Fa-f]{1,4}::([0–9A-Fa-f]{1,4}:){0,5}[0–9A-Fa-f]{1,4})|(::([0–9A-Fa-f]{1,4}:){0,6}[0–9A-Fa-f]{1,4})|(([0–9A-Fa-f]{1,4}:){1,7}:))$/, + telephonefr: /^0[1-9][0-9]{8}$/, + imgbase64: /data:image\/(png|jpg|jpeg|svg);base64,(?:[A-Za-z0-9+\/]+)/, + telephoneinter: /^\+*(\d{3})*[0-9,\-]{8,}/, + password: + /^(?=.*[a-z])(?=.*[A-Z])(?=.*\d)(?=.*[$@$!%*?&.])[A-Za-z\d$@$!%*?&.{}:|\s]{8,}/, + postalcodefr: /(^\d{5}$)|(^\d{5}-\d{4}$)/, + pgppublickey: + /^-----BEGIN PGP PUBLIC KEY BLOCK-----(\n|\r|\r\n)(\n|\r|\r\n)([0-9a-zA-Z\+\/=]*(\n|\r|\r\n))*-----END PGP PUBLIC KEY BLOCK-----(\n|\r|\r\n)?$/gm, + pgpprivatekey: + /^-----BEGIN PGP PRIVATE KEY BLOCK-----(\n|\r|\r\n)(\n|\r|\r\n)([0-9a-zA-Z\+\/=]*(\n|\r|\r\n))*-----END PGP PRIVATE KEY BLOCK-----(\n|\r|\r\n)?$/gm, +}; +Checkjson.schema.properties.default; +Checkjson.schema.validation = (schema) => { + /*validate a schema structure*/ + const multimsg = []; + const res = {}; + if (schema.properties) { + Object.keys(schema.properties).forEach((p) => { + const properties = schema.properties; + if ( + properties[p].type && + typeof properties[p].type === "string" && + !Checkjson.schema.properties.type[properties[p].type] + ) { + multimsg.push({ + ref: "Checkjson", + msg: "schemaerrtypedoesnotexist", + data: { propertie: p, type: properties[p].type }, + }); + } + if (properties[p].type && typeof properties[p].type === "object") { + if (properties[p]["$ref"]) { + //This is manage by Odmdb.schema to load recursively complex schema + multimsg.push({ + ref: "Checkjson", + msg: "externalrefnotload", + data: { propertie: p, ref: properties[p]["$ref"] }, + }); + } + //case type=="object" with properties + if (properties[p].properties) { + const checksub = Checkjson.schema.validation(properties[p]); + if (checksub.status != 200) { + multimsg = multimsg.concat(checksub.multimsg); + } + } + // if not $ref or no properties then any object is accepted + } + + if ( + properties[p].format && + !Checkjson.schema.properties.format[properties[p].format] + ) { + multimsg.push({ + ref: "Checkjson", + msg: "schemaerrformatdoesnotexist", + data: { propertie: p, format: properties[p].format }, + }); + } + if (properties[p].enum && !Array.isArray(properties[p].enum)) { + multimsg.push({ + ref: "Checkjson", + msg: "schemaerrenumnotarray", + data: { propertie: p, enum: properties[p].enum }, + }); + } + }); + } + // 406 means not acceptable + if (multimsg.length > 0) { + res.status = 406; + res.multimsg = multimsg; + } else { + res.status = 200; + res.ref = "Checkjson"; + res.msg = "validcheck"; + } + return res; +}; +/** + * Check data with a schema + * + * @param {object} schema a json schema + * @param {*} data some data to check using schema + * @param {*} withschemacheck boolean that force a schema check (usefull on modification schema) + * @returns {status: 200, ref:"Checkjson", msg:"validcheck", data:{itm:object}} + * {status:417, multimsg:[{re,msg,data}],data:{itm:object}} + */ +Checkjson.schema.data = (schema, data, withschemacheck) => { + /* validate a data set with a schema in a context ctx */ + /* + console.log('#################') + console.log(schema); + console.log('---------') + console.log(data) + */ + const propertiescheck = (properties, subdata) => { + // properties ={prop1:{type,format},prop2:{type:object,...}} + // subdata={prop1,prop2} + // Return [] => no error, else 1 item per error {msg,ref:checkjson,data} + const propertielist = Object.keys(properties); + console.log(propertielist); + Object.keys(subdata).forEach((kdata) => { + if (!propertielist.includes(kdata)) { + delete subdata[kdata]; + } + }); + let multimsg = []; + propertielist.forEach((p) => { + //type is mandatory in a propertie + if (subdata.hasOwnProperty(p)) { + if (properties[p].properties) { + //means it is a subobject + multimsg = multimsg.concat( + propertiescheck(properties[p].properties, subdata[p]) + ); + } + //type can be a list of string; number, array, boolean, object, null + //console.log(p,properties[p].type ) + const typlist = + properties[p].type && typeof properties[p].type === "string" + ? [properties[p].type] + : properties[p].type; + let valid = false; + typlist.forEach((typ) => { + // at least one test have to be valid + if (Checkjson.schema.properties.type[typ](subdata[p])) valid = true; + }); + if (!valid) + multimsg.push({ + ref: "Checkjson", + msg: "dataerrpropertie", + data: { key: p, value: subdata[p] }, + }); + if ( + properties[p].minLength && + !Checkjson.schema.properties.minLength( + subdata[p], + properties[p].minLength + ) + ) { + multimsg.push({ + ref: "Checkjson", + msg: "dataerrpropertie", + data: { + key: p, + value: subdata[p], + minLength: properties[p].minLength, + }, + }); + } + if ( + properties[p].maxLength && + !Checkjson.schema.properties.maxLength( + subdata[p], + properties[p].maxLength + ) + ) { + multimsg.push({ + ref: "Checkjson", + msg: "dataerrpropertie", + data: { + key: p, + value: subdata[p], + maxLength: properties[p].maxLength, + }, + }); + } + if ( + properties[p].multipleOf && + !Checkjson.schema.properties.multipleOf( + subdata[p], + properties[p].multipleOf + ) + ) { + multimsg.push({ + ref: "Checkjson", + msg: "dataerrpropertie", + data: { + key: p, + value: subdata[p], + multipleOf: properties[p].multipleOf, + }, + }); + } + if ( + properties[p].minimum || + properties[p].maximum || + properties[p].exclusiveMinimum || + properties[p].exclusiveMaximum + ) { + // test range + if ( + !Checkjson.schema.properties.range( + subdata[p], + properties[p].minimum, + properties[p].exclusiveMinimum, + properties[p].maximum, + properties[p].exclusiveMaximum + ) + ) { + multimsg.push({ + ref: "Checkjson", + msg: "dataerrpropertie", + data: { + key: p, + value: subdata[p], + minimum: properties[p].minimum, + maximum: properties[p].maximum, + exclusiveMinimum: properties[p].exclusiveMinimum, + exclusiveMaximum: properties[p].exclusiveMaximum, + }, + }); + } + } + if ( + properties[p].enum && + !Checkjson.schema.properties.enum(subdata[p], properties[p].enum) + ) { + multimsg.push({ + ref: "Checkjson", + msg: "dataerrpropertie", + data: { key: p, value: subdata[p], enumlst: properties[p].enum }, + }); + } + if (properties[p].format) { + properties[p].pattern = + Checkjson.schema.properties.format[properties[p].format]; + } + if ( + properties[p].pattern && + !Checkjson.schema.properties.pattern( + subdata[p], + properties[p].pattern + ) + ) { + multimsg.push({ + ref: "Checkjson", + msg: "dataerrpropertie", + data: { key: p, value: subdata[p], pattern: properties[p].pattern }, + }); + } + } else if (schema.required && schema.required.includes(p)) { + multimsg.push({ + ref: "Checkjson", + msg: "dataerrpropertierequired", + data: { key: p, required: true }, + }); + } + }); + return multimsg; + }; //end propertiescheck() + + if (withschemacheck) { + const validschema = Checkjson.schema.validation(schema); + if (validschema.status != 200) return validschema; + } + let multi = propertiescheck(schema.properties, data); + const res = {}; + + if (multi.length > 0) { + res.status = 417; + res.multimsg = multi; + } else { + res.status = 200; + res.ref = "Checkjson"; + res.msg = "validcheck"; + } + if (schema.apxid) { + res.data = { apxid: data[schema.apxid], itm: data }; + } + return res; +}; +if (typeof module !== "undefined") module.exports = Checkjson; diff --git a/apxtri/models/Contracts.js b/apxtri/models/Contracts.js new file mode 100755 index 0000000..0d14f16 --- /dev/null +++ b/apxtri/models/Contracts.js @@ -0,0 +1,113 @@ +const fs = require( 'fs-extra' ); +const glob = require( 'glob' ); +const moment = require( 'moment' ); +const axios = require( 'axios' ); + +const conf=require(`../conf.json`) + +/* +Model that will process actions plan for each client like sending email campain, or anything that +are plan in /tribes/tribeid/actions/todo +*/ +const Cards = {}; //require('../../models/Cards'); +const Contracts = {}; +/* +Send if envoicampain a liste of email in param.msg.destperso with param.headers +if not envoicampain, it just return a test about what to send +@param = {headers, msg:{destperso}} +*/ +Contracts.sendcampain = async ( param, envoicampain ) => { + if( envoicampain ) { + // Carefull w the action post outputs/msg just wait the feedback of the 1st message + const retcampain = await axios.post( 'https://mail.maildigit.fr/outputs/msg', param.msg, { + headers: param.headers + } ); + if( retcampain.status !== 200 ) { + console.log( "err", retcampain.payload.moreinfo ); + fs.appendFileSync( `${conf.tribes}/log_erreurglobal.txt`, moment( new Date() ) + .format( 'YYYYMMDD HH:mm:ss' ) + ' - IMPOSSIBLE TO SEND CAMPAIN TODO for :' + param.tribeid + ' -- ' + retcampain.payload.moreinfo + '\n', 'utf-8' ); + }; + return retcampain; + } else { + // permet de tester ce qu'il y a à envoyer + let premieremail = ""; + for( let i = 0; i < param.msg.destperso.length; i++ ) { + premieremail += param.msg.destperso[ 0 ].email + ","; + } + return { + status: 201, + payload: { + info: [ 'simplecomptage' ], + model: 'Contracts', + moreinfo: "#email: " + param.msg.destperso.length + " - 5 1st emails: " + premieremail + } + }; + } +} +Contracts.initActiontodo = async ( envoie ) => { + const datedeb = moment( new Date() ) + .format( 'YYYYMMDD HH:mm:ss' ); + let todo, actiondone; + let log = { + nbaction: 0, + nbactionexec: 0, + nbactionerr: 0, + actionlist: "" + }; + const listclient = fs.readJsonSync( `${conf.tribes}/tribeids.json` ); + for( let clid in listclient ) { + console.log( listclient[ clid ] ); + let listaction = glob.sync( `${conf.tribes}/${listclient[clid]}/actions/todo/*.json` ); + for( let action in listaction ) { + console.log( listaction[ action ] ) + log.nbaction++; + todo = fs.readJsonSync( listaction[ action ] ); + let passdate = true; + // currentdate doit etre après la startDate si existe et avant valideuntilDate si existe + // console.log('test now est avant date start ', moment() < moment(todo.startDate, 'YYYYMMDD HH:mm:ss').toDate()); + if( todo.startDate && ( moment() < moment( todo.startDate, 'YYYYMMDD HH:mm:ss' ) + .toDate() ) ) { + passdate = false; + }; + // currentdate ne doit pas depasser la date de validité de la tache + // console.log('test now est après la date de validite ', moment() > moment(todo.validuntilDate, 'YYYYMMDD HH:mm:ss').toDate()); + if( todo.valideuntilDate && ( moment() > moment( todo.validuntilDate, 'YYYYMMDD HH:mm:ss' ) + .toDate() ) ) { + passdate = false; + }; + // currentdate + if( passdate && todo.action && todo.error == "" ) { + log.nbactionexec++; + const actiondone = await Contracts[ todo.action ]( todo, envoie ); + todo.datesRun.push( moment( new Date() ) + .format( 'YYYYMMDD HH:mm:ss' ) ); + //console.log("actiondone" + log.actionlist += "STATUS:" + actiondone.status + " -- " + listaction[ action ] + "\n"; + if( actiondone.status == 200 ) { + todo.error = ""; + } else { + log.nbactionerr++; + todo.error += "status : " + actiondone.status + ' ' + actiondone.payload.moreinfo; + }; + if( parseInt( todo.maxnumberoftime ) && todo.maxnumberoftime != "999" && ( todo.datesRun.length >= parseInt( todo.maxnumberoftime ) ) ) { + //archive en done this triggeraction + fs.outputJsonSync( listaction[ action ].replace( '/todo/', '/done/' ), todo, { + spaces: 2 + } ); + fs.unlinkSync( listaction[ action ] ); + } else { + fs.outputJsonSync( listaction[ action ], todo, { + spaces: 2 + } ); + } + } else { + log.actionlist += "STATUS : not executed " + listaction[ action ] + "\n"; + }; + }; + }; + const trace = "###################### LOGS ####################\nSTART:" + datedeb + " END:" + moment( new Date() ) + .format( 'YYYYMMDD HH:mm:ss' ) + "\n nombre d'actions analysées : " + log.nbaction + " dont executées : " + log.nbactionexec + " dont en erreur: " + log.nbactionerr + "\n" + log.actionlist; + fs.appendFileSync( `${conf.tribes}/log.txt`, trace, 'utf-8' ); + return "done"; +} +module.exports = Contracts; diff --git a/apxtri/models/Nations.js b/apxtri/models/Nations.js new file mode 100755 index 0000000..caf7119 --- /dev/null +++ b/apxtri/models/Nations.js @@ -0,0 +1,260 @@ +const bcrypt = require("bcrypt"); +const fs = require("fs-extra"); +const glob = require("glob"); +const jwt = require("jwt-simple"); +const axios = require("axios"); +const path = require("path"); +const conf = require(`../../../adminapi/objects/tribes/itm/adminapi.json`); +const Odmdb = require("./Odmdb.js"); +// lowercase 1st letter is normal +const towns = require("./Towns.js"); +const pagans = require("./Pagans.js"); +/* +Blockchain manager +* Manage network directory of nations and towns +* read Blockchain and search, +* submit a transaction (now) or contract (futur) to store from userA.pubkey to userB.pubkey a number of AXESS +* mine to be able to register a block and create AXESS +* manage APIXP rules 20 M APIXP 1AXESS = 1 block validation +* manage contract = action if something appened validate by a proof of work +*/ +const Nations = {}; +Nations.init = () => { + console.group("init Nations"); +}; + +Nations.chaintown = (nationId, townId) => { + /** + * if not already exist Add a requested town into conf.towns.push({ "townId": "wall", "nationId": "ants", "dns": "wall-ants.ndda.fr" }) + */ +}; + +Nations.updateobjectsfromfreshesttown = (dnstownlist, objectidx) => { + /** + * Get lasttime update per apxtri object then choose the latest source and update local town + * if an item exist localy and does not from the town requested + * @Param {array} dnstownlist list of dns to get latest data + * @Param {object} objectidx objectnme:idxfile {agans:"alias_all.json",...} + * @return create/update tribes/adminapi/opjects/pagans town nation + */ + const localversion = {}; + const objlist = Object.keys(objectidx); + objlist.forEach((o) => { + let objconf = { + name: o, + schema: `../../schema/${o}.jsons`, + lastupdate: -1, + }; + if (fs.existsSync(`../../${o}/conf.json`)) { + objconf = fs.readJsonSync(`../../${o}/conf.json`); + } else { + fs.outputJsonSync(`../../${o}/conf.json`, objconf); + } + localversion[o] = [conf.dns[0], objconf.lastupdate]; + }); + //console.log(localversion); + for (let t = 0; t < dnstownlist.length; t++) { + if (conf.townId != dnstownlist[t].townId) { // to avoid update itself + let promiseconf = []; + let objecttotest = []; + objlist.forEach((o) => { + //console.log(`https://${dnstownlist[t].dns}/nationchains/${o}/conf.json`); + objecttotest.push(o); + promiseconf.push( + axios.get(`https://${dnstownlist[t].dns}/nationchains/${o}/conf.json`) + ); + }); + Promise.all(promiseconf) + .then((reps) => { + let promiseidx = []; + let objecttoupdate = []; + let objlastupdate = []; + for (let i = 0; i < objecttotest.length; i++) { + if ( + parseInt(reps[i].data.lastupdate) > + parseInt(localversion[reps[i].data.name][1]) + ) { + // add promise to get data + /*console.log( + `https://${dnstownlist[t].dns}/nationchains/${ + reps[i].data.name + }/idx/${objectidx[reps[i].data.name]}` + );*/ + objecttoupdate.push(objecttotest[i]); + objlastupdate.push(reps[i].data.lastupdate); + promiseidx.push( + axios.get( + `https://${dnstownlist[t].dns}/nationchains/${ + reps[i].data.name + }/idx/${objectidx[reps[i].data.name]}` + ) + ); + } + } + Promise.all(promiseidx) + .then((rets) => { + for (let j = 0; j < objecttoupdate.length; j++) { + Odmdb.updatefromidxall( + objecttoupdate[j], + objectidx[objecttoupdate[j]], + rets[j].data, + objlastupdate[j] + ); + } + }) + .catch((err) => { + console.log("ERR get idx data"); + console.log(err); + }); + }) + .catch((err) => { + console.log("ERR get conf lastupdate"); + console.log(err); + }); + } + } + return {status:200,ref:"Nations",msg:"updated",data:{}}; +}; +Nations.synchronizeold = () => { + /* + Run process to communicate with a list of towns to update network and transaction + */ + //update himself then send to other information + if (process.env.NODE_ENV != "prod") { + // Not concerned + return {}; + } + const initcurrentinstance = { + fixedIP: "", + lastblocknumber: 0, + firsttimeupdate: 0, + lastimeupdate: 0, + positifupdate: 0, + negatifupdate: 0, + pubkeyadmin: "", + tribeids: [], + logins: [], + knowninstance: [], + }; + let currentinstance = initcurrentinstance; + try { + currentinstance = fs.readFileSync( + `${conf.tribes}/${conf.mayorId}/nationchains/nodes/${conf.rootURL}`, + "utf-8" + ); + } catch (err) { + console.log("first init"); + } + const loginsglob = fs.readJsonSync(`${conf.tmp}/loginsglob.json`, "utf-8"); + currentinstance.logins = Object.keys(loginsglob); + currentinstance.tribeids = [...new Set(Object.values(loginsglob))]; + currentinstance.instanceknown = glob.Sync( + `${conf.tribes}/${conf.mayorId}/nationchains/nodes/*` + ); + //Save it + fs.outputJsonSync( + `${conf.tribes}/${conf.mayorId}/nationchains/nodes/${conf.rootURL}`, + currentinstance + ); + // proof of work + // try to find a key based on last block with difficulty + // if find then send to all for update and try to get token + // in any case rerun Nations.synchronize() + currentinstance.instanceknown.forEach((u) => { + if (u != conf.rootURL) { + //send currentinstance info and get back state of + axios + .post(`https://${u}/nationchains/push`, currentinstance) + .then((rep) => { + newdata = rep.payload.moreinfo; + //Available update info + fs.readJson( + `${conf.tribes}/${conf.mayorId}/nationchains/nodes/${u}`, + (err, data) => { + if (err) { + data.negatifupdate += 1; + data.lasttimeupdate = Date.now(); + } else { + data.positifupdate += 1; + data.lastimeupdate = Date.now(); + data.tribeids = newdata.tribeids; + data.logins = newdata.logins; + data.lastblocknumber = newdata.lastblocknumber; + newdata.knowninstance.forEach((k) => { + if (!data.knowninstance.includes(k)) { + data.knowninstance.push(k); + //init the domain for next update + initcurrentinstance.firsttimeupdate = Date.now(); + fs.outputJson( + `${conf.tribes}/${conf.mayorId}/nationchains/nodes/${k}`, + initcurrentinstance, + "utf-8" + ); + } + }); + } + //save with info + fs.outputJson( + `${conf.tribes}/${conf.mayorId}/nationchains/nodes/${u}`, + data + ); + } + ); + }) + .catch((err) => { + //Not available + data.negatifupdate += 1; + data.lasttimeupdate = Date.now(); + fs.outputJson( + `${conf.tribes}/${conf.mayorId}/nationchains/nodes/${u}`, + data + ); + }); + } + }); +}; + +Nations.create = (conf) => { + /* + @conf from a nationchains/socialworld/setup/townSetup {object, nationId, townId, dns} + @return + */ + const res = {}; + if (conf.object == "towns") { + Odmdb.create("nationchains/socialworld/objects", "towns", conf); + } + const nations = fs.readJsonSync( + "./nationchains/nations/idx/nationId_all.json" + ); + if (!ObjectKeys(nations).includes(conf.nationId)) { + res.status = 404; + res.info = `your nationId ${conf.nationId} does not exist you have to choose an existing one`; + return res; + } + const towns = fs.readJsonSync("./tribes/adminapi/opjects/towns/idx/townId_all.json"); + if (towns[conf.nationId].includes(conf.townId)) { + res.status = 409; + res.info = `This conf.townId already exist you have to find a unique town name`; + return res; + } + const towndata = { + uuid: conf.townId, + nationid: conf.nationId, + url: `${conf.townId}.${conf.nationId}.${conf.dns}`, + status: conf.dns == "unchain" ? "unchain" : "tochain", + }; + const metatown = fs.readJsonSync( + "./nationchains/socialworld/metaobject/towns.json" + ); + Odmdb.add(objectpath, towns, metatown, towndata); + + fs.outputJsonSync( + `./nationchains/socialworld/objects/towns/${townId}.json`, + towndata + ); + res.status = 200; + res.info = `${townId} create for ${nationId} nation`; + return res; +}; + +module.exports = Nations; diff --git a/apxtri/models/Notifications.js b/apxtri/models/Notifications.js new file mode 100644 index 0000000..116e40e --- /dev/null +++ b/apxtri/models/Notifications.js @@ -0,0 +1,392 @@ +const glob = require("glob"); +const path = require("path"); +const fs = require("fs-extra"); +const axios = require("axios"); +const dayjs = require("dayjs"); +const Mustache = require('mustache'); +const Checkjson = require(`./Checkjson.js`); +//const smtp = require("smtp-client"); +const nodemailer = require("nodemailer"); +const conf = require(`../../../adminapi/objects/tribes/itm/adminapi.json`); +const currentmod = "Notifications"; +const log = conf.api.activelog.includes(currentmod); +/** + * To manage any communication between Pagan + * mayor druid emailing/sms/paper from tribe register smtp, simcard, mail api to Person(s) / Pagan(s) + * volatile notification message from tribe activities to Pagans / person () + * + */ +const Notifications = {}; + +Notifications.get = (alias, tribeId) => { + const notiffile = `../../${req.params.tribeId}/notifications/${req.params.alias}.json`; + const msg = fs.existsSync(notiffile) ? fs.readJSONSync(notiffile) : {}; + return { + status: 200, + ref: "Notification", + msg: "Messagelist", + data: { notif: [{ tribeId, msg }] }, + }; +}; +/** + * Get statistic of registering email phone + */ +Notifications.statmaillist = (tribe) => { + const statinfo = {}; + let csv = "email/phone;name;srckey\n"; + const src = `../../${tribe}/objects/maillinglists/*.json`; + console.log(path.resolve(src)); + glob.sync(src).forEach((f) => { + const name = path.basename(f, ".json"); + const mlst = fs.readJSONSync(f); + Object.keys(mlst).forEach((c) => { + csv += `"${c}";"${name}";"${mlst[c].srckeys.join("-")}"\n`; + mlst[c].srckeys.forEach((s) => { + if (!statinfo[s]) statinfo[s] = {}; + if (!statinfo[s][name]) statinfo[s][name] = 0; + statinfo[s][name]++; + }); + }); + }); + // fichier csv stocker en local en attendant d'avoir un back pour stocker la reponse dans data.csv + fs.outputFileSync(`../../${tribe}/mailinglst.csv`, csv, "utf-8"); + return { + status: 200, + ref: "Notifications", + msg: "statistics", + data: statinfo, + }; +}; + +/** + * Register an typekey(email) or (phone) key into mailinglist for a tribe + * + */ +Notifications.registertolist = (key, typekey, tribe, mlist, srckey, uuid) => { + key = key.toLowerCase(); + typekey = typekey == "telephone" ? "telephonefr" : typekey; + if (!Checkjson.testformat(key, typekey)) + return { + status: 400, + ref: "Notifications", + msg: "formaterror", + data: { fielderr: typekey, format: typekey }, + }; + + const destin = `../../${tribe}/objects/maillinglists/${typekey}_${mlist}.json`; + if (!fs.existsSync(destin)) { + console.log( + `######## Attention tentative d'ecriture non autorisé,le fichier n'existe pas ${destin} créer le à la main vide {}` + ); + return { + status: 406, + ref: "Notifications", + msg: "destinnotallow", + data: { destin }, + }; + } + + const filestorage = fs.existsSync(destin) ? fs.readJsonSync(destin) : {}; + //if (log) console.log(currentmod,`filestorage`,filestorage, key, (filestorage[key])); + if (filestorage[key]) { + filestorage[key].dt_update = dayjs().toISOString(); + if (!filestorage[key].srckeys.includes(srckey)) + filestorage[key].srckeys.push(srckey); + if (!filestorage[key].uuids.includes(uuid)) + filestorage[key].uuids.push(uuid); + } else { + filestorage[key] = {}; + filestorage[key].dt_create = dayjs().toISOString(); + filestorage[key].srckeys = [srckey]; + filestorage[key].uuids = [uuid]; + } + fs.outputJSONSync(destin, filestorage); + return { + status: 200, + ref: "Notifications", + msg: "registersuccess", + data: { key, typekey, tribe, mlist, srckey, uuid }, + }; +}; +/** + * Unsubscribe an eamil or phone from a mailinglist for a tribe + */ +Notifications.unregisterfromlist = (key, typekey, tribe, mlist) => { + key = key.toLowerCase(); +}; +/** + * Message to send to an alias from an anonymous or not + */ +Notifications.sendcontact = (body, header) => {}; +Notifications.sendsms = async (data, tribeId) => { + /** + * Never use need wallet in mailjet to test + * To set up with mailjet see https://dev.mailjet.com/sms/guides/send-sms-api/#authentication + * + * @param {string} data.To a phone number with international +3360101010101 + * @param {string} data.Text text to send + * + * a conf.sms with {url:"smsurl", Token:"", From:""} + * + * + */ + + if (!conf.sms) { + return { + status: 412, + ref: "Notifications", + msg: "missingconf", + data: { tribe: tribeId }, + }; + } + let missingk = [][("To", "Text")].forEach((k) => { + if (!data[k]) { + missingk.push(k); + } + }); + if (missingk.lenght > 0) { + return { + status: 428, + ref: "Notifications", + msg: "missingdata", + data: { missingk: missingk }, + }; + } + let confsms = conf.sms; + if (fs.existsSync(`../../adminapi/objects/tribes/itm/${req.session.header.xtribe}.json`)) { + const conftrib = fs.readJSONSync( + `../../adminapi/objects/tribes/itm/${req.session.header.xtribe}.json` + ); + if (conftrib.sms) confsms = conftrib.sms; + } + data.From = confsms.From; + const sendsms = await axios.post(confsms.url, { + headers: { + Authorization: `Bearer ${confsms.MJ_TOKEN}`, + "Content-Type": "application/json", + }, + body: JSON.stringify(data), + }); + if (sendsms.status == 200) { + return { + status: 200, + ref: "Notifications", + msg: "successfullsentsms", + data: {}, + }; + } else { + return { + status: sendsms.status, + ref: "Notifications", + msg: "errsendsms", + data: { err: sendsms.data }, + }; + } + + /* si tout se passe bien: + { + "From": "MJPilot", + "To": "+33600000000", + "Text": "Have a nice SMS flight with Mailjet !", + "MessageId": "2034075536371630429", + "SmsCount": 1, + "CreationTS": 1521626400, + "SentTS": 1521626402, + "Cost": { + "Value": 0.0012, + "Currency": "EUR" + }, + "Status": { + "Code": 2, + "Name": "sent", + "Description": "Message sent" + } +} +} +*/ +}; + +Notifications.manageemail = (data, template, tribe) => { + /** + * Manage email publipostage + * data must contain emailsto + * data optionaly can contain Cc,Bcc as array of emails and attachments as array of filename (into the server) + * Await the 1st email + */ + //console.log(data) + if (!data.emailsto || data.emailsto.length == 0) { + return { + status: 406, + ref: "Notifications", + msg: "emailstomissing", + data: data, + }; + } + if (typeof data.emailsto === "string") data.emailsto = [data.emailsto]; + + if (!fs.existsSync(path.resolve(template))){ + return { + status: 404, + ref: "Notification", + msg: "templatenotfound", + data: { template:path.resolve(template) }, + } + } + const tplemail = require(path.resolve(template)); + let sendit={status:200,ref:"Notifications",msg:"successfullsend"}; + data.emailsto.forEach(async (e, i) => { + if (Checkjson.testformat(e, "email")) { + const dat = {}; + dat.to = e; + dat.subject = Mustache.render(tplemail.subject, data); + dat.html = Mustache.render(tplemail.html, data); + dat.text = Mustache.render(tplemail.text, data); + dat.Cc=tplemail.Cc + dat.Bcc=tplemail.Bcc + /* @TODO issue with data.Cc need to test + if (data.Cc){ + dat.Cc+=","+data.Cc.join(',') + } + if (data.Bcc){ + dat.Bcc+=","+data.Bcc.join(',') + } + */ + if (data.attachments){ + data.attachments.forEach(a=>tplemail.attachments.push(a)) + } + if (i == 0) { + sendit = await Notifications.sendmail(dat, tribe); + if (sendit.status != 200) return {status:406,ref:"Notifications",msg:"emailnotsent"}; + } else { + Notifications.sendmail(dat, tribe); + } + } else { + // not a well formated email @todo add a log file to follow it + } + }); + return sendit; +}; + +Notifications.sendmail = async (data, tribe) => { + /** + * * in conf global or in /itm/{tribe}.json must have valid parameter emailcontact must be authorized by the smtp + * "emailcontact": "noreply@smatchit.io", + * "smtp": { + * "host": "smtp-relay.brevo.com", + * "port": 587, + * "secure": false, + * "auth": { + * "user": "xx", + * "pass": "yy" + * } + * } + * See https://nodemailer.com/message/ for available fields to add + * @param {string} [data.from] an email authorized by smtp used priority from header xtribe + * @param {string} data.to list of email separate by , + * @param {string} data.subject + * @param {string} data.html + * @param {string} data.text + * @param {string} [data.Cc] list of email in copy + * @param {string} [data.Bcc] list of email in hidden copy + * @param {string} [data.attachments] array of + * {filename:'filename.txt',content:'txt'}, + * {filename:'img.svg',path:"https://....svg", contentType:'image/svg'} + * {filename:'img.svg',path:"https://....svg", contentType :'text/plain'} + * {filename:'img.png',path:"data:text/svg;base64.aGVsbG8gd29ybGQ="} + * + * @example data + * {"to":"wall-ants@ndda.fr", + * "subject":"Test", + * "html":"
+
+ Learner who needs to develop their ability to communicate effectively at work, both in writing and speaking + |
+
+
+ Learner who needs to develop their ability to communicate effectively at work, both in writing and speaking + |
+
PACK JOB SEARCH
",,english,2,0,,2,0,classroom,,,0000-00-00,0000-00-00,https://www.yesnyoulearning.com/lms/index.php?r=player&course_id=32934,1108,,,0, +32935,BK_F2F_B_JOB_10x1H-02,"My Job Search Workshop Session N°2 - 1H","PACK JOB SEARCH
",,english,2,0,,2,0,classroom,,,0000-00-00,0000-00-00,https://www.yesnyoulearning.com/lms/index.php?r=player&course_id=32935,1108,,,0,`; +//utils.analysestring(txtstring) +//console.log(utils.replacecarbtweendblquote(txtstring, ",", 'CARSEPARATOR') +// .split("\n")[0].split(",")) +utils.csv2json = (csv, options, callback) => { + // EN CAS DE PB AVEC UN FICHIER EXCEL RECALCITRANT + // l'ouvrir dans calc linux et sauvegarder csv utf8, ; , " enregistrer le contenu de la cellule comme affiché + console.log("\n--------------- CSV2JSON ---------------\n"); + // Default CSV options + if (!options.retln) options.retln = "\n"; + if (csv.indexOf("\n\r") > -1) options.retln = "\n\r"; + if (!options.sep) options.sep = ";"; + //gestion d un separateur dans une chaine de texte + //const regseptext = new RegExp(`${options.sep}(?!(?:[^"]*"[^"]*")*[^"]*$)`, 'gm'); + //csv = csv.replace(regseptext, "CARACSEPAR"); + // csv = utils.replacecarbtweendblquote(csv, options.retln, "RETLIGNE") + csv = utils.replacecarbtweendblquote(csv, options.sep, "CARSEPARATOR"); + if (!options.replacespecialcarCsv2Json) { + options.replacespecialcarCsv2Json = []; + } else { + if (typeof options.replacespecialcarCsv2Json == "string") { + //permet de passer des regex en string + options.replacespecialcarCsv2Json = eval( + options.replacespecialcarCsv2Json + ); + } + } + const result = []; + const lines = csv.split(options.retln); + const headers = utils.getHeaders(lines, options.sep); + let unknownHeaders = ""; + //console.log('headers', headers) + //console.log('options.champs', options.champs) + headers.forEach((header) => { + // Si un header n'est pas présent dans la liste des champs prédéfinis + // on l'ajoute aux champs inconnus + if (options.champs.indexOf(header) === -1) { + unknownHeaders += `${header}, `; + } + }); + if (unknownHeaders !== "") { + const errorMsg = `CSV2JSON() - Champs inconnus : ${unknownHeaders}`; + return callback(errorMsg, null); + } + lines.forEach((line, index) => { + // Skip headers line or empty lines + if (index === 0 || line.replace(/\s/g, "").length === 0) { + return; + } + // pour debuguer on met origincsv pour voir la ligne d'origine + const currentLineData = { origincsv: line, linenumber: index }; + const currentLine = line.split(options.sep); // Current string in the line + for (let j = 0; j < headers.length; j++) { + // Si la ligne n'est pas vide + if (currentLine[j]) { + // On clean le champs + // ajout eventuel de modification de caracter reservé ; dans les libelléetc... + let currentValue = currentLine[j].trim(); + //on transforme le caractere separateur modifié entre double quote + currentValue = currentValue.replace("CARSEPARATOR", options.sep); + options.replacespecialcarCsv2Json.forEach((re) => { + currentValue = currentValue.replace(re[0], re[1]); + }); + // Si le header est un email + if (headers[j].includes("EMAIL")) { + // Supprimer tous les espaces + currentValue = currentLine[j].replace(/\s/g, ""); + } + // on check si le chamos doit être numerique + if (options.numericfield.includes(headers[j])) { + currentValue = currentLine[j].replace(/\,/g, "."); + try { + const test = parseFloat(currentValue); + } catch (er) { + return callback( + `${headers[j]} contiens la valeur -${currentValue}- et devrait être numerique`, + null + ); + } + } + if (currentValue) { + // Si le header actuel est de type array + // Cela signifie que le header apparaît plusieurs fois dans le CSV + // et que les valeurs correspondantes à ce header + // doivent être mis dans un array + if (options.array && options.array.indexOf(headers[j]) > -1) { + // Si le tableau pour ce header n'existe pas on le crée + if (!currentLineData[headers[j]]) { + currentLineData[headers[j]] = []; + } + if (options.arraysplitsep) { + currentValue.split(options.arraysplitsep).forEach((v) => { + currentLineData[headers[j]].push(v); + }); + } else { + currentLineData[headers[j]].push(currentValue); + } + } else { + // Si un header est déjà présent pour la ligne + // alors que il n'est pas spécifié comme étant un array + // on retourne une erreur + if (currentLineData[headers[j]]) { + const errorMsg = `Le champ ${headers[j]} est présent plusieurs fois alors qu'il n'est pas spécifié comme étant un array !`; + return callback(errorMsg, null); + } + currentLineData[headers[j]] = currentValue; + } + } + } + } + result.push(currentLineData); + }); + return callback(null, result); +}; +/** + * [csvparam2json description] + * @param {object} csv object of csv file that has been read + * @param {object} options object containing csv options, headers, ... + {retln:'code de retour de ligne \n ou \n\r', + sep:'code to split cells', + champs:[ch1,ch2,...] catch only those field, + array:[ch1, ] can have more than one field champs with same name then data are push into an array } + * @param {Function} callback callback function + * @return {callback} - return an error if error, else return json + it converts a csv with 3 column col1;col2;col3 in a json in a tree + if in col1 we have __ => then it splits a leaf + col1 = xxxx__yyyy ; col2 = value ; col3 = comment that is ignored + return data = {xxxx:{yyyy:value}} + col1 = xxxx; col2 = value; col3 = comment ignored +return data = {xxxx:value} + +Usage example: +fiche.csvparam2article = (err, fiche) => { + if (!err) { + console.log(fiche) + } +} +utils.csvparam2json(fs.readFileSync('./devdata/tribee/aubergenville/infoexterne/localbusiness.csv', 'utf-8'), { + retln: "\n", + sep: ";", + champs: ["NOM", "OBJET", "ADRESSE_PRO", "CP_PRO", "VILLE_PRO", "ZONE", "PHONE_PRO", "HORAIRESDESC", "HORAIREDATA", "URL", "FACEBOOK", "INSTA", "EMAIL_PRO", "IMG", "TAG"], + array: ["TAG", "PHONE_PRO", "EMAIL_PRO"] +}, fiche.csv2article) + + */ +utils.csvparam2json = (csv, options, callback) => { + console.log("\n--------------- CSVPARAM2JSON ---------------\n"); + let etat = ""; + const param = {}; + if (!options.retln) { + options.retln = "\n"; + } + if (csv.indexOf("\n\r") > -1) { + options.retln = "\n\r"; + } + if (!options.sep) { + options.sep = ";"; + } + if (!options.seplevel) { + options.seplevel = "__"; + } + if (!options.replacespecialcarCsv2Json) { + options.replacespecialcarCsv2Json = []; + } else { + if (typeof options.replacespecialcarCsv2Json == "string") { + //permet de passer des regex en string + options.replacespecialcarCsv2Json = eval( + options.replacespecialcarCsv2Json + ); + } + } + const lines = csv.split(options.retln); + for (let i = 0; i < lines.length; i++) { + const infol = lines[i].split(options.sep); + //console.log(infol) + if (infol[0].length > 4 && infol.length < 2) { + // si le 1er element à plus de 4 caractere et s'il y a moins de 3 colonnes c'est qu'il y a un pb + etat += `Erreur sur ${lines[i]} moins de 3 column separé par ${options.sep}`; + continue; + } + // On ajoute ici la gestion de tous les caracteres spéciaux + // reservées pour le csv ; ' etc..' + if (infol[1] && infol[1] + "" == infol[1]) { + options.replacespecialcarCsv2Json.forEach((re) => { + //console.log("gggggggggggggggggggg", infol[1]) + infol[1] = infol[1].replace(re[0], re[1]); + }); + // console.log(infol[1]) + infol[1] = infol[1].replace(/'|’/g, '"'); + //console.log(infol[1]) + if (infol[1].toLowerCase() === "true") { + infol[1] = true; + } else if (infol[1].toLowerCase() === "false") { + infol[1] = false; + } + } + console.log(infol[1]); + //supprime des lignes vides + if (infol[0] == "") continue; + if (infol[0].indexOf(options.seplevel) == -1) { + param[infol[0]] = infol[1]; + continue; + } else { + const arbre = infol[0].split(options.seplevel); + switch (arbre.length) { + case 1: + param[arbre[0]] = infol[1]; + break; + case 2: + if (arbre[1] != "ARRAY") { + if (!param[arbre[0]]) param[arbre[0]] = {}; + param[arbre[0]][arbre[1]] = infol[1]; + } else { + if (!param[arbre[0]]) param[arbre[0]] = []; + //console.log('aff', infol[1].substring(1, infol[1].length - 1).replace(/""/g, '"')) + eval("result=" + infol[1]); + //.substring(1, infol[1].length - 1).replace(/""/g, '"')) + param[arbre[0]].push(result); + } + break; + case 3: + if (arbre[2] != "ARRAY") { + if (!param[arbre[0]]) param[arbre[0]] = {}; + if (!param[arbre[0]][arbre[1]]) param[arbre[0]][arbre[1]] = {}; + param[arbre[0]][arbre[1]][arbre[2]] = infol[1]; + } else { + if (!param[arbre[0]]) param[arbre[0]] = {}; + if (!param[arbre[0]][arbre[1]]) param[arbre[0]][arbre[1]] = []; + //eval("result = \"test\""); + //console.log(result); + eval("result=" + infol[1]); + //.substring(1, infol[1].length - 1).replace(/""/g, '"')) + param[arbre[0]][arbre[1]].push(result); + } + break; + case 4: + if (arbre[3] != "ARRAY") { + if (!param[arbre[0]]) param[arbre[0]] = {}; + if (!param[arbre[0]][arbre[1]]) param[arbre[0]][arbre[1]] = {}; + if (!param[arbre[0]][arbre[1]][arbre[2]]) + param[arbre[0]][arbre[1]][arbre[2]] = {}; + param[arbre[0]][arbre[1]][arbre[2]][arbre[3]] = infol[1]; + } else { + if (!param[arbre[0]]) param[arbre[0]] = {}; + if (!param[arbre[0]][arbre[1]]) param[arbre[0]][arbre[1]] = {}; + if (!param[arbre[0]][arbre[1]][arbre[2]]) + param[arbre[0]][arbre[1]][arbre[2]] = []; + eval("result=" + infol[1]); + //.substring(1, infol[1].length - 1).replace(/""/g, '"')) + param[arbre[0]][arbre[1]][arbre[2]].push(result); + break; + } + default: + break; + } + } + } + // JSON.parse(JSON.stringify(param)) + console.log( + "kkkkkkkkkkkkkkkkkk", + param["catalogue"]["filtrecatalog"]["searchengine"] + ); + if (etat == "") { + return callback(null, JSON.parse(JSON.stringify(param))); + } else { + return callback(etat, null); + } +}; +utils.levenshtein = (a, b) => { + if (a.length === 0) return b.length; + if (b.length === 0) return a.length; + let tmp, i, j, prev, val, row; + // swap to save some memory O(min(a,b)) instead of O(a) + if (a.length > b.length) { + tmp = a; + a = b; + b = tmp; + } + row = Array(a.length + 1); + // init the row + for (i = 0; i <= a.length; i++) { + row[i] = i; + } + // fill in the rest + for (i = 1; i <= b.length; i++) { + prev = i; + for (j = 1; j <= a.length; j++) { + if (b[i - 1] === a[j - 1]) { + val = row[j - 1]; // match + } else { + val = Math.min( + row[j - 1] + 1, // substitution + Math.min( + prev + 1, // insertion + row[j] + 1 + ) + ); // deletion + } + row[j - 1] = prev; + prev = val; + } + row[a.length] = prev; + } + return row[a.length]; +}; +utils.testinarray = (array, arrayreferent) => { + // au moins un element de array existe dans arryreferent + let exist = false; + if (arrayreferent) { + //console.log('arrrrrrrrrrrrrrr', arrayreferent) + array.forEach((e) => { + //console.log(e) + if (arrayreferent.includes(e)) exist = true; + }); + } + return exist; +}; +/* +DIRECTORY +*/ +const isDirectory = (source) => fs.lstatSync(source).isDirectory(); +const getDirectories = (source) => + fs + .readdirSync(source) + .map((name) => path.join(source, name)) + .filter(isDirectory); + +module.exports = utils; diff --git a/apxtri/models/Towns.js b/apxtri/models/Towns.js new file mode 100644 index 0000000..75a02f7 --- /dev/null +++ b/apxtri/models/Towns.js @@ -0,0 +1,208 @@ +const bcrypt = require("bcrypt"); +const fs = require("fs-extra"); +const glob = require("glob"); +const moment = require("moment"); +const jwt = require("jwt-simple"); +const UUID = require("uuid"); +const conf = require(`../../../adminapi/objects/tribes/itm/adminapi.json`); +const Checkjson = require(`./Checkjson.js`); +const Odmdb = require("./Odmdb.js"); + +const Towns = {}; + +Towns.create = () => { + // Create a new town from conf (generate in apxtrie.js if town not already exist in the server) + console.log( + `RUNNING A NEW SETUP with nation ${conf.nationId} and town ${conf.townId} to be accessible in dns http://${conf.dns}` + ); + const initconf = fs.readJSONSync( + `${conf.dirapi}/adminapi/www/adminapx/initconf.json` + ); + // Synchronize nationchains/ + const { updateobjectsfromfreshesttown } = require("./apxtri/models/Nations.js"); + updateobjectsfromfreshesttown(initconf.towns, { + pagans: "alias_all.json", + towns: "townId_all.json", + nations: "nationId_all.json", + }); + + initconf.dirapi = conf.dirapi; + initconf.dirtown = conf.dirtown; + initconf.nationId = conf.nationId; + initconf.townId = conf.townId; + initconf.sudoerUser = process.env.USER; + if (!initconf.dns.includes(conf.dns)) { + initconf.dns.push(conf.dns); + } + initconf.nginx.include.push(`${initconf.dirapi}/adminapi/www/nginx_*.conf`); + initconf.nginx.include.push(`${initconf.dirtown}/tribes/**/www/nginx_*.conf`); + initconf.nginx.logs = `${initconf.dirtown}/logs/nginx/adminapx`; + fs.ensureDirSync(`${initconf.dirtown}/logs/nginx`); + fs.ensureDirSync(`${initconf.dirtown}/tmp/tokens`); + + initconf.nginx.website = "adminapx"; + initconf.nginx.fswww = `${initconf.dirapi}/adminapi/www`; + initconf.nginx.pageindex = "index_en.html"; + const { exec } = require("child_process"); + exec( + `sudo chown -R ${process.env.USER}:${process.env.USER} /etc/nginx`, + (error, stdout, stderr) => { + if (error) { + console.log("\x1b[42m", error, stdout, stderr, "x1b[0m"); + console.log( + `impossible to change owner of /etc/nginx by ${initconf.sudoerUser}:${initconf.sudoerUser}` + ); + fs.removeSync(initconf.dirtown); + process.exit(); + } else { + console.log( + `successfull sudo chown -R ${process.env.USER}:${process.env.USER} /etc/nginx` + ); + } + } + ); + // create town env + fs.outputJsonSync(`${initconf.dirtown}/conf.json`, initconf, { space: 2 }); + const nginxconf = fs.readFileSync( + "./adminapi/www/adminapx/conf/nginx.conf.mustache", + "utf8" + ); + const proxyparams = fs.readFileSync( + "./adminapi/www/adminapx/conf/nginxproxyparams.mustache", + "utf8" + ); + const websiteconf = fs.readFileSync( + "./adminapi/www/adminapx/conf/nginxmodelwebsite.conf.mustache", + "utf8" + ); + + // saved and change nginx conf + if (!fs.existsSync("/etc/nginx/nginxconf.saved")) { + fs.moveSync("/etc/nginx/nginx.conf", "/etc/nginx/nginxconf.saved"); + console.log( + "your previous /etc/nginx/nginx.conf was backup in /etc/nginx/nginxconf.saved" + ); + } + fs.outputFileSync( + "/etc/nginx/nginx.conf", + mustache.render(nginxconf, initconf), + "utf8" + ); + fs.outputFileSync( + "/etc/nginx/proxy_params", + mustache.render(proxyparams, initconf), + "utf8" + ); + fs.outputFileSync( + `${initconf.dirapi}/adminapi/www/nginx_adminapx.conf`, + mustache.render(websiteconf, initconf), + "utf8" + ); + exec(initconf.nginx.restart, (error, stdout, stderr) => { + if (error) { + console.log("\x1b[42m", error, stdout, stderr, "x1b[0m"); + //@todo supprimer la derniere config nginx et relancer + fs.moveSync("/etc/nginx/nginxconf.saved", "/etc/nginx/nginx.conf"); + console.log("Restart yarn dev with correct parameter"); + // cleanup + fs.removeSync(initconf.dirtown); + } else { + //@TODO à finaliser en test sur machien pour creation de nouvelles villes + // add town in nationchains + const gettown = Odmdb.get(`${initconf.dirapi}/nationchains`, "towns", [ + initconf.townId, + ]); + if (gettown.data[initconf.townId] == "notfound") { + Odmdb.create( + `${initconf.dirapi}/nationschains`, + "towns", + { + townId: initconf.townId, + nationId: initconf.nationId, + dns: initconf.dns, + IP: "127.0.0.1", + status: "unchain", + tribes: [], + }, + false + ); + } else if (gettown.data[initconf.townId].dns !== initconf.dns) { + //reinstallation d'une town sur un autre serveur maj du dns , l'ip le status et les tribes se mettent à jour via l'interface + const updtown = Odmdb.update( + `${initconf.dirapi}/nationchains`, + "towns", + { dns: initconf.dns }, + initconf.townId + ); + } + console.log(`ready to use http://${initconf.dns}`); + } + }); +}; + +Towns.changeowner = async (newowner, requestby) => { + /** + * + */ + if (!fs.existsSync(`./tribes/adminapi/opjects/pagans/itm/${newowner}.json`)) { + return { + status: 404, + ref: "towns", + msg: "newownerdoesnotexist", + data: { alias: newowner }, + }; + } + if (!conf.mayorId || conf.mayorId == requestby) { + // update object town + town/conf.json + setup_xx.json + const gettown = Odmdb.get(`../nationchains`, "towns", [ + conf.townId, + ]); + console.log(`before town: ${conf.townId}`, gettown); + if (gettown.data[conf.townId] == "notfound") { + return { + status: 404, + ref: "towns", + msg: "townIdnotfound", + data: { townId: conf.townId }, + }; + } + gettown.data[conf.townId].mayorId = newowner; + const objup = await Odmdb.update( + `../nationchains`, + "towns", + gettown.data[conf.townId], + conf.townId + ); + //update the itm town + if (objup.status != 200) { + return objup; + } + console.log(`after town update: ${conf.townId}`, gettown); + + conf.mayorId = newowner; + fs.outputJsonSync(`../conf.json`, conf); + const setup = fs.readJSONSync( + `${conf.dirapi}/adminapi/www/adminapx/conf/setup_xx.json` + ); + conf.mayorId = newowner; + //update the setup file for webapp adminapi + fs.outputJsonSync( + `${conf.dirapi}/adminapi/www/adminapx/conf/setup_xx.json`, + setup + ); + return { + status: 200, + ref: "towns", + msg: "newownerchangesuccess", + data: { alias: newowner }, + }; + } + return { + status: 403, + ref: "towns", + msg: "notallow", + data: { newowner, currentowner: conf.mayorId }, + }; +}; + +module.exports = Towns; diff --git a/apxtri/models/Trackings.js b/apxtri/models/Trackings.js new file mode 100644 index 0000000..e14a97d --- /dev/null +++ b/apxtri/models/Trackings.js @@ -0,0 +1,29 @@ +const glob = require("glob"); +const path = require("path"); +const fs = require("fs-extra"); +const dayjs = require("dayjs"); +const axios = require("axios"); +const conf = require(`../../../adminapi/objects/tribes/itm/adminapi.json`); +const Checkjson = require(`./Checkjson.js`); + +const Trackings = {} + +/** + * Tracking system management + * + * Data collection is done from nginx log system see routes/trackings.js for doc + */ + + +/** + * Process plan to run each night or on demand to collect log data and cleanup + */ +Trackings.logcollection=()=>{ + +} + +Trackings.dashboard=(graphname)=>{ + console.log('Process data to provide a specific graph') +} + +module.export = Trackings; \ No newline at end of file diff --git a/apxtri/models/Tribes.js b/apxtri/models/Tribes.js new file mode 100755 index 0000000..11bf68f --- /dev/null +++ b/apxtri/models/Tribes.js @@ -0,0 +1,367 @@ +const bcrypt = require( 'bcrypt' ); +const fs = require( 'fs-extra' ); +const path = require( 'path' ); +const glob = require( 'glob' ); +const Mustache = require( 'mustache' ); +const execSync = require( 'child_process' ) + .execSync; +const dnsSync = require( 'dns-sync' ); +const jwt = require( 'jwt-simple' ); +const moment = require( 'moment' ); +const UUID = require( 'uuid' ); +const Pagans = require( './Pagans.js' ); +const conf = require(`../../../adminapi/objects/tribes/itm/adminapi.json`); +const Checkjson = require( `./Checkjson.js`); +/* +tribeid manager + +@TODO @STUDY + +To add a tribe in dirtown/tribes with a mayor phil +see man adduser and file reference call skelet directory to set an env for apxtri in /home/tribename/ +accessible by tribename/password +then add group group me to phil to allow phil to ate a symlink /dirtown/tribes/tribename => to /home/tribename + +At each reboot run a process to analyse /apxtri/routes and api/models whre only js can be exexuted are safe (only write data into /home/tribename, never outside) + +1- Create a user in linux with $ sudo useradd smatchit +2 => this create a user:group and a folder smatchit in /home/phil/dirtown/tribes/ +2 => add group smatchit to phil to allow phil to access file with a group accessright +3 set a password if needed "$sudo passwd smatchit" (sm@tchit) to smatchit to make it available from ssh on port 22 +4 +4 to delete a user sudo userdel smatchit (this keep folder smatchit to remove folder smatchit => sudo userdel --remove smacthit) + + +/tribes/tribeid +Manage a tribeid space +* create +* update by managing option and contract +* delete a tribeid +* check accountability and + +*/ +const Tribes = {}; +Tribes.init = () => { + console.group( 'init Tribes' ); + let tribeids = []; + let routes = glob.sync( './routes/*.js' ) + .map( f => { + return { url: `/${path.basename(f,'.js')}`, route: f } + } ); + let DOMs = []; + let appname = {}; + TribesGlobalConfig = glob.sync( `${conf.tribes}/**/clientconf.json` ) + .map( f => { + const conf = fs.readJSONSync( f ); + // check if plugins exist and add it in .plugins of each tribeid conf + conf.plugins = glob.sync( `${conf.tribes}/${conf.tribeid}/plugins/**/package.json` ) + .map( p => { + const pack = fs.readJsonSync( p, 'utf8' ); + routes.push( { url: `/${pack.name}`, route: `${conf.tribes}/${conf.tribeid}/plugins/${pack.name}/route.js` } ); + return pack; + } ); + //Add here any other info to get a global view and init + //... + tribeids.push( conf.tribeid ); + DOMs = [ ...new Set( [ ...DOMs, ...conf.allowedDOMs ] ) ]; + if( conf.website ) appname[ conf.tribeid ] = Object.keys( conf.website ) + return conf; + } ); + // store global conf fofs.existsSync( `${conf.tmp}/clientconfglob.json` )r sharing to other api + fs.outputJsonSync( `${conf.tmp}/clientconfglob.json`, TribesGlobalConfig, { + spaces: 2 + } ); + return { tribeids, routes, DOMs, appname } +} +Tribes.create = ( data ) => { + /* data = clientconf.json + { + "tribeid": "apxtri", + "genericpsw": "Trze3aze!", + "website": { + "presentation":"https://www.apxtri.org", + "webapp": "https://webapp.apxtri.org" + }, + "allowedDOMs": ["local.fr", "localhost:9002", "ndda.fr", "apxtri.org"], + "clientname": "apxtri", + "clientlogo": "", + "geoloc": [], + "useradmin": {PUBKEY:"",EMAIL:"",LOGIN:"adminapxtri",UUID:"adminapxtri"}, + "smtp": { + "emailFrom": "support@apxtri.org", + "emailcc": [], + "service": "gmail", + "auth": { + "user": "antonin.ha@gmail.com", + "pass": "Ha06110" + } + }, + "accepted-language": "fr,en", + "langueReferential": ["fr"] + } + What about: + "tribeid": same than the folder where all the client's file are stored + "genericpsw": a generic password for new user need upper lowercase number ans special char + "dnsname": a domain name belonging to the client + "subdns": "www", a sub domain subdns.dnsname give a public web access to + "website": { keywebsite:url}, give access to conf.tribes/tribeid/www/keywebsite/index.html, + "allowedDOMs": ["local.fr", "localhost:9002", "nnda.fr"], //for CORS, @TODO generate from prévious URL this allow this apxtri instance to be accessible + "clientname": Name of the organisation if any, + "clientlogo": logo of the organisation if any, + "geoloc": [], if any + "useradmin": { this is the 1st user create automaticaly to make gui available for the 1st user + "PUBKEY":public key to be authentify without an email, + "EMAIL":user email, we need at least one of authentification set up after the user can use both or only one + "LOGIN": login to use for access admintribeid, + "UUID": unique id normaly UUID but a uuid admintribeid is the same person in any apxtri instance so we use it by convention. + "xlang": lang used by this user + }, + "smtp": { smtp used to send email by nodemailer lib basic example with a google account + "emailFrom": "support@xx.fr", + "emailcc": [], + "service": "gmail", + "auth": { + "user": "antonin.ha@gmail.com", + "pass": "Ha06110" + } + }, + "accepted-language": "fr,en", list of accepted-language in terme of http request. + "langueReferential": ["fr"], list of the text that have to be translate in referentials + } + */ + //update tmp/confglog.json + const dataclient = Tribes.init(); + //return in prod all instance apxinfo={tribeids:[],logins:[]} + // in dev return only local + //check tribeid name is unique + console.log( 'liste des tribeid', dataclient.tribeids ) + if( dataclient.tribeids.includes( data.tribeid ) ) { + return { status: 403, payload: { model: "client", info: [ 'tribeidalreadyexist' ] } } + } + //loginsglob = {login:tribeid} + let loginsglob = {}; + if( fs.existsSync( `${conf.tmp}/loginsglob.json`, 'utf-8' ) ) { + loginsglob = fs.readJsonSync( `${conf.tmp}/loginsglob.json`, 'utf-8' ); + } + const logins = Object.keys( loginsglob ); + if( logins.includes( data.useradmin.login ) ) { + return { status: 403, payload: { model: "client", info: [ 'loginalreadyexist' ] } } + } + fs.ensureDirSync( `${conf.tribes}/${data.tribeid}` ); + [ 'users', 'www', 'referentials', 'nationchains' ].forEach( r => { + fs.copySync( `${__dirapi}/setup/tribes/apxtri/${r}`, `${conf.tribes}/${data.tribeid}/${r}` ); + } ) + fs.outputJsonSync( `${conf.tribes}/${data.tribeid}/clientconf.json`, data ); + const confcli = JSON.parse( Mustache.render( fs.readFileSync( `${__dirapi}/setup/tribes/apxtri/clientconf.mustache`, 'utf8' ), data ) ); + fs.outputJsonSync( `${conf.tribes}/${data.tribeid}/clientconf.json`, confcli ); + + return Pagans.createUser( { + xpaganid: "setup", + xworkon: data.tribeid, + xlang: data.useradmin.xlang + }, data.useradmin ); +}; +Tribes.archive = ( tribeid ) => { + //A faire zip un repertoire tribeid dans + // remove tribeid de data ou devdata + try { + fs.moveSync( `${conf.tribes}/${tribeid}`, `${conf.archivefolder}/${tribeid}` ); + //update apxtrienv + Tribes.init(); + return { status: 200, payload: { info: [ 'deletetribeidsuccessfull' ], models: 'Tribes', moreinfo: "TODO see in Tribes.archive" } } + } catch ( err ) { + console.log( "Erreur d'archivage", err ) + return { status: 403, payload: { info: [ 'archiveerror' ], models: 'Tribes', moreinfo: err } } + } +} +////////////// Manage file for Tribes +Tribes.checkaccessfolder = ( folder, typeaccessrequested, useraccessrights, useruuid ) => { + // check folder right + + + +} + +Tribes.checkaccessfiles = ( listfile, typeaccessrequested, useraccessrights, useruuid ) => { + // @listfile to check accessright on file or folder + // @typeaccessrequested on files R read or download, U for pdate, D for delete , O for owned a Owner has all rights RUD on its files + // @useraccessright from its account /userd/uuid.json + // @useruuid public uuid user + // return {'ok':[file auhtorized],'ko':[files not authorized]} + + const checkauthlistfile = { 'ok': [], 'ko': [] } + let structf = [] + let inforep = { file: {}, dir: {} } + let done; + for( const f of listfile ) { + done = false; + if( !fs.existsSync( `${conf.tribes}/${f}` ) ) { + done = true; + checkauthlistfile.ko.push( f ) + console.log( `${f} file does not exist` ) + } else { + structf = f.split( '/' ); + } + //on ckeck tribeid existe / tribeid/object/ + if( !done && + useraccessrights.data[ structf[ 0 ] ] && + useraccessrights.data[ structf[ 0 ] ][ structf[ 1 ] ] && + useraccessrights.data[ structf[ 0 ] ][ structf[ 1 ] ].includes( typeaccessrequested ) ) { + done = true; + checkauthlistfile.ok.push( f ); + } else { + // check if in folder we have a.info.json .file[f].shared{useruuid:'CRUDO'} + console.log( 'structf', structf ) + if( fs.existsSync( `${conf.tribes}/${structf.slice(0,-1).join('/')}/.info.json` ) ) { + inforep = fs.readJsonSync( `${conf.tribes}/${structf.slice(0,-1).join('/')}/.info.json`, 'utf8' ) + } + console.log( `no accessrights for ${f} for ${useruuid} ` ) + } + if( !done && inforep.file[ f ] && inforep.file[ f ] && inforep.file[ f ].shared && inforep.file[ f ].shared[ useruuid ] && inforep.file[ f ].shared[ useruuid ].includes( typeaccessrequested ) ) { + done = true; + checkauthlistfile.ok.push( f ) + } + // If no authorization then ko + if( !done ) { + checkauthlistfile.ko.push( f ) + } + } // end loop for + //console.log( 'checkauthlistfile', checkauthlistfile ) + return checkauthlistfile; +} + +Tribes.dirls = ( tribeid, dir ) => { + /* + Return list of file into tribeid/dir + */ + let comment = { src: `${tribeid}/${dir}`, file: {}, dir: {} }; + if( fs.existsSync( `${conf.tribes}/${tribeid}/${dir}/.info.json` ) ) { + comment = fs.readJsonSync( `${conf.tribes}/${tribeid}/${dir}/.info.json`, 'utf-8' ); + } + const listfile = [] + const listdir = [] + glob.sync( `${conf.tribes}/${tribeid}/${dir}/*` ) + .forEach( f => { + //console.log( f ) + const stats = fs.statSync( f ); + // console.log( stats ) + if( stats.isFile() ) { + listfile.push( path.basename( f ) ) + if( !comment.file[ path.basename( f ) ] ) { + comment.file[ path.basename( f ) ] = { tags: [], info: "", thumbb64: "" }; + } + comment.file[ path.basename( f ) ].mtime = stats.mtime; + comment.file[ path.basename( f ) ].ctime = stats.ctime; + comment.file[ path.basename( f ) ].size = stats.size; + } + if( stats.isDirectory() ) { + listdir.push( path.basename( f ) ) + if( !comment.dir[ path.basename( f ) ] ) { + comment.dir[ path.basename( f ) ] = { tags: [], info: "", thumbb64: "" } + } + comment.dir[ path.basename( f ) ].nbfile = glob.sync( `${f}/*.*` ) + .length; + comment.dir[ path.basename( f ) ].mtime = stats.mtime; + comment.dir[ path.basename( f ) ].ctime = stats.mtime; + console.log( 'comment.dir', comment.dir ) + } + } ); + // on remove les file or dir that was deleted + Object.keys( comment.file ) + .forEach( f => { + if( !listfile.includes( f ) ) delete comment.file[ f ] + } ) + Object.keys( comment.dir ) + .forEach( d => { + if( !listdir.includes( d ) ) delete comment.dir[ d ] + } ) + //console.log( comment ) + fs.outputJson( `${conf.tribes}/${tribeid}/${dir}/.info.json`, comment, 'utf-8' ); + return { status: 200, payload: { info: [ 'succestogetls' ], models: 'Tribes', moreinfo: comment } } +}; +Tribes.addspaceweb = ( data ) => { + /* + To create a public spaceweb accessible from https://dnsname/pageindex + + input: + {dnsname:["archilinea.fr","www.archilinea.fr"], 1st is tha main dns other are just servername redirection + tribeid:"archilinea", from req.session.header.xworkon + website:"presentation", + pageindex:"app_index_fr.html" + mode:dev(local no ssl) | prod(IP + ssl) +} + output: + nginx conf and ssl to serve each https://dnsname to /{tribeid}/www/app/{website} + + + Carefull this action is executed with root and restart nginx + apxtri to work + */ + data.configdomain = conf.tribes; + data.porthttp = conf.porthttp; + console.assert( conf.loglevel == "quiet", 'data to create spaceweb:', data ); + // create spaceweb app for tribeid/www/app/website/pageindexname.html + if( !fs.existsSync( `${conf.tribes}/${data.tribeid}/www/app/${data.website}` ) ) { + fs.outputFileSync( `${conf.tribes}/${data.tribeid}/www/app/${data.website}/${data.pageindex}`, `https://dns.xx/trk/pathtofile?alias=anonymous&uuid=1b506f71-1bff-416c-9057-cb8b86296f60&srckey=btnregister&version=1&lg=fr
+ *
+ * with headerhttps://dns.xx/trk/pathtofile?srckey=btnregister&version=1
+ *
+ * where pathtofile is a ressource accessible from https://dns.xx/pathtofile
+ * For dummy pathtofile for apxtri project, you have: < img src="static/img/photo.jpg" data-trksrckey="loadpage" data-version="1" >
+ *
+ * in js action:
+ *
+ *
+ *
+ * To hit an eventlistener axios.get("https://dns.xx/trk/cdn/empty.json?alias=anonymous&uuid=1b506f71-1bff-416c-9057-cb8b86296f60&srckey=btnregister&version=1");
+ *
+ * If no js available (example:email or pdf document) < img src="https://dns.xx/trk/static/img/photo.jpg?alias=anonymous&uuid=1b506f71-1bff-416c-9057-cb8b86296f60&srckey=loadpage&version=1"
+ *
+ *
+ *
+ * This will hit a tracker then redirect to url> *
+ *
+ * **if you use apx.js** : in html add in < button >, < img >, < a > tag data-trksrc="srckey"
+ *
+ * < img src="https://dns.xx/static/img/photo.jpg" data-trkversion="1" data-trksrckey="registerform">
+ * < button data-trksrc="https://dns.xx/static/img/photo.jpg" data-trkversion="1" data-trksrckey="registerform">
+ *
+ * A lazyloader can also be track.
+ * < img lazysrc="trk/static/img/photo.jpg data-trksrckey="loadpage" data-version="1" >
+
+ *
+ * Tracking log are store into tribe/logs/nginx/tribe_appname.trk.log
+ * Src have to be manage in tribe/api/models/lg/src_en.json
+ * {"srckey":{"app":"presentation|app|apptest","title":"","description":""}}
+ *
+ * @apiParam {String} alias=anonymous if authenticated we get from headers
+ * @apiParam {String} uuid a uuid v4 generate the first time a web page is open on a browser
+ * @apiParam {String} srckey source action that trig this get
+ * @apiParam {integer} version=1 can be an int, date or any version of the src
+ * @apiParam {integer} [tm] a timestamp of action when it is not immediate (for offline app)
+ *
+ */
+
+
+
+
+module.exports=router;
\ No newline at end of file
diff --git a/apxtri/routes/tribes.js b/apxtri/routes/tribes.js
new file mode 100755
index 0000000..162781c
--- /dev/null
+++ b/apxtri/routes/tribes.js
@@ -0,0 +1,448 @@
+const express = require( 'express' );
+const fs = require( 'fs-extra' );
+const path = require( 'path' );
+const glob = require('glob');
+const conf = require(`../../../adminapi/objects/tribes/itm/adminapi.json`);
+
+// Classes
+const Tribes = require( '../models/Tribes.js' );
+// Middlewares
+const checkHeaders = require( '../middlewares/checkHeaders' );
+const isAuthenticated = require( '../middlewares/isAuthenticated' );
+const router = express.Router();
+
+
+/**
+* @api {get} adminapi/tribes/conf/:tribe - tribe list
+* @apiName getconf
+* @apiDescription Get list of route and model for a town server
+* @apiGroup Tribes
+*
+* @apiSuccess (object) get tribes conf on this server
+* @apiSuccessExample {json}
+* HTTP/1.1 200 OK
+* {status:200,ref:"Tribes",msg:"tribesconf",data:{routes:[],modele:[{model:,tplstrings:[lg,lg]}]}}
+*/
+router.get('/config/:tribe', checkHeaders,isAuthenticated,(req,res)=>{
+ /*console.log("passe la")
+ AJOUTER gestion accessright
+ req.session.header.accessrights.data[ req.params.tribe ] &&
+ req.session.header.accessrights.data[ req.params.tribe ].tribeid &&
+ req.session.header.accessrights.data[ req.params.tribe ].tribeid.includes( 'R' )
+ */
+ if("authorize"=="authorize"){
+ const tribconf=`../../adminapi/objects/tribes/itm/${req.params.tribe}.json`
+ if (!fs.existsSync(tribconf)){
+ res.status(404).json({status:404,ref:"Tribes",msg:"tribedoesnotexist", data:{tribe:req.params.tribe}})
+ }else{
+ res.status(200).json({satatus:200,ref:"Tribes",msg:"tribconf",data:{conf:fs.readJsonSync(tribconf)}})
+ }
+ }else{
+ res.status( 403 )
+ .json( { msg: [ 'forbidenAccess' ], ref: 'Tribes' } )
+ .end();
+ }
+})
+
+/**
+* @api {get} adminapi/tribes/www/:tribeId - tribe list
+* @apiName getlisttrib
+* @apiDescription Get list of www object (space web)
+* @apiGroup Tribes
+*
+* @apiParam {String} tribeId it identify an existing tribe*
+* @apiSuccess (object) listwww contains folder name in www for tribeId
+* @apiSuccessExample {json} listwww
+* HTTP/1.1 200 OK
+* {status:200,ref:"Tribes",msg:"listwww",data:{listwww}}
+*/
+router.get('/www', checkHeaders,isAuthenticated,(req,res)=>{
+ let listwww=[]
+ glob.sync(`${conf.dirtown}/tribes/${req.params.tribeId}/www/*`).forEach(d=>{
+ listwww.push(d.split("/").pop())
+ })
+ res.status(200).json({status:200,ref:"Tribes",msg:"listwww",data:{listwww}})
+})
+
+//router.post('www/') to create a webspace
+
+//router.put('www/:app') to update
+
+//router.delete('www/:tribeId/:app)
+
+router.post('/actionanonyme',checkHeaders,(req,res)=>{
+ if (!fs.existsSync(`${conf.dirtown}/tribes/${req.session.header.xtribe}/actions/${req.body.action}.js`)){
+ res.status(403).send({status:403,msg:"actionmissing",ref:"Tribes", data:{action:req.body.action,tribe:req.session.header.xtribe}})
+ }
+ const action = require(`${conf.dirtown}/tribes/${req.session.header.xtribe}/actions/${req.body.action}.js`)
+ const resaction= action.run(req.body,req.session.header);
+ res.status(resaction.status).send(resaction);
+})
+
+router.post('/action',checkHeaders,isAuthenticated,(req,res)=>{
+
+})
+
+
+router.get( '/clientconf/:tribeid', checkHeaders, isAuthenticated, ( req, res ) => {
+ /*
+ get a clientconf.json for a tribeid depending of user accessright
+ if tribeid == all and user is admin of apxtri => get /tmp/clientconfglob.json
+ req.session.header.accessrights, req.session.header.apixpaganid
+ */
+ console.log( `Tribes/clientconf for tribeid:${req.params.tribeid}` )
+ if( req.params.tribeid == "all" && req.session.header.accessrights.data.apxtri && req.session.header.accessrights.data.apxtri.tribeid && req.session.header.accessrights.data.apxtri.tribeid.includes( 'R' ) ) {
+ res.status( 200 )
+ .send( { moreinfo: fs.readJsonSync( `${config.tmp}/clientconfglob.json`, 'utf-8' ) } );
+ return;
+ }
+ if( req.session.header.accessrights.data[ req.params.tribeid ] &&
+ req.session.header.accessrights.data[ req.params.tribeid ].tribeid &&
+ req.session.header.accessrights.data[ req.params.tribeid ].tribeid.includes( 'R' ) &&
+ fs.existsSync( `${config.tribes}/${req.params.tribeid}/clientconf.json` ) ) {
+ // const conftribeid = { moreinfo: {} }
+ // conftribeid.moreinfo[ req.params.tribeid ] = fs.readJsonSync( `${config.tribes}/${req.params.tribeid}/clientconf.json`, 'utf-8' );
+ res.status( 200 )
+ .send( { moreinfo: [ fs.readJsonSync( `${config.tribes}/${req.params.tribeid}/clientconf.json`, 'utf-8' ) ] } );
+ return;
+ }
+ // if not authorized or dos not exist return empty
+ // no specific message is send for security reason (check only log)
+ res.status( 403 )
+ .send( { info: [ 'forbidenAccess' ], models: 'Tribes' } )
+ .end();
+} )
+router.put( '/', checkHeaders, isAuthenticated, ( req, res ) => {
+ console.log( 'Create a new tribeid, with a useradmin' )
+ console.log( ' send data = clientconf.json with all parameter.' )
+ // !!!!! check for security any ; \n or so because data can be used into shell
+ const add = Tribes.create( req.body );
+ res.status( add.status )
+ .send( add.payload )
+} )
+router.delete( '/archivetribeid/:tribeid', checkHeaders, isAuthenticated, ( req, res ) => {
+ console.log( "request archive tribeid" )
+ const archive = Tribes.archive( req.params.tribeid );
+ res.status( archive.status )
+ .send( archive.payload )
+} );
+router.post( '/spaceweb', checkHeaders, isAuthenticated, ( req, res ) => {
+ // !!!!! check for security any ; \n or so because data can be used into shell
+ console.log( 'Create a new webapp for xworkon ' )
+ req.body.tribeid = req.session.header.xworkon;
+ const add = Tribes.addspaceweb( req.body )
+ res.status( add.status )
+ .send( add.payload )
+} )
+router.get( '/spaceweb/components/:tribeid/:website/:key', checkHeaders, ( req, res ) => {
+ // check if key is valid before continue
+ // exemple: get Tribes/spaceweb/components/ndda/mesa/123?rep=appmesatable/appsimpletable.mustache
+ const file = `${config.tribes}/${req.params.tribeid}/spacedev/${req.params.website}/src/ctatic/components/${req.query.path}`
+ console.log( `Request components file from ${file}` )
+ if( fs.existsSync( file ) ) {
+ res.sendFile( file );
+ } else {
+ res.send( `console.error("Missing components file in ${req.params.tribeid}/spacedev/${req.params.website}/src/ctatic/components/${req.query.path}");` );
+ }
+} )
+router.get( '/plugins/:tribeid/:pluginname/:key/:filename', ( req, res ) => {
+ // No accessright possible cause it is load on the fly
+ // @todo Check key to authorize access to the plugin (key comme from user ACCESSRIGHTS[tribeid plugin owner:pluginname]).key
+ // return a file into /:tribeid owner of plugin/plugins/:pluginname/components/:filename
+ // if not exist or invalid key then return console.error
+ const file = `${config.tribes}/${req.params.tribeid}/plugins/${req.params.pluginname}/components/${req.params.filename}`
+ console.log( 'Tribes/plugins/ ', file )
+ if( fs.existsSync( file ) ) {
+ res.sendFile( file );
+ } else {
+ res.send( `console.error("Missing plugin file in ${req.params.tribeid}/plugins/${req.params.pluginname}/components/${req.params.filename}");` );
+ }
+} );
+
+router.get( '/dirls', checkHeaders, isAuthenticated, ( req, res ) => {
+ // url adminapi/tribes/dirls?rep=referentials/dataManagement
+ // request information about a req.query.rep from header xworkon/
+ // return
+ // {file:[{}],dir:[{}]}
+ // @todo check if isAuthorized and exist
+
+ console.log( 'request dirls', `${config.tribes}/${req.session.header.xworkon}/${req.query.rep}` );
+ if( !fs.existsSync( `${config.tribes}/${req.session.header.xworkon}/${req.query.rep}` ) ) {
+ res.status( 404 )
+ .send( { 'info': [ 'dirnotexist' ], model: 'Tribes' } );
+ }
+ const info = Tribes.dirls( req.session.header.xworkon, req.query.rep );
+ console.log( info )
+ res.status( info.status )
+ .send( info.payload );
+} )
+router.delete( '/ls', checkHeaders, isAuthenticated, ( req, res ) => {
+ // check Accessright with D or O on each
+ // url adminapi/tribes/ls
+ // req.body.files=[listfiles file to delete ]
+ const authfiles = Tribes.checkaccessfiles( req.body, 'D', req.session.header.accessrights, req.session.header.apixpaganid );
+ authfiles.ok.forEach( f => { fs.remove( `${config.tribes}/${f}` ); } )
+ res.status( 200 )
+ .send( { 'info': [ 'fileauthdeleted' ], models: 'Tribes', moreinfo: authfiles } )
+} );
+router.put( '/sendjson', checkHeaders, isAuthenticated, ( req, res ) => {
+ //req.body = {object:spacedev, path:website/src/data/tpldataname_lg.json, data:{...}}
+ //console.log( req.body )
+ const dest = `${config.tribes}/${req.session.header.xworkon}/${req.body.object}/${req.body.path}`;
+ console.log( `Send json to saved to ${dest}` );
+ if( !( req.body.object && fs.existsSync( `${config.tribes}/${req.session.header.xworkon}/${req.body.object}` ) ) ) {
+ res.status( '404' )
+ .send( { info: [ 'objectmissiong' ], models: 'Tribes', moreinfo: `object: ${req.body.object} does not exist req.body must {object, data, path} into data ${req.session.header.xworkon}/${req.body.object}` } )
+ } else {
+ if( fs.existsSync( `${config.tribes}/${req.session.header.xworkon}/${req.body.object}/${req.body.path}` ) ) {
+ // exist so can be update check accessright update on this
+ //A REVOIR hasAccessrighton( req.body.object, "U" );
+ } else {
+ // AREVOIRhasAccessrighton( req.body.object, "C" );
+ }
+ fs.outputJsonSync( dest, req.body.data );
+ res.status( 200 )
+ .send( { info: [ 'filesaved' ], models: 'Tribes' } )
+ }
+} );
+router.post( '/downloadls', checkHeaders, isAuthenticated, ( req, res ) => {
+ // midlleware hasAccessrighton.js is not apply here only to access/update/create information inside an object
+ // to get file a user need accessrights to data: object: R or to Own it
+ // or if exist a .info.json into folder get shared as R in uuid
+
+ //req.body contain list of path file or folder if only 1 file then download it, otherwise zip list and send zip file
+
+ const authfiles = Tribes.checkaccessfiles( req.body.files, 'R', req.session.header.accessrights, req.session.header.xpaganid );
+ if( authfiles.ok.length == 1 ) {
+ // bidouille en attendnat de faire un .zip binaire propre
+ if( !authfiles.ok[ 0 ].includes( '.xml' ) ) {
+ res.status( 200 )
+ .download( `${config.tribes}/${authfiles.ok[0]}`, authfiles.ok[ 0 ] );
+ } else {
+ fs.copySync( `${config.tribes}/${authfiles.ok[0]}`, `${config.tribes}/${config.mayorId}/www/app/webapp/static/tmp/${authfiles.ok[ 0 ]}` )
+ }
+ } else if( authfiles.ok.length > 1 ) {
+ // on zip et on envoie
+ //res.status( 200 )
+ // .download( `${config.tribes}/${authfiles.ok[0]}`, authfiles.ok[ 0 ])
+ res.status( 200 )
+ .attachment( `${config.tribes}/${authfiles.ok[0]}` );
+
+ } else {
+ req.body.filepon
+ res.status( 403 )
+ .send( 'Forbidden access' )
+ }
+} );
+
+
+router.post( '/upfilepond', checkHeaders, isAuthenticated, ( req, res ) => {
+ console.log( 'post adminapi/tribes/uploadfilepond' );
+ // Store file and return a unique id to save button
+ // that provide folder where to store it
+ const formidable = require( 'formidable' );
+ const form = formidable( { multiples: false } );
+ form.parse( req, ( err, fields, files ) => {
+ if( err ) { next( err ); return; }
+ //console.log( 'fields',fields);
+ // fileMetadaObject send
+ let context = JSON.parse( fields.filepond );
+ let idfile = files.filepond.path;
+ let name = files.filepond.name;
+ let subfolder = context.subfolder;
+ name = name.replace( /[ ,'"’]/g, "_" );
+ //console.log( 'files.filepond:', files.filepond );
+ console.log( idfile, `${config.tribes}/${req.session.header.xworkon}/www/${subfolder}/${name}` )
+ // On le supprime s'il existe deja
+ fs.removeSync( `${config.tribes}/${req.session.header.xworkon}/www/${subfolder}/${name}` );
+ // mv tmp
+ fs.moveSync( idfile, `${config.tribes}/${req.session.header.xworkon}/www/${subfolder}/${name}` );
+ //res.status(200).send({models:"Tribes",info:["Savedsuccess"],moreinfo:{id:file.filepond.path}})
+ //return for filepond
+ res.writeHead( 200, { 'Content-Type': 'text/plain' } );
+ res.end( idfile );
+ } )
+} );
+router.delete( '/file', checkHeaders, isAuthenticated, ( req, res ) => {
+ //src = objectfolder with accessright/...
+ //hasAccessrighton( "www", "D" ),
+ if( !req.query.src ) {
+ res.status( 404 )
+ .send( { info: [ 'deleteerror' ], models: "Tribes", moreinfo: "your del req need a src" } )
+ return;
+ };
+ // A REVOIR hasAccessrighton( req.query.src.split( '/' )[ 0 ], "D" );
+ console.log( 'Remove file', `${config.tribes}/${req.session.header.xworkon}/${req.query.src}` )
+ console.log( req.body )
+ fs.removeSync( `${config.tribes}/${req.session.header.xworkon}/${req.query.src}` );
+ res.status( 200 )
+ .send( { info: [ 'Successfullremove' ], models: "Tribes" } )
+} );
+router.post( '/uploadfile', checkHeaders, isAuthenticated, ( req, res ) => {
+ console.log( 'upload a file ' )
+ /* Authentification is needed to get a TOKEN
+ curl -X POST -H "xtribe: apxtri" -H "xworkon: pvmsaveurs" -H "xlang: fr" -H "xpaganid: 1" -H "xauth: 1" -H "xapp: pvmsaveurs:pvmsaveurs" -H "Content-Type: application/json" -d '{"LOGIN":"adminapxtri","PASSWORD":"Trze3aze!"}' http://pvmsaveurs.pvmsaveurs.fr/app/users/login
+ if exist replace xpaganidTOKEN with payload.TOKEN value
+
+ curl -H "xtribe: pvmsaveurs" -H "xworkon: pvmsaveurs" -H "xlang: fr" -H "xpaganid: adminapxtri" -H "xauth: xpressuuisToken" -H "xapp: pvmsaveurs:pvmsaveurs" -F 'data=@filename.xx' http://pvmsaveurs.pvmsaveurs.fr/app/Tribes/uploadfile
+ */
+ const formidable = require( 'formidable' );
+ const form = formidable( { multiples: false } );
+ form.parse( req, function ( err, fields, files ) {
+ //console.log( files.data )
+ var oldPath = files.data.path;
+ var newPath = `${config.tribes}/${req.session.header.xworkon}/${clientconf.uploadzip[files.data.name].dest}`;
+ console.log( 'oldPath', oldPath )
+ console.log( 'newPath', newPath )
+ var rawData = fs.readFileSync( oldPath )
+ fs.outputFile( newPath, rawData, function ( err ) {
+ if( err ) {
+ console.log( err );
+ return res.status( 405 )
+ .send( { info: [ 'savederror' ], models: "Tribes", moreinfo: "your file was not able to be saved into the server" } )
+ } else {
+ return res.status( 200 )
+ .send( {
+ info: [ "successfullsent" ],
+ models: "Tribes"
+ } );
+ }
+ } )
+ } );
+} );
+router.post( '/uploadzip', checkHeaders, ( req, res ) => {
+ console.log( 'uploadzip a file ' )
+
+ /* no authentification to upload a zip filename into adminapi/tribes/${xworkon}/${clientconf.uploadzip[filename].dest}
+ unzip it using the password ${clientconf.uploadzip[filename].psw
+ if no error then run the callback ${clientconf.uploadzip[filename].callback
+ but a password to unzip
+ in clientconf.json need to be set
+ "uploadzip": {
+ "articlesTribespvm.zip": {
+ "comment": "unzip with overwrite if same name",
+ "psw": "azPI1209qtrse",
+ "dest": "importexport/tmp",
+ "unzipoption": "-aoa",
+ "callback": "importexport/integrationitem.js"
+ }
+ },
+ Example:
+ cd where zip file is stored
+ curl -H "xtribe: pvmsaveurs" -H "xworkon: pvmsaveurs" -H "xlang: fr" -H "xpaganid: adminapxtri" -H "xauth: 1" -H "xapp: pvmsaveurs:pvmsaveurs" -F 'data=@articlesTribespvm.zip' http://pvmsaveurs.pvmsaveurs.fr/app/Tribes/uploadzip
+
+ */
+ const clientconf = fs.readJSONSync( `${config.tribes}/${req.session.header.xworkon}/clientconf.json` )
+ if( !clientconf.uploadzip ) {
+ return res.status( '404' )
+ .send( { info: [ "missconf" ], models: "Tribes", moreinfo: `no uploadzip in clientconf for ${req.session.header.xworkon} please contact apxtri admin ` } );
+ };
+ const uploadzip = clientconf.uploadzip;
+ const formidable = require( 'formidable' );
+ const form = formidable( { multiples: false } );
+ form.parse( req, function ( err, fields, files ) {
+ //console.log( files.data )
+ var oldPath = files.data.path;
+ if( !Object.keys( clientconf.uploadzip )
+ .includes( files.data.name ) ) {
+ return res.status( 403 )
+ .send( { info: [ "notAllowed" ], models: "Tribes", moreinfo: `file ${files.data.name} not allowed to be upload` } )
+ } else {
+ console.log( "context:", clientconf.uploadzip[ files.data.name ] )
+ var newPath = `${config.tribes}/${req.session.header.xworkon}/${clientconf.uploadzip[files.data.name].dest}`;
+ //console.log( 'oldPath', oldPath )
+ //console.log( 'newPath', `${newPath}/${files.data.name}` )
+ fs.moveSync( oldPath, `${newPath}/${files.data.name}`, { overwrite: true } );
+ const cp = require( 'child_process' );
+ //console.log( `7z e -p${clientconf.uploadzip[ files.data.name ].psw} ${newPath}/${files.data.name}` );
+ console.log( '7z', [ 'e', `-p${clientconf.uploadzip[ files.data.name ].psw}`, `${newPath}/${files.data.name}`, `-o${config.tribes}/${req.session.header.xworkon}/${clientconf.uploadzip[ files.data.name ].dest}`, clientconf.uploadzip[ files.data.name ].unzipoption ] );
+ var newFiles = cp.spawnSync( '7z', [ 'e', `-p${clientconf.uploadzip[ files.data.name ].psw}`, `${newPath}/${files.data.name}`, `-o${config.tribes}/${req.session.header.xworkon}/${clientconf.uploadzip[ files.data.name ].dest}`, clientconf.uploadzip[ files.data.name ].unzipoption ] );
+ console.log( newFiles.output.toString() )
+ if( newFiles.output.toString()
+ .includes( 'Everything is Ok' ) ) {
+ if( clientconf.uploadzip[ files.data.name ].callback ) {
+ const integ = require( `${config.tribes}/${req.session.header.xworkon}/${clientconf.uploadzip[files.data.name].callback}` )
+ .run();
+ console.log( 'integration', integ )
+ return res.status( integ.status )
+ .send( integ.payload );
+ } else {
+ return res.status( 200 )
+ .send( {
+ info: [ "successfullsent" ],
+ models: "Tribes"
+ } );
+ }
+ } else {
+ return res.status( 400 )
+ .send( {
+ info: [ "zipfileerror" ],
+ models: "Tribes",
+ moreinfo: newFiles.output.toString()
+ } )
+ }
+ }
+ } )
+} );
+
+router.post( '/upload', checkHeaders, isAuthenticated, ( req, res ) => {
+ 1 // ACHANGER VIA usage sendjson
+ // url adminapi/tribes/upload?save=tmp&rep=referentials/dataManagement
+ // if save=tmp then store in a tmp file
+ // if save=ok then mv the tmp file to the folder
+ // midlleware hasAccessrighton.js is not apply here only to access/update/create information inside an object
+ // to upload a file a user need accessrights to data: object: C or to Own it
+ // or if dir.file exist a .info.json into folder get shared as C in uuid accessright
+ /*
+ to add in front
+
+ */
+ console.log( 'Envoie image' )
+ console.log( 'body', req.body );
+ console.log( 'params', req.params );
+ //const authfolder = Tribes.checkaccessfiles( req.params.rep, 'C', req.session.header.accessrights, req.session.header.xpaganid );
+ // cheack autorisation to create or replace a file for this accessrights user
+ const authfolder = { ok: "tt" }
+ if( authfolder.ok ) {
+ if( req.params.save == 'file' ) {
+ if( fs.existsSync( req.body.filepond ) ) {
+ fs.mv( req.body.filepond, req.params.rep );
+ }
+ };
+ // voir si c'est toujours pertinent car upload est géré par filepond pour les image
+ if( req.params.save == 'upload' ) {
+ const form = formidable( { multiples: false } );
+ form.parse( req, ( err, fields, files ) => {
+ if( err ) { next( err ); return; }
+ let thefile = files.filebond.path;
+ fs.outputFileSync()
+ console.log( 'thefile:' + thefile );
+ res.writeHead( 200, { 'Content-Type': 'text/plain' } );
+ res.end( theFile );
+ } )
+ }
+ } else {
+ res.status( 403 )
+ .send( 'forbiden access' );
+ }
+} );
+/*
+Manage tribeid into /data/tribee/tribeid
+client space dedicated
+
+@Todo
+clientconfglob copy cut from Referentials.clientconfglob
+clientconf.json copy cut from Referentials.clientconf
+list of tribeid copy cut from Referentials.
+Add a tribeid
+update clientconf
+
+
+*/
+
+
+
+module.exports = router;
diff --git a/apxtri/routes/wwws.js b/apxtri/routes/wwws.js
new file mode 100644
index 0000000..1526e22
--- /dev/null
+++ b/apxtri/routes/wwws.js
@@ -0,0 +1,156 @@
+const express = require("express");
+const path = require("path");
+
+// Classes
+const Wwws = require("../models/Wwws.js");
+// Middlewares
+const checkHeaders = require("../middlewares/checkHeaders");
+const isAuthenticated = require("../middlewares/isAuthenticated");
+
+const router = express.Router();
+// GET api/wwws/conf/:tribeId/:website
+// if profils accessright return the nginx conf in ${conf.dirtown}/tribes/${req.param.tribeId}/www/nginx_${req.params.tribeId}_${req.params.website}.conf
+router.get(
+ "/conf/:tribeId/:website",
+ checkHeaders,
+ isAuthenticated,
+ (req, res) => {
+ res.send(Www.configlist(req.params.tribeId));
+ }
+);
+router.post(
+ "/conf/:tribeId/:website",
+ checkHeaders,
+ isAuthenticated,
+ (req, res) => {
+ res.send(Wwws.create(req.params.tribeId));
+ }
+);
+
+/**
+ * @api {put} /adminapi/wwws/webcomponents - Get local web components
+ * @apiGroup Wwws
+ * @apiName getwco
+ * @apiDescription Get web component from backend to localstorage for development. This is anonymous but must be authenticated with accessright to other tribe to get their web component.
For production it will generate a unique id that store to add in updatelocaldb with in production space /js/uniqueid.js css/uniqueid.css pagename.html with link in it
+ *
+ * @apiBody {object} tribelistwco { wco:{tribe:[wconame]}, mode:"dev"|"prod"}
+*/
+router.put(
+ "/updatelocalwcoanonymous",
+ checkHeaders,
+ (req, res) => {
+ console.log("localstorage anonymous for web component", req.session.header.xalias);
+ req.session.header.xprofils = ["anonymous"];
+ console.log(req.session.header.xprofils);
+ //ajouter une detection de changement
+ const getlocal = Wwws.initlocalwco(
+ req.body,
+ req.session.header.xprofils,
+ req.session.header.xlang
+ );
+ res.status(getlocal.status).json(getlocal);
+ }
+);
+
+/**
+ * @api {get} /adminapi/wwws/updatelocaldbanonymous/:tribe/:appname/:pagename/:version - Get localdb for app anonymous only
+ * @apiGroup Wwws
+ * @apiName getappcontextforanonymous
+ * @apiDescription Get data base from backend to localstorage for anonymous (see Get app data model)
+ *
+ * @apiParams {string} tribe (adminapi,smatchit,..) to looking for
+ * @apiParams {string} appname agregate a full data referential to store localy
+ * @apiParams {string} pagename app page name
+ * @apiParams {interger} version the current version
+*/
+router.get(
+ "/updatelocaldbanonymous/:tribe/:appname/:pagename/:version",
+ checkHeaders,
+ (req, res) => {
+ console.log("pass localstorage anonymous", req.session.header.xalias);
+ req.session.header.xprofils = ["anonymous"];
+ console.log(req.session.header.xprofils);
+ //ajouter une detection de changement
+ const getlocal = Wwws.initlocaldata(
+ req.params.tribe,
+ req.params.appname,
+ req.params.pagename,
+ req.params.version,
+ req.session.header.xprofils,
+ req.session.header.xlang
+ );
+ res.status(getlocal.status).json(getlocal);
+ }
+);
+
+/**
+ * @api {get} /adminapi/wwws/updatelocaldb/:tribe/:appname/:pagename/:version - Get localdb for app
+ * @apiGroup Wwws
+ * @apiName getappcontext
+ * @apiDescription Get data base from backend to localstorage for authenticated user
+ *
+ * @apiParams {string} tribe (adminapi,smatchit,..) to looking for
+ * @apiParams {string} appname agregate a full data referential to store localy
+ * @apiParams {string} pagename app page name
+ * @apiParams {interger} version the current version
+ * @apiSuccess {object} contain new version data model for a local web app in a PWA logical in the language of the header or if no new version exist then return
+ * @apiSuccessExample {json} datamodelupdate
+ * {"status":200, "ref":"Wwws", "msg":"datamodelupdate", "data":{version,confpage,profils,schema,options,ref,tpl,tpldata}}
+ * @apiSuccessExample {json} datamodelnoupdate
+ * HTTP/1.1 200 OK
+ * {"status":200, "ref":"Wwws", "msg":"datamodelupdate", "data":{version,confpage,profils,schema,options,ref,tpl,tpldata}}
+ * @apiSuccessExample {json} pagedoesnotexist
+ * {status: 200,ref: "Wwws", msg: "pagedoesnotexist", data: { pagename } }
+ * @apiSuccessExample {json} forbidenaccess
+ * {status: 200,ref: "Wwws", msg: "forbidenaccess",data: { pagename, profils } }
+ */
+router.get(
+ "/updatelocaldb/:tribe/:appname/:pagename/:version",
+ checkHeaders, isAuthenticated,
+ (req, res) => {
+ console.log("pass localstorage", req.session.header.xalias);
+ console.log(req.session.header.xprofils);
+ //ajouter une detection de changement
+ const getlocal = Wwws.initlocaldata(
+ req.params.tribe,
+ req.params.appname,
+ req.params.pagename,
+ req.params.version,
+ req.session.header.xprofils,
+ req.session.header.xlang
+ );
+ res.status(getlocal.status).json(getlocal);
+ }
+);
+/**
+ * @api {get} /api/adminapi/wwws/buildpage/:tribe/:appname/:pagename - Create pagename
+ * @apiGroup Wwws
+ * @apiName createPagename
+ * @apiDescription Create a pagename from /appscreen/template/:pagename with
+ *
+ * @apiParams {string} tribe (adminapi,smatchit,..) to looking for
+ * @apiParams {string} appname agregate a full data referential to store localy
+ * @apiSuccess {object} contain cuurent version of the data model
+ * @apiSuccessExample {json} Success-Response:
+ * HTTP/1.1 200 OK
+ * {"status":200, "ref":"Odmdb", "msg":"datamodelversion", "data":{version}
+ */
+router.get("/buildpage/:tribe/:appname/:pagename", checkHeaders, (req, res) => {
+ console.log("pass get version localstorage");
+ const localdbf = `../../${req.params.tribe}/objects/wwws/itm/${req.params.appname}`;
+ if (!existsSync(localdbf)) {
+ return res
+ .status(404)
+ .json({ status: 404, ref: "Wwws", msg: "localdbnotfound", data: {} });
+ }
+ res
+ .status(200)
+ .json({
+ status: 200,
+ ref: "Wwws",
+ msg: "lastversion",
+ data: { version: fs.readJSONSync(localdbf).version },
+ });
+});
+
+module.exports = router;
diff --git a/apxtri/setup/apidoc_adminapi.json b/apxtri/setup/apidoc_adminapi.json
new file mode 100644
index 0000000..a1a3cc1
--- /dev/null
+++ b/apxtri/setup/apidoc_adminapi.json
@@ -0,0 +1,36 @@
+ {
+ "comment": "specific api for tribes conf have to be in conf/apidoc/apidoc_{tribe}.json run: $ tribe=smatchit yarn apidoctribe",
+ "name": "apxtri.be",
+ "version": "1.0.0",
+ "title": "apiDoc for apxtri endpoint",
+ "description": "Core api documentation that manage nation objects (Pagans, Nations, Towns, Blockchain, Tracking, Notifications, Odmdb, Tribes (management), Wwws)",
+ "url": "https://testwall-ants.ndda.fr/api",
+ "outputcoment":"pour rappel l'option output ne fonctionne pas en fichier de config ajouté dans package.json",
+ "output":"../nationchains/tribes/adminapi/www/cdn/apidoc/",
+ "input":[
+ "./middlewares",
+ "./routes"
+ ],
+ "order": [
+ "Middlewares",
+ "Odmdb",
+ "Nationchains",
+ "Pagans",
+ "Notifications"
+ ],
+ "template": {
+ "forceLanguage": "en",
+ "showRequiredLabels": true,
+ "withCompare": true,
+ "withGenerator": true,
+ "aloneDisplay": false
+ },
+ "header": {
+ "title": "Introduction",
+ "filename": "header.md"
+ },
+ "footer": {
+ "title": "Best practices",
+ "filename": "footer.md"
+ }
+ }
diff --git a/apxtri/setup/initadminapi.json b/apxtri/setup/initadminapi.json
new file mode 100644
index 0000000..1a21210
--- /dev/null
+++ b/apxtri/setup/initadminapi.json
@@ -0,0 +1,52 @@
+{
+ "tribeId": "adminapi",
+ "urlinit":["http://apx.adminapi","https://testwall-ants.ndda.fr"],
+ "idxname":{"towns":"towns","nation":"nations","pagans":"alias"},
+ "dns": [
+ "apx.adminapi"
+ ],
+ "status": "unchain",
+ "nationId": "ants",
+ "townId": "newdev",
+ "sudoUser": "phil",
+ "api": {
+ "port": 3021,
+ "activelog": [],
+ "languages": [
+ "en"
+ ],
+ "exposedHeaders": [
+ "xdays",
+ "xhash",
+ "xalias",
+ "xlang",
+ "xtribe",
+ "xapp",
+ "xuuid"
+ ],
+ "appset": {
+ "trust proxy": true
+ },
+ "json": {
+ "limit": "10mb",
+ "type": "application/json",
+ "rawBody": true
+ },
+ "bodyparse": {
+ "urlencoded": {
+ "limit": "50mb",
+ "extended": true
+ },
+ "json": {
+ "limit": "500mb"
+ }
+ }
+ },
+ "nginx": {
+ "restart": "sudo systemctl restart nginx",
+ "worker_connections": 1024
+ },
+ "emailcontact": "",
+ "smtp": {},
+ "sms": {}
+}
\ No newline at end of file
diff --git a/apxtri/setup/nginx.maincf b/apxtri/setup/nginx.maincf
new file mode 100644
index 0000000..1a7544e
--- /dev/null
+++ b/apxtri/setup/nginx.maincf
@@ -0,0 +1,42 @@
+user {{sudoUser}};
+worker_processes auto;
+error_log /var/log/nginx/error.log notice;
+pid /var/run/nginx.pid;
+#include /etc/nginx/modules-enabled/*.conf;
+
+events {
+ worker_connections 1024;
+}
+http {
+ include /etc/nginx/mime.types;
+ default_type application/octet-stream;
+
+ log_format main '$time_iso8601###$status###$request';
+
+ log_format tracker escape=json
+ '{"time":"$time_iso8601","alias":"$arg_alias","uuid":"$arg_uuid",'
+ '"lg":"$arg_lg","consentcookie":"$arg_consentcookie","version":"$arg_version",'
+ '"srckey":"$arg_srckey","request_filename":"$request_filename",'
+ '"remoteaddr":"$remote_addr","httpxforwardedfor":"$http_x_forwarded_for",'
+ '"httpreferer":"$http_referer","httpuseragent":"$http_user_agent","request":"$request"}';
+
+ log_format mainold '$remote_addr - $remote_user [$time_local] "$request" '
+ '$status $body_bytes_sent "$http_referer" '
+ '"$http_user_agent" "$http_x_forwarded_for"';
+
+ sendfile on;
+ keepalive_timeout 65;
+ gzip on;
+ gzip_vary on;
+ gzip_proxied any;
+ gzip_comp_level 6;
+ gzip_buffers 4 32k;
+ gzip_http_version 1.1;
+ gzip_types text/plain text/css application/json application/x-javascript text/xml application/xml application/xml+rss text/javascript application/x-font-ttf application/javascript font/eot font/opentype image/svg+xml image/x-icon text/plain;
+
+ ##
+ # Virtual Host Configs
+ ##
+ include /etc/nginx/conf.d/*.conf;
+ include {{townpath}}/*/*/nginx/*.conf;
+}
diff --git a/apxtri/setup/nginx.wwwscf b/apxtri/setup/nginx.wwwscf
new file mode 100644
index 0000000..f8d7344
--- /dev/null
+++ b/apxtri/setup/nginx.wwwscf
@@ -0,0 +1,98 @@
+erver {
+server_name {{website}}.{{tribe}};
+add_header 'Access-Control-Allow-Origin' '*' always;
+add_header 'Access-Control-Allow-Credentials' 'true' always;
+add_header 'Access-Control-Allow-Methods' 'GET, POST, PUT, DELETE, OPTIONS' always;
+add_header 'Access-Control-Allow-Headers' '*' always;
+# usefull to debug nginx conf 3 next line:
+access_log {{townpath}}/{{tribe}}/logs/nginx/{{tribe}}_{{website}}.access.log main;
+error_log {{townpath}}/{{tribe}}/logs/nginx/{{tribe}}_{{website}}.error.log debug;
+add_header xdebug "testmsg debug: $uri - $request - liste args: $args - url:$arg_url - alias:$arg_alias " always;
+
+set $trackme 0;
+if ( $uri ~ ^/trk/ ){
+ set $trackme 1;
+}
+access_log {{townpath}}/{{tribe}}/logs/nginx/{{tribe}}_{{website}}.trk.log tracker if=$trackme ;
+location ~* /trk/ {
+ if ( $uri ~ ^/trk/redirect ){
+ return 301 $arg_url;
+ }
+ rewrite ^/trk/(.*)$ /$1;
+}
+location /adminapi/Checkjson.js {
+ alias {{townpath}}/adminapi/apxtri/models/Checkjson.js;
+}
+location ~* /adminapi/objects/tplstrings/ {
+ rewrite /adminapi/objects/tplstrings/(.*$) /$1 break;
+ root {{townpath}}/adminapi/objects/tplstrings/;
+}
+location ~* /adminapi/schema/ {
+ rewrite /adminapi/schema/(.*$) /$1 break;
+ root {{townpath}}/adminapi/schema/;
+}
+location ~* /{{tribe}}/objects/tplstrings/ {
+ rewrite /{{tribe}}/objects/tplstrings/(.*$) /$1 break;
+ root {{townpath}}/{{tribe}}/objects/tplstrings/;
+}
+location ~* /{{tribe}}/schema/ {
+ rewrite /{{tribe}}/schema/(.*$) /$1 break;
+ root {{townpath}}/{{tribe}}/schema/;
+}
+location ~* /objectimg/.*/img/ {
+ rewrite /objectimg/(.*) /$1 break;
+ root {{townpath}}/{{tribe}}/objects/;
+}
+
+location /cdn/ {
+ expires 1y;
+ add_header Cache-Control "public";
+ rewrite /cdn/(.*$) /$1 break;
+ root {{townpath}}/{{tribe}}/objects/wwws/cdn/;
+}
+
+location /api/ {
+ rewrite /api/(.*$) /$1 break;
+ proxy_pass http://localhost:3020;
+ proxy_redirect off;
+ include proxy_params;
+}
+
+location /socket/ {
+ proxy_pass http://127.0.0.1:3030;
+ proxy_http_version 1.1;
+ proxy_set_header Upgrade $http_upgrade;
+ proxy_set_header Connection 'upgrade';
+ proxy_set_header Host $host;
+ proxy_cache_bypass $http_upgrade;
+}
+
+location /apxwebapp/ {
+ rewrite /apxwebapp/(.*$) /$1 break;
+ root /media/phil/usbfarm/apxtowns/apxwebapp;
+ index index.html index_fr.html index_en.html;
+}
+location /apidoc/ {
+ root {{townpath}}/{{tribe}}/objects/wwws/apidoc/;
+ index index.html;
+}
+location /appbuilder/{
+ root {{townpath}}/ndda/objects/wwws/;
+}
+location /src/ {
+rewrite /src/(.*$) /$1 break;
+root {{townpath}}/{{tribe}}/objects/wwws/{{website}}/src/;
+index index.html index_fr.html;
+}
+location / {
+root {{townpath}}/{{tribe}}/objects/wwws/{{website}}/dist/;
+index index.html index_fr.html;
+}
+error_page 404 /404.html;
+# redirect server error pages to the static page /50x.html
+#
+error_page 500 502 503 504 /50x.html;
+location = /50x.html {
+root /usr/local/nginx/html;
+}
+}
\ No newline at end of file
diff --git a/backup.sh b/backup.sh
new file mode 100644
index 0000000..9c0a8e7
--- /dev/null
+++ b/backup.sh
@@ -0,0 +1,37 @@
+#!/bin/bash
+
+# backup each night
+# param $1 path to town name
+# param $2 tribe name
+# param $3 mainfolder that contain folderlist store this in tribe/backup/mainfolder.ter.gz
+# param $4 prefix special keyword dayname is convert to $(date +%A)
+# param $5 folderlist split by a space in doublequote "folder1 folder2"
+#
+# Exemple:
+# For setup
+#. /media/phil/usbfarm/apxtowns/dev-ants/adminapi/backup.sh /media/phil/usbfarm/apxtowns/dev-ants adminapi objects setup. "nations options pagans towns tplstrings wwws"
+#. /media/phil/usbfarm/apxtowns/dev-ants/adminapi/backup.sh /media/phil/usbfarm/apxtowns/dev-ants adminapi apxtri setup. "middlewares models routes apidoc_adminapi.json apxchat.js apxtri.js package.json"
+# For backup
+#. /media/phil/usbfarm/apxtowns/dev-ants/adminapi/backup.sh /media/phil/usbfarm/apxtowns/dev-ants adminapi objects dayname "persons"
+#
+#
+# add in crontab each day at 2h00 with echo "0 2 * * ${pathtown}/${town}/adminapi/backup.sh ${pathtown}/${town} ${tribe} objects setup. \"list of object to backup\"" | crontab -
+# add in crontab each day at 2h10 with echo "10 2 * * ${pathtown}/${town}/adminapi/backup.sh ${pathtown}/${town} ${tribe} objects $(date +A%) \"list of object to backup\"" | crontab -
+# add it for setup at least
+
+pathtown=$1
+tribe=$2
+mainfolder=$3
+prefix=$4
+if [ "$prefix" = "dayname" ]; then
+ prefix=$(date +%A)
+fi
+folderlist=$5
+# setup adminapi/objects
+declare -a folders=($folderlist)
+cd ${pathtown}
+lstfile=""
+for folder in "${folders[@]}"; do
+ lstfile="${lstfile} ${tribe}/${mainfolder}/${folder}"
+done
+tar -czf ${tribe}/backups/${prefix}.${mainfolder}.tar.gz ${lstfile}
diff --git a/package.json b/package.json
new file mode 100755
index 0000000..03fbdcf
--- /dev/null
+++ b/package.json
@@ -0,0 +1,98 @@
+{
+ "name": "adminapi",
+ "version": "1.0.0",
+ "logo": {
+ "file": "objects/wwws/cdn/share/logo/logocarredark.png"
+ },
+ "description": "apxtri Decentralized Autonomous Organization",
+ "main": "./apxtri/apxtri.js",
+ "repository": {
+ "url": "https://gitea.ndda.fr/apxtri/adminapi.git",
+ "type": "git"
+ },
+ "scripts": {
+ "startapx": "pm2 start apxtri.js --log-date-format 'DD-MM HH:mm:ss.SSS'",
+ "restartapx": "pm2 restart apxtri.js --log-date-format 'DD-MM HH:mm:ss.SSS'",
+ "dev": "NODE_MODE=dev node ./apxtri/apxtri.js",
+ "unittest": "node unittest.js",
+ "tar2prod": "rm ../objects/wwws/cdn/share/apxtriV* && tar -czf ../objects/wwws/cdn/share/apxtriV$version.tar --exclude='node_modules' --exclude='.git' ../../adminapi/apxtri ../../adminapi/schema ../../adminapi/objects ../../adminapi/template ../../adminapi/objects/tribes/itm/adminapi.json",
+ "apidoc": "apidoc -c ../../$tribe/apxtri/apidoc_$tribe.json -o ../../$tribe/objects/wwws/apidoc/",
+ "publishtestwall": "scp -r /media/phil/usbfarm/apxtowns/dev-ants/tribes/$space phil@wall-ants://home/phil/apxtowns/testwall-ants/tribes/$space/..",
+ "publishwall": "scp -r /media/phil/usbfarm/apxtowns/dev-ants/tribes/$space phil@wall-ants://home/phil/apxtowns/wall-ants/tribes/$space/..",
+ "publishhouse": "scp -r /media/phil/usbfarm/apxtowns/dev-ants/tribes/$space phil@house-ants://home/phil/apxtowns/house-ants/tribes/$space/..",
+ "src:css": "tailwindcss --watch -c ../../$tribe/objects/wwws/$webapp/tailwind.config.js -i ../../$tribe/objects/wwws/$webapp/src/static/css/twdevstyle.css -o ../../$tribe/objects/wwws/$webapp/src/static/css/twstyle.css",
+ "dist:css": "tailwindcss --minify -i ../../$tribe/objects/wwws/$webapp/src/static/css/twdevstyle.css -o ../../$tribe/objects/wwws/$webapp/dist/static/css/twstyle.css"
+ },
+ "commentscript": "cf wiki apxtri doc for details: yarn startpm2 -n teswallants, yarn pm2 stop testwallants, yarn pm2 delete testwallants, yarn pm2 logs --lines 200 testwall, yarn pm2 monit -n testwallants, yarn pm2 save tribe=tribeid yarn apidoc to build apidoc // space=adminapi/www/cdn/apidoc yarn publishtestwall ",
+ "commentinstallprod": "A lancer en yarn 'version=1 yarn run tar2prod' pour generer un tar d'installation disponible en ligne sur towndns/cdn/share/apxtriV.tar pour install voir le readme qui run in apxtowns mkdir newtown && tar -xf appv1.tar -C newtown",
+ "maintainers": [
+ {
+ "name": "Filou",
+ "email": "support@need-data.com",
+ "url": "https://need-data.com"
+ }
+ ],
+ "contributors": [
+ {
+ "name": "Antonin Hattabe",
+ "email": "",
+ "url": ""
+ },
+ {
+ "name": "Maxime Serrato",
+ "email": "",
+ "url": ""
+ },
+ {
+ "name": "Charles Filuzeau",
+ "email": "",
+ "url": ""
+ }
+ ],
+ "author": {
+ "name": "patma",
+ "email": "patma@mail.be",
+ "url": ""
+ },
+ "license": "MIT",
+ "dependencies": {
+ "@editorjs/editorjs": "^2.26.5",
+ "@google/generative-ai": "^0.11.3",
+ "apidoc": "^0.54.0",
+ "async": "^3.2.0",
+ "axios": "^1.6.2",
+ "baunsu": "^0.2.3",
+ "bcrypt": "^5.0.0",
+ "cors": "^2.8.4",
+ "crypto-js": "^4.1.1",
+ "dayjs": "^1.11.7",
+ "dns-sync": "^0.2.1",
+ "express": "^4.16.3",
+ "faiss-node": "^0.5.1",
+ "formidable": "^2.1.1",
+ "fs-extra": "^11.1.0",
+ "glob": "^7.1.2",
+ "hosts": "^0.0.1",
+ "jszip": "^3.7.1",
+ "jwt-simple": "^0.5.1",
+ "luxon": "^2.1.1",
+ "moment": "^2.22.1",
+ "mustache": "^2.3.0",
+ "nodemailer": "^6.9.7",
+ "openpgp": "^5.10.1",
+ "path": "^0.12.7",
+ "pm2": "^2.10.4",
+ "readline-sync": "^1.4.10",
+ "sharp": "^0.33.4",
+ "smtp-client": "^0.4.0",
+ "stripe": "^14.4.0",
+ "uuid": "^9.0.0",
+ "yarn": "^1.22.21"
+ },
+ "devDependencies": {
+ "@tailwindcss/typography": "^0.5.13",
+ "daisyui": "^4.12.10",
+ "postcss-import": "^16.1.0",
+ "tailwindcss": "^3.4.6"
+ }
+}
diff --git a/schema/conf.json b/schema/conf.json
new file mode 100644
index 0000000..7a8e557
--- /dev/null
+++ b/schema/conf.json
@@ -0,0 +1,5 @@
+{
+ "schema":"schema/",
+ "objects":[{"name":"nations","lastversion":0},{"name":"pagans","lastversion":0},{"name":"towns","lastversion":0},{"name":"tribes","lastversion":0}],
+ "comment":"schema are in english to get translate part a /lg/schemaname_lg.json allow to replace by lg language the relevant key. Each time a modification is done in schema lastupdate take a timestamp"
+}
\ No newline at end of file
diff --git a/schema/frenchlocation.json b/schema/frenchlocation.json
new file mode 100644
index 0000000..9999fd1
--- /dev/null
+++ b/schema/frenchlocation.json
@@ -0,0 +1,63 @@
+{
+ "$schema": "https://json-schema.org/draft/2020-12/schema",
+ "$id": "/schema/frenchlocation",
+ "title": "French adresse location system from https://api-adresse.data.gouv.fr/search/?q=8+bd+du+port+massy that respect GeoCodeJSON",
+ "description": "Manage properly postal adresse",
+ "type": "object",
+ "properties": {
+ "type": {
+ "title": "Type of result found, housenumber is a physical adresse, street position at street, locality: lieud it, municipality",
+ "type": "string"
+ },
+ "housenumber":{
+ "title":"number with ince bis ter A B"
+ },
+ "street":{
+ "title":"Name of street"
+ },
+ "name": {
+ "title": "Number plaque and street name (no town)",
+ "type": "string"
+ },
+ "label": {
+ "title": "Full address description number type of street, name of street Postal code Town name",
+ "type": "string"
+ },
+ "postcode": {
+ "title": "5 numbers french Post code",
+ "pattern": "^[0-9]{5}$"
+ },
+ "citycode": {
+ "title": "code INSEE",
+ "type": "string"
+ },
+ "city": {
+ "title": "City name",
+ "type": "string"
+ },
+ "zoning":{
+ "title":"Number of kilometer around this position",
+ "type":"integer",
+ "minimum":0
+ },
+ "position": {
+ "type": "object",
+ "properties": {
+ "latitude": {
+ "type": "number",
+ "minimum": -90.0,
+ "maximum": 90.0
+ },
+ "longitude": {
+ "type": "number",
+ "minimum": -180.0,
+ "maximum": 180.0
+ }
+ }
+ },
+ "context": {
+ "title": "Contexte of this address Department number, Department Name, Region ",
+ "type": "string"
+ }
+ }
+}
diff --git a/schema/lg/httperror_en.json b/schema/lg/httperror_en.json
new file mode 100644
index 0000000..60674b6
--- /dev/null
+++ b/schema/lg/httperror_en.json
@@ -0,0 +1,275 @@
+{
+ "100": {
+ "description": "Continue",
+ "examples": [
+ "Continue with the data transfer.",
+ "You may proceed with the next part of the request.",
+ "The server is ready for the next step in the request."
+ ]
+ },
+ "101": {
+ "description": "Switching Protocols",
+ "examples": [
+ "The server is changing the protocol on the request.",
+ "The protocol used for this request is being upgraded.",
+ "Switching to a different communication protocol."
+ ]
+ },
+ "200": {
+ "description": "OK",
+ "examples": [
+ "The operation was successful.",
+ "The request has been successfully completed.",
+ "Everything is fine, and the request is successful."
+ ]
+ },
+ "201": {
+ "description": "Created",
+ "examples": [
+ "A new resource has been successfully created.",
+ "The request resulted in the creation of a new resource.",
+ "Your request has led to the creation of a new item."
+ ]
+ },
+ "202": {
+ "description": "Accepted",
+ "examples": [
+ "The request has been accepted for processing.",
+ "Your request has been acknowledged and queued for processing.",
+ "We've received your request and will take action."
+ ]
+ },
+ "204": {
+ "description": "No Content",
+ "examples": [
+ "The request was successful, but there is no response body.",
+ "Your request was processed, but there's nothing to show in the response.",
+ "This request did not return any content."
+ ]
+ },
+ "206": {
+ "description": "Partial Content",
+ "examples": [
+ "The server is returning part of the requested data.",
+ "You requested a range of data, and we're sending a portion of it.",
+ "Here's a partial response to your request."
+ ]
+ },
+ "300": {
+ "description": "Multiple Choices",
+ "examples": [
+ "The request has multiple possible responses, and the user or client must choose one.",
+ "We can fulfill your request in several ways. Please choose one.",
+ "You have multiple options for the requested resource."
+ ]
+ },
+ "301": {
+ "description": "Moved Permanently",
+ "examples": [
+ "The requested resource has permanently moved to a new location.",
+ "This resource is no longer available here; it's moved to a new address.",
+ "The URL you're looking for has been permanently redirected."
+ ]
+ },
+ "302": {
+ "description": "Found",
+ "examples": [
+ "The requested resource is temporarily located at a different URL.",
+ "You can find what you're looking for at a different address for now.",
+ "The resource you want is temporarily located elsewhere."
+ ]
+ },
+ "304": {
+ "description": "Not Modified",
+ "examples": [
+ "The requested resource has not been modified since the specified time.",
+ "Your cached data is still up-to-date; there have been no changes.",
+ "The server confirms that your data is current."
+ ]
+ },
+ "400": {
+ "description": "Bad Request",
+ "examples": [
+ "The request is malformed or invalid.",
+ "Something is wrong with the request parameters.",
+ "Your request does not meet the server's requirements."
+ ]
+ },
+ "401": {
+ "description": "Unauthorized",
+ "examples": [
+ "Authentication is required, and the user or client failed to provide valid credentials.",
+ "You must log in or provide valid credentials to access this resource.",
+ "Access is restricted. Please provide valid authentication."
+ ]
+ },
+ "403": {
+ "description": "Forbidden",
+ "examples": [
+ "Access to the requested resource is forbidden.",
+ "You do not have permission to access this resource.",
+ "Sorry, but you're not allowed to access this."
+ ]
+ },
+ "404": {
+ "description": "Not Found",
+ "examples": [
+ "The requested resource does not exist on the server.",
+ "The server could not find the page you're looking for.",
+ "Sorry, but what you're searching for isn't here."
+ ]
+ },
+ "405": {
+ "description": "Method Not Allowed",
+ "examples": [
+ "The HTTP method used in the request is not allowed for the requested resource.",
+ "The server does not support the method you're trying to use.",
+ "This resource does not allow the requested HTTP method."
+ ]
+ },
+ "406": {
+ "description": "Not Acceptable",
+ "examples": [
+ "The requested resource cannot provide a response that is acceptable according to the request's headers.",
+ "We cannot provide the response you expect based on your request headers.",
+ "Sorry, but we can't fulfill your request as specified."
+ ]
+ },
+ "407": {
+ "description": "Proxy Authentication Required",
+ "examples": [
+ "Authentication is required to access the requested resource via a proxy.",
+ "To access this resource through a proxy, you must provide valid authentication.",
+ "Please provide valid credentials for proxy access."
+ ]
+ },
+ "408": {
+ "description": "Request Timeout",
+ "examples": [
+ "The server did not receive a complete request within the expected time.",
+ "Your request took too long to arrive at the server.",
+ "Sorry, your request has timed out."
+ ]
+ },
+ "409": {
+ "description": "Conflict",
+ "examples": [
+ "The request could not be completed due to a conflict with the current state of the target resource.",
+ "There's a conflict with the current state of the resource; please try again.",
+ "Sorry, there's a conflict with the requested action."
+ ]
+ },
+ "410": {
+ "description": "Gone",
+ "examples": [
+ "The requested resource is no longer available and has been intentionally removed.",
+ "The resource you're looking for is gone and will not return.",
+ "This resource has been permanently removed."
+ ]
+ },
+ "411": {
+ "description": "Length Required",
+ "examples": [
+ "The server requires a content length to be specified in the request headers.",
+ "Your request is missing a required content length header.",
+ "Please include a 'Content-Length' header in your request."
+ ]
+ },
+ "412": {
+ "description": "Precondition Failed",
+ "examples": [
+ "A precondition in the request headers was not met.",
+ "The server expected certain conditions to be met, but they were not.",
+ "Sorry, the required conditions were not fulfilled."
+ ]
+ },
+ "413": {
+ "description": "Request Entity Too Large",
+ "examples": [
+ "The request entity is too large for the server to process.",
+ "Your request body is too big for us to handle.",
+ "Please reduce the size of your request entity."
+ ]
+ },
+ "414": {
+ "description": "Request-URI Too Long",
+ "examples": [
+ "The URI provided in the request is too long for the server to process.",
+ "The URL in your request is excessively long; please shorten it.",
+ "The request URI you provided is too lengthy."
+ ]
+ },
+ "415": {
+ "description": "Unsupported Media Type",
+ "examples": [
+ "The server cannot process the request because the media type is not supported.",
+ "We cannot handle the content type you specified.",
+ "Sorry, we do not support the requested media type."
+ ]
+ },
+ "416": {
+ "description": "Requested Range Not Satisfiable",
+ "examples": [
+ "The requested range cannot be satisfied by the server.",
+ "We cannot provide the content range you requested.",
+ "Sorry, but we cannot fulfill the requested content range."
+ ]
+ },
+ "417": {
+ "description": "Expectation Failed",
+ "examples": [
+ "The server could not meet the expectations specified in the request's Expect header.",
+ "We were unable to fulfill the expectations you set in your request headers.",
+ "Sorry, but we could not meet your expectations."
+ ]
+ },
+ "500": {
+ "description": "Internal Server Error",
+ "examples": [
+ "Something went wrong on the server's end.",
+ "We apologize, but an unexpected error occurred.",
+ "The server is currently experiencing technical difficulties."
+ ]
+ },
+ "501": {
+ "description": "Not Implemented",
+ "examples": [
+ "The server does not support the functionality required to fulfill the request.",
+ "Sorry, but the requested functionality is not available on this server.",
+ "We have not implemented the feature you're looking for."
+ ]
+ },
+ "502": {
+ "description": "Bad Gateway",
+ "examples": [
+ "The server acting as a gateway or proxy received an invalid response from the upstream server.",
+ "The gateway or proxy received an unexpected response from the upstream server.",
+ "Sorry, there's an issue with the gateway or proxy."
+ ]
+ },
+ "503": {
+ "description": "Service Unavailable",
+ "examples": [
+ "The server is temporarily unavailable to handle the request.",
+ "We're currently unavailable due to maintenance; please try again later.",
+ "Sorry, the service is not available right now."
+ ]
+ },
+ "504": {
+ "description": "Gateway Timeout",
+ "examples": [
+ "The server acting as a gateway or proxy did not receive a timely response from the upstream server.",
+ "We're experiencing a timeout while waiting for the upstream server.",
+ "Sorry, but there's a timeout issue with the gateway."
+ ]
+ },
+ "505": {
+ "description": "HTTP Version Not Supported",
+ "examples": [
+ "The HTTP version used in the request is not supported by the server.",
+ "Your client is using an unsupported HTTP version; please update.",
+ "Sorry, but we do not support the HTTP version used in your request."
+ ]
+ }
+ }
+
\ No newline at end of file
diff --git a/schema/lg/nations_fr.json b/schema/lg/nations_fr.json
new file mode 100644
index 0000000..7e37087
--- /dev/null
+++ b/schema/lg/nations_fr.json
@@ -0,0 +1,19 @@
+{
+ "title": "Description d'une nation",
+ "description":"Nation schema in the apxtri world",
+ "$comment":"",
+ "properties":{
+ "nationId":{
+ "title":"Nom de nation",
+ "description":"Nom unique idenbtifiant une nation composé de a-z et de 0-9 permet de fereder des règles de fonctionnement"
+ },
+ "dtcreate":{
+ "title":"Date de creation",
+ "description":"Creation de la nation"
+ },
+ "contracts":{
+ "title":"Le contrat social",
+ "description":"Indique l'emplacement du contrat qu'accepte toutes les villes"
+ }
+ }
+}
\ No newline at end of file
diff --git a/schema/lg/pagans_fr.json b/schema/lg/pagans_fr.json
new file mode 100644
index 0000000..d3d09a6
--- /dev/null
+++ b/schema/lg/pagans_fr.json
@@ -0,0 +1,14 @@
+{
+ "title": "L'identité d'un pagan",
+ "description": "Decrit les informations de l'identité numérique public",
+ "properties": {
+ "publickey": {
+ "title": "Clé public associés à l'alias",
+ "description": "Cle public généré par openpgp.js lors de la creation d'un compte"
+ },
+ "alias": {
+ "title": "Alias",
+ "description": "Chaine de texte memnotechnique qui se retiens plus facilement qu'une clé public"
+ }
+ }
+}
diff --git a/schema/nations.json b/schema/nations.json
new file mode 100644
index 0000000..f08c616
--- /dev/null
+++ b/schema/nations.json
@@ -0,0 +1,39 @@
+{
+ "$schema":"http://json-schema.org/schema#",
+ "$id":"adminapi/schema/nations",
+ "title": "Nation definition",
+ "description": "A nation from apxtri world",
+ "$comment":"see ./lg/nations_lg.json for description in your languange lg",
+ "type": "object",
+ "properties": {
+ "nationId": {
+ "title":"Unique nation name",
+ "description":"A unique string that define a nation a-z and 0-9 onlyin a social world",
+ "type": "string",
+ "pattern":"^[a-z0-9]*$"
+ },
+ "dtcreate": {
+ "title":"Creation date",
+ "description":"Nation date of creation",
+ "type": "string",
+ "format":"date-time"
+ },
+ "contracts":{
+ "title":"Social contract of the nation",
+ "description":"A uri of the signed js contract that rules this nation",
+ "type": "string",
+ "format":"uri"
+ }
+ },
+ "required": ["nationId", "dtcreate","contracts"],
+ "additionalProperties":false,
+ "apxid":"nationId",
+ "apxuniquekey":["nationId"],
+ "apxidx": [
+ { "name":"lst_nationId","type":"array","keyval": "nationId"},
+ { "name":"nations","type":"view","keyval":"nationId","objkey":[],"filter":""}
+ ],
+ "apxaccessrights":{
+ "pagan":{"C":[],"R":[]}
+ }
+}
diff --git a/schema/pagans.json b/schema/pagans.json
new file mode 100644
index 0000000..520d2d4
--- /dev/null
+++ b/schema/pagans.json
@@ -0,0 +1,52 @@
+{
+ "$id": "https://smatchit.io/schema/pagan",
+ "$comment": "To create account bin apxtri",
+ "title": "Pagans identity",
+ "description": "A numeric id in the nationchains world",
+ "type": "object",
+ "properties": {
+ "publickey": {
+ "title": "Alias's publickey",
+ "description": "Public key generate with openpgp.js",
+ "type": "string",
+ "format": "pgppublickey"
+ },
+ "alias": {
+ "title": "Alias",
+ "description": "text to remember easily a public key",
+ "type": "string",
+ "minLength": 3,
+ "pattern": "^[a-z0-9]*$"
+ },
+ "dt_delete": {
+ "title": "Date of death",
+ "description": "Date of alias delete request, your will will be apply",
+ "type": "string",
+ "format": "date-time"
+ },
+ "will": {
+ "title": "Will script after death",
+ "description": "This will script will be apply on your data 30 days after your alias death",
+ "type": "string"
+ }
+ },
+ "required": ["publickey", "alias"],
+ "apxid": "alias",
+ "apxuniquekey": ["publickey"],
+ "apxidx": [
+ { "name": "lst_alias", "type": "array", "keyval": "alias" },
+ {
+ "name": "alias",
+ "type": "view",
+ "keyval": "alias",
+ "objkey": [],
+ "filter": ""
+ }
+ ],
+ "apxaccessrights": {
+ "owner": { "R": [], "U": [], "D": [] },
+ "anonymous": { "C": [], "R": ["alias"] },
+ "pagans": { "R": ["alias", "publickey"] },
+ "mayor": { "R": [], "D": [] }
+ }
+}
diff --git a/schema/screens.json b/schema/screens.json
new file mode 100644
index 0000000..27fe61f
--- /dev/null
+++ b/schema/screens.json
@@ -0,0 +1,69 @@
+{
+ "$id": "https://smatchit.io/schema/screens",
+ "$comment": "To describe screens as tree to navigate inside",
+ "title": "Screens description",
+ "description": "Each propertie value is a mustache template string where a data must be provide to display screen with value",
+ "type": "object",
+ "properties": {
+ "screenid":{
+ "title":"Screen identification used in html tag id",
+ "type":"string"
+ },
+ "title": {
+ "title": "A title in a screen",
+ "type": "string"
+ },
+ "subtitle": {
+ "title": "A subtitle in a screen",
+ "type": "string"
+ },
+ "icon": {
+ "title": "an icon name",
+ "type": "string"
+ },
+ "warning": {
+ "title": "A text to highlight something, this text is between 2 ! icon",
+ "type": "string"
+ },
+ "formcontrol": {
+ "title": "A key word to presents this content",
+ "type": "string",
+ "enum":["squarebluebtn","listbtn","form"]
+ },
+ "forms":{
+ "title":"Liste of data collection element into the screen",
+ "type":"array",
+ "items":{"type":"objects"}
+ },
+ "action": {
+ "title": "List of possible action on this element",
+ "type": "string",
+ "enum":["onclick"]
+ },
+ "function":{
+ "title":"Function name to call, if action",
+ "comment":"other function than nextlevel",
+ "type":"string"
+ },
+ "params":{
+ "title":" an object containning parameter to send to function",
+ "comment":"can be empty {}",
+ "type":"objects"
+ },
+ "nextlevel":{
+ "title":"List of new screens to show if function is nextlevel $ref:# means it same current schema",
+ "type":"array",
+ "items":{"$ref":"#"}
+ }
+ },
+ "required": ["screenid", "title"],
+ "apxid": "screenid",
+ "apxuniquekey": ["screenid"],
+ "apxidx": [
+ { "name": "lst_screens", "type": "array", "keyval": "screenid" },
+ ],
+ "apxaccessrights": {
+ "owner": { "R": [], "U": [], "D": [] },
+ "anonymous": { "R": [] },
+ }
+ }
diff --git a/schema/towns.json b/schema/towns.json
new file mode 100644
index 0000000..410ee39
--- /dev/null
+++ b/schema/towns.json
@@ -0,0 +1,88 @@
+{
+ "$schema": "https://json-schema.org/draft/2020-12/schema",
+ "$id": "schema/towns",
+ "title": "Town",
+ "description": "A town belonging to a nation from apxtri world",
+ "type": "object",
+ "properties": {
+ "townId": {
+ "type": "string",
+ "pattern": "^[a-z0-9]*$"
+ },
+ "nationId": {
+ "type": "string",
+ "options": {
+ "$ref": "adminapi/objects/nations/idx/lst_nations.json"
+ }
+ },
+ "owner": {
+ "type": "string",
+ "options": {
+ "$ref": "adminapi/objects/pagans/idx/lst_alias.json"
+ }
+ },
+ "mayorId": {
+ "comment": "todo, to be remove by ower in models",
+ "type": "string",
+ "options": {
+ "$ref": "adminapi/objects/pagans/idx/lst_alias.json"
+ }
+ },
+ "IP": {
+ "comment": "IP adress where the dns answer to access in direct to this IP into a /etc/hosts or private dns",
+ "type": "string"
+ },
+ "status": {
+ "title": "The current status of this town",
+ "description": "Unchain is used for dev they are no public domain, chain means that a public domain name is available, elected means the consensus make this town as the master of adminapi data. A town is elected for a period of time necessary to get new election",
+ "default": "unchain",
+ "type": "string",
+ "enum": [
+ "unchain",
+ "chain",
+ "elected"
+ ]
+ },
+ "dns": {
+ "type": "string"
+ }
+ },
+ "required": [
+ "townId",
+ "status",
+ "nationId",
+ "dns"
+ ],
+ "apxid": "townId",
+ "apxuniquekey": [
+ "townId",
+ "dns"
+ ],
+ "apxidx": [
+ {
+ "name": "lst_townId",
+ "type":"array",
+ "keyval": "townId"
+ },
+ {
+ "name": "towns",
+ "keyval": "townId",
+ "type": "view",
+ "objkey": [],
+ "filter": ""
+ }
+ ],
+ "apxaccessrights": {
+ "pagan": {
+ "C": [],
+ "R": []
+ },
+ "owner": {
+ "D": [],
+ "U": [
+ "owner",
+ "status"
+ ]
+ }
+ }
+}
\ No newline at end of file
diff --git a/schema/tribes.json b/schema/tribes.json
new file mode 100644
index 0000000..4044cb0
--- /dev/null
+++ b/schema/tribes.json
@@ -0,0 +1,110 @@
+{
+ "$schema": "https://json-schema.org/draft/2020-12/schema",
+ "$id": "nationchains/schema/tribes",
+ "title": "Tribes",
+ "description": "A tribe belonging to a town",
+ "type": "object",
+ "properties": {
+ "tribeId": {
+ "type": "string",
+ "pattern": "^[a-z0-9]*$"
+ },
+ "townId": {
+ "type": "string",
+ "options": {
+ "$ref": "adminapi/objects/towns/idx/lst_towns.json"
+ }
+ },
+ "nationId": {
+ "type": "string",
+ "options": {
+ "$ref": "adminapi/objects/nations/idx/lst_nations.json"
+ }
+ },
+ "owner": {
+ "type": "string",
+ "options": {
+ "$ref": "adminapi/objects/nations/idx/lst_alias.json"
+ }
+ },
+ "dns": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "context": {
+ "type": "object",
+ "comments": "Check if necessary generate by apxtri.js detail route model available"
+ }
+ },
+ "required": [
+ "townId",
+ "status",
+ "nationId",
+ "dns"
+ ],
+ "apxid": "tribeId",
+ "apxuniquekey": [
+ "tribeId"
+ ],
+ "apxidx": [
+ {
+ "name": "lst_tribeId",
+ "keyval": "tribeId",
+ "type": "array"
+ },
+ {
+ "name": "tribes",
+ "keyval": "tribeId",
+ "type": "view",
+ "objkey": [],
+ "filter": ""
+ },
+ {
+ "name": "owner_tribeId",
+ "type": "distribution",
+ "keyval": "owner",
+ "objkey": [
+ "tribeId"
+ ]
+ },
+ {
+ "name": "dns_tribeId",
+ "type": "distribution",
+ "keyval": "dns",
+ "objkey": [
+ "tribeId"
+ ]
+ },
+ {
+ "name": "tribes_dns",
+ "keyval": "tribeId",
+ "type": "view",
+ "objkey": [
+ "dns"
+ ],
+ "filter": ""
+ }
+ ],
+ "apxacceesrights": {
+ "owner": {
+ "D": [],
+ "U": [
+ "owner",
+ "dns"
+ ]
+ },
+ "druid": {
+ "R": [],
+ "U": []
+ },
+ "mayor": {
+ "C": [],
+ "R": []
+ },
+ "pagan": {
+ "R": []
+ }
+ }
+}
\ No newline at end of file
diff --git a/schema/wwws.json b/schema/wwws.json
new file mode 100644
index 0000000..1bf9552
--- /dev/null
+++ b/schema/wwws.json
@@ -0,0 +1,131 @@
+{
+ "$schema": "https://json-schema.org/draft/2020-12/schema",
+ "$id": "nationchains/schema/www",
+ "title": "www",
+ "description": "A space web available for a domaine, with accessright",
+ "type": "object",
+ "properties": {
+ "tribeId": {
+ "title": "Tribe name",
+ "description": "A unique string as tribe",
+ "options": {
+ "$ref": "adminapi/objects/tribess/idx/lst_tribeId.json"
+ }
+ },
+ "website": {
+ "description": "Folder name into a tribeId/www/",
+ "title": "web space",
+ "type": "string"
+ },
+ "status": {
+ "title": "Status",
+ "description": "Status of website ",
+ "default": "active",
+ "type": "string",
+ "enum": [
+ "chain",
+ "tochain",
+ "unchain"
+ ]
+ },
+ "dns": {
+ "title": "domaine name list to access",
+ "description": "Add in nginx nameserver local website.tribeId LAN domaine.tld",
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "apxtri": {
+ "title": "apxtri value to use in a front end project",
+ "type": "object",
+ "properties": {
+ "headers": {
+ "type": "object",
+ "properties": {
+ "xtrkversion": {
+ "type": "integer",
+ "default": 1
+ },
+ "xalias": {
+ "type": "string",
+ "default": "anonymous"
+ },
+ "xapp": {
+ "type": "string",
+ "comment": "the website"
+ },
+ "xdays": {
+ "type": "integer",
+ "default": 0
+ },
+ "xhash": {
+ "type": "string",
+ "default": "anonymous"
+ },
+ "xlang": {
+ "type": "string",
+ "default": "en"
+ },
+ "xprofils": {
+ "type": "string",
+ "default": "anonymous"
+ },
+ "xtribe": {
+ "type": "string",
+ "comment": "the tribeid"
+ },
+ "xuuid": {
+ "type": "string""default": 0
+ }
+ },
+ "required": [
+ "xtrkversion",
+ "xalias",
+ "xapp",
+ "xdays",
+ "xhash",
+ "xlang",
+ "xprofils",
+ "xtribe",
+ "xuuid"
+ ]
+ }
+ },
+ "required": [
+ "headers"
+ ]
+ },
+ "pages": {
+ "title": "a page name of a front end",
+ "description": "Those obejct allow to run a request like https://dns[]/api/adminapi/localdb(anonymous|)/tribeId/website/pagesname/version to return some contexte items are collection of object items @todo explain list of possible",
+ "type": "object"
+ }
+ },
+ "required": [
+ "tribeId",
+ "website",
+ "status"
+ ],
+ "apxid": "website",
+ "apxidx": [
+ {
+ "name": "lst_website",
+ "type": "array",
+ "keyval": "website"
+ }
+ ],
+ "apxaccessrights": {
+ "owner": {
+ "D": [],
+ "R": [],
+ "U": []
+ },
+ "mayor": {
+ "C": []
+ },
+ "person": {
+ "R": []
+ }
+ }
+}
\ No newline at end of file
diff --git a/template/createidentity_en.js b/template/createidentity_en.js
new file mode 100644
index 0000000..85522b8
--- /dev/null
+++ b/template/createidentity_en.js
@@ -0,0 +1,100 @@
+// See https://nodemailer.com/message/ for available fields to add
+// email template to get key registration
+const tplemail={};
+tplemail.sender = "smatchtit"
+tplemail.replyTo="{{{name}}}<{{{email}}}>"
+tplemail.inreplyTo=""
+tplemail.references=""
+//tplemail.from = "noreply@smatchit.io" get by default in configtrib.emailcontact or server ;emailcontact
+tplemail.to="<{{emailcontact}}>"
+tplemail.subject="Vos clés d'identification pour {{alias}}"
+tplemail.cc=""
+tplemail.bcc=""
+tplemail.attachments=[]
+/*example
+ attachments: [
+ {
+ filename: "{{data}}.txt",
+ pathfile: "{{data}}",
+ contenttype: "text/plain",
+ minepart: "mixed",
+ },
+ {
+ filename: "my-image.jpg",
+ content: "blabla content",
+ contenttype: "image/jpeg",
+ minepart: "alternative",
+ },
+ ],
+ if content and filepath is present filepath content replace content
+ */
+tplemail.html=`
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Votre identité numérique: {{alias}}
+ Vous avez souhaité recevoir vos clés d'identification via {{tribe}}. Une identité numerique est composée de 2 fichiers texte appelés clé privée et clé public.
+
+ Son usage est simple mais souvent méconnu, avec une fonction mathematique, la clé privée permet de signer un message. Avec une autre fonction, on vérifie que la signature est bien issue de la cle public sans avoir besoin de disposer de la clé privée.
+
+ Les applications de {{tribe}} fonctionnent toutes de cette façon et permettent non seulement de vous identifier mais aussi de chiffrer vos données lisible uniquement grâce à votre clé privée. Pour simplifier l'usage de ces clés nous associons un alias à votre clé public.
+ {{#avecpassphrase}}
+
+ Pour plus de securité, on peut vous demander une passphrase qui dans votre cas est :{{passphrase}}
+
+ {{/avecpassphrase}}
+ Gardez précieusement ces clés et ne communiquez jamais votre clé privée. Uniquement votre alias (et eventuellement votre clé public).
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Nos conditions d'utilisation et politique de protection des données CGU
+
+
+
+
+
+
+`
+tplemail.text=`
+Vos clés d'authentifications:
+alias: {{alias}}
+passphrase: {{passphrase}}
+publickey: copier coller entre les 2 lignes -------
+---------------------
+{{publickey}}
+---------------------
+privatekye: copier coller entre les 2 lignes -------
+---------------------
+{{privatekey}}
+---------------------
+`
+module.exports=tplemail;
\ No newline at end of file
diff --git a/template/createidentity_fr.js b/template/createidentity_fr.js
new file mode 100644
index 0000000..d6bc43b
--- /dev/null
+++ b/template/createidentity_fr.js
@@ -0,0 +1,100 @@
+// See https://nodemailer.com/message/ for available fields to add
+// email template to get key registration
+const tplemail={};
+tplemail.sender = "smatchtit"
+tplemail.replyTo="{{{name}}}<{{{email}}}>"
+tplemail.inreplyTo=""
+tplemail.references=""
+//tplemail.from = "noreply@smatchit.io" get by default in configtrib.emailcontact or server ;emailcontact
+tplemail.to="<{{emailcontact}}>"
+tplemail.subject="Vos clés d'identification pour {{alias}}"
+tplemail.cc=""
+tplemail.bcc=""
+tplemail.attachments=[]
+/*example
+ attachments: [
+ {
+ filename: "{{data}}.txt",
+ pathfile: "{{data}}",
+ contenttype: "text/plain",
+ minepart: "mixed",
+ },
+ {
+ filename: "my-image.jpg",
+ content: "blabla content",
+ contenttype: "image/jpeg",
+ minepart: "alternative",
+ },
+ ],
+ if content and filepath is present filepath content replace content
+ */
+tplemail.html=`
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Votre identité numérique: {{alias}}
+ Vous avez souhaité recevoir vos clés d'identification via {{tribe}}. Une identité numerique est composée de 2 fichiers texte appelés clé privée et clé public.
+
+ Son usage est simple mais souvent méconnu, avec une fonction mathematique, la clé privée permet de signer un message. Avec une autre fonction, on vérifie que la signature est bien issue de la cle public sans avoir besoin de disposer de la clé privée.
+
+ Les applications de {{tribe}} fonctionnent toutes de cette façon et permettent non seulement de vous identifier mais aussi de chiffrer vos données lisible uniquement grâce à votre clé privée. Pour simplifier l'usage de ces clés nous associons un alias à votre clé public.
+ {{#avecpassphrase}}
+
+ Pour plus de securité, on peut vous demander une passphrase qui dans votre cas est :{{passphrase}}
+
+ {{/avecpassphrase}}
+ Gardez précieusement ces clés et ne communiquez jamais votre clé privée. Uniquement votre alias (et eventuellement votre clé public).
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Nos conditions d'utilisation et politique de protection des données CGU
+
+
+
+
+
+
+`
+tplemail.text=`
+Vos clés d'authentifications:
+alias: {{alias}}
+passphrase: {{passphrase}}
+publickey: copier coller entre les 2 lignes -------
+---------------------
+{{publickey}}
+---------------------
+privatekye: copier coller entre les 2 lignes -------
+---------------------
+{{privatekey}}
+---------------------
+`
+module.exports=tplemail;
\ No newline at end of file