in progress
This commit is contained in:
82
api/middlewares/checkHeaders.js
Executable file
82
api/middlewares/checkHeaders.js
Executable file
@@ -0,0 +1,82 @@
|
||||
const conf = require( '../../nationchains/tribes/conf.json' );
|
||||
|
||||
const checkHeaders = ( req, res, next ) => {
|
||||
/**
|
||||
* @apiDefine apxHeader
|
||||
* @apiGroup Middleware
|
||||
* @apiDescription Header is mandatory to access apxtrib see tribes/townconf.json.exposedHeaders
|
||||
* A turn around can be done with a simple get params has to be sent in the get url. Usefull to send simple get without header like ?xworkon=tribeName&xlang=en... priority is given to headers
|
||||
* For performance, tokens are store globaly in req.app.locals.tokens={xpaganid:xauth}
|
||||
* if xlang is not in conf.languagesAvailable
|
||||
*
|
||||
* @apiHeader {string} xjwt Pagans unique jwt token store in local town Pagans data or "noauth"
|
||||
* @apiHeader {string} xpseudo Pagans unique Pagan id in uuid format or "nouuid"
|
||||
* @apiHeader {string} xlang the 2 letter langage it request the api (if not exist the 2 first letter of Accept-Language header ) if lang does not exist in the town then en is set (as it always exist in en).
|
||||
* @apiHeader {string} xtribe Tribes id where pseudo want to act
|
||||
* @apiHeader {string} xapp Name of www/xapp folder that host app that send the request
|
||||
* /tribeid/person/xpseudo.json have accessright on this app store in /tribe/tribeid/www/xapp
|
||||
*
|
||||
* @apiError missingexposedHeaders it miss an exposedHeaders
|
||||
*
|
||||
* @apiErrorExample {json} Error-Response:
|
||||
* HTTP/1/1 404 Not Found
|
||||
* {
|
||||
* status:400,
|
||||
* ref:"middleware"
|
||||
* msg:"missingheaders",
|
||||
* data: ["xpseudo","xjwt"]
|
||||
* }
|
||||
*
|
||||
* @apiHeaderExample {json} Header-Exemple:
|
||||
* {
|
||||
* xtribe:"apache",
|
||||
* xalias:"toto",
|
||||
* xhash:"",
|
||||
* xlang:"en",
|
||||
* xapp:"popular"
|
||||
* }
|
||||
*/
|
||||
req.session = {};
|
||||
const header = {};
|
||||
if (!req.header('xlang') && req.header('Content-Language')) req.params.xlang=req.header('Content-Language');
|
||||
let missingheader = [];
|
||||
console.log('req.headers',req.headers)
|
||||
for( const h of conf.api.exposedHeaders ) {
|
||||
//console.log( h, req.header( h ) )
|
||||
if( req.params[ h ] ) {
|
||||
header[ h ] = req.params[ h ]
|
||||
} else if( req.header( h ) ) {
|
||||
header[ h ] = req.header( h )
|
||||
} else {
|
||||
missingheader.push(h);
|
||||
}
|
||||
};
|
||||
//console.log( 'header', header )
|
||||
// store in session the header information
|
||||
req.session.header = header;
|
||||
// Each header have to be declared
|
||||
if( missingheader != "" ) {
|
||||
// bad request
|
||||
return res.status( 400 )
|
||||
.json( {
|
||||
ref:"headers",
|
||||
msg: "missingheader",
|
||||
data: missingheader
|
||||
} );
|
||||
};
|
||||
//console.log( req.app.locals.tribeids )
|
||||
if( !req.app.locals.tribeids.includes( header.xtribe ) ) {
|
||||
return res.status( 400 )
|
||||
.json( {
|
||||
ref:"headers",
|
||||
msg: 'tribeiddoesnotexist',
|
||||
moreinfo: header.xtribe
|
||||
} );
|
||||
}
|
||||
if( !conf.api.languages.includes( header.xlang ) ) {
|
||||
console.log('warning language requested does not exist force to en glish')
|
||||
header.xlang="en";
|
||||
}
|
||||
next();
|
||||
};
|
||||
module.exports = checkHeaders;
|
42
api/middlewares/hasAccessrighton.js
Executable file
42
api/middlewares/hasAccessrighton.js
Executable file
@@ -0,0 +1,42 @@
|
||||
const fs = require( 'fs-extra' );
|
||||
const glob = require( 'glob' );
|
||||
const path = require( 'path' );
|
||||
|
||||
const config = require( '../../nationchains/tribes/conf.json' );
|
||||
|
||||
const hasAccessrighton = ( object, action, ownby ) => {
|
||||
/*
|
||||
@action (mandatory) : CRUDO
|
||||
@object (mandatory)= name of a folder object in /tribeid space can be a tree for example objects/items
|
||||
@ownby (option) = list des uuid propriétaire
|
||||
return next() if all action exist in req.app.local.tokens[UUID].ACCESSRIGHTS.data[object]
|
||||
OR if last action ="O" and uuid exist in ownBy
|
||||
Careffull if you have many action CRO let O at the end this will force req.right at true if the owner try an action on this object
|
||||
*/
|
||||
return ( req, res, next ) => {
|
||||
//console.log( 'err.stack hasAccessrights', err.statck )
|
||||
//console.log( `test accessright on object:${object} for ${req.session.header.xworkon}:`, req.app.locals.tokens[ req.session.header.xpaganid ].ACCESSRIGHTS.data[ req.session.header.xworkon ] )
|
||||
req.right = false;
|
||||
if( req.app.locals.tokens[ req.session.header.xpaganid ].ACCESSRIGHTS.data[ req.session.header.xworkon ] && req.app.locals.tokens[ req.session.header.xpaganid ].ACCESSRIGHTS.data[ req.session.header.xworkon ][ object ] ) {
|
||||
req.right = true;
|
||||
[ ...action ].forEach( a => {
|
||||
if( a == "O" && ownby && ownby.includes( req.session.header.xpaganid ) ) {
|
||||
req.right = true;
|
||||
} else {
|
||||
req.right = req.right && req.app.locals.tokens[ req.session.header.xpaganid ].ACCESSRIGHTS.data[ req.session.header.xworkon ][ object ].includes( a )
|
||||
}
|
||||
} )
|
||||
}
|
||||
//console.log( 'Access data autorise? ', req.right )
|
||||
if( !req.right ) {
|
||||
return res.status( 403 )
|
||||
.json( {
|
||||
info:'forbiddenAccessright',
|
||||
ref: 'headers',
|
||||
moreinfo: {xpaganid:req.session.header.xpaganid,object:object, xworkon:req.session.header.xworkon, action:action}
|
||||
} )
|
||||
}
|
||||
next();
|
||||
}
|
||||
}
|
||||
module.exports = hasAccessrighton;
|
207
api/middlewares/isAuthenticated.js
Executable file
207
api/middlewares/isAuthenticated.js
Executable file
@@ -0,0 +1,207 @@
|
||||
const jwt = require("jwt-simple");
|
||||
const fs = require("fs-extra");
|
||||
const moment = require("moment");
|
||||
const dayjs = require("dayjs");
|
||||
const glob = require("glob");
|
||||
|
||||
const conf = require("../../nationchains/tribes/conf.json");
|
||||
|
||||
const isAuthenticated = (req, res, next) => {
|
||||
//once a day rm oldest tokens than 24hours
|
||||
const currentday = dayjs().date();
|
||||
console.log("dayjs", currentday);
|
||||
console.log(
|
||||
"test si menagedone" + currentday,
|
||||
!fs.existsSync(`${conf.dirname}/tmp/tokensmenagedone${currentday}`)
|
||||
);
|
||||
if (!fs.existsSync(`${conf.dirname}/tmp/tokensmenagedone${currentday}`)) {
|
||||
// clean oldest
|
||||
const tsday = dayjs().date();
|
||||
console.log("tsday", tsday);
|
||||
glob.sync(`${conf.dirname}/tmp/tokensmenagedone*`).forEach((f) => {
|
||||
fs.removeSync(f);
|
||||
});
|
||||
glob.sync(`${conf.dirname}/tmp/tokens/*.json`).forEach((f) => {
|
||||
fs.readJson(f, (err, data) => {
|
||||
if (!err && tsday - data.timestamp > 86400000) fs.remove(f);
|
||||
});
|
||||
});
|
||||
}
|
||||
//Check register in tmp/tokens/
|
||||
console.log("isRegister?");
|
||||
const resnotauth = {
|
||||
ref: "headers",
|
||||
msg: "notauthenticated",
|
||||
data: {
|
||||
xalias: req.session.header.xalias,
|
||||
xtribe: req.session.header.xtribe,
|
||||
},
|
||||
};
|
||||
console.lolg(req.session.header)
|
||||
if (req.session.header.xalias == "anonymous") res.status(401).json(resnotauth);
|
||||
|
||||
const tmpfs = `${conf.dirname}/tmp/tokens/${req.session.header.xtribe}_${req.session.header.xalias}_${req.session.header.hash}.json`;
|
||||
if (!fs.exists(tmpfs)) {
|
||||
//check if pseudo exist as a pagan in pagans/ and as a person in xtribe/persons/ and check hash is coming from publickey
|
||||
if (
|
||||
!fs.existsSync(
|
||||
`${conf.dirname}/nationchains/tribes/${req.session.header.xtribe}/persons/${req.session.header.xalias}.json`
|
||||
)
|
||||
) {
|
||||
console.log(
|
||||
`pseudo:${req.session.header.xalias} does not exist for xtribe ${req.session.header.xtribe}`
|
||||
);
|
||||
res.status(401).json(resnotauth);
|
||||
}
|
||||
if (
|
||||
!fs.existsSync(
|
||||
`${conf.dirname}/nationchains/pagans/${req.session.header.xalias}.json`
|
||||
)
|
||||
) {
|
||||
console.log(
|
||||
`pseudo:${req.session.header.xalias} does not exist as a pagan`
|
||||
);
|
||||
res.status(401).json(resnotauth);
|
||||
}
|
||||
const person = fs.readJsonSync(
|
||||
`${conf.dirname}/nationchains/tribes/${req.session.header.xtribe}/persons/${req.session.header.xalias}.json`
|
||||
);
|
||||
const pagan = fs.readJsonSync(
|
||||
`${conf.dirname}/nationchains/pagans/${req.session.header.xalias}.json`
|
||||
);
|
||||
//check hash with publickey pagan.publickey
|
||||
// if good => create a /tmp/tokens/xtribe_xalias_xhash.json ={timestamp}
|
||||
// if not good res.json(resnotauth)
|
||||
}
|
||||
next();
|
||||
};
|
||||
const isAuthenticatedold = (req, res, next) => {
|
||||
/*
|
||||
check if authenticated with valid token
|
||||
if not => set req.session.header.xjwt=1
|
||||
if yes => set for xWorkon
|
||||
req.session.header.accessrights={
|
||||
app:{'tribeid:website':[liste of menu]},
|
||||
data:{ "sitewebsrc": "RWCDO",
|
||||
"contacts": "RWCDO"}}
|
||||
Liste of menu is linked with the app tht h
|
||||
ave to be consistent with accessrights.data
|
||||
data, list of object accessright Read Write Create Delete Owner
|
||||
a xuuid can read any objet if R
|
||||
if O wner means that it can only read write its object create by himself
|
||||
*/
|
||||
console.log("isAuthenticated()?");
|
||||
//console.log( 'req.app.locals.tokens', req.app.locals.tokens )
|
||||
//console.log( 'req.session.header', req.session.header );
|
||||
// Check if token exist or not
|
||||
req.session.header.accessrights = { app: "", data: {} };
|
||||
if (
|
||||
req.session.header.xalias == "1" ||
|
||||
!req.app.locals.tokens[req.session.header.xalias]
|
||||
) {
|
||||
console.log(
|
||||
`isAuthenticated no : uuid=1 (value=${req.session.header.xalias}) or locals.tokens[uuid] empty `
|
||||
);
|
||||
console.log(
|
||||
"req.app.locals.tokens de xalias",
|
||||
req.app.locals.tokens[req.session.header.xalias]
|
||||
);
|
||||
console.log(
|
||||
"list key uuid de req.app.locals.tokens",
|
||||
Object.keys(req.app.locals.tokens)
|
||||
);
|
||||
req.session.header.xjwt = "1";
|
||||
} else if (
|
||||
req.app.locals.tokens[req.session.header.xalias].TOKEN !==
|
||||
req.session.header.xjwt
|
||||
) {
|
||||
// console.log(req.session.header.xuuid);
|
||||
// console.log(req.session.header.xjwt);
|
||||
// update tokens from file in case recently logged
|
||||
try {
|
||||
console.log(
|
||||
"token not in list of token (req.app.locals.tokens) try to refresh from file"
|
||||
);
|
||||
req.app.locals.tokens = fs.readJsonSync(`${conf.tmp}/tokens.json`);
|
||||
} catch (err) {
|
||||
console.log(
|
||||
`check isAuthenticated issue in reading ${conf.tmp}/tokens.json`
|
||||
);
|
||||
}
|
||||
if (
|
||||
req.app.locals.tokens[req.session.header.xalias].TOKEN !==
|
||||
req.session.header.xjwt
|
||||
) {
|
||||
// if still does not exist then out
|
||||
console.log("isAuthenticated no, token outdated");
|
||||
req.session.header.xjwt = "1";
|
||||
req.session.header.xalias = "1";
|
||||
}
|
||||
}
|
||||
if (req.session.header.xjwt == "1") {
|
||||
//return res.status( 403 )
|
||||
return res.status(403).json({
|
||||
info: ["forbiddenAccess"],
|
||||
model: "Pagans",
|
||||
moreinfo: "isAuthenticated faill",
|
||||
});
|
||||
} else {
|
||||
console.log("isAuthenticated yes");
|
||||
if (req.app.locals.tokens[req.session.header.xalias]) {
|
||||
//console.log( `accessright pour ${req.session.header.xalias}`, req.app.locals.tokens[ req.session.header.xalias ].ACCESSRIGHTS );
|
||||
//set header.accessrights from tokens.json
|
||||
req.session.header.accessrights =
|
||||
req.app.locals.tokens[req.session.header.xalias].ACCESSRIGHTS;
|
||||
} else {
|
||||
// case of bypass no accessright available
|
||||
req.session.header.accessrights = {};
|
||||
}
|
||||
// Once per day, clean old token
|
||||
const currentday = moment().date();
|
||||
console.log(
|
||||
"test si menagedone" + currentday,
|
||||
!fs.existsSync(`${conf.tmp}/menagedone${currentday}`)
|
||||
);
|
||||
if (!fs.existsSync(`${conf.tmp}/menagedone${currentday}`)) {
|
||||
glob.sync(`${conf.tmp}/menagedone*`).forEach((f) => {
|
||||
fs.remove(f, (err) => {
|
||||
if (err) {
|
||||
console.log("err remove menagedone", err);
|
||||
}
|
||||
});
|
||||
});
|
||||
glob.sync(`${conf.tmp}/mdcreator*.log`).forEach((f) => {
|
||||
fs.remove(f, (err) => {
|
||||
if (err) {
|
||||
console.log("err remove mdcreator log", err);
|
||||
}
|
||||
});
|
||||
});
|
||||
const newtokens = {};
|
||||
for (const k of Object.keys(req.app.locals.tokens)) {
|
||||
try {
|
||||
const decodedToken = jwt.decode(
|
||||
req.app.locals.tokens[k].TOKEN,
|
||||
conf.jwtSecret
|
||||
);
|
||||
//console.log( moment( decodedToken.expiration ), moment() )
|
||||
//console.log( moment( decodedToken.expiration ) >= moment() )
|
||||
if (moment(decodedToken.expiration) >= moment()) {
|
||||
newtokens[k] = req.app.locals.tokens[k];
|
||||
}
|
||||
} catch (err) {
|
||||
console.log("Check isAuthenticated cleaning token ", err);
|
||||
}
|
||||
}
|
||||
req.app.locals.tokens = newtokens;
|
||||
fs.outputJsonSync(`${conf.tmp}/tokens.json`, newtokens);
|
||||
fs.writeFileSync(
|
||||
`${conf.tmp}/menagedone${currentday}`,
|
||||
"fichier semaphore to clean data each day can be deleted with no consequence",
|
||||
"utf-8"
|
||||
);
|
||||
}
|
||||
next();
|
||||
}
|
||||
};
|
||||
module.exports = isAuthenticated;
|
7
api/middlewares/lg/headers_en.json
Normal file
7
api/middlewares/lg/headers_en.json
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"missingheader":"Some header miss to have a valid request: {{#data}} {{.}} {{/data}}",
|
||||
"tribeiddoesnotexist":"Header xtribe: {{data}} does not exist in this town",
|
||||
"authenticated":"Your perso{{{xpseudo}}} is register for tribe {{{xtribe}}}",
|
||||
"notauthenticated":"Your pseudo {{xpseudo}} are not register into tribe {{xtribe}} ",
|
||||
"forbiddenAccessright":"Pagan {{data.xpseudo}} has not access right to act {{data.action}} onto object {{data.object}} for tribe {{mor.xworkon}}"
|
||||
}
|
248
api/models/Checkjson.js
Executable file
248
api/models/Checkjson.js
Executable file
@@ -0,0 +1,248 @@
|
||||
/*
|
||||
This module have to be use in back as well front
|
||||
can be include in project with
|
||||
- into a browser : <script src="https://townName.nationName.dns/nationchains/contracts/Checkjson.js"></script>
|
||||
- into a node.js : const Checkjson = require( `../nationchains/socialworld/contracts/Checkjson.js`);
|
||||
*/
|
||||
// --##
|
||||
|
||||
const Checkjson = {};
|
||||
Checkjson.schema = {};
|
||||
Checkjson.schema.properties = {};
|
||||
Checkjson.schema.properties.type = {};
|
||||
Checkjson.schema.properties.type.string = (str) => typeof str === "string";
|
||||
Checkjson.schema.properties.type.number = (n) => typeof n === "number";
|
||||
Checkjson.schema.properties.type.boolean = (n) => typeof n === "boolean";
|
||||
Checkjson.schema.properties.type.integer = (n) =>
|
||||
n != "" && !isNaN(n) && Math.round(n) == n;
|
||||
Checkjson.schema.properties.type.float = (n) =>
|
||||
n != "" && !isNaN(n) && Math.round(n) != n; //not yet in json schema
|
||||
Checkjson.schema.properties.minLength = (str, min) =>
|
||||
typeof str === "string" && str.length > parseInt(min);
|
||||
Checkjson.schema.properties.maxLength = (str, max) =>
|
||||
typeof str === "string" && str.length < parseInt(max);
|
||||
Checkjson.schema.properties.multipleOf = (n, val) =>
|
||||
typeof n === "number" &&
|
||||
typeof val === "number" &&
|
||||
parseFloat(n) / parseFloat(val) -
|
||||
Math.round(parseFloat(n) / parseFloat(val)) <
|
||||
0.0000001;
|
||||
Checkjson.schema.properties.range = (
|
||||
n,
|
||||
minimum,
|
||||
exclusiveMinimum,
|
||||
maximum,
|
||||
exclusiveMaximum
|
||||
) => {
|
||||
//console.log(minimum,exclusiveMinimum,maximum, exclusiveMaximum,n)
|
||||
if (typeof n !== "number") return false;
|
||||
if (minimum && parseFloat(n) < parseFloat(minimum)) return false;
|
||||
if (exclusiveMinimum && parseFloat(n) <= parseFloat(exclusiveMinimum))
|
||||
return false;
|
||||
if (maximum && parseFloat(n) > parseFloat(maximum)) return false;
|
||||
if (exclusiveMaximum && parseFloat(n) >= parseFloat(exclusiveMaximum))
|
||||
return false;
|
||||
return true;
|
||||
};
|
||||
Checkjson.schema.properties.pattern = (str, pattern) => {
|
||||
try {
|
||||
new RegExp(pattern);
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
return pattern.test(str);
|
||||
};
|
||||
Checkjson.schema.properties.enum = (str, enumvalues) =>
|
||||
typeof str === "string" && enumvalues.includes(str);
|
||||
// see format https://json-schema.org/understanding-json-schema/reference/string.html#format
|
||||
Checkjson.schema.properties.format = {
|
||||
"date-time": / /,
|
||||
stringalphaonly:/^[A-Za-z0-9]{3,}$/,
|
||||
time: / /,
|
||||
date: / /,
|
||||
duration: / /,
|
||||
email:
|
||||
/^(([^<>()[\]\\.,;:\s@\"]+(\.[^<>()[\]\\.,;:\s@\"]+)*)|(\".+\"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/,
|
||||
"idn-email": / /,
|
||||
uuid: /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/,
|
||||
uri: / /,
|
||||
"uri-reference": / /,
|
||||
iri: / /,
|
||||
hostname: / /,
|
||||
"idn-hostname": / /,
|
||||
ipv4: /^([0–9]{1,3}.){3}.([0–9]{1,3})$/,
|
||||
ipv6: /^((([0–9A-Fa-f]{1,4}:){7}[0–9A-Fa-f]{1,4})|(([0–9A-Fa-f]{1,4}:){6}:[0–9A-Fa-f]{1,4})|(([0–9A-Fa-f]{1,4}:){5}:([0–9A-Fa-f]{1,4}:)?[0–9A-Fa-f]{1,4})|(([0–9A-Fa-f]{1,4}:){4}:([0–9A-Fa-f]{1,4}:){0,2}[0–9A-Fa-f]{1,4})|(([0–9A-Fa-f]{1,4}:){3}:([0–9A-Fa-f]{1,4}:){0,3}[0–9A-Fa-f]{1,4})|(([0–9A-Fa-f]{1,4}:){2}:([0–9A-Fa-f]{1,4}:){0,4}[0–9A-Fa-f]{1,4})|(([0–9A-Fa-f]{1,4}:){6}((b((25[0–5])|(1d{2})|(2[0–4]d)|(d{1,2}))b).){3}(b((25[0–5])|(1d{2})|(2[0–4]d)|(d{1,2}))b))|(([0–9A-Fa-f]{1,4}:){0,5}:((b((25[0–5])|(1d{2})|(2[0–4]d)|(d{1,2}))b).){3}(b((25[0–5])|(1d{2})|(2[0–4]d)|(d{1,2}))b))|(::([0–9A-Fa-f]{1,4}:){0,5}((b((25[0–5])|(1d{2})|(2[0–4]d)|(d{1,2}))b).){3}(b((25[0–5])|(1d{2})|(2[0–4]d)|(d{1,2}))b))|([0–9A-Fa-f]{1,4}::([0–9A-Fa-f]{1,4}:){0,5}[0–9A-Fa-f]{1,4})|(::([0–9A-Fa-f]{1,4}:){0,6}[0–9A-Fa-f]{1,4})|(([0–9A-Fa-f]{1,4}:){1,7}:))$/,
|
||||
telephonefr: /^0[1-9][0-9]{9}$/,
|
||||
telephoneinter: /^\+*(\d{3})*[0-9,\-]{8,}/,
|
||||
password:
|
||||
/^(?=.*[a-z])(?=.*[A-Z])(?=.*\d)(?=.*[$@$!%*?&.])[A-Za-z\d$@$!%*?&.{}:|\s]{8,}/,
|
||||
postalcodefr: /(^\d{5}$)|(^\d{5}-\d{4}$)/,
|
||||
};
|
||||
Checkjson.schema.properties.default
|
||||
Checkjson.schema.validation = (schema) => {
|
||||
/*validate a schema structure*/
|
||||
const res = { status: 200, err: [] };
|
||||
if (schema.properties) {
|
||||
Object.keys(schema.properties).forEach((p) => {
|
||||
const properties = schema.properties;
|
||||
if (
|
||||
properties[p].type &&
|
||||
typeof properties[p].type === "string" &&
|
||||
!Checkjson.schema.properties.type[properties[p].type]
|
||||
) {
|
||||
res.err.push({
|
||||
info: "|Checkjson|typedoesnotexistinschema",
|
||||
moreinfo: ` ${properties[p].type}`,
|
||||
});
|
||||
}
|
||||
if (
|
||||
properties[p].type &&
|
||||
typeof properties[p].type === "object" &&
|
||||
Array.isArray(properties[p].type)
|
||||
) {
|
||||
properties[p].type.forEach((tp) => {
|
||||
if (!Checkjson.schema.properties.type[tp])
|
||||
res.err.push({
|
||||
info: "|Checkjson|typedoesnotexistinschema",
|
||||
moreinfo: `${tp} of ${properties[p].type}`,
|
||||
});
|
||||
});
|
||||
}
|
||||
if (
|
||||
properties[p].format &&
|
||||
!Checkjson.schema.properties.format[properties[p].format]
|
||||
) {
|
||||
res.err.push({
|
||||
info: "|Checkjson|formatdoesnotexistinschema",
|
||||
moreinfo: ` ${properties[p].format}`,
|
||||
});
|
||||
}
|
||||
if (properties[p].enum && !Array.isArray(properties[p].enum)) {
|
||||
res.err.push({
|
||||
info: "|Checkjson|enumisnotarrayinschema",
|
||||
moreinfo: ` ${properties[p].enum}`,
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
// 406 means not acceptable
|
||||
if (res.err.length > 0) res.status = 406;
|
||||
return res;
|
||||
};
|
||||
|
||||
Checkjson.schema.data = (schema, data, withschemacheck) => {
|
||||
/* validate a data set with a schema in a context ctx */
|
||||
/*
|
||||
console.log('#################')
|
||||
console.log(schema);
|
||||
console.log('---------')
|
||||
console.log(data)
|
||||
*/
|
||||
if (withschemacheck) {
|
||||
const validschema = Checkjson.schema.validation(schema);
|
||||
if (validschema.status != 200) return validschema;
|
||||
}
|
||||
const res = { status: 200, err: [] };
|
||||
if (schema.properties) {
|
||||
const properties = schema.properties;
|
||||
Object.keys(properties).forEach((p) => {
|
||||
//type is mandatory in a propertie
|
||||
if (data[p]) {
|
||||
const typlist =
|
||||
properties[p].type && typeof properties[p].type === "string"
|
||||
? [properties[p].type]
|
||||
: properties[p].type;
|
||||
let valid = false;
|
||||
typlist.forEach((typ) => {
|
||||
// at least one test have to be ok
|
||||
if (Checkjson.schema.properties.type[typ](data[p])) valid = true;
|
||||
});
|
||||
if (!valid)
|
||||
res.err.push({
|
||||
info: "|Checkjson|dataerrpropertie",
|
||||
moreinfo: `${p} : ${data[p]}`,
|
||||
});
|
||||
|
||||
if (
|
||||
properties[p].minLength &&
|
||||
!Checkjson.schema.properties.minLength(data[p], properties[p].minLength)
|
||||
) {
|
||||
res.err.push({
|
||||
info: "|Checkjson|dataerrpropertie",
|
||||
moreinfo: `${p} : ${data[p]} minLength:${properties[p].minLength}`,
|
||||
});
|
||||
}
|
||||
if (
|
||||
properties[p].maxLength &&
|
||||
!Checkjson.schema.properties.maxLength(data[p], properties[p].maxLength)
|
||||
) {
|
||||
res.err.push({
|
||||
info: "|Checkjson|dataerrpropertie",
|
||||
moreinfo: `${p} : ${data[p]} maxLength:${properties[p].maxLength}`,
|
||||
});
|
||||
}
|
||||
if (
|
||||
properties[p].multipleOf &&
|
||||
!Checkjson.schema.properties.multipleOf(data[p], properties[p].multipleOf)
|
||||
) {
|
||||
res.err.push({
|
||||
info: "|Checkjson|dataerrpropertie",
|
||||
moreinfo: `${p} : ${data[p]} not a multipleOf:${properties[p].multipleOf}`,
|
||||
});
|
||||
}
|
||||
if (
|
||||
properties[p].minimum ||
|
||||
properties[p].maximum ||
|
||||
properties[p].exclusiveMinimum ||
|
||||
properties[p].exclusiveMaximum
|
||||
) {
|
||||
// test range
|
||||
if (
|
||||
!Checkjson.schema.properties.range(
|
||||
data[p],
|
||||
properties[p].minimum,
|
||||
properties[p].exclusiveMinimum,
|
||||
properties[p].maximum,
|
||||
properties[p].exclusiveMaximum
|
||||
)
|
||||
) {
|
||||
res.err.push({
|
||||
info: "|Checkjson|dataerrpropertie",
|
||||
moreinfo: `${p} : ${data[p]} not in range ${properties[p].minimum} exclu: ${properties[p].exclusiveMinimum} and ${properties[p].maximum} exclu: ${properties[p].exclusiveMaximum}`,
|
||||
});
|
||||
}
|
||||
}
|
||||
if (
|
||||
properties[p].enum &&
|
||||
!Checkjson.schema.properties.enum(data[p], properties[p].enum)
|
||||
) {
|
||||
res.err.push({
|
||||
info: "|Checkjson|dataerrpropertie",
|
||||
moreinfo: `${p} : ${data[p]} not in enum list :${properties[p].enum}`,
|
||||
});
|
||||
}
|
||||
if (properties[p].format) {
|
||||
properties[p].pattern =
|
||||
Checkjson.schema.properties.format[properties[p].format];
|
||||
}
|
||||
if (
|
||||
properties[p].pattern &&
|
||||
!Checkjson.schema.properties.pattern(data[p], properties[p].pattern)
|
||||
) {
|
||||
res.err.push({
|
||||
info: "|Checkjson|dataerrpropertie",
|
||||
moreinfo: `${p} : ${data[p]} problem pattern or format ${properties[p].pattern}`,
|
||||
});
|
||||
}
|
||||
} else if (schema.required && schema.required.includes(p)) {
|
||||
res.err.push({
|
||||
info: "|Checkjson|dataerrpropertiesrequired",
|
||||
moreinfo: `${p}`,
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
if (res.err.length > 0) res.status = 417;
|
||||
return res;
|
||||
};
|
||||
if (typeof module !== "undefined") module.exports = Checkjson;
|
113
api/models/Contracts.js
Executable file
113
api/models/Contracts.js
Executable file
@@ -0,0 +1,113 @@
|
||||
const fs = require( 'fs-extra' );
|
||||
const glob = require( 'glob' );
|
||||
const moment = require( 'moment' );
|
||||
const axios = require( 'axios' );
|
||||
|
||||
const conf=require('../../nationchains/tribes/conf.json')
|
||||
|
||||
/*
|
||||
Model that will process actions plan for each client like sending email campain, or anything that
|
||||
are plan in /tribes/tribeid/actions/todo
|
||||
*/
|
||||
const Cards = {}; //require('../../models/Cards');
|
||||
const Contracts = {};
|
||||
/*
|
||||
Send if envoicampain a liste of email in param.msg.destperso with param.headers
|
||||
if not envoicampain, it just return a test about what to send
|
||||
@param = {headers, msg:{destperso}}
|
||||
*/
|
||||
Contracts.sendcampain = async ( param, envoicampain ) => {
|
||||
if( envoicampain ) {
|
||||
// Carefull w the action post outputs/msg just wait the feedback of the 1st message
|
||||
const retcampain = await axios.post( 'https://mail.maildigit.fr/outputs/msg', param.msg, {
|
||||
headers: param.headers
|
||||
} );
|
||||
if( retcampain.status !== 200 ) {
|
||||
console.log( "err", retcampain.payload.moreinfo );
|
||||
fs.appendFileSync( `${conf.tribes}/log_erreurglobal.txt`, moment( new Date() )
|
||||
.format( 'YYYYMMDD HH:mm:ss' ) + ' - IMPOSSIBLE TO SEND CAMPAIN TODO for :' + param.tribeid + ' -- ' + retcampain.payload.moreinfo + '\n', 'utf-8' );
|
||||
};
|
||||
return retcampain;
|
||||
} else {
|
||||
// permet de tester ce qu'il y a à envoyer
|
||||
let premieremail = "";
|
||||
for( let i = 0; i < param.msg.destperso.length; i++ ) {
|
||||
premieremail += param.msg.destperso[ 0 ].email + ",";
|
||||
}
|
||||
return {
|
||||
status: 201,
|
||||
payload: {
|
||||
info: [ 'simplecomptage' ],
|
||||
model: 'Contracts',
|
||||
moreinfo: "#email: " + param.msg.destperso.length + " - 5 1st emails: " + premieremail
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
Contracts.initActiontodo = async ( envoie ) => {
|
||||
const datedeb = moment( new Date() )
|
||||
.format( 'YYYYMMDD HH:mm:ss' );
|
||||
let todo, actiondone;
|
||||
let log = {
|
||||
nbaction: 0,
|
||||
nbactionexec: 0,
|
||||
nbactionerr: 0,
|
||||
actionlist: ""
|
||||
};
|
||||
const listclient = fs.readJsonSync( `${conf.tribes}/tribeids.json` );
|
||||
for( let clid in listclient ) {
|
||||
console.log( listclient[ clid ] );
|
||||
let listaction = glob.sync( `${conf.tribes}/${listclient[clid]}/actions/todo/*.json` );
|
||||
for( let action in listaction ) {
|
||||
console.log( listaction[ action ] )
|
||||
log.nbaction++;
|
||||
todo = fs.readJsonSync( listaction[ action ] );
|
||||
let passdate = true;
|
||||
// currentdate doit etre après la startDate si existe et avant valideuntilDate si existe
|
||||
// console.log('test now est avant date start ', moment() < moment(todo.startDate, 'YYYYMMDD HH:mm:ss').toDate());
|
||||
if( todo.startDate && ( moment() < moment( todo.startDate, 'YYYYMMDD HH:mm:ss' )
|
||||
.toDate() ) ) {
|
||||
passdate = false;
|
||||
};
|
||||
// currentdate ne doit pas depasser la date de validité de la tache
|
||||
// console.log('test now est après la date de validite ', moment() > moment(todo.validuntilDate, 'YYYYMMDD HH:mm:ss').toDate());
|
||||
if( todo.valideuntilDate && ( moment() > moment( todo.validuntilDate, 'YYYYMMDD HH:mm:ss' )
|
||||
.toDate() ) ) {
|
||||
passdate = false;
|
||||
};
|
||||
// currentdate
|
||||
if( passdate && todo.action && todo.error == "" ) {
|
||||
log.nbactionexec++;
|
||||
const actiondone = await Contracts[ todo.action ]( todo, envoie );
|
||||
todo.datesRun.push( moment( new Date() )
|
||||
.format( 'YYYYMMDD HH:mm:ss' ) );
|
||||
//console.log("actiondone"
|
||||
log.actionlist += "STATUS:" + actiondone.status + " -- " + listaction[ action ] + "\n";
|
||||
if( actiondone.status == 200 ) {
|
||||
todo.error = "";
|
||||
} else {
|
||||
log.nbactionerr++;
|
||||
todo.error += "status : " + actiondone.status + ' ' + actiondone.payload.moreinfo;
|
||||
};
|
||||
if( parseInt( todo.maxnumberoftime ) && todo.maxnumberoftime != "999" && ( todo.datesRun.length >= parseInt( todo.maxnumberoftime ) ) ) {
|
||||
//archive en done this triggeraction
|
||||
fs.outputJsonSync( listaction[ action ].replace( '/todo/', '/done/' ), todo, {
|
||||
spaces: 2
|
||||
} );
|
||||
fs.unlinkSync( listaction[ action ] );
|
||||
} else {
|
||||
fs.outputJsonSync( listaction[ action ], todo, {
|
||||
spaces: 2
|
||||
} );
|
||||
}
|
||||
} else {
|
||||
log.actionlist += "STATUS : not executed " + listaction[ action ] + "\n";
|
||||
};
|
||||
};
|
||||
};
|
||||
const trace = "###################### LOGS ####################\nSTART:" + datedeb + " END:" + moment( new Date() )
|
||||
.format( 'YYYYMMDD HH:mm:ss' ) + "\n nombre d'actions analysées : " + log.nbaction + " dont executées : " + log.nbactionexec + " dont en erreur: " + log.nbactionerr + "\n" + log.actionlist;
|
||||
fs.appendFileSync( `${conf.tribes}/log.txt`, trace, 'utf-8' );
|
||||
return "done";
|
||||
}
|
||||
module.exports = Contracts;
|
226
api/models/Nations.js
Executable file
226
api/models/Nations.js
Executable file
@@ -0,0 +1,226 @@
|
||||
const bcrypt = require("bcrypt");
|
||||
const fs = require("fs-extra");
|
||||
const glob = require("glob");
|
||||
const jwt = require("jwt-simple");
|
||||
const axios = require("axios");
|
||||
const path=require('path');
|
||||
const conf=require('../../nationchains/tribes/conf.json')
|
||||
const Odmdb = require('./Odmdb.js');
|
||||
// lowercase 1st letter is normal
|
||||
const towns = require('./Towns.js');
|
||||
const pagans = require('./Pagans.js');
|
||||
/*
|
||||
Blockchain manager
|
||||
* Manage network directory of nations and towns
|
||||
* read Blockchain and search,
|
||||
* submit a transaction (now) or contract (futur) to store from userA.pubkey to userB.pubkey a number of AXESS
|
||||
* mine to be able to register a block and create AXESS
|
||||
* manage APIXP rules 20 M APIXP 1AXESS = 1 block validation
|
||||
* manage contract = action if something appened validate by a proof of work
|
||||
*/
|
||||
const Nations = {};
|
||||
Nations.init = () => {
|
||||
console.group("init Nations");
|
||||
};
|
||||
|
||||
Nations.updateChains = async (newtown) =>{
|
||||
/**
|
||||
* @newtown {object} optional to request a registration in the nationchain network
|
||||
* if newtown exist then it send a request to update itself else it just refresh from existing town.
|
||||
* Check public nationchains are up to date from the existing list of towns
|
||||
* Each object to sync have a /idx/conf.json with key lastupdate = timestamp last update
|
||||
* tribes is not synchonized and contain private information
|
||||
* A town is a network node of the nationchains and allow to synchronize new
|
||||
*/
|
||||
const res= {status:400};
|
||||
const ref2update={}
|
||||
glob.sync('nationchains/**/idx/conf.json').forEach(f=>{
|
||||
const ref=fs.readJsonSync(f)
|
||||
ref2update[path.basename(ref.schema,'.json')]=ref.lastupdate;
|
||||
})
|
||||
console.log(ref2update)
|
||||
// Get list of town to check n of them have fresh update
|
||||
const knowntowns =fs.readJsonSync('nationchains/towns/idx/towns_townId_all.json');
|
||||
let promiselistblock=[]
|
||||
let towidlist=[]
|
||||
Object.keys(knowntowns).forEach(townid=>{
|
||||
// identify the town with the highest block to update town
|
||||
promiselistblock.push(axios.get(`${knowntowns[townid].url}/blocks/idx/conf.json`));
|
||||
townidlistblock.push(townid)
|
||||
});
|
||||
let selectedtown=""
|
||||
let blocnum=0
|
||||
await Promise.all(promiselistblock)
|
||||
.then(rep=>{
|
||||
for (let pos=0;pos<townidlist.length;pos++){
|
||||
if (rep[pos].blocnum > blocnum) {
|
||||
selectedtown=townidlist[pos]
|
||||
blocnum=rep[pos].blocnum
|
||||
}
|
||||
}
|
||||
})
|
||||
.catch(err=>{
|
||||
console.log(err)
|
||||
})
|
||||
let promiselistref=[]
|
||||
Object.keys(ref2update).forEach(ob=>{
|
||||
promiselistref.push(axios.get(`${knowntowns[selectedtown].url}/${obj}/idx/conf.json`));
|
||||
})
|
||||
await Promise.all(promiselistref)
|
||||
.then(rep=>{
|
||||
for (let pos=0;pos<townidlist.length;pos++){
|
||||
//si axios lastupdate > local lastupate => recupe _all et regenere tous les objets par ecrasement
|
||||
}
|
||||
})
|
||||
.catch(err=>{
|
||||
console.log(err)
|
||||
})
|
||||
return res
|
||||
}
|
||||
|
||||
Nations.update=(nationsource)=>{
|
||||
/**
|
||||
* Update object nation with last update
|
||||
*/
|
||||
}
|
||||
|
||||
Nations.synchronize = () => {
|
||||
/*
|
||||
Run process to communicate with a list of towns to update network and transaction
|
||||
*/
|
||||
//update himself then send to other information
|
||||
if (process.env.NODE_ENV != "prod") {
|
||||
// Not concerned
|
||||
return {};
|
||||
}
|
||||
const initcurrentinstance = {
|
||||
fixedIP: "",
|
||||
lastblocknumber: 0,
|
||||
firsttimeupdate: 0,
|
||||
lastimeupdate: 0,
|
||||
positifupdate: 0,
|
||||
negatifupdate: 0,
|
||||
pubkeyadmin: "",
|
||||
tribeids: [],
|
||||
logins: [],
|
||||
knowninstance: [],
|
||||
};
|
||||
let currentinstance = initcurrentinstance;
|
||||
try {
|
||||
currentinstance = fs.readFileSync(
|
||||
`${conf.tribes}/${conf.mayorId}/nationchains/nodes/${conf.rootURL}`,
|
||||
"utf-8"
|
||||
);
|
||||
} catch (err) {
|
||||
console.log("first init");
|
||||
}
|
||||
const loginsglob = fs.readJsonSync(`${conf.tmp}/loginsglob.json`, "utf-8");
|
||||
currentinstance.logins = Object.keys(loginsglob);
|
||||
currentinstance.tribeids = [...new Set(Object.values(loginsglob))];
|
||||
currentinstance.instanceknown = glob.Sync(
|
||||
`${conf.tribes}/${conf.mayorId}/nationchains/nodes/*`
|
||||
);
|
||||
//Save it
|
||||
fs.outputJsonSync(
|
||||
`${conf.tribes}/${conf.mayorId}/nationchains/nodes/${conf.rootURL}`,
|
||||
currentinstance
|
||||
);
|
||||
// proof of work
|
||||
// try to find a key based on last block with difficulty
|
||||
// if find then send to all for update and try to get token
|
||||
// in any case rerun Nations.synchronize()
|
||||
currentinstance.instanceknown.forEach((u) => {
|
||||
if (u != conf.rootURL) {
|
||||
//send currentinstance info and get back state of
|
||||
axios
|
||||
.post(`https://${u}/nationchains/push`, currentinstance)
|
||||
.then((rep) => {
|
||||
newdata = rep.payload.moreinfo;
|
||||
//Available update info
|
||||
fs.readJson(
|
||||
`${conf.tribes}/${conf.mayorId}/nationchains/nodes/${u}`,
|
||||
(err, data) => {
|
||||
if (err) {
|
||||
data.negatifupdate += 1;
|
||||
data.lasttimeupdate = Date.now();
|
||||
} else {
|
||||
data.positifupdate += 1;
|
||||
data.lastimeupdate = Date.now();
|
||||
data.tribeids = newdata.tribeids;
|
||||
data.logins = newdata.logins;
|
||||
data.lastblocknumber = newdata.lastblocknumber;
|
||||
newdata.knowninstance.forEach((k) => {
|
||||
if (!data.knowninstance.includes(k)) {
|
||||
data.knowninstance.push(k);
|
||||
//init the domain for next update
|
||||
initcurrentinstance.firsttimeupdate = Date.now();
|
||||
fs.outputJson(
|
||||
`${conf.tribes}/${conf.mayorId}/nationchains/nodes/${k}`,
|
||||
initcurrentinstance,
|
||||
"utf-8"
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
//save with info
|
||||
fs.outputJson(
|
||||
`${conf.tribes}/${conf.mayorId}/nationchains/nodes/${u}`,
|
||||
data
|
||||
);
|
||||
}
|
||||
);
|
||||
})
|
||||
.catch((err) => {
|
||||
//Not available
|
||||
data.negatifupdate += 1;
|
||||
data.lasttimeupdate = Date.now();
|
||||
fs.outputJson(
|
||||
`${conf.tribes}/${conf.mayorId}/nationchains/nodes/${u}`,
|
||||
data
|
||||
);
|
||||
});
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
Nations.create = (conf) => {
|
||||
/*
|
||||
@conf from a nationchains/socialworld/setup/townSetup {object, nationName, townName, dns}
|
||||
@return
|
||||
*/
|
||||
const res = {};
|
||||
if (conf.object=="towns"){
|
||||
Odmdb.create("nationchains/socialworld/objects","towns",conf);
|
||||
}
|
||||
const nation_town = fs.readJsonSync(
|
||||
"./nationchains/socialworld/objects/towns/searchindex/towns_nationId_townId.json"
|
||||
);
|
||||
if (!ObjectKeys(nation_town).includes(conf.nationName)) {
|
||||
res.status = 404;
|
||||
res.info = `your nationName ${conf.nationName} does not exist you have to choose an existing one`;
|
||||
return res;
|
||||
}
|
||||
if (nation_town[conf.nationName].includes(conf.townName)) {
|
||||
res.status = 409;
|
||||
res.info = `This conf.townName already exist you have to find a unique town name per nation`;
|
||||
return res;
|
||||
}
|
||||
const towndata = {
|
||||
uuid: conf.townName,
|
||||
nationid: conf.nationName,
|
||||
url: `${conf.townName}.${conf.nationName}.${conf.dns}`,
|
||||
status: (conf.dns=="unchain")? "unchain" : "tochain",
|
||||
};
|
||||
const metatown=fs.readJsonSync('./nationchains/socialworld/metaobject/towns.json');
|
||||
Odmdb.add(objectpath, towns, metatown,towndata)
|
||||
|
||||
fs.outputJsonSync(
|
||||
`./nationchains/socialworld/objects/towns/${townName}.json`,
|
||||
towndata
|
||||
);
|
||||
res.status=200
|
||||
res.info=`${townName} create for ${nationName} nation`;
|
||||
return res
|
||||
};
|
||||
|
||||
module.exports = Nations;
|
263
api/models/Odmdb.js
Normal file
263
api/models/Odmdb.js
Normal file
@@ -0,0 +1,263 @@
|
||||
const glob = require("glob");
|
||||
const path = require("path");
|
||||
const fs = require("fs-extra");
|
||||
const axios = require('axios');
|
||||
const conf=require('../../nationchains/tribes/conf.json')
|
||||
const Checkjson = require(`./Checkjson.js`);
|
||||
|
||||
/* This manage Objects for indexing and check and act to CRUD
|
||||
objectpath/objects/schema/objectName.json
|
||||
/objectNames/searchindes/objectName_valueofkey_uuildlist.json
|
||||
/objectNames/uuid.json
|
||||
|
||||
*/
|
||||
const Odmdb = {};
|
||||
/*
|
||||
Input: metaobject => data mapper of Key: Value
|
||||
|
||||
objname + an object {} + action Checkjson => get a valid or not answer
|
||||
objname + an object {} + action search => apply matching algo to find probalistic object id
|
||||
objname + action index => update /searcindex of objects concern
|
||||
|
||||
*/
|
||||
Odmdb.setObject=(schemaPath, objectPath, objectName, schema, lgjson, lg)=>{
|
||||
/**
|
||||
*
|
||||
* @schemapath {string} path to create or replace a schema ${schemaPath}/schema/
|
||||
* @objectPath {string} path where object are store
|
||||
* @objectName {string} name of the object
|
||||
* @schema {object} the json schema for this object
|
||||
* @lgjson {object} the json file for a specific language
|
||||
* @lg {string} the 2 letters language
|
||||
*
|
||||
* a shema :
|
||||
* schemaPath/schema/objectName.json
|
||||
* /lg/objectName_{lg}.json
|
||||
* an object :
|
||||
* objectPath/objectName/idx/confjson ={"schema":"relativpathfile or http"}
|
||||
* /uniqueid.json defining schema
|
||||
*
|
||||
*/
|
||||
if (!fs.existsSync(schemaPath)){
|
||||
return {status:404, ref:"Odmdb", info:"pathnamedoesnotexist", moreinfo:{fullpath:schemaPath}}
|
||||
}
|
||||
if (!fs.existsSync(objectPath)){
|
||||
return {status:404, ref:"Odmdb", info:"pathnamedoesnotexist",moreinfo:{fullpath:objectPath}}
|
||||
}
|
||||
// store schema file if not empty undefined or {}
|
||||
if (schema && !(Object.keys(schema).length === 0 && schema.constructor === Object)){
|
||||
fs.outputJSONSync(`${schemaPath}/schema/${objectName}.json`,schema, {spaces:2})
|
||||
}
|
||||
if (lgjson && lg && !(Object.keys(lgjson).length === 0 && lgjson.constructor === Object)){
|
||||
fs.outputJSONSync(`${schemaPath}/lg/${objectName}_${lg}.json`,lgjson, {spaces:2})
|
||||
}
|
||||
//create environnement object with the new schema config
|
||||
if (!fs.existsSync(`${objectPath}/${objectName}`)){
|
||||
fs.outputJsonSync(`${objectPath}/${objectName}/idx/confjson`,{schema:`${schemaPath}/schema/${objectName}.json`},{spaces:2})
|
||||
}
|
||||
return {status:200}
|
||||
}
|
||||
|
||||
Odmdb.schema = (schemaPath, objectName, withschemacheck) => {
|
||||
// Return schema if exist and objectpath contain objectName { status:200;data:schema}
|
||||
if (!fs.existsSync(`${schemaPath}/${objectName}`))
|
||||
return {
|
||||
status: 404,
|
||||
info: "|odmdb|schemapathnamedoesnotexist",
|
||||
moreinfo: `${schemaPath}/${objectName}`,
|
||||
};
|
||||
if (!fs.existsSync(`${objectPath}/schema/${objectName}.json`)) {
|
||||
return {
|
||||
status: 404,
|
||||
info: `|odmdb|schemanotfound`,
|
||||
moreinfo: `file not found ${schemaPath}/schema/${objectName}.json`,
|
||||
};
|
||||
}
|
||||
const schema = fs.readJsonSync(`${schemaPath}/schema/${objectName}.json`);
|
||||
// check schema apx validity specificities primary unique ans searchindex
|
||||
if (withschemacheck) {
|
||||
if (!schema.apxprimarykey) {
|
||||
// code 422: unprocessable Content
|
||||
return {
|
||||
status: 422,
|
||||
info: "|Odmdb|apxprimarykeynotfound",
|
||||
moreinfo: `${schemaPath}/schema/${objectName}.json`,
|
||||
};
|
||||
} else {
|
||||
if (
|
||||
!(
|
||||
schema.apxsearchindex[schema.apxprimarykey] &&
|
||||
schema.apxsearchindex[schema.apxprimarykey].list
|
||||
)
|
||||
) {
|
||||
return {
|
||||
status: 422,
|
||||
info: "|Odmdb|apxprimaryketnotinsearchindexlist",
|
||||
moreinfo: `${schemaPath}/schema/${objectName}.json`,
|
||||
};
|
||||
}
|
||||
if (schema.apxuniquekey) {
|
||||
schema.apxuniquekey.forEach((k) => {
|
||||
if (
|
||||
!(
|
||||
schema.apxsearchindex[k] &&
|
||||
schema.apxsearchindex[k][schema.apxprimarykey]
|
||||
)
|
||||
) {
|
||||
return {
|
||||
status: 422,
|
||||
info: "|Odmdb|apxuniquekeynotinsearchindex",
|
||||
moreinfo: `${schemaPath}/schema/${objectName}.json`,
|
||||
};
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
const validschema = Checkjson.schema.validation(schema);
|
||||
if (validschema.status != 200) return validschema;
|
||||
}
|
||||
return {
|
||||
status: 200,
|
||||
data: schema,
|
||||
};
|
||||
};
|
||||
|
||||
Odmdb.Checkjson = (objectPath, objectName, data, withschemacheck) => {
|
||||
/*
|
||||
@objectPath path to the folder that contain /objects/objectName/ /lg/objectName_{lg}.json /schema/objectName.json
|
||||
@objectName name of object
|
||||
@data data to check based on schema objectName definition
|
||||
|
||||
@return status:200 Data is consistent with schema and primarykey does not exist
|
||||
status:201 Data is consistent with schema and primarykey does already exist
|
||||
status:other means unconsistent schema:
|
||||
404: schema does not exist
|
||||
or unconsitent data and schema from Checkjson.js Checkjson.schema.data
|
||||
|
||||
*/
|
||||
const res = { status: 200 };
|
||||
//get schema link of object
|
||||
const schemaPath = fs.readJsonSync(`${objectPath}/${objectName}/idx/confjson`)['schema']
|
||||
if (schemaPath.substring(0,4)=="http"){
|
||||
// lance requete http pour recuperer le schema
|
||||
}else{
|
||||
schema=="!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
|
||||
}
|
||||
// check schema validity
|
||||
const schema = Odmdb.schema(objectPath, objectName, withschemacheck);
|
||||
if (schema.status != 200) return schema;
|
||||
console.log("SCHEMA for checking:");
|
||||
console.log(schema.data);
|
||||
console.log("DATA to check:");
|
||||
console.log(data);
|
||||
// withschemacheck at false, if check then it is done at Odmdb.schema
|
||||
const validate = Checkjson.schema.data(schema.data, data, false);
|
||||
if (validate.status != 200) {
|
||||
return validate;
|
||||
}
|
||||
if (
|
||||
schema.data.apxprimarykey &&
|
||||
data[k] &&
|
||||
fs.existsSync(`${objectPath}/${objectName}/${data[k]}.json}`)
|
||||
) {
|
||||
res.status = 201; // means created => exist an object with this primary key
|
||||
}
|
||||
if (schema.data.apxuniquekey) {
|
||||
schema.data.apxuniquekey.forEach((k) => {
|
||||
if (
|
||||
data[k] &&
|
||||
fs.existsSync(
|
||||
`${objectPath}/${objectName}/searchindex/${objectName}_${k}_${schema.data.apxprimarykey}.json}`
|
||||
) &&
|
||||
fs.readJsonSync(
|
||||
`${objectPath}/${objectName}/searchindex/${objectName}_${k}_${schema.data.apxprimarykey}.json}`
|
||||
)[k]
|
||||
) {
|
||||
res.status = 201; // means created => exist as primary key
|
||||
}
|
||||
});
|
||||
}
|
||||
return res;
|
||||
};
|
||||
Odmdb.search = (objectPath, objectName, search) => {
|
||||
/*
|
||||
@search= {
|
||||
txt: string,
|
||||
algo: match | pattern | fuzzy
|
||||
fieldstring:[list of field],
|
||||
indexfilter:{index1:[val1,val2 | ] }
|
||||
}
|
||||
Return data:[uuids]
|
||||
|
||||
example: search exact match hill in townId
|
||||
heavy search={txt:"hill",algo:"match",fieldstring:"toxnId"}
|
||||
light search={txt:"hill", algo:"match", indexfilter:{"key":"townId","value":[]}}
|
||||
light search={txt:"hill", algo:"match", indexfilter:{"key":"nationId","value":"ants"}}
|
||||
|
||||
*/
|
||||
const schema = Odmdb.schema(objectPath, objectName);
|
||||
if (schema.status != 200) return schema;
|
||||
};
|
||||
Odmdb.get = (objectPath, objectName, uuidprimarykeyList, fieldList) => {
|
||||
/*
|
||||
@uuidprimarykeyList list of uuid requested
|
||||
@fieldList key to return for each object
|
||||
Return objectName {status:200; data:{found:[{primarykey,field}],notfound:[uuid]}
|
||||
if all primarykey exist then data.notfound does not exist
|
||||
if all primarykey does not exist data.found does not exist
|
||||
*/
|
||||
const res = { status: 200, data: {} };
|
||||
uuidprimarykeyList.forEach((id) => {
|
||||
if (fs.existsSync(`${objectPath}/${objectName}/${id}.json`)) {
|
||||
if (!res.data.found) res.data.found = [];
|
||||
const objectdata = fs.readJsonSync(
|
||||
`${objectPath}/${objectName}/${id}.json`
|
||||
);
|
||||
if (!fieldList) {
|
||||
res.data.found.push(objectdata);
|
||||
} else {
|
||||
const objinfo = {};
|
||||
fieldlList.forEach((k) => {
|
||||
if (objectdata[k]) objinfo[k] = objectdata[k];
|
||||
});
|
||||
res.data.found.push(objinfo);
|
||||
}
|
||||
} else {
|
||||
if (!res.data.notfound) res.data.notfound = [];
|
||||
}
|
||||
});
|
||||
return res;
|
||||
};
|
||||
Odmdb.create = (objectPath, objectName, data) => {
|
||||
/*
|
||||
Create an objects data into objectName
|
||||
@objectPath path to the folder that contain /objects/objectName/ /objectsInfo/objectName_lg.json /objectsMeta/objectName.json
|
||||
@objectName name of object
|
||||
@data data to check based on objectsMeta definition
|
||||
*/
|
||||
};
|
||||
Odmdb.update = (objectPath, objectName, data) => {
|
||||
/*
|
||||
Create an objects data into objectName
|
||||
@objectPath path to the folder that contain /objects/objectName/ /objectsInfo/objectName_lg.json /objectsMeta/objectName.json
|
||||
@objectName name of object
|
||||
@data data to check based on objectsMeta definition
|
||||
*/
|
||||
};
|
||||
Odmdb.delete = (objectPath, objectName, data) => {
|
||||
/*
|
||||
Create an objects data into objectName
|
||||
@objectPath path to the folder that contain /objects/objectName/ /objectsInfo/objectName_lg.json /objectsMeta/objectName.json
|
||||
@objectName name of object
|
||||
@data data to check based on objectsMeta definition
|
||||
*/
|
||||
};
|
||||
/*console.log("test Odmdb");
|
||||
console.log(
|
||||
Odmdb.check(
|
||||
"/media/phil/usbfarm/apxtrib/nationchains/socialworld/objects",
|
||||
"nations",
|
||||
{ nationId: "123", status: "unchain" }
|
||||
)
|
||||
);*/
|
||||
module.exports = Odmdb;
|
120
api/models/Pagans.js
Normal file
120
api/models/Pagans.js
Normal file
@@ -0,0 +1,120 @@
|
||||
const glob = require("glob");
|
||||
const path = require("path");
|
||||
const fs = require("fs-extra");
|
||||
const axios = require('axios');
|
||||
const openpgp = require('openpgp');
|
||||
const conf=require('../../nationchains/tribes/conf.json')
|
||||
|
||||
/**
|
||||
* Pagan Management numeric Identity
|
||||
*
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
const Pagans= {}
|
||||
|
||||
Pagans.createId = async (alias,passphrase) =>{
|
||||
/**
|
||||
* @param {string} alias a unique alias that identify an identity
|
||||
* @param {string} passphrase a string to cipher the publicKey (can be empty, less secure but simpler)
|
||||
* @return {publicKey,privateKey} with userIds = [{alias}]
|
||||
*/
|
||||
let apxpagans={};
|
||||
if (fs.existsSync(`${conf.dirname}/nationchains/pagans/idx/alias_all.json`)){
|
||||
apxpagans = fs.readJsonSync(
|
||||
`${conf.dirname}/nationchains/pagans/idx/alias_all.json`
|
||||
);
|
||||
}
|
||||
if (Object.keys(apxpagans).includes(alias)){
|
||||
return {status:409,ref:"pagans",msg:"aliasalreadyexist"}
|
||||
};
|
||||
const {privateKey,publicKey} = await openpgp.generateKey({
|
||||
type: "ecc", // Type of the key, defaults to ECC
|
||||
curve: "curve25519", // ECC curve name, defaults to curve25519
|
||||
userIDs: [{ alias: alias }], // you can pass multiple user IDs
|
||||
passphrase: passphrase, // protects the private key
|
||||
format: "armored", // output key format, defaults to 'armored' (other options: 'binary' or 'object')
|
||||
});
|
||||
console.log(privateKey)
|
||||
console.log(publicKey)
|
||||
apxpagans[alias]={alias,publicKey};
|
||||
|
||||
fs.outputJsonSync(`${conf.dirname}/nationchains/pagans/idx/alias_all.json`,apxpagans);
|
||||
fs.outputJsonSync(`${conf.dirname}/nationchains/pagans/itm/${alias}.json`,{alias,publicKey});
|
||||
return {status:200, data:{alias,privateKey,publicKey}}
|
||||
}
|
||||
|
||||
Pagans.generateKey = async (alias, passphrase) => {
|
||||
/**
|
||||
* @param {string} alias a unique alias that identify an identity
|
||||
* @param {string} passphrase a string to cipher the publicKey (can be empty, less secure but simpler)
|
||||
* @return {publicKey,privateKey} with userIds = [{alias}]
|
||||
*/
|
||||
const { privateKey, publicKey } = await openpgp.generateKey({
|
||||
type: "ecc", // Type of the key, defaults to ECC
|
||||
curve: "curve25519", // ECC curve name, defaults to curve25519
|
||||
userIDs: [{ alias: alias }], // you can pass multiple user IDs
|
||||
passphrase: passphrase, // protects the private key
|
||||
format: "armored", // output key format, defaults to 'armored' (other options: 'binary' or 'object')
|
||||
});
|
||||
// key start by '-----BEGIN PGP PRIVATE KEY BLOCK ... '
|
||||
// get liste of alias:pubklickey await axios.get('api/v0/pagans')
|
||||
// check alias does not exist
|
||||
console.log(privateKey)
|
||||
return { alias, privateKey, publicKey };
|
||||
|
||||
};
|
||||
//console.log( Pagans.generateKey('toto',''))
|
||||
Pagans.detachedSignature = async (pubK, privK, passphrase, message) => {
|
||||
/**
|
||||
* @pubK {string} a text public key
|
||||
* @privK {string} a test priv key
|
||||
* @passphrase {string} used to read privK
|
||||
* @message {string} message to sign
|
||||
* @Return a detached Signature of the message
|
||||
*/
|
||||
const publicKey = await openpgp.readKey({ armoredKey: pubK });
|
||||
const privateKey = await openpgp.decryptKey({
|
||||
privateKey: await openpgp.readPrivateKey({ armoredKey: privK }),
|
||||
passphrase,
|
||||
});
|
||||
const msg = await openpgp.createMessage({ text: message });
|
||||
return await openpgp.sign({ msg, signinKeys: privK, detached: true });
|
||||
};
|
||||
Pagans.checkdetachedSignature = async (
|
||||
alias,
|
||||
pubK,
|
||||
detachedSignature,
|
||||
message
|
||||
) => {
|
||||
/**
|
||||
* @alias {string} alias link to the publicKey
|
||||
* @pubK {string} publiKey text format
|
||||
* @detachedSignature {string} a detachedsignatured get from apx.detachedSignature
|
||||
* @message {string} the message signed
|
||||
* @return {boolean} true the message was signed by alias
|
||||
* false the message was not signed by alias
|
||||
*/
|
||||
const publicKey = await openpgp.readKey({ armoredKey: pubK });
|
||||
const msg = await openpgp.createMessage({ text: message });
|
||||
const signature = await openpgp.readSignature({
|
||||
armoredSignature: detachedSignature, // parse detached signature
|
||||
});
|
||||
const verificationResult = await openpgp.verify({
|
||||
msg, // Message object
|
||||
signature,
|
||||
verificationKeys: publicKey,
|
||||
});
|
||||
const { verified, keyID } = verificationResult.signatures[0];
|
||||
try {
|
||||
await verified; // throws on invalid signature
|
||||
console.log("Signed by key id " + keyID.toHex());
|
||||
return KeyId.toHex().alias == alias;
|
||||
} catch (e) {
|
||||
console.log("Signature could not be verified: " + e.message);
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
module.exports=Pagans;
|
321
api/models/Setup.js
Normal file
321
api/models/Setup.js
Normal file
@@ -0,0 +1,321 @@
|
||||
const fs = require("fs-extra");
|
||||
const path = require("path");
|
||||
const dnsSync = require("dns-sync");
|
||||
const mustache = require("mustache");
|
||||
const readlineSync = require("readline-sync");
|
||||
|
||||
/**
|
||||
* This Setup is run at the first installation
|
||||
* This is not an exportable module
|
||||
*
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
const Setup = {};
|
||||
//const nationsync = require('./Nations').updateChains();
|
||||
|
||||
Setup.check = () => {
|
||||
if ("testinternet" != "testinternet") {
|
||||
console.log(
|
||||
"\x1b[31m Check your internet access, to setup this town we need to update the Nations. It seems we cannot do it"
|
||||
);
|
||||
process.exit();
|
||||
}
|
||||
if (!fs.existsSync("/etc/nginx/nginx.conf")) {
|
||||
console.log(
|
||||
"\x1b[31m Check documentation, nginx have to be installed on this server first, no /etc/nginx/nginx.conf available"
|
||||
);
|
||||
process.exit();
|
||||
}
|
||||
if (fs.existsSync("./nationchains/tribes/conf.json")) {
|
||||
console.log(
|
||||
"\x1b[31m Be carefull you already have a town set in ./nationchains/tribes/index.conf.json, check and remove it if you want to setup this town."
|
||||
);
|
||||
process.exit();
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
Setup.init = async () => {
|
||||
// Get standard conf and current data
|
||||
const townconf = fs.readJsonSync("./nationchains/www/adminapx/townconf.json");
|
||||
const apxnations = fs.readJsonSync(
|
||||
`./nationchains/nations/idx/nationId_all.json`
|
||||
);
|
||||
const apxtowns = fs.readJsonSync(`./nationchains/towns/idx/townId_all.json`);
|
||||
let apxpagans={}
|
||||
if (fs.existsSync(`./nationchains/pagans/idx/alias_all.json`)){
|
||||
apxpagans = fs.readJsonSync(
|
||||
`./nationchains/pagans/idx/alias_all.json`
|
||||
);
|
||||
}
|
||||
|
||||
if (!Object.keys(apxnations).includes(townconf.nationId)) {
|
||||
console.log(
|
||||
`Sorry nationId ${townconf.nationId} does not exist, please change with an existing nation `
|
||||
);
|
||||
process.exit();
|
||||
}
|
||||
if (Object.keys(apxtowns).includes(townconf.townId)) {
|
||||
console.log(
|
||||
`Sorry townId ${townconf.townId} already exist, please change it`
|
||||
);
|
||||
process.exit();
|
||||
}
|
||||
/*
|
||||
if (Object.keys(apxpagans).includes(townconf.mayorId)) {
|
||||
console.log(
|
||||
`Sorry paganId ${townconf.maorId} already exist ti create a town you need a new identity, please change it`
|
||||
);
|
||||
process.exit();
|
||||
}
|
||||
*/
|
||||
townconf.sudoerUser = process.env.USER;
|
||||
townconf.dirname = path.resolve(`${__dirname}/../../`);
|
||||
// nginx allow to set a new website space
|
||||
townconf.nginx.include.push(
|
||||
`${townconf.dirname}/nationchains/**/nginx_*.conf`
|
||||
);
|
||||
townconf.nginx.logs = `${townconf.dirname}/nationchains/logs/nginx`;
|
||||
townconf.nginx.website = "adminapx";
|
||||
townconf.nginx.fswww = "nationchains/"; //for a local tribe nationchains/tribes/tribeid
|
||||
townconf.nginx.tribeid = "town";
|
||||
townconf.nginx.pageindex = "index_en.html";
|
||||
|
||||
console.log(townconf);
|
||||
if (
|
||||
!readlineSync.keyInYN(
|
||||
`\x1b[42mThis is the first install from ./nationchains/www/adminapx/townconf.json (check it if you want) \nthis will change your nginx config in /etc/nginx and run nginx from sudoer user ${townconf.sudoerUser} (Yes/no)? \nno if you want to change parameter and run yarn setup again \x1b[0m`
|
||||
)
|
||||
)
|
||||
process.exit();
|
||||
|
||||
// saved and change nginx conf
|
||||
if (!fs.existsSync("/etc/nginx/nginxconf.saved")) {
|
||||
fs.moveSync("/etc/nginx/nginx.conf", "/etc/nginx/nginxconf.saved");
|
||||
console.log(
|
||||
"your previous /etc/nginx/nginx.conf was backup in /etc/nginx/nginxconf.saved"
|
||||
);
|
||||
}
|
||||
const tplnginxconf = fs.readFileSync(
|
||||
"./nationchains/www/adminapx/nginx/nginx.conf.mustache",
|
||||
"utf8"
|
||||
);
|
||||
fs.outputFileSync(
|
||||
"/etc/nginx/nginx.conf",
|
||||
mustache.render(tplnginxconf, townconf),
|
||||
"utf8"
|
||||
);
|
||||
const tplnginxwww = fs.readFileSync(
|
||||
"./nationchains/www/adminapx/nginx/modelwebsite.conf.mustache",
|
||||
"utf8"
|
||||
);
|
||||
fs.outputFileSync(
|
||||
`./${townconf.nginx.fswww}www/nginx_${townconf.nginx.website}.conf`,
|
||||
mustache.render(tplnginxwww, townconf),
|
||||
"utf8"
|
||||
);
|
||||
fs.outputJsonSync("./nationchains/tribes/conf.json", townconf, {
|
||||
spaces: 2,
|
||||
});
|
||||
//CREATE A TOWN setup local voir utiliser towns.create
|
||||
townconf.town = {
|
||||
townId: townconf.townId,
|
||||
nationId: townconf.nationId,
|
||||
url: `http://${townconf.dns[0]}`,
|
||||
IP: townconf.IP,
|
||||
mayorid: townconf.mayorId,
|
||||
status: "unchain",
|
||||
};
|
||||
apxtowns[townconf.townId]=townconf.town;
|
||||
fs.outputJsonSync(`./nationchains/towns/idx/townId_all.json`,apxtowns);
|
||||
fs.outputJsonSync(`./nationchains/towns/itm/${townconf.townId}.json`,townconf.town,{spaces:2});
|
||||
// Create tribe id voir a utiliser tribes.create()
|
||||
townconf.tribe = {
|
||||
tribeId: townconf.tribeId,
|
||||
dns: [],
|
||||
status: "unchain",
|
||||
nationId: townconf.nationId,
|
||||
townId: townconf.townId,
|
||||
};
|
||||
//tribe does not exist in a new town
|
||||
apxtribes={}
|
||||
apxtribes[townconf.tribeId]=townconf.tribe;
|
||||
fs.outputJsonSync(`./nationchains/tribes/idx/tribeId_all.json`,apxtribes);
|
||||
fs.outputJsonSync(`./nationchains/tribes/itm/${townconf.tribeId}.json`,townconf.tribe,{spaces:2});
|
||||
fs.ensureDirSync(`./nationchains/tribes/${townconf.tribeId}/logs`);
|
||||
|
||||
//CREATE a mayorId pagans if it does not exist
|
||||
if (!apxpagans[townconf.mayorId]){
|
||||
const Pagans=require('./Pagans');
|
||||
const createPagans=await Pagans.createId(townconf.mayorId,townconf.passphrase);
|
||||
if (createPagans.status==200){
|
||||
fs.outputFileSync(`./${townconf.mayorId}_PrivateKey.txt`,createPagans.data.privateKey,"utf8");
|
||||
fs.outputFileSync(`./${townconf.mayorId}_PublicKey.txt`,createPagans.data.publicKey,"utf8");
|
||||
console.log(`\x1b[43mCut paste your keys /${townconf.mayorId}_PrivateKey.txt /${townconf.mayorId}_PublicKey.txt \x1b[0m`)
|
||||
}else{
|
||||
console.log('Error at Pagan creation ');
|
||||
console.log(createPagans);
|
||||
process.exit();
|
||||
}
|
||||
}
|
||||
//restart nginx
|
||||
const { exec } = require("child_process");
|
||||
exec(townconf.nginx.restart, (error, stdout, stderr) => {
|
||||
if (error) {
|
||||
console.log("\x1b[42m", error, stdout, stderr, "x1b[0m");
|
||||
} else {
|
||||
console.log(
|
||||
`\x1b[42m###########################################################################################\x1b[0m\n\x1b[42mWellcome into apxtrib, you can now 'yarn dev' for dev or 'yarn startpm2' for prod or \n'yarn unittest' for testing purpose. Access to your town here \x1b[0m\x1b[32mhttp://${townconf.dns}\x1b[0m \x1b[42m \nto finish your town set up. Check README's project to learn more. \x1b[0m\n\x1b[42m###########################################################################################\x1b[0m`
|
||||
);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
Setup.Checkjson = (conf) => {
|
||||
var rep = "";
|
||||
const nation_town = fs.readJsonSync(
|
||||
"./nationchains/socialworld/objects/towns/searchindex/towns_nation_uuid.json"
|
||||
);
|
||||
if (!ObjectKeys(nation_town).includes(conf.nationId)) {
|
||||
rep += `your nationId ${conf.nationId} does not exist you have to choose an existing one`;
|
||||
}
|
||||
if (nation_town[conf.nationId].includes(conf.townId)) {
|
||||
rep += `This conf.townId already exist you have to find a unique town name per nation`;
|
||||
}
|
||||
const getnation = Odmdb.get(
|
||||
"./nationchains/socialworld/objects",
|
||||
"towns",
|
||||
[conf.NationId],
|
||||
[nationId]
|
||||
);
|
||||
//if getnation.data.notfound
|
||||
conf.language.forEach((l) => {
|
||||
if (!["fr", "en", "it", "de", "sp"].includes(l)) {
|
||||
rep += l + " Only fr,en,it,de,sp are available \n";
|
||||
}
|
||||
});
|
||||
if (!fs.existsSync(`/home/${conf.sudoerUser}`)) {
|
||||
rep += `/home/${conf.sudoerUser} does not exist, user has to be create with a /home on this server\n`;
|
||||
}
|
||||
try {
|
||||
if (
|
||||
"true" ==
|
||||
execSync('timeout 2 sudo id && sudo="true" || sudo="false";echo "$sudo"')
|
||||
.toString()
|
||||
.trim()
|
||||
.split(/\r?\n/)
|
||||
.slice(-1)
|
||||
) {
|
||||
rep += `${sudoerUser} is not sudoer please change this `;
|
||||
}
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
rep += " Check your user it seems to not be a sudoer";
|
||||
}
|
||||
if (conf.jwtsecret.length < 32) {
|
||||
rep += "Your jwtsecretkey must have at least 32 characters";
|
||||
}
|
||||
if (
|
||||
conf.dns != "unchain" &&
|
||||
!dnsSync.resolve(`${conf.townId}.${conf.nationId}.${conf.dns}`)
|
||||
) {
|
||||
rep += `\nresolving $${conf.townId}.${conf.nationId}.${conf.dns} will not responding valid IP, please setup domain redirection IP before runing this script`;
|
||||
}
|
||||
return rep;
|
||||
};
|
||||
|
||||
Setup.config = (townSetup) => {
|
||||
// Init this instance with a .config.js
|
||||
Setup.configjs(townSetup);
|
||||
// Create tribeid space + a user admin + webspace withe apxtrib webapp install
|
||||
Setup.druidid(townSetup);
|
||||
};
|
||||
Setup.configjs = (townSetup) => {
|
||||
// Set /config.js
|
||||
let confapxtrib = fs.readFileSync("./setup/config.mustache", "utf-8");
|
||||
fs.writeFileSync(
|
||||
"./config.js",
|
||||
Mustache.render(confapxtrib, townSetup),
|
||||
"utf-8"
|
||||
);
|
||||
if (fs.existsSync("./config.js")) {
|
||||
console.log("config.js successfully created.");
|
||||
} else {
|
||||
console.log(
|
||||
"config.js not created, check what's wrong in tpl:",
|
||||
confapxtrib
|
||||
);
|
||||
console.log("for data :", townSetup);
|
||||
process.exit();
|
||||
}
|
||||
};
|
||||
Setup.druidid = (townSetup) => {
|
||||
// create a tribeid with a user that will admin this instance into /tribes/tribeid /users
|
||||
const config = require("../config.js");
|
||||
// Need to do it on setup this is also done again in models/Tribes.js
|
||||
console.log(`${config.tribes}/${townSetup.druidid}`);
|
||||
fs.ensureDirSync(`${config.tribes}/${townSetup.druidid}`);
|
||||
["users", "www", "referentials", "nationchains"].forEach((r) => {
|
||||
fs.copySync(
|
||||
`${__base}/setup/tribes/apxtrib/${r}`,
|
||||
`${config.tribes}/${townSetup.druidid}/${r}`
|
||||
);
|
||||
});
|
||||
/* const confcli = JSON.parse( Mustache.render( fs.readFileSync( `${__base}/setup/tribes/apxtrib/clientconf.mustache`, 'utf8' ), townSetup ) );
|
||||
fs.outputJsonSync( `${config.tribes}/${townSetup.druidid}/clientconf.json`, confcli );
|
||||
// Create a new tribeid + admin user for this tribeid
|
||||
// with access to {druidid}:webapp as admin
|
||||
*/
|
||||
const Tribes = require("./Tribes.js");
|
||||
const access = { app: {}, data: {} };
|
||||
access.app[`${townSetup.druidid}:webapp`] = "admin";
|
||||
access.data[townSetup.druidid] = {
|
||||
users: "CRUDO",
|
||||
referentials: "CRUDO",
|
||||
www: "CRUDO",
|
||||
};
|
||||
const createclient = Tribes.create({
|
||||
tribeid: townSetup.druidid,
|
||||
genericpsw: townSetup.genericpsw,
|
||||
lanquageReferential: townSetup.language,
|
||||
useradmin: {
|
||||
LOGIN: townSetup.login,
|
||||
xlang: townSetup.language[0],
|
||||
ACCESSRIGHTS: access,
|
||||
},
|
||||
});
|
||||
if (createclient.status == 200) {
|
||||
console.log(
|
||||
`Your tribeid domain was created with login : ${townSetup.login} and password: ${townSetup.genericpsw}, change it after the 1st login on https://${townSetup.subdomain}.${townSetup.domain}`
|
||||
);
|
||||
// Create nginx conf for a first install
|
||||
const confnginx = fs.readFileSync(
|
||||
"./setup/nginx/nginx.conf.mustache",
|
||||
"utf8"
|
||||
);
|
||||
fs.outputFileSync(
|
||||
"/etc/nginx/nginx.conf",
|
||||
Mustache.render(confnginx, townSetup),
|
||||
"utf-8"
|
||||
);
|
||||
// Create a spacedev for webapp of apxtrib
|
||||
// that will be accesible in prod from https://subdomain.domain/ and in dev http://webapp.local.fr
|
||||
const addspaceweb = Tribes.addspaceweb({
|
||||
setup: true,
|
||||
dnsname: [`${townSetup.subdomain}.${townSetup.domain}`],
|
||||
mode: townSetup.mode,
|
||||
tribeid: townSetup.druidid,
|
||||
website: "webapp",
|
||||
pageindex: "app_index_fr.html",
|
||||
});
|
||||
if (addspaceweb.status == 200) {
|
||||
console.log(`WELL DONE run yarn dev to test then yarn startpm2 `);
|
||||
}
|
||||
} else {
|
||||
console.log("Issue ", createclient);
|
||||
}
|
||||
};
|
||||
|
||||
if (Setup.check()) Setup.init();
|
646
api/models/Toolsbox.js
Executable file
646
api/models/Toolsbox.js
Executable file
@@ -0,0 +1,646 @@
|
||||
/* eslint-disable no-useless-escape */
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const bcrypt = require("bcrypt");
|
||||
const moment = require("moment");
|
||||
const utils = {};
|
||||
|
||||
console.log(
|
||||
"Check in /utils/index.js to find usefull function for your dev.\n Feel free to send suggestion, code to maintainer of apxtrib project (see /package.json to get email).\n We'll add to the roadmap to add it."
|
||||
);
|
||||
|
||||
/**
|
||||
* EMAIL
|
||||
*/
|
||||
/* const validateEmail = email => {
|
||||
const regExp = /^(([^<>()[\]\\.,;:\s@\"]+(\.[^<>()[\]\\.,;:\s@\"]+)*)|(\".+\"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/;
|
||||
return regExp.test(email);
|
||||
};
|
||||
|
||||
const validatePassword = pwd => {
|
||||
const regExp = new RegExp(
|
||||
/^(?=.*[a-z])(?=.*[A-Z])(?=.*\d)(?=.*[$@$!%*?&.])[A-Za-z\d$@$!%*?&.{}:|\s]{8,}/
|
||||
);
|
||||
return regExp.test(pwd);
|
||||
};
|
||||
|
||||
const filterInvalidInArray = (array, validate) =>
|
||||
array ? array.filter(el => !validate(el)) : undefined; // return undefined when every elements is valid
|
||||
|
||||
/**
|
||||
* POSTAL CODE
|
||||
*/
|
||||
/*
|
||||
const validatePostalCode = postalCode =>
|
||||
/(^\d{5}$)|(^\d{5}-\d{4}$)/.test(postalCode);
|
||||
|
||||
/**
|
||||
* PHONE
|
||||
*/
|
||||
/* const validatePhoneNumber = phoneNumber =>
|
||||
/((^0[1-9]|\+[0-9]{3})([-. ]?[0-9]{2}){4}$)/.test(phoneNumber);
|
||||
|
||||
const correctPhoneNumber = phone =>
|
||||
phone[0] === '0' ? '+33' + phone.substr(1) : phone;
|
||||
|
||||
const Checkjson = (appProfil, referential, data) => {
|
||||
// @TODO get a referentiel per object then check data validity and allowed access
|
||||
// need to add referentiel manager
|
||||
const invalidefor = [];
|
||||
let updateDatabase = false;
|
||||
Object.keys(data).forEach(field => {
|
||||
switch (field) {
|
||||
case 'token':
|
||||
updateDatabase = true;
|
||||
break;
|
||||
case 'email':
|
||||
if (!validateEmail(data.email)) {
|
||||
invalidefor.push('ERREMAIL:' + field);
|
||||
} else {
|
||||
updateDatabase = true;
|
||||
}
|
||||
break;
|
||||
case 'password':
|
||||
if (!validatePassword(data.password)) {
|
||||
invalidefor.push('ERRPWD:' + field);
|
||||
} else {
|
||||
data.password = bcrypt.hash(data.password, config.saltRounds);
|
||||
updateDatabase = true;
|
||||
}
|
||||
break;
|
||||
}
|
||||
});
|
||||
return { invalidefor, data, updateDatabase };
|
||||
};
|
||||
*/
|
||||
//Permet d'attendre en milliseconde
|
||||
// s'utilise avec async ()=>{
|
||||
// await sleep(2000)
|
||||
//}
|
||||
utils.sleep = (ms) => {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||
};
|
||||
utils.normalize = {};
|
||||
utils.normalize.telephonefr = (phone) => {
|
||||
phone = phone.trim().replace(/[- .]/g, "");
|
||||
if (
|
||||
Checkjson.schema.properties.format.telephoenfr(phone) &&
|
||||
phone.length == 10 &&
|
||||
phone[0] == "0"
|
||||
) {
|
||||
phone = "+33 " + phone.substring(1);
|
||||
}
|
||||
return phone;
|
||||
};
|
||||
utils.normalize.zfill10 = (num) => {
|
||||
let s = num + "";
|
||||
while (s.length < 10) s = "0" + s;
|
||||
return s;
|
||||
};
|
||||
utils.generemdp = (nbpos, fromchar) => {
|
||||
if (!fromchar) {
|
||||
const fromchar = "ABCDEFGHIJKLMNPQRSTUVWZY123456789";
|
||||
}
|
||||
//const chaine = "ABCDEFGHIJKLMNPQRSTUVWZY123456789";
|
||||
let mdp = "";
|
||||
for (var i = 0; i < nbpos; i++) {
|
||||
var pos = Math.floor(Math.random() * fromchar.length);
|
||||
mdp += fromchar.substring(pos, pos + 1);
|
||||
}
|
||||
return mdp;
|
||||
};
|
||||
utils.generecompteur = (filecpt, typeincrement) => {
|
||||
let file = `${filecpt}/${typeincrement}.json`;
|
||||
let prefix = "";
|
||||
if ((typeincrement = "ANNEESEMAINE")) {
|
||||
file = `${filecpt}/${typeincrement}${moment().format(
|
||||
"YYYY"
|
||||
)}${moment().format("WW")}.json`;
|
||||
prefix = `${moment().format("YYYY")}${moment().format("WW")}`;
|
||||
}
|
||||
let num = 1;
|
||||
try {
|
||||
num = parseInt(fs.readFileSync(file, "utf8")) + 1;
|
||||
} catch (err) {
|
||||
console.log("Nouveau compteur incrementale ", file);
|
||||
}
|
||||
fs.writeFileSync(file, num, "utf8");
|
||||
return prefix + num;
|
||||
};
|
||||
/**
|
||||
* CSV
|
||||
*/
|
||||
utils.json2csv = (jsondata, options, callback) => {
|
||||
// uniquement json = [{niv1:val,niv1:[liste of val]}]
|
||||
// console.log('_________________________');
|
||||
// console.log(jsondata)
|
||||
// console.log('_________________________');
|
||||
if (jsondata.length == 0) {
|
||||
return callback("Empty json", null);
|
||||
}
|
||||
if (!options.retln) options.retln = "\n";
|
||||
if (!options.sep) options.sep = ";";
|
||||
if (!options.arraysplitsep) options.arraysplitsep = ",";
|
||||
if (!options.replacespecialcarJson2Csv) {
|
||||
options.replacespecialcarJson2Csv = [];
|
||||
} else {
|
||||
if (typeof options.replacespecialcarJson2Csv == "string") {
|
||||
//permet de passer des regex en string
|
||||
options.replacespecialcarJson2Csv = eval(
|
||||
options.replacespecialcarJson2Csv
|
||||
);
|
||||
}
|
||||
}
|
||||
let etat = "";
|
||||
let csv = "";
|
||||
let entete = "";
|
||||
let prem = true;
|
||||
for (const j in jsondata) {
|
||||
// console.log(jsondata[j])
|
||||
for (const c in options.champs) {
|
||||
if (prem) {
|
||||
entete += options.champs[c] + options.sep;
|
||||
}
|
||||
if (jsondata[j][options.champs[c]]) {
|
||||
if (options.array.indexOf(options.champs[c]) > -1) {
|
||||
csv +=
|
||||
jsondata[j][options.champs[c]].join(options.arraysplitsep) +
|
||||
options.sep;
|
||||
} else {
|
||||
let currentValue = "";
|
||||
if (jsondata[j][options.champs[c]])
|
||||
currentValue += jsondata[j][options.champs[c]];
|
||||
options.replacespecialcarJson2Csv.forEach((re) => {
|
||||
//console.log(currentValue)
|
||||
currentValue = currentValue.replace(re[1], re[0]);
|
||||
});
|
||||
csv += currentValue + options.sep;
|
||||
}
|
||||
} else {
|
||||
csv += options.sep;
|
||||
}
|
||||
}
|
||||
csv = csv.substring(0, csv.length - 1) + options.retln;
|
||||
if (prem) {
|
||||
prem = false;
|
||||
entete = entete.substring(0, entete.length - 1) + options.retln;
|
||||
// console.log(entete)
|
||||
}
|
||||
}
|
||||
// return entete + csv;
|
||||
if (etat == "") {
|
||||
return callback(null, entete + csv);
|
||||
} else {
|
||||
return callback(etat, null);
|
||||
}
|
||||
};
|
||||
/**
|
||||
* Get headers from first line of CSV
|
||||
* @param {array} lines array of string which contains each csv lines
|
||||
* @return {array} string array of headers
|
||||
*/
|
||||
utils.getHeaders = (lines, sep) =>
|
||||
lines[0].split(sep).map((i) => i.replace(/"/g, ""));
|
||||
/**
|
||||
* [csv2json description]
|
||||
* @param {object} csv object of csv file that has been read
|
||||
* @param {object} options object containing csv options, headers, ...
|
||||
{retln:'code de retour de ligne \n ou \n\r',
|
||||
sep:'code to split cells',
|
||||
champs:[ch1,ch2,...] catch only those field,
|
||||
array:[ch1, ] can have more than one field champs with same name then data are push into an array }
|
||||
* @param {Function} callback callback function
|
||||
* @return {callback} - return an error if error, else return json
|
||||
it convert a csv file into a json = [{field:value}]
|
||||
|
||||
Usage example:
|
||||
fiche.csv2article = (err, fiche) => {
|
||||
if (!err) {
|
||||
console.log(fiche)
|
||||
}
|
||||
}
|
||||
utils.csv2json(fs.readFileSync('./devdata/tribee/aubergenville/infoexterne/localbusiness.csv', 'utf-8'), {
|
||||
retln: "\n",
|
||||
sep: ";",
|
||||
champs: ["NOM", "OBJET", "ADRESSE_PRO", "CP_PRO", "VILLE_PRO", "ZONE", "PHONE_PRO", "HORAIRESDESC", "HORAIREDATA", "URL", "FACEBOOK", "INSTA", "EMAIL_PRO", "IMG", "TAG"],
|
||||
array: ["TAG", "PHONE_PRO", "EMAIL_PRO"]
|
||||
}, fiche.csv2article)
|
||||
|
||||
*/
|
||||
utils.replacecarbtweendblquote = (csv, car, carremplacant) => {
|
||||
/*
|
||||
return csv text with any car betwenn 2 " by CARSEPARATOR
|
||||
*/
|
||||
let newcsv = "";
|
||||
let txtencours = "";
|
||||
let flagouvert = false;
|
||||
const sepreg = new RegExp(`${car}`, "gmi");
|
||||
for (let j = 0; j < csv.length; j++) {
|
||||
//if((csv[j] == "\"" && csv[j + 1] && csv[j + 1] != "\"") || (csv[j] == "\"" && csv[j - 1] && csv[j - 1] != "\"") || (csv[j] == "\"" && csv[j - 1] && csv[j - 2] && csv[j - 1] != "\"" && csv[j - 2] != "\"")) {
|
||||
if (csv[j] == '"') {
|
||||
if (flagouvert) {
|
||||
// on cherche à ferme une chaine de texte
|
||||
if (csv[j + 1] == '"') {
|
||||
//on a "" consecutif qu'on remplace par "" et on fait j+1
|
||||
txtencours += '""';
|
||||
j++;
|
||||
} else {
|
||||
// on a bien une fermeture
|
||||
flagouvert = false;
|
||||
newcsv += txtencours.replace(sepreg, carremplacant);
|
||||
txtencours = '"';
|
||||
}
|
||||
} else {
|
||||
// on ouvre une chaine
|
||||
flagouvert = true;
|
||||
//on met le contenu précédent ds newcsv
|
||||
newcsv += txtencours;
|
||||
txtencours = '"';
|
||||
}
|
||||
//} else if((csv[j] !== "\n") && (csv[j + 1] && csv[j] + csv[j + 1] !== "\n\r")) {
|
||||
} else if (csv[j] !== "\n") {
|
||||
txtencours += csv[j];
|
||||
// } else if((csv[j] == "\n") || (csv[j + 1] && csv[j] + csv[j + 1] == "\n\r")) {
|
||||
} else if (csv[j] == "\n") {
|
||||
if (!flagouvert) txtencours += "\n";
|
||||
}
|
||||
}
|
||||
return newcsv + txtencours;
|
||||
};
|
||||
utils.analysestring = (string) => {
|
||||
let buftxt = "";
|
||||
let bufcode = "";
|
||||
let i = 0;
|
||||
let avecRL = false;
|
||||
for (let p = 0; p < string.length; p++) {
|
||||
if (string[p].charCodeAt() == 10) {
|
||||
buftxt += "[RL]";
|
||||
avecRL = true;
|
||||
} else {
|
||||
buftxt += string[p];
|
||||
}
|
||||
bufcode += "-" + string[p].charCodeAt();
|
||||
if (i == 20) {
|
||||
if (avecRL) {
|
||||
console.log(`${buftxt} - ${bufcode}`);
|
||||
} else {
|
||||
console.log(`${buftxt} ---- ${bufcode}`);
|
||||
}
|
||||
i = 0;
|
||||
buftxt = "";
|
||||
bufcode = "";
|
||||
avecRL = false;
|
||||
}
|
||||
i++;
|
||||
}
|
||||
};
|
||||
|
||||
const txtstring = `32932,BK_F2F_B_COM_10x1H-09,"My Communication Workshop ""Session N°9 - 1H""","<p> </p>
|
||||
<table>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>
|
||||
<p>Learner who needs to develop their ability to communicate effectively at work, both in writing and speaking</p>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>",,english,2,0,,2,0,classroom,"0000-00-00 00:00:00","0000-00-00 00:00:00",0000-00-00,0000-00-00,https://www.yesnyoulearning.com/lms/index.php?r=player&course_id=32932,1101,,"BUSINESS KEYS",0,
|
||||
32933,BK_F2F_B_COM_10x1H-10,"My Communication Workshop Session N°10 - 1H","<p> </p>
|
||||
<table>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>
|
||||
<p>Learner who needs to develop their ability to communicate effectively at work, both in writing and speaking</p>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>",,english,2,0,,2,0,classroom,"0000-00-00 00:00:00","0000-00-00 00:00:00",0000-00-00,0000-00-00,https://www.yesnyoulearning.com/lms/index.php?r=player&course_id=32933,1101,,"BUSINESS KEYS",0,
|
||||
32934,BK_F2F_B_JOB_10x1H-01,"My Job Search Workshop Session N°1 - 1H","<p>PACK JOB SEARCH</p>",,english,2,0,,2,0,classroom,,,0000-00-00,0000-00-00,https://www.yesnyoulearning.com/lms/index.php?r=player&course_id=32934,1108,,,0,
|
||||
32935,BK_F2F_B_JOB_10x1H-02,"My Job Search Workshop Session N°2 - 1H","<p>PACK JOB SEARCH</p>",,english,2,0,,2,0,classroom,,,0000-00-00,0000-00-00,https://www.yesnyoulearning.com/lms/index.php?r=player&course_id=32935,1108,,,0,`;
|
||||
//utils.analysestring(txtstring)
|
||||
//console.log(utils.replacecarbtweendblquote(txtstring, ",", 'CARSEPARATOR')
|
||||
// .split("\n")[0].split(","))
|
||||
utils.csv2json = (csv, options, callback) => {
|
||||
// EN CAS DE PB AVEC UN FICHIER EXCEL RECALCITRANT
|
||||
// l'ouvrir dans calc linux et sauvegarder csv utf8, ; , " enregistrer le contenu de la cellule comme affiché
|
||||
console.log("\n--------------- CSV2JSON ---------------\n");
|
||||
// Default CSV options
|
||||
if (!options.retln) options.retln = "\n";
|
||||
if (csv.indexOf("\n\r") > -1) options.retln = "\n\r";
|
||||
if (!options.sep) options.sep = ";";
|
||||
//gestion d un separateur dans une chaine de texte
|
||||
//const regseptext = new RegExp(`${options.sep}(?!(?:[^"]*"[^"]*")*[^"]*$)`, 'gm');
|
||||
//csv = csv.replace(regseptext, "CARACSEPAR");
|
||||
// csv = utils.replacecarbtweendblquote(csv, options.retln, "RETLIGNE")
|
||||
csv = utils.replacecarbtweendblquote(csv, options.sep, "CARSEPARATOR");
|
||||
if (!options.replacespecialcarCsv2Json) {
|
||||
options.replacespecialcarCsv2Json = [];
|
||||
} else {
|
||||
if (typeof options.replacespecialcarCsv2Json == "string") {
|
||||
//permet de passer des regex en string
|
||||
options.replacespecialcarCsv2Json = eval(
|
||||
options.replacespecialcarCsv2Json
|
||||
);
|
||||
}
|
||||
}
|
||||
const result = [];
|
||||
const lines = csv.split(options.retln);
|
||||
const headers = utils.getHeaders(lines, options.sep);
|
||||
let unknownHeaders = "";
|
||||
//console.log('headers', headers)
|
||||
//console.log('options.champs', options.champs)
|
||||
headers.forEach((header) => {
|
||||
// Si un header n'est pas présent dans la liste des champs prédéfinis
|
||||
// on l'ajoute aux champs inconnus
|
||||
if (options.champs.indexOf(header) === -1) {
|
||||
unknownHeaders += `${header}, `;
|
||||
}
|
||||
});
|
||||
if (unknownHeaders !== "") {
|
||||
const errorMsg = `CSV2JSON() - Champs inconnus : ${unknownHeaders}`;
|
||||
return callback(errorMsg, null);
|
||||
}
|
||||
lines.forEach((line, index) => {
|
||||
// Skip headers line or empty lines
|
||||
if (index === 0 || line.replace(/\s/g, "").length === 0) {
|
||||
return;
|
||||
}
|
||||
// pour debuguer on met origincsv pour voir la ligne d'origine
|
||||
const currentLineData = { origincsv: line, linenumber: index };
|
||||
const currentLine = line.split(options.sep); // Current string in the line
|
||||
for (let j = 0; j < headers.length; j++) {
|
||||
// Si la ligne n'est pas vide
|
||||
if (currentLine[j]) {
|
||||
// On clean le champs
|
||||
// ajout eventuel de modification de caracter reservé ; dans les libelléetc...
|
||||
let currentValue = currentLine[j].trim();
|
||||
//on transforme le caractere separateur modifié entre double quote
|
||||
currentValue = currentValue.replace("CARSEPARATOR", options.sep);
|
||||
options.replacespecialcarCsv2Json.forEach((re) => {
|
||||
currentValue = currentValue.replace(re[0], re[1]);
|
||||
});
|
||||
// Si le header est un email
|
||||
if (headers[j].includes("EMAIL")) {
|
||||
// Supprimer tous les espaces
|
||||
currentValue = currentLine[j].replace(/\s/g, "");
|
||||
}
|
||||
// on check si le chamos doit être numerique
|
||||
if (options.numericfield.includes(headers[j])) {
|
||||
currentValue = currentLine[j].replace(/\,/g, ".");
|
||||
try {
|
||||
const test = parseFloat(currentValue);
|
||||
} catch (er) {
|
||||
return callback(
|
||||
`${headers[j]} contiens la valeur -${currentValue}- et devrait être numerique`,
|
||||
null
|
||||
);
|
||||
}
|
||||
}
|
||||
if (currentValue) {
|
||||
// Si le header actuel est de type array
|
||||
// Cela signifie que le header apparaît plusieurs fois dans le CSV
|
||||
// et que les valeurs correspondantes à ce header
|
||||
// doivent être mis dans un array
|
||||
if (options.array && options.array.indexOf(headers[j]) > -1) {
|
||||
// Si le tableau pour ce header n'existe pas on le crée
|
||||
if (!currentLineData[headers[j]]) {
|
||||
currentLineData[headers[j]] = [];
|
||||
}
|
||||
if (options.arraysplitsep) {
|
||||
currentValue.split(options.arraysplitsep).forEach((v) => {
|
||||
currentLineData[headers[j]].push(v);
|
||||
});
|
||||
} else {
|
||||
currentLineData[headers[j]].push(currentValue);
|
||||
}
|
||||
} else {
|
||||
// Si un header est déjà présent pour la ligne
|
||||
// alors que il n'est pas spécifié comme étant un array
|
||||
// on retourne une erreur
|
||||
if (currentLineData[headers[j]]) {
|
||||
const errorMsg = `Le champ ${headers[j]} est présent plusieurs fois alors qu'il n'est pas spécifié comme étant un array !`;
|
||||
return callback(errorMsg, null);
|
||||
}
|
||||
currentLineData[headers[j]] = currentValue;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
result.push(currentLineData);
|
||||
});
|
||||
return callback(null, result);
|
||||
};
|
||||
/**
|
||||
* [csvparam2json description]
|
||||
* @param {object} csv object of csv file that has been read
|
||||
* @param {object} options object containing csv options, headers, ...
|
||||
{retln:'code de retour de ligne \n ou \n\r',
|
||||
sep:'code to split cells',
|
||||
champs:[ch1,ch2,...] catch only those field,
|
||||
array:[ch1, ] can have more than one field champs with same name then data are push into an array }
|
||||
* @param {Function} callback callback function
|
||||
* @return {callback} - return an error if error, else return json
|
||||
it converts a csv with 3 column col1;col2;col3 in a json in a tree
|
||||
if in col1 we have __ => then it splits a leaf
|
||||
col1 = xxxx__yyyy ; col2 = value ; col3 = comment that is ignored
|
||||
return data = {xxxx:{yyyy:value}}
|
||||
col1 = xxxx; col2 = value; col3 = comment ignored
|
||||
return data = {xxxx:value}
|
||||
|
||||
Usage example:
|
||||
fiche.csvparam2article = (err, fiche) => {
|
||||
if (!err) {
|
||||
console.log(fiche)
|
||||
}
|
||||
}
|
||||
utils.csvparam2json(fs.readFileSync('./devdata/tribee/aubergenville/infoexterne/localbusiness.csv', 'utf-8'), {
|
||||
retln: "\n",
|
||||
sep: ";",
|
||||
champs: ["NOM", "OBJET", "ADRESSE_PRO", "CP_PRO", "VILLE_PRO", "ZONE", "PHONE_PRO", "HORAIRESDESC", "HORAIREDATA", "URL", "FACEBOOK", "INSTA", "EMAIL_PRO", "IMG", "TAG"],
|
||||
array: ["TAG", "PHONE_PRO", "EMAIL_PRO"]
|
||||
}, fiche.csv2article)
|
||||
|
||||
*/
|
||||
utils.csvparam2json = (csv, options, callback) => {
|
||||
console.log("\n--------------- CSVPARAM2JSON ---------------\n");
|
||||
let etat = "";
|
||||
const param = {};
|
||||
if (!options.retln) {
|
||||
options.retln = "\n";
|
||||
}
|
||||
if (csv.indexOf("\n\r") > -1) {
|
||||
options.retln = "\n\r";
|
||||
}
|
||||
if (!options.sep) {
|
||||
options.sep = ";";
|
||||
}
|
||||
if (!options.seplevel) {
|
||||
options.seplevel = "__";
|
||||
}
|
||||
if (!options.replacespecialcarCsv2Json) {
|
||||
options.replacespecialcarCsv2Json = [];
|
||||
} else {
|
||||
if (typeof options.replacespecialcarCsv2Json == "string") {
|
||||
//permet de passer des regex en string
|
||||
options.replacespecialcarCsv2Json = eval(
|
||||
options.replacespecialcarCsv2Json
|
||||
);
|
||||
}
|
||||
}
|
||||
const lines = csv.split(options.retln);
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const infol = lines[i].split(options.sep);
|
||||
//console.log(infol)
|
||||
if (infol[0].length > 4 && infol.length < 2) {
|
||||
// si le 1er element à plus de 4 caractere et s'il y a moins de 3 colonnes c'est qu'il y a un pb
|
||||
etat += `Erreur sur ${lines[i]} moins de 3 column separé par ${options.sep}`;
|
||||
continue;
|
||||
}
|
||||
// On ajoute ici la gestion de tous les caracteres spéciaux
|
||||
// reservées pour le csv ; ' etc..'
|
||||
if (infol[1] && infol[1] + "" == infol[1]) {
|
||||
options.replacespecialcarCsv2Json.forEach((re) => {
|
||||
//console.log("gggggggggggggggggggg", infol[1])
|
||||
infol[1] = infol[1].replace(re[0], re[1]);
|
||||
});
|
||||
// console.log(infol[1])
|
||||
infol[1] = infol[1].replace(/'|’/g, '"');
|
||||
//console.log(infol[1])
|
||||
if (infol[1].toLowerCase() === "true") {
|
||||
infol[1] = true;
|
||||
} else if (infol[1].toLowerCase() === "false") {
|
||||
infol[1] = false;
|
||||
}
|
||||
}
|
||||
console.log(infol[1]);
|
||||
//supprime des lignes vides
|
||||
if (infol[0] == "") continue;
|
||||
if (infol[0].indexOf(options.seplevel) == -1) {
|
||||
param[infol[0]] = infol[1];
|
||||
continue;
|
||||
} else {
|
||||
const arbre = infol[0].split(options.seplevel);
|
||||
switch (arbre.length) {
|
||||
case 1:
|
||||
param[arbre[0]] = infol[1];
|
||||
break;
|
||||
case 2:
|
||||
if (arbre[1] != "ARRAY") {
|
||||
if (!param[arbre[0]]) param[arbre[0]] = {};
|
||||
param[arbre[0]][arbre[1]] = infol[1];
|
||||
} else {
|
||||
if (!param[arbre[0]]) param[arbre[0]] = [];
|
||||
//console.log('aff', infol[1].substring(1, infol[1].length - 1).replace(/""/g, '"'))
|
||||
eval("result=" + infol[1]);
|
||||
//.substring(1, infol[1].length - 1).replace(/""/g, '"'))
|
||||
param[arbre[0]].push(result);
|
||||
}
|
||||
break;
|
||||
case 3:
|
||||
if (arbre[2] != "ARRAY") {
|
||||
if (!param[arbre[0]]) param[arbre[0]] = {};
|
||||
if (!param[arbre[0]][arbre[1]]) param[arbre[0]][arbre[1]] = {};
|
||||
param[arbre[0]][arbre[1]][arbre[2]] = infol[1];
|
||||
} else {
|
||||
if (!param[arbre[0]]) param[arbre[0]] = {};
|
||||
if (!param[arbre[0]][arbre[1]]) param[arbre[0]][arbre[1]] = [];
|
||||
//eval("result = \"test\"");
|
||||
//console.log(result);
|
||||
eval("result=" + infol[1]);
|
||||
//.substring(1, infol[1].length - 1).replace(/""/g, '"'))
|
||||
param[arbre[0]][arbre[1]].push(result);
|
||||
}
|
||||
break;
|
||||
case 4:
|
||||
if (arbre[3] != "ARRAY") {
|
||||
if (!param[arbre[0]]) param[arbre[0]] = {};
|
||||
if (!param[arbre[0]][arbre[1]]) param[arbre[0]][arbre[1]] = {};
|
||||
if (!param[arbre[0]][arbre[1]][arbre[2]])
|
||||
param[arbre[0]][arbre[1]][arbre[2]] = {};
|
||||
param[arbre[0]][arbre[1]][arbre[2]][arbre[3]] = infol[1];
|
||||
} else {
|
||||
if (!param[arbre[0]]) param[arbre[0]] = {};
|
||||
if (!param[arbre[0]][arbre[1]]) param[arbre[0]][arbre[1]] = {};
|
||||
if (!param[arbre[0]][arbre[1]][arbre[2]])
|
||||
param[arbre[0]][arbre[1]][arbre[2]] = [];
|
||||
eval("result=" + infol[1]);
|
||||
//.substring(1, infol[1].length - 1).replace(/""/g, '"'))
|
||||
param[arbre[0]][arbre[1]][arbre[2]].push(result);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
// JSON.parse(JSON.stringify(param))
|
||||
console.log(
|
||||
"kkkkkkkkkkkkkkkkkk",
|
||||
param["catalogue"]["filtrecatalog"]["searchengine"]
|
||||
);
|
||||
if (etat == "") {
|
||||
return callback(null, JSON.parse(JSON.stringify(param)));
|
||||
} else {
|
||||
return callback(etat, null);
|
||||
}
|
||||
};
|
||||
utils.levenshtein = (a, b) => {
|
||||
if (a.length === 0) return b.length;
|
||||
if (b.length === 0) return a.length;
|
||||
let tmp, i, j, prev, val, row;
|
||||
// swap to save some memory O(min(a,b)) instead of O(a)
|
||||
if (a.length > b.length) {
|
||||
tmp = a;
|
||||
a = b;
|
||||
b = tmp;
|
||||
}
|
||||
row = Array(a.length + 1);
|
||||
// init the row
|
||||
for (i = 0; i <= a.length; i++) {
|
||||
row[i] = i;
|
||||
}
|
||||
// fill in the rest
|
||||
for (i = 1; i <= b.length; i++) {
|
||||
prev = i;
|
||||
for (j = 1; j <= a.length; j++) {
|
||||
if (b[i - 1] === a[j - 1]) {
|
||||
val = row[j - 1]; // match
|
||||
} else {
|
||||
val = Math.min(
|
||||
row[j - 1] + 1, // substitution
|
||||
Math.min(
|
||||
prev + 1, // insertion
|
||||
row[j] + 1
|
||||
)
|
||||
); // deletion
|
||||
}
|
||||
row[j - 1] = prev;
|
||||
prev = val;
|
||||
}
|
||||
row[a.length] = prev;
|
||||
}
|
||||
return row[a.length];
|
||||
};
|
||||
utils.testinarray = (array, arrayreferent) => {
|
||||
// au moins un element de array existe dans arryreferent
|
||||
let exist = false;
|
||||
if (arrayreferent) {
|
||||
//console.log('arrrrrrrrrrrrrrr', arrayreferent)
|
||||
array.forEach((e) => {
|
||||
//console.log(e)
|
||||
if (arrayreferent.includes(e)) exist = true;
|
||||
});
|
||||
}
|
||||
return exist;
|
||||
};
|
||||
/*
|
||||
DIRECTORY
|
||||
*/
|
||||
const isDirectory = (source) => fs.lstatSync(source).isDirectory();
|
||||
const getDirectories = (source) =>
|
||||
fs
|
||||
.readdirSync(source)
|
||||
.map((name) => path.join(source, name))
|
||||
.filter(isDirectory);
|
||||
|
||||
module.exports = utils;
|
13
api/models/Towns.js
Normal file
13
api/models/Towns.js
Normal file
@@ -0,0 +1,13 @@
|
||||
const bcrypt = require( 'bcrypt' );
|
||||
const fs = require( 'fs-extra' );
|
||||
const glob = require( 'glob' );
|
||||
const moment = require( 'moment' );
|
||||
const jwt = require( 'jwt-simple' );
|
||||
const UUID = require( 'uuid' );
|
||||
const conf=require('../../nationchains/tribes/conf.json')
|
||||
const Checkjson = require( `./Checkjson.js`);
|
||||
|
||||
const Towns = {};
|
||||
|
||||
|
||||
module.exports= Towns;
|
350
api/models/Tribes.js
Executable file
350
api/models/Tribes.js
Executable file
@@ -0,0 +1,350 @@
|
||||
const bcrypt = require( 'bcrypt' );
|
||||
const fs = require( 'fs-extra' );
|
||||
const path = require( 'path' );
|
||||
const glob = require( 'glob' );
|
||||
const Mustache = require( 'mustache' );
|
||||
const execSync = require( 'child_process' )
|
||||
.execSync;
|
||||
const dnsSync = require( 'dns-sync' );
|
||||
const jwt = require( 'jwt-simple' );
|
||||
const moment = require( 'moment' );
|
||||
const UUID = require( 'uuid' );
|
||||
const Pagans = require( './Pagans.js' );
|
||||
const config = require( '../../nationchains/tribes/conf.json' );
|
||||
const Checkjson = require( `./Checkjson.js`);
|
||||
/*
|
||||
tribeid manager
|
||||
|
||||
/tribes/tribeid
|
||||
Manage a tribeid space
|
||||
* create
|
||||
* update by managing option and contract
|
||||
* delete a tribeid
|
||||
* check accountability and
|
||||
|
||||
*/
|
||||
const Tribes = {};
|
||||
Tribes.init = () => {
|
||||
console.group( 'init Tribes' );
|
||||
let tribeids = [];
|
||||
let routes = glob.sync( './routes/*.js' )
|
||||
.map( f => {
|
||||
return { url: `/${path.basename(f,'.js')}`, route: f }
|
||||
} );
|
||||
let DOMs = [];
|
||||
let appname = {};
|
||||
TribesGlobalConfig = glob.sync( `${config.tribes}/**/clientconf.json` )
|
||||
.map( f => {
|
||||
const conf = fs.readJSONSync( f );
|
||||
// check if plugins exist and add it in .plugins of each tribeid conf
|
||||
conf.plugins = glob.sync( `${config.tribes}/${conf.tribeid}/plugins/**/package.json` )
|
||||
.map( p => {
|
||||
const pack = fs.readJsonSync( p, 'utf8' );
|
||||
routes.push( { url: `/${pack.name}`, route: `${config.tribes}/${conf.tribeid}/plugins/${pack.name}/route.js` } );
|
||||
return pack;
|
||||
} );
|
||||
//Add here any other info to get a global view and init
|
||||
//...
|
||||
tribeids.push( conf.tribeid );
|
||||
DOMs = [ ...new Set( [ ...DOMs, ...conf.allowedDOMs ] ) ];
|
||||
if( conf.website ) appname[ conf.tribeid ] = Object.keys( conf.website )
|
||||
return conf;
|
||||
} );
|
||||
// store global conf fofs.existsSync( `${config.tmp}/clientconfglob.json` )r sharing to other api
|
||||
fs.outputJsonSync( `${config.tmp}/clientconfglob.json`, TribesGlobalConfig, {
|
||||
spaces: 2
|
||||
} );
|
||||
return { tribeids, routes, DOMs, appname }
|
||||
}
|
||||
Tribes.create = ( data ) => {
|
||||
/* data = clientconf.json
|
||||
{
|
||||
"tribeid": "apxtrib",
|
||||
"genericpsw": "Trze3aze!",
|
||||
"website": {
|
||||
"presentation":"https://www.apxtrib.org",
|
||||
"webapp": "https://webapp.apxtrib.org"
|
||||
},
|
||||
"allowedDOMs": ["local.fr", "localhost:9002", "ndda.fr", "apxtrib.org"],
|
||||
"clientname": "apxtrib",
|
||||
"clientlogo": "",
|
||||
"geoloc": [],
|
||||
"useradmin": {PUBKEY:"",EMAIL:"",LOGIN:"adminapxtrib",UUID:"adminapxtrib"},
|
||||
"smtp": {
|
||||
"emailFrom": "support@apxtrib.org",
|
||||
"emailcc": [],
|
||||
"service": "gmail",
|
||||
"auth": {
|
||||
"user": "antonin.ha@gmail.com",
|
||||
"pass": "Ha06110"
|
||||
}
|
||||
},
|
||||
"accepted-language": "fr,en",
|
||||
"langueReferential": ["fr"]
|
||||
}
|
||||
What about:
|
||||
"tribeid": same than the folder where all the client's file are stored
|
||||
"genericpsw": a generic password for new user need upper lowercase number ans special char
|
||||
"dnsname": a domain name belonging to the client
|
||||
"subdns": "www", a sub domain subdns.dnsname give a public web access to
|
||||
"website": { keywebsite:url}, give access to config.tribes/tribeid/www/keywebsite/index.html,
|
||||
"allowedDOMs": ["local.fr", "localhost:9002", "nnda.fr"], //for CORS, @TODO generate from prévious URL this allow this apxtrib instance to be accessible
|
||||
"clientname": Name of the organisation if any,
|
||||
"clientlogo": logo of the organisation if any,
|
||||
"geoloc": [], if any
|
||||
"useradmin": { this is the 1st user create automaticaly to make gui available for the 1st user
|
||||
"PUBKEY":public key to be authentify without an email,
|
||||
"EMAIL":user email, we need at least one of authentification set up after the user can use both or only one
|
||||
"LOGIN": login to use for access admintribeid,
|
||||
"UUID": unique id normaly UUID but a uuid admintribeid is the same person in any apxtrib instance so we use it by convention.
|
||||
"xlang": lang used by this user
|
||||
},
|
||||
"smtp": { smtp used to send email by nodemailer lib basic example with a google account
|
||||
"emailFrom": "support@xx.fr",
|
||||
"emailcc": [],
|
||||
"service": "gmail",
|
||||
"auth": {
|
||||
"user": "antonin.ha@gmail.com",
|
||||
"pass": "Ha06110"
|
||||
}
|
||||
},
|
||||
"accepted-language": "fr,en", list of accepted-language in terme of http request.
|
||||
"langueReferential": ["fr"], list of the text that have to be translate in referentials
|
||||
}
|
||||
*/
|
||||
//update tmp/confglog.json
|
||||
const dataclient = Tribes.init();
|
||||
//return in prod all instance apxinfo={tribeids:[],logins:[]}
|
||||
// in dev return only local
|
||||
//check tribeid name is unique
|
||||
console.log( 'liste des tribeid', dataclient.tribeids )
|
||||
if( dataclient.tribeids.includes( data.tribeid ) ) {
|
||||
return { status: 403, payload: { model: "client", info: [ 'tribeidalreadyexist' ] } }
|
||||
}
|
||||
//loginsglob = {login:tribeid}
|
||||
let loginsglob = {};
|
||||
if( fs.existsSync( `${config.tmp}/loginsglob.json`, 'utf-8' ) ) {
|
||||
loginsglob = fs.readJsonSync( `${config.tmp}/loginsglob.json`, 'utf-8' );
|
||||
}
|
||||
const logins = Object.keys( loginsglob );
|
||||
if( logins.includes( data.useradmin.login ) ) {
|
||||
return { status: 403, payload: { model: "client", info: [ 'loginalreadyexist' ] } }
|
||||
}
|
||||
fs.ensureDirSync( `${config.tribes}/${data.tribeid}` );
|
||||
[ 'users', 'www', 'referentials', 'nationchains' ].forEach( r => {
|
||||
fs.copySync( `${__base}/setup/tribes/apxtrib/${r}`, `${config.tribes}/${data.tribeid}/${r}` );
|
||||
} )
|
||||
fs.outputJsonSync( `${config.tribes}/${data.tribeid}/clientconf.json`, data );
|
||||
const confcli = JSON.parse( Mustache.render( fs.readFileSync( `${__base}/setup/tribes/apxtrib/clientconf.mustache`, 'utf8' ), data ) );
|
||||
fs.outputJsonSync( `${config.tribes}/${data.tribeid}/clientconf.json`, confcli );
|
||||
|
||||
return Pagans.createUser( {
|
||||
xpaganid: "setup",
|
||||
xworkon: data.tribeid,
|
||||
xlang: data.useradmin.xlang
|
||||
}, data.useradmin );
|
||||
};
|
||||
Tribes.archive = ( tribeid ) => {
|
||||
//A faire zip un repertoire tribeid dans
|
||||
// remove tribeid de data ou devdata
|
||||
try {
|
||||
fs.moveSync( `${config.tribes}/${tribeid}`, `${config.archivefolder}/${tribeid}` );
|
||||
//update apxtribenv
|
||||
Tribes.init();
|
||||
return { status: 200, payload: { info: [ 'deletetribeidsuccessfull' ], models: 'Tribes', moreinfo: "TODO see in Tribes.archive" } }
|
||||
} catch ( err ) {
|
||||
console.log( "Erreur d'archivage", err )
|
||||
return { status: 403, payload: { info: [ 'archiveerror' ], models: 'Tribes', moreinfo: err } }
|
||||
}
|
||||
}
|
||||
////////////// Manage file for Tribes
|
||||
Tribes.checkaccessfolder = ( folder, typeaccessrequested, useraccessrights, useruuid ) => {
|
||||
// check folder right
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
Tribes.checkaccessfiles = ( listfile, typeaccessrequested, useraccessrights, useruuid ) => {
|
||||
// @listfile to check accessright on file or folder
|
||||
// @typeaccessrequested on files R read or download, U for pdate, D for delete , O for owned a Owner has all rights RUD on its files
|
||||
// @useraccessright from its account /userd/uuid.json
|
||||
// @useruuid public uuid user
|
||||
// return {'ok':[file auhtorized],'ko':[files not authorized]}
|
||||
|
||||
const checkauthlistfile = { 'ok': [], 'ko': [] }
|
||||
let structf = []
|
||||
let inforep = { file: {}, dir: {} }
|
||||
let done;
|
||||
for( const f of listfile ) {
|
||||
done = false;
|
||||
if( !fs.existsSync( `${config.tribes}/${f}` ) ) {
|
||||
done = true;
|
||||
checkauthlistfile.ko.push( f )
|
||||
console.log( `${f} file does not exist` )
|
||||
} else {
|
||||
structf = f.split( '/' );
|
||||
}
|
||||
//on ckeck tribeid existe / tribeid/object/
|
||||
if( !done &&
|
||||
useraccessrights.data[ structf[ 0 ] ] &&
|
||||
useraccessrights.data[ structf[ 0 ] ][ structf[ 1 ] ] &&
|
||||
useraccessrights.data[ structf[ 0 ] ][ structf[ 1 ] ].includes( typeaccessrequested ) ) {
|
||||
done = true;
|
||||
checkauthlistfile.ok.push( f );
|
||||
} else {
|
||||
// check if in folder we have a.info.json .file[f].shared{useruuid:'CRUDO'}
|
||||
console.log( 'structf', structf )
|
||||
if( fs.existsSync( `${config.tribes}/${structf.slice(0,-1).join('/')}/.info.json` ) ) {
|
||||
inforep = fs.readJsonSync( `${config.tribes}/${structf.slice(0,-1).join('/')}/.info.json`, 'utf8' )
|
||||
}
|
||||
console.log( `no accessrights for ${f} for ${useruuid} ` )
|
||||
}
|
||||
if( !done && inforep.file[ f ] && inforep.file[ f ] && inforep.file[ f ].shared && inforep.file[ f ].shared[ useruuid ] && inforep.file[ f ].shared[ useruuid ].includes( typeaccessrequested ) ) {
|
||||
done = true;
|
||||
checkauthlistfile.ok.push( f )
|
||||
}
|
||||
// If no authorization then ko
|
||||
if( !done ) {
|
||||
checkauthlistfile.ko.push( f )
|
||||
}
|
||||
} // end loop for
|
||||
//console.log( 'checkauthlistfile', checkauthlistfile )
|
||||
return checkauthlistfile;
|
||||
}
|
||||
|
||||
Tribes.dirls = ( tribeid, dir ) => {
|
||||
/*
|
||||
Return list of file into tribeid/dir
|
||||
*/
|
||||
let comment = { src: `${tribeid}/${dir}`, file: {}, dir: {} };
|
||||
if( fs.existsSync( `${config.tribes}/${tribeid}/${dir}/.info.json` ) ) {
|
||||
comment = fs.readJsonSync( `${config.tribes}/${tribeid}/${dir}/.info.json`, 'utf-8' );
|
||||
}
|
||||
const listfile = []
|
||||
const listdir = []
|
||||
glob.sync( `${config.tribes}/${tribeid}/${dir}/*` )
|
||||
.forEach( f => {
|
||||
//console.log( f )
|
||||
const stats = fs.statSync( f );
|
||||
// console.log( stats )
|
||||
if( stats.isFile() ) {
|
||||
listfile.push( path.basename( f ) )
|
||||
if( !comment.file[ path.basename( f ) ] ) {
|
||||
comment.file[ path.basename( f ) ] = { tags: [], info: "", thumbb64: "" };
|
||||
}
|
||||
comment.file[ path.basename( f ) ].mtime = stats.mtime;
|
||||
comment.file[ path.basename( f ) ].ctime = stats.ctime;
|
||||
comment.file[ path.basename( f ) ].size = stats.size;
|
||||
}
|
||||
if( stats.isDirectory() ) {
|
||||
listdir.push( path.basename( f ) )
|
||||
if( !comment.dir[ path.basename( f ) ] ) {
|
||||
comment.dir[ path.basename( f ) ] = { tags: [], info: "", thumbb64: "" }
|
||||
}
|
||||
comment.dir[ path.basename( f ) ].nbfile = glob.sync( `${f}/*.*` )
|
||||
.length;
|
||||
comment.dir[ path.basename( f ) ].mtime = stats.mtime;
|
||||
comment.dir[ path.basename( f ) ].ctime = stats.mtime;
|
||||
console.log( 'comment.dir', comment.dir )
|
||||
}
|
||||
} );
|
||||
// on remove les file or dir that was deleted
|
||||
Object.keys( comment.file )
|
||||
.forEach( f => {
|
||||
if( !listfile.includes( f ) ) delete comment.file[ f ]
|
||||
} )
|
||||
Object.keys( comment.dir )
|
||||
.forEach( d => {
|
||||
if( !listdir.includes( d ) ) delete comment.dir[ d ]
|
||||
} )
|
||||
//console.log( comment )
|
||||
fs.outputJson( `${config.tribes}/${tribeid}/${dir}/.info.json`, comment, 'utf-8' );
|
||||
return { status: 200, payload: { info: [ 'succestogetls' ], models: 'Tribes', moreinfo: comment } }
|
||||
};
|
||||
Tribes.addspaceweb = ( data ) => {
|
||||
/*
|
||||
To create a public spaceweb accessible from https://dnsname/pageindex
|
||||
|
||||
input:
|
||||
{dnsname:["archilinea.fr","www.archilinea.fr"], 1st is tha main dns other are just servername redirection
|
||||
tribeid:"archilinea", from req.session.header.xworkon
|
||||
website:"presentation",
|
||||
pageindex:"app_index_fr.html"
|
||||
mode:dev(local no ssl) | prod(IP + ssl)
|
||||
}
|
||||
output:
|
||||
nginx conf and ssl to serve each https://dnsname to /{tribeid}/www/app/{website}
|
||||
|
||||
|
||||
Carefull this action is executed with root and restart nginx + apxtrib to work
|
||||
*/
|
||||
data.configdomain = config.tribes;
|
||||
data.porthttp = config.porthttp;
|
||||
console.assert( config.loglevel == "quiet", 'data to create spaceweb:', data );
|
||||
// create spaceweb app for tribeid/www/app/website/pageindexname.html
|
||||
if( !fs.existsSync( `${config.tribes}/${data.tribeid}/www/app/${data.website}` ) ) {
|
||||
fs.outputFileSync( `${config.tribes}/${data.tribeid}/www/app/${data.website}/${data.pageindex}`, `<h1>Hello ${data.tribeid} ${data.website} onto ${data.dnsname.join(',')}`, 'utf-8' )
|
||||
}
|
||||
//create logs folder
|
||||
fs.ensureDirSync( `${config.tribes}/${data.tribeid}/logs/nginx` );
|
||||
// add nginx http config
|
||||
const confnginx = fs.readFileSync( 'setup/nginx/modelwebsite.conf.mustache', 'utf-8' );
|
||||
fs.outputFileSync( `/etc/nginx/conf.d/${data.dnsname[0]}.conf`, Mustache.render( confnginx, data ), 'utf-8' );
|
||||
if( data.dns == "unchain" ) {
|
||||
//add in /etc/hosts
|
||||
let hosts = fs.readFileSync( '/etc/hosts', 'utf8' );
|
||||
let chg = false;
|
||||
data.dnsname.forEach( d => {
|
||||
if( !hosts.includes( `127.0.0.1 ${d}` ) ) {
|
||||
hosts += `\n127.0.0.1 ${d}`;
|
||||
chg = true;
|
||||
}
|
||||
if( chg ) {
|
||||
fs.outputFileSync( '/etc/hosts', hosts, 'utf8' )
|
||||
}
|
||||
} );
|
||||
};
|
||||
//Ckeck dns respond
|
||||
data.dnsname.forEach( d => {
|
||||
if( !dnsSync.resolve( `${d}` ) ) {
|
||||
rep += `\nresolving ${d} will not responding valid IP, please setup domain redirection IP before runing this script`
|
||||
}
|
||||
} )
|
||||
//update clienconf.json
|
||||
const clientconf = fs.readJsonSync( `${config.tribes}/${data.tribeid}/clientconf.json` );
|
||||
clientconf.website[ data.website ] = data.dnsname[ 0 ];
|
||||
//merge allowedDOMs in unique concat
|
||||
clientconf.allowedDOMs = [ ...new Set( ...clientconf.allowedDOMs, ...data.dnsname ) ];
|
||||
fs.outputJsonSync( `${config.tribes}/${data.tribeid}/clientconf.json`, clientconf, 'utf-8' );
|
||||
if( !data.setup ) {
|
||||
// in setup apxtrib is not running and it will be start manually at the 1st run
|
||||
// in other case need to plan a restart for CORS
|
||||
setTimeout( Tribes.restartapxtrib, 300000, data.clienId );
|
||||
}
|
||||
const nginxrestart = execSync( `sudo systemctl restart nginx` )
|
||||
.toString();
|
||||
console.log( 'Restart nginx', nginxrestart )
|
||||
if( data.mode != "unchain" ) {
|
||||
// get ssl certificate ATTENTION il faut ajouter -d devant chaque domain qui redirige vers l'espace web.
|
||||
const certbot = execSync( `sudo certbot --nginx -d ${data.dnsname.join(' -d ')}` )
|
||||
.toString();
|
||||
console.log( 'certbot is running A CHECKER POUR UNE VRAIE PROD ????', certbot )
|
||||
}
|
||||
//sh execution to update change requested
|
||||
return {
|
||||
status: 200,
|
||||
payload: {
|
||||
model: "Tribes",
|
||||
info: [ 'webspacecreated' ],
|
||||
moreinfo: "Space web well created"
|
||||
}
|
||||
};
|
||||
}
|
||||
Tribes.restartapxtrib = ( tribeid ) => {
|
||||
console.log( 'A restarting was requested 5mn ago from a new spacedev for ' + tribeid )
|
||||
execSync( 'yarn restartpm2' );
|
||||
}
|
||||
|
||||
|
||||
module.exports = Tribes;
|
5
api/models/lg/Checkjson_en.json
Normal file
5
api/models/lg/Checkjson_en.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"typedoesnnotexistinschema":"This type in your propertie is not manage by Checkjson.js",
|
||||
"dataerrpropertie":"Check your data that not fit your schema rules propertie",
|
||||
"dataerrpropertiesrequired":"This propertie is required and not present in your data"
|
||||
}
|
5
api/models/lg/Odmdb_en.json
Normal file
5
api/models/lg/Odmdb_en.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"schemanotfound":"Schema not found in {{{fullpath}}}",
|
||||
"pathnamedoesnotexist":"ObjectPath or objectName does not exist {{{indexpath}}}",
|
||||
"objectfiledoesnotexist":"Requested index does not exist here: {{{objectpath}}}"
|
||||
}
|
3
api/models/lg/Pagans_en.json
Normal file
3
api/models/lg/Pagans_en.json
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
|
||||
}
|
152
api/models/unittest/Checkjson.js
Normal file
152
api/models/unittest/Checkjson.js
Normal file
@@ -0,0 +1,152 @@
|
||||
/*
|
||||
Unit testing
|
||||
*/
|
||||
const assert = require("assert");
|
||||
const Checkjson = require("../Checkjson.js");
|
||||
|
||||
const ut = { name: "Checkjson" };
|
||||
|
||||
const schema = {
|
||||
$schema: "http://json-schema.org/schema#",
|
||||
title: "Dummy schema to test Checkjson.js",
|
||||
description: "Checkjson is use on server as well as into a browser",
|
||||
$comment: "We change schema type on the fly to simplify the test",
|
||||
type: "object",
|
||||
properties: {
|
||||
totest: {},
|
||||
},
|
||||
};
|
||||
const testproperties = [
|
||||
{
|
||||
name: "test0",
|
||||
data: { totest: true },
|
||||
properties: { totest: { type: "boolean" } },
|
||||
status: 200
|
||||
},
|
||||
{
|
||||
name: "test1",
|
||||
data: { totest: "blabla" },
|
||||
properties: { totest: { type: "string" } },
|
||||
status: 200
|
||||
},
|
||||
{
|
||||
name: "test2",
|
||||
data: { totest: 123 },
|
||||
properties: { totest: { type: "string" } },
|
||||
status: 417
|
||||
},
|
||||
{
|
||||
name: "test3",
|
||||
data: { totest: 123.13 },
|
||||
properties: { totest: { type: "integer" } },
|
||||
status: 417
|
||||
},
|
||||
{
|
||||
name: "test4",
|
||||
data: { totest: 123 },
|
||||
properties: { totest: { type: "number" } },
|
||||
status: 200
|
||||
},
|
||||
{
|
||||
name: "test5",
|
||||
data: { totest: 12312 },
|
||||
properties: { totest: { type: "number" } },
|
||||
status: 200
|
||||
},
|
||||
{
|
||||
name: "test6",
|
||||
data: { totest: 12.313 },
|
||||
properties: { totest: { type: "float" } },
|
||||
status: 200
|
||||
},
|
||||
{
|
||||
name: "test7",
|
||||
data: { totest: "blablab sfde" },
|
||||
properties: { totest: { type: "string", minLength: 1111 } },
|
||||
status: 417
|
||||
},
|
||||
{
|
||||
name: "test8",
|
||||
data: { totest: "blablab sfde" },
|
||||
properties: { totest: { type: "string", minLength: 4, maxLength: 128} },
|
||||
status: 200
|
||||
},
|
||||
{
|
||||
name: "test9",
|
||||
data: { totest: 12 },
|
||||
properties: { totest: { type: "integer", multipleOf:3} },
|
||||
status: 200
|
||||
},
|
||||
{
|
||||
name: "test10",
|
||||
data: { totest: 9 },
|
||||
properties: { totest: { type: "number", minimum:-10, exclusiveMaximum:10} },
|
||||
status: 200
|
||||
},
|
||||
{
|
||||
name: "test11",
|
||||
data: { totest: 10 },
|
||||
properties: { totest: { type: "number", minimum:-10, exclusiveMaximum:10} },
|
||||
status: 417
|
||||
},
|
||||
{
|
||||
name: "test12",
|
||||
data: { totest: "gfhrtabcdgfr" },
|
||||
properties: { totest: { type: "string", pattern:/.*abc.*/} },
|
||||
status: 200
|
||||
},
|
||||
{
|
||||
name: "test13",
|
||||
data: { totest: "toto@google.com" },
|
||||
properties: { totest: { type: "string", format:"email"} },
|
||||
status: 200
|
||||
},
|
||||
{
|
||||
name: "test14",
|
||||
data: { totest: "Aze123@0" },
|
||||
properties: { totest: { type: "string", format:"password"} },
|
||||
status: 200
|
||||
},
|
||||
{
|
||||
name: "test15",
|
||||
data: { totest: "value1" },
|
||||
properties: { totest: { type: "string", enum:["value1","value2","value3"]} },
|
||||
status: 200
|
||||
},
|
||||
{
|
||||
name: "test16",
|
||||
data: { totest: ["t1","t2"] },
|
||||
properties: { totest: { type: ["string", "number"] }},
|
||||
status: 417
|
||||
}
|
||||
,
|
||||
{
|
||||
name: "test17",
|
||||
data: { totest: 12 },
|
||||
properties: { totest: { type: ["string", "number"] }},
|
||||
status: 200
|
||||
}
|
||||
];
|
||||
|
||||
ut.testproperties = (options) => {
|
||||
let msg = "";
|
||||
testproperties.forEach((t) => {
|
||||
schema.properties = t.properties;
|
||||
const res = Checkjson.schema.data(schema, t.data);
|
||||
if (res.status != t.status) {
|
||||
msg = (msg == "") ? "Unconsistent testproperties() name list: " : `${msg},`;
|
||||
if (options.verbose) {
|
||||
console.log(t)
|
||||
console.log(res);
|
||||
}
|
||||
msg += res.err.map((e) => ` ${t.name} ${e.info}`);
|
||||
}
|
||||
});
|
||||
return assert.deepEqual(msg, "", msg);
|
||||
};
|
||||
|
||||
ut.run = (options) => {
|
||||
console.log("Test Checkjson properties");
|
||||
ut.testproperties(options);
|
||||
};
|
||||
module.exports = ut;
|
72
api/models/unittest/Odmdb.js
Normal file
72
api/models/unittest/Odmdb.js
Normal file
@@ -0,0 +1,72 @@
|
||||
/*
|
||||
Unit testing
|
||||
*/
|
||||
const assert = require("assert");
|
||||
const fs=require('fs-extra');
|
||||
const path= require('path');
|
||||
const Odmdb = require("../Odmdb.js");
|
||||
const {generemdp} = require('../../nationchains/socialworld/contracts/toolsbox.js');
|
||||
|
||||
const ut = { name: "Odmdb" };
|
||||
/*
|
||||
We test only search and indexation here
|
||||
Create Update Read and Delete are unit testing with specificities of each Object.
|
||||
|
||||
To do that we create in tmp a dummy data folder for a dummy schema object
|
||||
*/
|
||||
const schema = {
|
||||
$schema: "http://json-schema.org/schema#",
|
||||
title: "Dummy schema to test Checkjson.js",
|
||||
description: "Checkjson is use on server as well as into a browser",
|
||||
$comment: "We change schema type on the fly to simplify the test",
|
||||
type: "object",
|
||||
properties: {
|
||||
uuid: {
|
||||
type:"string",
|
||||
format:"uuid",
|
||||
default:"=uuid.v4()"
|
||||
},
|
||||
dtcreate:{
|
||||
type:"string",
|
||||
format:"datetime",
|
||||
default:"=date.now()"
|
||||
},
|
||||
tag:{
|
||||
type:"string",
|
||||
enum:["t1","t2","t3"],
|
||||
default:"t1"
|
||||
},
|
||||
info:{
|
||||
type:"string",
|
||||
minLength: 10,
|
||||
default:"=generemdp(255,'ABCDEFGHIJKLM 12340')"
|
||||
}
|
||||
},
|
||||
required:["uuid"],
|
||||
apxprimarykey:"uuid",
|
||||
apxuniquekey:["info"],
|
||||
apxsearchindex:{
|
||||
"uuid":{"list":[],"taginfo":['tag','info'],"all":""},
|
||||
"info":{"uuid":['uuid']}
|
||||
}
|
||||
};
|
||||
|
||||
const obj={tag:"t1",info:"Lorem ipsum A"}
|
||||
|
||||
ut.createanobject=(schema,obj)=>{
|
||||
|
||||
const res={status:200,err:[]}
|
||||
return res
|
||||
}
|
||||
|
||||
ut.run = (options) => {
|
||||
const objectPath=path.resolve(__dirname,'../../tmp/testobjects');
|
||||
const schemaPath=path.resolve(__dirname,'../../tmp/testschema');
|
||||
if (!fs.existsSync(objectPath)) fs.ensureDirSync(objectPath);
|
||||
if (!fs.existsSync(schemaPath)) fs.ensureDirSync(schemaPath);
|
||||
const createenvobj=Odmdb.setObject(schemaPath,objectPath,"objtest",schema,{},"en");
|
||||
assert.deepEqual(createenvobj,{status:200},JSON.stringify(createenvobj));
|
||||
const checkschema= Odmdb.schema(schemaPath,"objtest",true)
|
||||
assert.deepEqual(checkschema.status,200,JSON.stringify(checkschema))
|
||||
};
|
||||
module.exports = ut;
|
37
api/routes/nations.js
Executable file
37
api/routes/nations.js
Executable file
@@ -0,0 +1,37 @@
|
||||
const express = require( 'express' );
|
||||
const config = require( '../../nationchains/tribes/conf.json' );
|
||||
|
||||
// Classes
|
||||
const Nations = require( '../models/Nations.js' );
|
||||
// Middlewares
|
||||
const checkHeaders = require( '../middlewares/checkHeaders' );
|
||||
const isAuthenticated = require( '../middlewares/isAuthenticated' );
|
||||
const hasAccessrighton = require( '../middlewares/hasAccessrighton' );
|
||||
const router = express.Router();
|
||||
|
||||
/*
|
||||
Manage the social world
|
||||
|
||||
@Todo
|
||||
|
||||
Manage a new nation
|
||||
A major create a nation with at least a town => nation:{name, towns:[]} contracts/nationname.js + contracts/townsname.js
|
||||
Manage a new towns in a nation => update nation:[nationname:towns:[]} contracts/townname.js
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
*/
|
||||
router.post( '/push', checkHeaders, ( req, res ) => {
|
||||
// Get information from other apxtrib instance in req.body
|
||||
// check req.body.hashnext => means this is a candidate to validate next block
|
||||
//
|
||||
// return it's own information back with the last call to Nationchains.synchronize()
|
||||
res.send( { status: 200, payload: { moreinfo: fs.readFileSync( `${config.tribes}/${config.mayorId}/nationchains/nodes/${config.rootURL}`, 'utf-8' ) } } )
|
||||
} )
|
||||
|
||||
|
||||
module.exports = router;
|
201
api/routes/odmdb.js
Normal file
201
api/routes/odmdb.js
Normal file
@@ -0,0 +1,201 @@
|
||||
const express = require("express");
|
||||
const glob = require("glob");
|
||||
const fs = require("fs-extra");
|
||||
const path = require("path");
|
||||
const conf = require("../../nationchains/tribes/conf.json");
|
||||
const Odmdb = require("../models/Odmdb.js");
|
||||
// Middlewares
|
||||
const checkHeaders = require("../middlewares/checkHeaders");
|
||||
const isAuthenticated = require("../middlewares/isAuthenticated");
|
||||
const hasAccessrighton = require("../middlewares/hasAccessrighton");
|
||||
const router = express.Router();
|
||||
|
||||
router.get(
|
||||
"/:objectname/idx/:indexname",
|
||||
checkHeaders,
|
||||
isAuthenticated,
|
||||
(req, res) => {
|
||||
/**
|
||||
* @api {get} /odmdb/idx/:indexname
|
||||
* @apiName Get index file for an object
|
||||
* @apiGroup Odmdb
|
||||
*
|
||||
* @apiUse apxHeader
|
||||
* @objectname {string} Mandatory
|
||||
* @apiParam {String} indexname Mandatory if in conf.nationObjects then file is into nationchains/ else in /nationchains/tribes/xtribe/objectname/idx/indexname indexname contains the ObjectName .*_ (before the first _)
|
||||
*
|
||||
* @apiError (404) {string} status the file does not exist
|
||||
* @apiError (404) {string} ref objectmodel to get in the right language
|
||||
* @apiError (404) {string} msg key to get template from objectmodel
|
||||
* @apiError (404) {object} data use to render lg/objectmodel_lg.json
|
||||
*
|
||||
* @apiSuccess (200) {object} data contains indexfile requested
|
||||
*
|
||||
*/
|
||||
// indexname = objectname_key_value.json
|
||||
let objectLocation = "../../nationchains/";
|
||||
if (!conf.api.nationObjects.includes(req.params.objectname)) {
|
||||
objectLocation += `tribes/${req.session.headers.xtribe}/`;
|
||||
// check if accessright
|
||||
}
|
||||
const indexpath = `${objectLocation}/${req.params.objectname}/idx/${req.params.indexname}`;
|
||||
if (fs.existsSync(indexpath)) {
|
||||
res.status(200).json({ data: fs.readJsonSync(indexpath) });
|
||||
} else {
|
||||
res
|
||||
.status(404)
|
||||
.json({
|
||||
ref: "Odmdb",
|
||||
msg: "objectfiledoesnotexist",
|
||||
data: { indexpath },
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
router.get(
|
||||
"/:objectname/itm/:primaryindex",
|
||||
checkHeaders,
|
||||
isAuthenticated,
|
||||
(req, res) => {
|
||||
/**
|
||||
* @api {get} /odmdb/item/:objectname/:primaryindex
|
||||
* @apiName Get index file for an object
|
||||
* @apiGroup Odmdb
|
||||
*
|
||||
* @apiUse apxHeader
|
||||
*
|
||||
* @apiParam {String} objectname name Mandatory if in conf.nationObjects then file is into nationchains/ else in /nationchains/tribes/xtribe/objectname
|
||||
* @apiParam {String} primaryindex the unique id where item is store
|
||||
* @apiError (404) {string} status the file does not exist
|
||||
* @apiError (404) {string} ref objectmodel to get in the right language
|
||||
* @apiError (404) {string} msg key to get template from objectmodel
|
||||
* @apiError (404) {object} data use to render lg/objectmodel_lg.json
|
||||
*
|
||||
* @apiSuccess (200) {object} data contains indexfile requested
|
||||
*
|
||||
*/
|
||||
// indexname = objectname_key_value.json
|
||||
const objectName = req.params.objectname;
|
||||
const objectId = req.params.primaryindex;
|
||||
let objectLocation = "../../nationchains/";
|
||||
if (!conf.api.nationObjects.includes(objectName)) {
|
||||
objectLocation += `tribes/${req.session.headers.xtribe}/${objectName}`;
|
||||
// check if accessright on object on item
|
||||
// in case not res.status(403)
|
||||
}
|
||||
const objectpath = `${objectLocation}/${objectName}/itm/${objectId}`;
|
||||
|
||||
if (fs.existsSync(objectpath)) {
|
||||
res.status(200).json({ data: fs.readJsonSync(objectpath) });
|
||||
} else {
|
||||
res
|
||||
.status(404)
|
||||
.json({
|
||||
ref: "Odmdb",
|
||||
msg: "objectfiledoesnotexist",
|
||||
data: { objectpath },
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
router.post(":objectname/itm", checkHeaders, isAuthenticated, (req, res) => {
|
||||
// Create an item of an object
|
||||
});
|
||||
router.get(
|
||||
"/searchitems/:objectname/:question",
|
||||
checkHeaders,
|
||||
isAuthenticated,
|
||||
(req, res) => {
|
||||
/**
|
||||
*
|
||||
*
|
||||
*/
|
||||
console.log(
|
||||
"route referentials get all language" +
|
||||
req.params.objectname +
|
||||
"-" +
|
||||
req.params.question
|
||||
);
|
||||
const getref = Referentials.getref(
|
||||
true,
|
||||
req.params.source,
|
||||
req.params.idref,
|
||||
req.session.header.xworkon,
|
||||
req.session.header.xlang
|
||||
);
|
||||
// Return any status the data if any erreur return empty object
|
||||
res.jsonp(getref.payload.data);
|
||||
}
|
||||
);
|
||||
|
||||
router.get("schema/:objectname", checkHeaders, isAuthenticated, (req, res) => {
|
||||
/**
|
||||
* @api {get} /odmdb/schema/:objectname
|
||||
* @apiName GetSchema
|
||||
* @apiGroup Odmdb
|
||||
*
|
||||
* @apiUse apxHeader
|
||||
*
|
||||
* @apiParam {String} objectname Mandatory if headers.xworkon == nationchains then into ./nationchains/ else into ./tribes/xworkon/
|
||||
*
|
||||
* @apiError (404) {string} status a key word to understand not found schema
|
||||
* @apiError (404) {string} ref objectmodel to get in the right language
|
||||
* @apiError (404) {string} msg key to get template from objectmodel
|
||||
* @apiError (404) {object} data use to render lg/objectmodel_lg.json
|
||||
*
|
||||
* @apiSuccess (200) {object} data contains schema requested
|
||||
*
|
||||
*/
|
||||
const fullpath = path.resolve(
|
||||
`${__dirname}/tribes/${req.session.header.xworkon}/schema/${req.params.pathobjectname}.json`
|
||||
);
|
||||
if (fs.existsSync(fullpath)) {
|
||||
res.status(200).json({ data: fs.readJsonSync(fullpath) });
|
||||
} else {
|
||||
res
|
||||
.status(404)
|
||||
.json({ msg: "schemanotfound", ref: "odmdb", data: { fullpath } });
|
||||
}
|
||||
});
|
||||
router.put("schema/:objectname", checkHeaders, isAuthenticated, (req, res) => {
|
||||
/**
|
||||
* @api {put} /odmdb/schema/:objectname
|
||||
* @apiName putSchema
|
||||
* @apiGroup Odmdb
|
||||
*
|
||||
* @apiUse apxHeader
|
||||
*
|
||||
* @apiParam {String} objectname Mandatory if headers.xworkon == nationchains then into ./nationchains/ else into ./tribes/xworkon/
|
||||
* @apiBody {string} schemapath where to store schema .../schema
|
||||
* @apiBody {string} objectpath where to store object ...objectname/idx/config.json
|
||||
* @apiBody {json} schema content
|
||||
* @apiBody {json} schemalang content in lg
|
||||
* @apiBody {string} lang define which schemalg is (2 letters)
|
||||
*
|
||||
* @apiError (404) {string} status a key word to understand not found schema
|
||||
* @apiError (404) {string} ref objectmodel to get in the right language
|
||||
* @apiError (404) {string} msg key to get template from objectmodel
|
||||
* @apiError (404) {object} data use to render lg/objectmodel_lg.json
|
||||
*
|
||||
*
|
||||
* @apiSuccess (200) {object} data contains schema requested
|
||||
*
|
||||
*/
|
||||
const fullpath = path.resolve(
|
||||
`${__dirname}/tribes/${req.session.header.xworkon}/schema/${req.params.pathobjectname}.json`
|
||||
);
|
||||
const set = Odmdb.setObject(
|
||||
path.resolve(`${__dirname}/tribes/${req.session.header.xworkon}`)
|
||||
);
|
||||
|
||||
if (fs.existsSync(fullpath)) {
|
||||
res.status(200).json({ data: fs.readJsonSync(fullpath) });
|
||||
} else {
|
||||
res
|
||||
.status(404)
|
||||
.json({ msg: "schemanotfound", ref: "odmdb", data: { fullpath } });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
242
api/routes/pagans.js
Executable file
242
api/routes/pagans.js
Executable file
@@ -0,0 +1,242 @@
|
||||
const express = require( 'express' );
|
||||
const path = require( 'path' );
|
||||
|
||||
// Classes
|
||||
const Pagans = require( '../models/Pagans.js' );
|
||||
// Middlewares
|
||||
const checkHeaders = require( '../middlewares/checkHeaders' );
|
||||
const isAuthenticated = require( '../middlewares/isAuthenticated' );
|
||||
const hasAccessrighton = require( '../middlewares/hasAccessrighton' );
|
||||
const router = express.Router();
|
||||
/*
|
||||
models/Pagans.js
|
||||
Managed:
|
||||
/data/tribee/client-Id/users/uuid.json
|
||||
/searchindex/emails.json {email:uuid}
|
||||
/login.json {login:uuid}
|
||||
/uids.json {uuid;[[
|
||||
login,
|
||||
email,
|
||||
encrypted psw,
|
||||
accessrights]}
|
||||
|
||||
ACCESSRIGHTS = {
|
||||
app:{"tribeid:appname":"profil"},
|
||||
data:{"tribeid":{object:"CRUDO"}}
|
||||
}
|
||||
ACCESSRIGHTS is store into the token and is load into req.session.header.accessrights by hasAccessrighton() middleware
|
||||
|
||||
appname is a website space object /sitewebsrc/appname
|
||||
website live is strored into /dist source in /src
|
||||
|
||||
This can be managed by maildigitcreator or not.
|
||||
apxtrib/sitewebs/webapp is the webinterface of apxtrib
|
||||
|
||||
profil: admin / manager / user are key word to give specific access to data into model. Any kind of other profil can exist. It is usefull to manage specific menu in an app.
|
||||
It is also possible to authorize update a field's object depending of rule into dataManagement/object/
|
||||
{ field:X
|
||||
nouserupdate: "!(['admin','manager'].includes(contexte.profil))",
|
||||
}
|
||||
|
||||
data allow a user to access tribeid with Create Read Update Delete Own (CRUDO) on each object of a tribeid independantly of any app.
|
||||
|
||||
Create allow to create a new object respecting rules defined into /referentials/dataManagement/object/name.json
|
||||
Update idem
|
||||
Delete idem
|
||||
Owner means it can be Write/Delete if field OWNER contain the UUID that try to act on this object. Usefull to allow someone to fully manage its objects.
|
||||
|
||||
*/
|
||||
|
||||
router.get('/isregister', checkHeaders, isAuthenticated,(req,res)=>{
|
||||
/**
|
||||
* @api {get} /pagans/isregister
|
||||
* @apiName Is register check xalias and xhash
|
||||
* @apiGroup Odmdb
|
||||
*
|
||||
* @apiUse apxHeader
|
||||
*
|
||||
* @apiParam {String} indexname Mandatory if in conf.nationObjects then file is into nationchains/ else in /nationchains/tribes/xtribe/objectname/idx/indexname indexname contains the ObjectName .*_ (before the first _)
|
||||
*
|
||||
* @apiError (404) {string} status the file does not exist
|
||||
* @apiError (404) {string} ref objectmodel to get in the right language
|
||||
* @apiError (404) {string} msg key to get template from objectmodel
|
||||
* @apiError (404) {object} data use to pagansmodel: 'Pagans' } );render lg/objectmodel_lg.json
|
||||
*
|
||||
* @apiSuccess (200) {object} data contains indexfile requested
|
||||
*
|
||||
*/
|
||||
res.send(Pagans.checkdetachedSignature(req.session.header.xalias,req.session.header.xhash));
|
||||
|
||||
res.send({status:200,ref:"headers",msg:"authenticated",data:{xalias:req.session.header.xalias,xtribe:req.session.header.xtribe}})
|
||||
})
|
||||
router.post('/', checkHeaders, (req,res)=>{
|
||||
// create a pagan account from alias, publickey, if trusted recovery={email,privatekey}
|
||||
console.log(req.body)
|
||||
|
||||
} )
|
||||
router.delete( '/:alias', checkHeaders, isAuthenticated, ( req, res ) => {
|
||||
console.log( `DELETE pagans nationchains/pagans/${req.params.alias}.json` );
|
||||
const result = Pagans.delete( req.params.id, req.session.header );
|
||||
res.status( result.status )
|
||||
.send( result.data );
|
||||
} );
|
||||
|
||||
|
||||
|
||||
|
||||
router.get( '/isauth', checkHeaders, isAuthenticated, ( req, res ) => {
|
||||
if( req.session.header.xpseudo == "1" ) {
|
||||
return res.status( 401 )
|
||||
.send( { info: "not authenticate" } );
|
||||
} else return res.status( 200 )
|
||||
.send( { info: "well authenticated" } )
|
||||
} )
|
||||
router.post( '/login', checkHeaders, async ( req, res ) => {
|
||||
// console.log('POST /users/login with: ', req.app.locals.header);
|
||||
/*
|
||||
Check un mot de passe pour un login pour obtenir un token d'authentification
|
||||
valable 1 hour, 1 day
|
||||
@header
|
||||
@body.LOGIN
|
||||
@body.PASSWORD
|
||||
@checkpsw = true check si les 2 mot de passe cryptés correspondent
|
||||
false bypass le contrôle et permet de générer un token
|
||||
utile le temps de reinitialisé son mot de passe.
|
||||
@return
|
||||
*/
|
||||
console.log( 'login for ', req.body, "in", req.session.header )
|
||||
const log = await Pagans.loginUser( req.session.header, req.body, true );
|
||||
console.log( "log user login", log );
|
||||
if( log.status == 200 ) {
|
||||
// update req.app.locals.tokens for this uuid just after login success then next isAuth will be valid
|
||||
req.app.locals.tokens[ log.data.user.UUID ] = { TOKEN: log.data.user.TOKEN, ACCESSRIGHTS: log.data.user.ACCESSRIGHTS }
|
||||
console.log( req.app.locals )
|
||||
}
|
||||
return res.status( log.status )
|
||||
.send( log.data );
|
||||
} );
|
||||
|
||||
|
||||
router.get( '/getlinkwithoutpsw/:email', checkHeaders, async ( req, res ) => {
|
||||
/*
|
||||
Permet pour un email existant de renvoyer un email avec un lien valable 1h
|
||||
@email est le compte pour lequel on demande un accès
|
||||
Réponse:
|
||||
Si email n'existe pas on n'envoie pas d'email
|
||||
Si email existe on envoie un email avec un lien dont le token est valable 1h
|
||||
|
||||
@return
|
||||
{status:200 ou erreur ,
|
||||
payload:{
|
||||
info:[list de key to appear in correct requester langue],
|
||||
model:'Pagans',
|
||||
moreinfo: 'texte pour log '
|
||||
}
|
||||
}
|
||||
*/
|
||||
console.log( `GET /users/getlinkwithoutpsw for email: ${req.params.email} tribeid :${req.header('X-Client-Id')}` );
|
||||
if( !req.params.email ) {
|
||||
return res.status( 404 )
|
||||
.send( {
|
||||
info: [ 'emailmissing' ],
|
||||
model: 'Pagans'
|
||||
} );
|
||||
} else {
|
||||
try {
|
||||
const getlink = await Pagans.getlinkwithoutpsw( req.params.email, req.session.header );
|
||||
console.log( 'getlink', getlink )
|
||||
//met à jour le token créer pour le uuid
|
||||
req.app.locals.tokens[ getlink.data.info.xuuid ] = getlink.data.info.token;
|
||||
// attention si on relance le serveur le token temporaire est perdu
|
||||
return res.status( getlink.status )
|
||||
.send( getlink.data );
|
||||
} catch ( err ) {
|
||||
console.log( err )
|
||||
}
|
||||
}
|
||||
} );
|
||||
router.post( '/register', checkHeaders, async ( req, res ) => {
|
||||
console.log( `POST /users for ${req.session.header.xtribe}` );
|
||||
if( req.session.header.xjwt == '123123' ) {
|
||||
// Creation d'un utilisateur avec information de base aucun droit
|
||||
// On modifie le contenu du form pour n egarder que login/email et psw
|
||||
// pour le client_id permet de traiter un user en attente de validation
|
||||
console.log( 'req du post', req );
|
||||
}
|
||||
} );
|
||||
router.get( '/info/:listindex', checkHeaders, isAuthenticated, hasAccessrighton( 'users', 'R' ), async ( req, res ) => {
|
||||
console.log( `get users info on tribeid ${req.session.header.xworkon} for ${req.params.listindex} with accessright`, req.session.header.accessrights.data );
|
||||
const result = await Pagans.getinfoPagans( req.session.header.xpresworkon, req.session.header.accessrights, req.params.listindex );
|
||||
res.status( result.status )
|
||||
.send( result.data );
|
||||
} );
|
||||
router.get( '/list/:filter/:field', checkHeaders, isAuthenticated, hasAccessrighton( 'users', 'R' ), async ( req, res ) => {
|
||||
console.log( 'GET /users/list/filtre/champs list for ' + req.session.header.xworkon );
|
||||
if(
|
||||
[ 'admin', 'manager' ].includes( req.session.header.decodetoken[ 'apps' + req.session.header.xworkon + 'profil' ] ) ) {
|
||||
try {
|
||||
const userslist = await Pagans.getUserlist( req.session.header, req.params.filter, req.params.field );
|
||||
console.log( 'userslist', userslist );
|
||||
if( userslist.status == 200 ) {
|
||||
return res.status( userslist.status )
|
||||
.send( userslist.data );
|
||||
}
|
||||
} catch ( err ) {
|
||||
console.log( err );
|
||||
return res.status( 400 )
|
||||
.send( { info: 'erreur' } );
|
||||
}
|
||||
} else {
|
||||
res.status( 403 )
|
||||
.send( {
|
||||
info: [ 'forbiddenAccess' ],
|
||||
model: 'Pagans'
|
||||
} );
|
||||
}
|
||||
} );
|
||||
router.get( '/uuid/:id', checkHeaders, isAuthenticated, hasAccessrighton( 'users', 'R' ), async ( req, res ) => {
|
||||
console.log( `GET /users/uuid/${req.params.id}` );
|
||||
//console.log('req.app.locals: ', req.app.locals);
|
||||
//console.log('req.session', req.session);
|
||||
const result = await Pagans.getUser( req.params.id, req.session.header.xworkon, req.session.header.accessrights );
|
||||
res.status( result.status )
|
||||
.send( result.data );
|
||||
} );
|
||||
router.put( '/chgpsw/:id', checkHeaders, isAuthenticated, async ( req, res ) => {
|
||||
console.log( `PUT update /users/chgpsw/${req.params.id}` );
|
||||
try {
|
||||
const majpsw = await Pagans.updateUserpassword( req.params.id, req.session.header, req.body );
|
||||
res.status( majpsw.status )
|
||||
.send( majpsw.data );
|
||||
} catch ( {
|
||||
status,
|
||||
data
|
||||
} ) {
|
||||
res.status( status )
|
||||
.send( data );
|
||||
}
|
||||
} );
|
||||
router.post( '/uuid', checkHeaders, isAuthenticated, hasAccessrighton( 'users', 'C' ), async ( req, res ) => {
|
||||
console.log( 'POST /users create for ' + req.session.header.xworkon, req.body );
|
||||
const usercreate = await Pagans.createUser( req.session.header, req.body );
|
||||
return res.status( usercreate.status )
|
||||
.send( usercreate.data );
|
||||
} );
|
||||
router.put( '/uuid/:id', checkHeaders, isAuthenticated, hasAccessrighton( 'users', 'U' ), async ( req, res ) => {
|
||||
console.log( `PUT update /users/${req.params.id}` );
|
||||
// console.log('req.app.locals: ', req.app.locals);
|
||||
// console.log('req.session', req.session);
|
||||
try {
|
||||
const majUser = await Pagans.updateUser( req.params.id, req.session.header, req.body );
|
||||
res.status( majUser.status )
|
||||
.send( majUser.data );
|
||||
} catch ( {
|
||||
status,
|
||||
data
|
||||
} ) {
|
||||
res.status( status )
|
||||
.send( data );
|
||||
}
|
||||
} );
|
||||
|
||||
module.exports = router;
|
374
api/routes/tribes.js
Executable file
374
api/routes/tribes.js
Executable file
@@ -0,0 +1,374 @@
|
||||
const express = require( 'express' );
|
||||
const fs = require( 'fs-extra' );
|
||||
const path = require( 'path' );
|
||||
const conf=require('../../nationchains/tribes/conf.json')
|
||||
|
||||
// Classes
|
||||
const Tribes = require( '../models/Tribes.js' );
|
||||
// Middlewares
|
||||
const checkHeaders = require( '../middlewares/checkHeaders' );
|
||||
const isAuthenticated = require( '../middlewares/isAuthenticated' );
|
||||
const hasAccessrighton = require( '../middlewares/hasAccessrighton' );
|
||||
const router = express.Router();
|
||||
|
||||
|
||||
router.get( '/clientconf/:tribeid', checkHeaders, isAuthenticated, ( req, res ) => {
|
||||
/*
|
||||
get a clientconf.json for a tribeid depending of user accessright
|
||||
if tribeid == all and user is admin of apxtrib => get /tmp/clientconfglob.json
|
||||
req.session.header.accessrights, req.session.header.apixpaganid
|
||||
*/
|
||||
console.log( `Tribes/clientconf for tribeid:${req.params.tribeid}` )
|
||||
if( req.params.tribeid == "all" && req.session.header.accessrights.data.apxtrib && req.session.header.accessrights.data.apxtrib.tribeid && req.session.header.accessrights.data.apxtrib.tribeid.includes( 'R' ) ) {
|
||||
res.status( 200 )
|
||||
.send( { moreinfo: fs.readJsonSync( `${config.tmp}/clientconfglob.json`, 'utf-8' ) } );
|
||||
return;
|
||||
}
|
||||
if( req.session.header.accessrights.data[ req.params.tribeid ] &&
|
||||
req.session.header.accessrights.data[ req.params.tribeid ].tribeid &&
|
||||
req.session.header.accessrights.data[ req.params.tribeid ].tribeid.includes( 'R' ) &&
|
||||
fs.existsSync( `${config.tribes}/${req.params.tribeid}/clientconf.json` ) ) {
|
||||
// const conftribeid = { moreinfo: {} }
|
||||
// conftribeid.moreinfo[ req.params.tribeid ] = fs.readJsonSync( `${config.tribes}/${req.params.tribeid}/clientconf.json`, 'utf-8' );
|
||||
res.status( 200 )
|
||||
.send( { moreinfo: [ fs.readJsonSync( `${config.tribes}/${req.params.tribeid}/clientconf.json`, 'utf-8' ) ] } );
|
||||
return;
|
||||
}
|
||||
// if not authorized or dos not exist return empty
|
||||
// no specific message is send for security reason (check only log)
|
||||
res.status( 403 )
|
||||
.send( { info: [ 'forbidenAccess' ], models: 'Tribes' } )
|
||||
.end();
|
||||
} )
|
||||
router.put( '/', checkHeaders, isAuthenticated, ( req, res ) => {
|
||||
console.log( 'Create a new tribeid, with a useradmin' )
|
||||
console.log( ' send data = clientconf.json with all parameter.' )
|
||||
// !!!!! check for security any ; \n or so because data can be used into shell
|
||||
const add = Tribes.create( req.body );
|
||||
res.status( add.status )
|
||||
.send( add.payload )
|
||||
} )
|
||||
router.delete( '/archivetribeid/:tribeid', checkHeaders, isAuthenticated, ( req, res ) => {
|
||||
console.log( "request archive tribeid" )
|
||||
const archive = Tribes.archive( req.params.tribeid );
|
||||
res.status( archive.status )
|
||||
.send( archive.payload )
|
||||
} );
|
||||
router.post( '/spaceweb', checkHeaders, isAuthenticated, ( req, res ) => {
|
||||
// !!!!! check for security any ; \n or so because data can be used into shell
|
||||
console.log( 'Create a new webapp for xworkon ' )
|
||||
req.body.tribeid = req.session.header.xworkon;
|
||||
const add = Tribes.addspaceweb( req.body )
|
||||
res.status( add.status )
|
||||
.send( add.payload )
|
||||
} )
|
||||
router.get( '/spaceweb/components/:tribeid/:website/:key', checkHeaders, ( req, res ) => {
|
||||
// check if key is valid before continue
|
||||
// exemple: get Tribes/spaceweb/components/ndda/mesa/123?rep=appmesatable/appsimpletable.mustache
|
||||
const file = `${config.tribes}/${req.params.tribeid}/spacedev/${req.params.website}/src/ctatic/components/${req.query.path}`
|
||||
console.log( `Request components file from ${file}` )
|
||||
if( fs.existsSync( file ) ) {
|
||||
res.sendFile( file );
|
||||
} else {
|
||||
res.send( `console.error("Missing components file in ${req.params.tribeid}/spacedev/${req.params.website}/src/ctatic/components/${req.query.path}");` );
|
||||
}
|
||||
} )
|
||||
router.get( '/plugins/:tribeid/:pluginname/:key/:filename', ( req, res ) => {
|
||||
// No accessright possible cause it is load on the fly
|
||||
// @todo Check key to authorize access to the plugin (key comme from user ACCESSRIGHTS[tribeid plugin owner:pluginname]).key
|
||||
// return a file into /:tribeid owner of plugin/plugins/:pluginname/components/:filename
|
||||
// if not exist or invalid key then return console.error
|
||||
const file = `${config.tribes}/${req.params.tribeid}/plugins/${req.params.pluginname}/components/${req.params.filename}`
|
||||
console.log( 'Tribes/plugins/ ', file )
|
||||
if( fs.existsSync( file ) ) {
|
||||
res.sendFile( file );
|
||||
} else {
|
||||
res.send( `console.error("Missing plugin file in ${req.params.tribeid}/plugins/${req.params.pluginname}/components/${req.params.filename}");` );
|
||||
}
|
||||
} );
|
||||
|
||||
router.get( '/dirls', checkHeaders, isAuthenticated, ( req, res ) => {
|
||||
// url /Tribes/dirls?rep=referentials/dataManagement
|
||||
// request information about a req.query.rep from header xworkon/
|
||||
// return
|
||||
// {file:[{}],dir:[{}]}
|
||||
// @todo check if isAuthorized and exist
|
||||
|
||||
console.log( 'request dirls', `${config.tribes}/${req.session.header.xworkon}/${req.query.rep}` );
|
||||
if( !fs.existsSync( `${config.tribes}/${req.session.header.xworkon}/${req.query.rep}` ) ) {
|
||||
res.status( 404 )
|
||||
.send( { 'info': [ 'dirnotexist' ], model: 'Tribes' } );
|
||||
}
|
||||
const info = Tribes.dirls( req.session.header.xworkon, req.query.rep );
|
||||
console.log( info )
|
||||
res.status( info.status )
|
||||
.send( info.payload );
|
||||
} )
|
||||
router.delete( '/ls', checkHeaders, isAuthenticated, ( req, res ) => {
|
||||
// check Accessright with D or O on each
|
||||
// url /Tribes/ls
|
||||
// req.body.files=[listfiles file to delete ]
|
||||
const authfiles = Tribes.checkaccessfiles( req.body, 'D', req.session.header.accessrights, req.session.header.apixpaganid );
|
||||
authfiles.ok.forEach( f => { fs.remove( `${config.tribes}/${f}` ); } )
|
||||
res.status( 200 )
|
||||
.send( { 'info': [ 'fileauthdeleted' ], models: 'Tribes', moreinfo: authfiles } )
|
||||
} );
|
||||
router.put( '/sendjson', checkHeaders, isAuthenticated, ( req, res ) => {
|
||||
//req.body = {object:spacedev, path:website/src/data/tpldataname_lg.json, data:{...}}
|
||||
//console.log( req.body )
|
||||
const dest = `${config.tribes}/${req.session.header.xworkon}/${req.body.object}/${req.body.path}`;
|
||||
console.log( `Send json to saved to ${dest}` );
|
||||
if( !( req.body.object && fs.existsSync( `${config.tribes}/${req.session.header.xworkon}/${req.body.object}` ) ) ) {
|
||||
res.status( '404' )
|
||||
.send( { info: [ 'objectmissiong' ], models: 'Tribes', moreinfo: `object: ${req.body.object} does not exist req.body must {object, data, path} into data ${req.session.header.xworkon}/${req.body.object}` } )
|
||||
} else {
|
||||
if( fs.existsSync( `${config.tribes}/${req.session.header.xworkon}/${req.body.object}/${req.body.path}` ) ) {
|
||||
// exist so can be update check accessright update on this
|
||||
hasAccessrighton( req.body.object, "U" );
|
||||
} else {
|
||||
hasAccessrighton( req.body.object, "C" );
|
||||
}
|
||||
fs.outputJsonSync( dest, req.body.data );
|
||||
res.status( 200 )
|
||||
.send( { info: [ 'filesaved' ], models: 'Tribes' } )
|
||||
}
|
||||
} );
|
||||
router.post( '/downloadls', checkHeaders, isAuthenticated, ( req, res ) => {
|
||||
// midlleware hasAccessrighton.js is not apply here only to access/update/create information inside an object
|
||||
// to get file a user need accessrights to data: object: R or to Own it
|
||||
// or if exist a .info.json into folder get shared as R in uuid
|
||||
|
||||
//req.body contain list of path file or folder if only 1 file then download it, otherwise zip list and send zip file
|
||||
|
||||
const authfiles = Tribes.checkaccessfiles( req.body.files, 'R', req.session.header.accessrights, req.session.header.xpaganid );
|
||||
if( authfiles.ok.length == 1 ) {
|
||||
// bidouille en attendnat de faire un .zip binaire propre
|
||||
if( !authfiles.ok[ 0 ].includes( '.xml' ) ) {
|
||||
res.status( 200 )
|
||||
.download( `${config.tribes}/${authfiles.ok[0]}`, authfiles.ok[ 0 ] );
|
||||
} else {
|
||||
fs.copySync( `${config.tribes}/${authfiles.ok[0]}`, `${config.tribes}/${config.mayorId}/www/app/webapp/static/tmp/${authfiles.ok[ 0 ]}` )
|
||||
}
|
||||
} else if( authfiles.ok.length > 1 ) {
|
||||
// on zip et on envoie
|
||||
//res.status( 200 )
|
||||
// .download( `${config.tribes}/${authfiles.ok[0]}`, authfiles.ok[ 0 ])
|
||||
res.status( 200 )
|
||||
.attachment( `${config.tribes}/${authfiles.ok[0]}` );
|
||||
|
||||
} else {
|
||||
req.body.filepon
|
||||
res.status( 403 )
|
||||
.send( 'Forbidden access' )
|
||||
}
|
||||
} );
|
||||
router.post( '/upfilepond', checkHeaders, isAuthenticated, ( req, res ) => {
|
||||
console.log( 'post /Tribes/uploadfilepond' );
|
||||
// Store file and return a unique id to save button
|
||||
// that provide folder where to store it
|
||||
const formidable = require( 'formidable' );
|
||||
const form = formidable( { multiples: false } );
|
||||
form.parse( req, ( err, fields, files ) => {
|
||||
if( err ) { next( err ); return; }
|
||||
//console.log( 'fields',fields);
|
||||
// fileMetadaObject send
|
||||
let context = JSON.parse( fields.filepond );
|
||||
let idfile = files.filepond.path;
|
||||
let name = files.filepond.name;
|
||||
let subfolder = context.subfolder;
|
||||
name = name.replace( /[ ,'"’]/g, "_" );
|
||||
//console.log( 'files.filepond:', files.filepond );
|
||||
console.log( idfile, `${config.tribes}/${req.session.header.xworkon}/www/${subfolder}/${name}` )
|
||||
// On le supprime s'il existe deja
|
||||
fs.removeSync( `${config.tribes}/${req.session.header.xworkon}/www/${subfolder}/${name}` );
|
||||
// mv tmp
|
||||
fs.moveSync( idfile, `${config.tribes}/${req.session.header.xworkon}/www/${subfolder}/${name}` );
|
||||
//res.status(200).send({models:"Tribes",info:["Savedsuccess"],moreinfo:{id:file.filepond.path}})
|
||||
//return for filepond
|
||||
res.writeHead( 200, { 'Content-Type': 'text/plain' } );
|
||||
res.end( idfile );
|
||||
} )
|
||||
} );
|
||||
router.delete( '/file', checkHeaders, isAuthenticated, ( req, res ) => {
|
||||
//src = objectfolder with accessright/...
|
||||
//hasAccessrighton( "www", "D" ),
|
||||
if( !req.query.src ) {
|
||||
res.status( 404 )
|
||||
.send( { info: [ 'deleteerror' ], models: "Tribes", moreinfo: "your del req need a src" } )
|
||||
return;
|
||||
};
|
||||
hasAccessrighton( req.query.src.split( '/' )[ 0 ], "D" );
|
||||
console.log( 'Remove file', `${config.tribes}/${req.session.header.xworkon}/${req.query.src}` )
|
||||
console.log( req.body )
|
||||
fs.removeSync( `${config.tribes}/${req.session.header.xworkon}/${req.query.src}` );
|
||||
res.status( 200 )
|
||||
.send( { info: [ 'Successfullremove' ], models: "Tribes" } )
|
||||
} );
|
||||
router.post( '/uploadfile', checkHeaders, isAuthenticated, ( req, res ) => {
|
||||
console.log( 'upload a file ' )
|
||||
/* Authentification is needed to get a TOKEN
|
||||
curl -X POST -H "xtribe: apxtrib" -H "xworkon: pvmsaveurs" -H "xlang: fr" -H "xpaganid: 1" -H "xauth: 1" -H "xapp: pvmsaveurs:pvmsaveurs" -H "Content-Type: application/json" -d '{"LOGIN":"adminapxtrib","PASSWORD":"Trze3aze!"}' http://pvmsaveurs.pvmsaveurs.fr/app/users/login
|
||||
if exist replace xpaganidTOKEN with payload.TOKEN value
|
||||
|
||||
curl -H "xtribe: pvmsaveurs" -H "xworkon: pvmsaveurs" -H "xlang: fr" -H "xpaganid: adminapxtrib" -H "xauth: xpressuuisToken" -H "xapp: pvmsaveurs:pvmsaveurs" -F 'data=@filename.xx' http://pvmsaveurs.pvmsaveurs.fr/app/Tribes/uploadfile
|
||||
*/
|
||||
const formidable = require( 'formidable' );
|
||||
const form = formidable( { multiples: false } );
|
||||
form.parse( req, function ( err, fields, files ) {
|
||||
//console.log( files.data )
|
||||
var oldPath = files.data.path;
|
||||
var newPath = `${config.tribes}/${req.session.header.xworkon}/${clientconf.uploadzip[files.data.name].dest}`;
|
||||
console.log( 'oldPath', oldPath )
|
||||
console.log( 'newPath', newPath )
|
||||
var rawData = fs.readFileSync( oldPath )
|
||||
fs.outputFile( newPath, rawData, function ( err ) {
|
||||
if( err ) {
|
||||
console.log( err );
|
||||
return res.status( 405 )
|
||||
.send( { info: [ 'savederror' ], models: "Tribes", moreinfo: "your file was not able to be saved into the server" } )
|
||||
} else {
|
||||
return res.status( 200 )
|
||||
.send( {
|
||||
info: [ "successfullsent" ],
|
||||
models: "Tribes"
|
||||
} );
|
||||
}
|
||||
} )
|
||||
} );
|
||||
} );
|
||||
router.post( '/uploadzip', checkHeaders, ( req, res ) => {
|
||||
console.log( 'uploadzip a file ' )
|
||||
|
||||
/* no authentification to upload a zip filename into /tribes/${xworkon}/${clientconf.uploadzip[filename].dest}
|
||||
unzip it using the password ${clientconf.uploadzip[filename].psw
|
||||
if no error then run the callback ${clientconf.uploadzip[filename].callback
|
||||
but a password to unzip
|
||||
in clientconf.json need to be set
|
||||
"uploadzip": {
|
||||
"articlesTribespvm.zip": {
|
||||
"comment": "unzip with overwrite if same name",
|
||||
"psw": "azPI1209qtrse",
|
||||
"dest": "importexport/tmp",
|
||||
"unzipoption": "-aoa",
|
||||
"callback": "importexport/integrationitem.js"
|
||||
}
|
||||
},
|
||||
Example:
|
||||
cd where zip file is stored
|
||||
curl -H "xtribe: pvmsaveurs" -H "xworkon: pvmsaveurs" -H "xlang: fr" -H "xpaganid: adminapxtrib" -H "xauth: 1" -H "xapp: pvmsaveurs:pvmsaveurs" -F 'data=@articlesTribespvm.zip' http://pvmsaveurs.pvmsaveurs.fr/app/Tribes/uploadzip
|
||||
|
||||
*/
|
||||
const clientconf = fs.readJSONSync( `${config.tribes}/${req.session.header.xworkon}/clientconf.json` )
|
||||
if( !clientconf.uploadzip ) {
|
||||
return res.status( '404' )
|
||||
.send( { info: [ "missconf" ], models: "Tribes", moreinfo: `no uploadzip in clientconf for ${req.session.header.xworkon} please contact apxtrib admin ` } );
|
||||
};
|
||||
const uploadzip = clientconf.uploadzip;
|
||||
const formidable = require( 'formidable' );
|
||||
const form = formidable( { multiples: false } );
|
||||
form.parse( req, function ( err, fields, files ) {
|
||||
//console.log( files.data )
|
||||
var oldPath = files.data.path;
|
||||
if( !Object.keys( clientconf.uploadzip )
|
||||
.includes( files.data.name ) ) {
|
||||
return res.status( 403 )
|
||||
.send( { info: [ "notAllowed" ], models: "Tribes", moreinfo: `file ${files.data.name} not allowed to be upload` } )
|
||||
} else {
|
||||
console.log( "context:", clientconf.uploadzip[ files.data.name ] )
|
||||
var newPath = `${config.tribes}/${req.session.header.xworkon}/${clientconf.uploadzip[files.data.name].dest}`;
|
||||
//console.log( 'oldPath', oldPath )
|
||||
//console.log( 'newPath', `${newPath}/${files.data.name}` )
|
||||
fs.moveSync( oldPath, `${newPath}/${files.data.name}`, { overwrite: true } );
|
||||
const cp = require( 'child_process' );
|
||||
//console.log( `7z e -p${clientconf.uploadzip[ files.data.name ].psw} ${newPath}/${files.data.name}` );
|
||||
console.log( '7z', [ 'e', `-p${clientconf.uploadzip[ files.data.name ].psw}`, `${newPath}/${files.data.name}`, `-o${config.tribes}/${req.session.header.xworkon}/${clientconf.uploadzip[ files.data.name ].dest}`, clientconf.uploadzip[ files.data.name ].unzipoption ] );
|
||||
var newFiles = cp.spawnSync( '7z', [ 'e', `-p${clientconf.uploadzip[ files.data.name ].psw}`, `${newPath}/${files.data.name}`, `-o${config.tribes}/${req.session.header.xworkon}/${clientconf.uploadzip[ files.data.name ].dest}`, clientconf.uploadzip[ files.data.name ].unzipoption ] );
|
||||
console.log( newFiles.output.toString() )
|
||||
if( newFiles.output.toString()
|
||||
.includes( 'Everything is Ok' ) ) {
|
||||
if( clientconf.uploadzip[ files.data.name ].callback ) {
|
||||
const integ = require( `${config.tribes}/${req.session.header.xworkon}/${clientconf.uploadzip[files.data.name].callback}` )
|
||||
.run();
|
||||
console.log( 'integration', integ )
|
||||
return res.status( integ.status )
|
||||
.send( integ.payload );
|
||||
} else {
|
||||
return res.status( 200 )
|
||||
.send( {
|
||||
info: [ "successfullsent" ],
|
||||
models: "Tribes"
|
||||
} );
|
||||
}
|
||||
} else {
|
||||
return res.status( 400 )
|
||||
.send( {
|
||||
info: [ "zipfileerror" ],
|
||||
models: "Tribes",
|
||||
moreinfo: newFiles.output.toString()
|
||||
} )
|
||||
}
|
||||
}
|
||||
} )
|
||||
} );
|
||||
|
||||
router.post( '/upload', checkHeaders, isAuthenticated, ( req, res ) => {
|
||||
1 // ACHANGER VIA usage sendjson
|
||||
// url /Tribes/upload?save=tmp&rep=referentials/dataManagement
|
||||
// if save=tmp then store in a tmp file
|
||||
// if save=ok then mv the tmp file to the folder
|
||||
// midlleware hasAccessrighton.js is not apply here only to access/update/create information inside an object
|
||||
// to upload a file a user need accessrights to data: object: C or to Own it
|
||||
// or if dir.file exist a .info.json into folder get shared as C in uuid accessright
|
||||
/*
|
||||
to add in front
|
||||
<form action="/upload" method="POST" enctype="multipart/form-data">
|
||||
<input type="file" name="file"/>
|
||||
<input type="submit" value="upload"/>
|
||||
</form>
|
||||
*/
|
||||
console.log( 'Envoie image' )
|
||||
console.log( 'body', req.body );
|
||||
console.log( 'params', req.params );
|
||||
//const authfolder = Tribes.checkaccessfiles( req.params.rep, 'C', req.session.header.accessrights, req.session.header.xpaganid );
|
||||
// cheack autorisation to create or replace a file for this accessrights user
|
||||
const authfolder = { ok: "tt" }
|
||||
if( authfolder.ok ) {
|
||||
if( req.params.save == 'file' ) {
|
||||
if( fs.existsSync( req.body.filepond ) ) {
|
||||
fs.mv( req.body.filepond, req.params.rep );
|
||||
}
|
||||
};
|
||||
// voir si c'est toujours pertinent car upload est géré par filepond pour les image
|
||||
if( req.params.save == 'upload' ) {
|
||||
const form = formidable( { multiples: false } );
|
||||
form.parse( req, ( err, fields, files ) => {
|
||||
if( err ) { next( err ); return; }
|
||||
let thefile = files.filebond.path;
|
||||
fs.outputFileSync()
|
||||
console.log( 'thefile:' + thefile );
|
||||
res.writeHead( 200, { 'Content-Type': 'text/plain' } );
|
||||
res.end( theFile );
|
||||
} )
|
||||
}
|
||||
} else {
|
||||
res.status( 403 )
|
||||
.send( 'forbiden access' );
|
||||
}
|
||||
} );
|
||||
/*
|
||||
Manage tribeid into /data/tribee/tribeid
|
||||
client space dedicated
|
||||
|
||||
@Todo
|
||||
clientconfglob copy cut from Referentials.clientconfglob
|
||||
clientconf.json copy cut from Referentials.clientconf
|
||||
list of tribeid copy cut from Referentials.
|
||||
Add a tribeid
|
||||
update clientconf
|
||||
|
||||
|
||||
*/
|
||||
|
||||
|
||||
|
||||
module.exports = router;
|
Reference in New Issue
Block a user