update project with new architecture

This commit is contained in:
philc 2023-04-13 07:46:35 +02:00
parent d0a3b10cfe
commit 67a02c33a2
333 changed files with 3764 additions and 1254 deletions

3
.gitignore vendored
View File

@ -1,6 +1,5 @@
/node_modules /node_modules
/tmp /tmp
/tribes
/tribesbackup
/nationchains/blocks /nationchains/blocks
/nationchains/tribes
/yarn* /yarn*

89
app/middlewares/checkHeaders.js Executable file
View File

@ -0,0 +1,89 @@
const path = require( 'path' );
const config = require( '../tribes/townconf.js' );
const checkHeaders = ( req, res, next ) => {
/**
* @apiDefine apxHeader
* @apiGroup Middleware
* @apiDescription Header is mandatory to access apxtrib see tribes/townconf.json.exposedHeaders
* A turn around can be done with a simple get params has to be sent in the get url. Usefull to send simple get without header like ?xworkon=tribeName&xlang=en... priority is given to headers
* For performance, tokens are store globaly in req.app.locals.tokens={xpaganid:xauth}
* if xlang is not in config.languagesAvailable
*
* @apiHeader {string} xauth Pagans unique jwt token store in local town Pagans data or "noauth"
* @apiHeader {string} xpaganid Pagans unique Pagan id in uuid format or "nouuid"
* @apiHeader {string} xlang the 2 letter langage it request the api (if not exist the 2 first letter of Accept-Language header ) if lang does not exist in the town then en is set (as it always exist in en).
* @apiHeader {string} xtribe Tribes id where Pagan belong to
* @apiHeader {string} xworkon Tribes on which pagansId want and have accessright to work on.
* @apiHeader {string} xapp Name of app that send the request (tribesId:websiteName) cpaganid have to have accessright on this app}
*
* @apiError missingexposedHeaders it miss an exposedHeaders
*
* @apiErrorExample {json} Error-Response:
* HTTP/1/1 404 Not Found
* {
* status:404,
* info:"|middleware|missingheaders",
* moreinfo: xpaganid xauth
* }
*
* @apiHeaderExample {json} Header-Exemple:
* {
* xtribe:"apache",
* xpaganid:"12123211222",
* xworkon:"sioux",
* xauth:"",
* xlang:"en",
* xapp:""
* }
*/
req.session = {};
const header = {};
if (!req.header('xlang') && req.header('Content-Language')) req.params.xlang=req.header('Content-Language');
let missingheader = [];
for( const h of config.exposedHeaders ) {
//console.log( h, req.header( h ) )
if( req.params[ h ] ) {
header[ h ] = req.params[ h ]
} else if( req.header( h ) ) {
header[ h ] = req.header( h )
} else {
missingheade.push(h);
}
};
//console.log( 'header', header )
// store in session the header information
req.session.header = header;
// Each header have to be declared
if( missingheader != "" ) {
// bad request
return res.status( 400 )
.json( {
ref:"headers"
info: "missingheader",
moreinfo: missingheader
} );
};
//console.log( req.app.locals.tribeids )
if( !req.app.locals.tribeids.includes( header.xtribe ) ) {
return res.status( 400 )
.json( {
ref:"headers"
info: 'tribeiddoesnotexist',
moreinfo: header.xtribe
} );
}
if( !req.app.locals.tribeids.includes( header.xworkon ) ) {
return res.status( 400 )
.send( {
info: [ 'workondoesnotexist' ],
ref: 'headers',
moreinfo:header.xworkon
} );
}
if( !config.languages.includes( header.xlang ) ) {
header.xlang="en";
}
next();
};
module.exports = checkHeaders;

View File

@ -30,10 +30,10 @@ const hasAccessrighton = ( object, action, ownby ) => {
//console.log( 'Access data autorise? ', req.right ) //console.log( 'Access data autorise? ', req.right )
if( !req.right ) { if( !req.right ) {
return res.status( 403 ) return res.status( 403 )
.send( { .json( {
info: [ 'forbiddenAccess' ], info:'forbiddenAccessright',
model: 'middleware', ref: 'headers',
moreinfo: 'no auth to act on this object' moreinfo: {xpaganid:req.session.header.xpaganid,object:object, xworkon:req.session.header.xworkon, action:action}
} ) } )
} }
next(); next();

View File

@ -8,6 +8,7 @@ const glob = require( 'glob' );
//const src = '..'; // ( __dirname.indexOf( '/node_modules/' ) > -1 ) ? '../../..' : '..'; //const src = '..'; // ( __dirname.indexOf( '/node_modules/' ) > -1 ) ? '../../..' : '..';
//const config = require( path.normalize( `${__dirname}/${src}/config.js` ) ); //const config = require( path.normalize( `${__dirname}/${src}/config.js` ) );
const config = require( '../tribes/townconf.js' ); const config = require( '../tribes/townconf.js' );
const isAuthenticated = ( req, res, next ) => { const isAuthenticated = ( req, res, next ) => {
/* /*
check if authenticated with valid token check if authenticated with valid token
@ -17,7 +18,8 @@ const isAuthenticated = ( req, res, next ) => {
app:{'tribeid:website':[liste of menu]}, app:{'tribeid:website':[liste of menu]},
data:{ "sitewebsrc": "RWCDO", data:{ "sitewebsrc": "RWCDO",
"contacts": "RWCDO"}} "contacts": "RWCDO"}}
Liste of menu is linked with the app tht have to be consistent with accessrights.data Liste of menu is linked with the app tht h
ave to be consistent with accessrights.data
data, list of object accessright Read Write Create Delete Owner data, list of object accessright Read Write Create Delete Owner
a xuuid can read any objet if R a xuuid can read any objet if R
if O wner means that it can only read write its object create by himself if O wner means that it can only read write its object create by himself
@ -54,7 +56,7 @@ const isAuthenticated = ( req, res, next ) => {
if( req.session.header.xauth == "1" ) { if( req.session.header.xauth == "1" ) {
//return res.status( 403 ) //return res.status( 403 )
return res.status( 403 ) return res.status( 403 )
.send( { .json( {
info: [ 'forbiddenAccess' ], info: [ 'forbiddenAccess' ],
model: 'Pagans', model: 'Pagans',
moreinfo: 'isAuthenticated faill' moreinfo: 'isAuthenticated faill'

View File

@ -0,0 +1,6 @@
{
"missingheader":"This header miss to have a valid request: {{#moreinfo}} {{.}} {{/moreinfo}}",
"tribeiddoesnotexist":"Header xtribe: {{moreinfo}} does not exist",
"workondoesnotexist":"Header xworkon: {{moreinfo}} does not exist",
"forbiddenAccessright":"Pagan {{moreinfo.xpaganid}} has not access right to act {{moreinfo.action}} onto object {{moreinfo.object}} for tribe {{moreinfo.xworkon}}"
}

View File

@ -1,34 +1,33 @@
/* /*
This module have to be independant of any external package This module have to be use in back as well front
it is shared between back and front and is usefull
to apply common check in front before sending it in back
can be include in project with can be include in project with
- into a browser : <script src="https://townName.nationName.dns/socialworld/contracts/check.js"></script> - into a browser : <script src="https://townName.nationName.dns/nationchains/contracts/Checkjson.js"></script>
- into a node.js : const check = require( `../nationchains/socialworld/contracts/check.js`); - into a node.js : const Checkjson = require( `../nationchains/socialworld/contracts/Checkjson.js`);
*/ */
// --## // --##
const check = {}; const Checkjson = {};
check.schema = {}; Checkjson.schema = {};
check.schema.properties = {}; Checkjson.schema.properties = {};
check.schema.properties.type = {}; Checkjson.schema.properties.type = {};
check.schema.properties.type.string = (str) => typeof str === "string"; Checkjson.schema.properties.type.string = (str) => typeof str === "string";
check.schema.properties.type.number = (n) => typeof n === "number"; Checkjson.schema.properties.type.number = (n) => typeof n === "number";
check.schema.properties.type.integer = (n) => Checkjson.schema.properties.type.boolean = (n) => typeof n === "boolean";
Checkjson.schema.properties.type.integer = (n) =>
n != "" && !isNaN(n) && Math.round(n) == n; n != "" && !isNaN(n) && Math.round(n) == n;
check.schema.properties.type.float = (n) => Checkjson.schema.properties.type.float = (n) =>
n != "" && !isNaN(n) && Math.round(n) != n; //not yet in json schema n != "" && !isNaN(n) && Math.round(n) != n; //not yet in json schema
check.schema.properties.minLength = (str, min) => Checkjson.schema.properties.minLength = (str, min) =>
typeof str === "string" && str.length > parseInt(min); typeof str === "string" && str.length > parseInt(min);
check.schema.properties.maxLength = (str, max) => Checkjson.schema.properties.maxLength = (str, max) =>
typeof str === "string" && str.length < parseInt(max); typeof str === "string" && str.length < parseInt(max);
check.schema.properties.multipleOf = (n, val) => Checkjson.schema.properties.multipleOf = (n, val) =>
typeof n === "number" && typeof n === "number" &&
typeof val === "number" && typeof val === "number" &&
parseFloat(n) / parseFloat(val) - parseFloat(n) / parseFloat(val) -
Math.round(parseFloat(n) / parseFloat(val)) < Math.round(parseFloat(n) / parseFloat(val)) <
0.0000001; 0.0000001;
check.schema.properties.range = ( Checkjson.schema.properties.range = (
n, n,
minimum, minimum,
exclusiveMinimum, exclusiveMinimum,
@ -45,7 +44,7 @@ check.schema.properties.range = (
return false; return false;
return true; return true;
}; };
check.schema.properties.pattern = (str, pattern) => { Checkjson.schema.properties.pattern = (str, pattern) => {
try { try {
new RegExp(pattern); new RegExp(pattern);
} catch (e) { } catch (e) {
@ -53,11 +52,12 @@ check.schema.properties.pattern = (str, pattern) => {
} }
return pattern.test(str); return pattern.test(str);
}; };
check.schema.properties.enum = (str, enumvalues) => Checkjson.schema.properties.enum = (str, enumvalues) =>
typeof str === "string" && enumvalues.includes(str); typeof str === "string" && enumvalues.includes(str);
// see format https://json-schema.org/understanding-json-schema/reference/string.html#format // see format https://json-schema.org/understanding-json-schema/reference/string.html#format
check.schema.properties.format = { Checkjson.schema.properties.format = {
"date-time": / /, "date-time": / /,
stringalphaonly:/^[A-Za-z0-9]{3,}$/,
time: / /, time: / /,
date: / /, date: / /,
duration: / /, duration: / /,
@ -78,8 +78,8 @@ check.schema.properties.format = {
/^(?=.*[a-z])(?=.*[A-Z])(?=.*\d)(?=.*[$@$!%*?&.])[A-Za-z\d$@$!%*?&.{}:|\s]{8,}/, /^(?=.*[a-z])(?=.*[A-Z])(?=.*\d)(?=.*[$@$!%*?&.])[A-Za-z\d$@$!%*?&.{}:|\s]{8,}/,
postalcodefr: /(^\d{5}$)|(^\d{5}-\d{4}$)/, postalcodefr: /(^\d{5}$)|(^\d{5}-\d{4}$)/,
}; };
Checkjson.schema.properties.default
check.schema.validation = (schema) => { Checkjson.schema.validation = (schema) => {
/*validate a schema structure*/ /*validate a schema structure*/
const res = { status: 200, err: [] }; const res = { status: 200, err: [] };
if (schema.properties) { if (schema.properties) {
@ -88,10 +88,10 @@ check.schema.validation = (schema) => {
if ( if (
properties[p].type && properties[p].type &&
typeof properties[p].type === "string" && typeof properties[p].type === "string" &&
!check.schema.properties.type[properties[p].type] !Checkjson.schema.properties.type[properties[p].type]
) { ) {
res.err.push({ res.err.push({
info: "|checkdata|typedoesnotexistinschema", info: "|Checkjson|typedoesnotexistinschema",
moreinfo: ` ${properties[p].type}`, moreinfo: ` ${properties[p].type}`,
}); });
} }
@ -101,25 +101,25 @@ check.schema.validation = (schema) => {
Array.isArray(properties[p].type) Array.isArray(properties[p].type)
) { ) {
properties[p].type.forEach((tp) => { properties[p].type.forEach((tp) => {
if (!check.schema.properties.type[tp]) if (!Checkjson.schema.properties.type[tp])
res.err.push({ res.err.push({
info: "|checkdata|typedoesnotexistinschema", info: "|Checkjson|typedoesnotexistinschema",
moreinfo: `${tp} of ${properties[p].type}`, moreinfo: `${tp} of ${properties[p].type}`,
}); });
}); });
} }
if ( if (
properties[p].format && properties[p].format &&
!check.schema.properties.format[properties[p].format] !Checkjson.schema.properties.format[properties[p].format]
) { ) {
res.err.push({ res.err.push({
info: "|checkdata|formatdoesnotexistinschema", info: "|Checkjson|formatdoesnotexistinschema",
moreinfo: ` ${properties[p].format}`, moreinfo: ` ${properties[p].format}`,
}); });
} }
if (properties[p].enum && !Array.isArray(properties[p].enum)) { if (properties[p].enum && !Array.isArray(properties[p].enum)) {
res.err.push({ res.err.push({
info: "|checkdata|enumisnotarrayinschema", info: "|Checkjson|enumisnotarrayinschema",
moreinfo: ` ${properties[p].enum}`, moreinfo: ` ${properties[p].enum}`,
}); });
} }
@ -130,7 +130,7 @@ check.schema.validation = (schema) => {
return res; return res;
}; };
check.schema.data = (schema, ctx, data) => { Checkjson.schema.data = (schema, data, withschemacheck) => {
/* validate a data set with a schema in a context ctx */ /* validate a data set with a schema in a context ctx */
/* /*
console.log('#################') console.log('#################')
@ -138,8 +138,10 @@ check.schema.data = (schema, ctx, data) => {
console.log('---------') console.log('---------')
console.log(data) console.log(data)
*/ */
const validschema = check.schema.validation(schema); if (withschemacheck) {
const validschema = Checkjson.schema.validation(schema);
if (validschema.status != 200) return validschema; if (validschema.status != 200) return validschema;
}
const res = { status: 200, err: [] }; const res = { status: 200, err: [] };
if (schema.properties) { if (schema.properties) {
const properties = schema.properties; const properties = schema.properties;
@ -153,38 +155,38 @@ check.schema.data = (schema, ctx, data) => {
let valid = false; let valid = false;
typlist.forEach((typ) => { typlist.forEach((typ) => {
// at least one test have to be ok // at least one test have to be ok
if (check.schema.properties.type[typ](data[p])) valid = true; if (Checkjson.schema.properties.type[typ](data[p])) valid = true;
}); });
if (!valid) if (!valid)
res.err.push({ res.err.push({
info: "|checkdata|dataerrpropertie", info: "|Checkjson|dataerrpropertie",
moreinfo: `${p} : ${data[p]}`, moreinfo: `${p} : ${data[p]}`,
}); });
if ( if (
properties[p].minLength && properties[p].minLength &&
!check.schema.properties.minLength(data[p], properties[p].minLength) !Checkjson.schema.properties.minLength(data[p], properties[p].minLength)
) { ) {
res.err.push({ res.err.push({
info: "|checkdata|dataerrpropertie", info: "|Checkjson|dataerrpropertie",
moreinfo: `${p} : ${data[p]} minLength:${properties[p].minLength}`, moreinfo: `${p} : ${data[p]} minLength:${properties[p].minLength}`,
}); });
} }
if ( if (
properties[p].maxLength && properties[p].maxLength &&
!check.schema.properties.maxLength(data[p], properties[p].maxLength) !Checkjson.schema.properties.maxLength(data[p], properties[p].maxLength)
) { ) {
res.err.push({ res.err.push({
info: "|checkdata|dataerrpropertie", info: "|Checkjson|dataerrpropertie",
moreinfo: `${p} : ${data[p]} maxLength:${properties[p].maxLength}`, moreinfo: `${p} : ${data[p]} maxLength:${properties[p].maxLength}`,
}); });
} }
if ( if (
properties[p].multipleOf && properties[p].multipleOf &&
!check.schema.properties.multipleOf(data[p], properties[p].multipleOf) !Checkjson.schema.properties.multipleOf(data[p], properties[p].multipleOf)
) { ) {
res.err.push({ res.err.push({
info: "|checkdata|dataerrpropertie", info: "|Checkjson|dataerrpropertie",
moreinfo: `${p} : ${data[p]} not a multipleOf:${properties[p].multipleOf}`, moreinfo: `${p} : ${data[p]} not a multipleOf:${properties[p].multipleOf}`,
}); });
} }
@ -196,7 +198,7 @@ check.schema.data = (schema, ctx, data) => {
) { ) {
// test range // test range
if ( if (
!check.schema.properties.range( !Checkjson.schema.properties.range(
data[p], data[p],
properties[p].minimum, properties[p].minimum,
properties[p].exclusiveMinimum, properties[p].exclusiveMinimum,
@ -205,36 +207,36 @@ check.schema.data = (schema, ctx, data) => {
) )
) { ) {
res.err.push({ res.err.push({
info: "|checkdata|dataerrpropertie", info: "|Checkjson|dataerrpropertie",
moreinfo: `${p} : ${data[p]} not in range ${properties[p].minimum} exclu: ${properties[p].exclusiveMinimum} and ${properties[p].maximum} exclu: ${properties[p].exclusiveMaximum}`, moreinfo: `${p} : ${data[p]} not in range ${properties[p].minimum} exclu: ${properties[p].exclusiveMinimum} and ${properties[p].maximum} exclu: ${properties[p].exclusiveMaximum}`,
}); });
} }
} }
if ( if (
properties[p].enum && properties[p].enum &&
!check.schema.properties.enum(data[p], properties[p].enum) !Checkjson.schema.properties.enum(data[p], properties[p].enum)
) { ) {
res.err.push({ res.err.push({
info: "|checkdata|dataerrpropertie", info: "|Checkjson|dataerrpropertie",
moreinfo: `${p} : ${data[p]} not in enum list :${properties[p].enum}`, moreinfo: `${p} : ${data[p]} not in enum list :${properties[p].enum}`,
}); });
} }
if (properties[p].format) { if (properties[p].format) {
properties[p].pattern = properties[p].pattern =
check.schema.properties.format[properties[p].format]; Checkjson.schema.properties.format[properties[p].format];
} }
if ( if (
properties[p].pattern && properties[p].pattern &&
!check.schema.properties.pattern(data[p], properties[p].pattern) !Checkjson.schema.properties.pattern(data[p], properties[p].pattern)
) { ) {
res.err.push({ res.err.push({
info: "|checkdata|dataerrpropertie", info: "|Checkjson|dataerrpropertie",
moreinfo: `${p} : ${data[p]} problem pattern or format ${properties[p].pattern}`, moreinfo: `${p} : ${data[p]} problem pattern or format ${properties[p].pattern}`,
}); });
} }
} else if (schema.required.includes(p)) { } else if (schema.required && schema.required.includes(p)) {
res.err.push({ res.err.push({
info: "|checkdata|dataerrpropertiesrequired", info: "|Checkjson|dataerrpropertiesrequired",
moreinfo: `${p}`, moreinfo: `${p}`,
}); });
} }
@ -243,59 +245,4 @@ check.schema.data = (schema, ctx, data) => {
if (res.err.length > 0) res.status = 417; if (res.err.length > 0) res.status = 417;
return res; return res;
}; };
if (typeof module !== "undefined") module.exports = Checkjson;
/*
Normalize data link to check.schema.properties.format
or any normalization to get consistent data
*/
const normalize={};
normalize.telephonefr =(phone)=>{
phone = phone.trim().replace(/[- .]/g, "");
if (
check.schema.properties.format.telephoenfr(phone) &&
phone.length == 10 &&
phone[0] == "0"
) {
phone = "+33 " + phone.substring(1);
}
return phone;
};
normalize.zfill10 = (num) => {
let s = num + "";
while (s.length < 10) s = "0" + s;
return s;
};
check.test.unique = (ctx, val) => {
if (ctx.list[ctx.currentfield]) {
return !ctx.list[ctx.currentfield].includes(val);
} else {
console.log("ERR no list for field:" + ctx.currentfield);
return false;
}
};
// check.normalize take a correct data then reformat it to harmonise it
check.normalize = {};
check.normalize.phoneNumber = (ctx, phone) => {
phone = phone.trim().replace(/[- .]/g, "");
if (
check.test.phoneNumber("", phone) &&
phone.length == 10 &&
phone[0] == "0"
) {
phone = "+33 " + phone.substring(1);
}
return phone;
};
check.normalize.upperCase = (ctx, txt) => txt.toUpperCase();
check.normalize.lowerCase = (ctx, txt) => txt.toLowerCase();
// fixe 10 position et complete par des 0 devant
check.normalize.zfill10 = (ctx, num) => {
let s = num + "";
while (s.length < 10) s = "0" + s;
return s;
};
if (typeof module !== "undefined") module.exports = check;

View File

@ -9,7 +9,7 @@ const { DateTime } = require( 'luxon' );
const UUID = require( 'uuid' ); const UUID = require( 'uuid' );
const Outputs = require( '../models/Outputs.js' ); const Outputs = require( '../models/Outputs.js' );
const config = require( '../tribes/townconf.js' ); const config = require( '../tribes/townconf.js' );
const checkdata = require( `../nationchains/socialworld/contracts/checkdata.js`); const Checkjson = require( `./Checkjson.js`);
/* /*
Message manager Message manager
* Manage apxtrib message at different level * Manage apxtrib message at different level
@ -220,9 +220,9 @@ Messages.object = ( data, header ) => {
if( data.callback ) { if( data.callback ) {
// check from plugin data and add relevant data // check from plugin data and add relevant data
const Plug = require( `${config.tribes}/${data.callback.tribeid}/plugins/${data.callback.plugins}/Model.js` ); const Plug = require( `${config.tribes}/${data.callback.tribeid}/plugins/${data.callback.plugins}/Model.js` );
const check = Plug[ data.callback.function ]( header.xworkon, data ); const Checkjson = Plug[ data.callback.function ]( header.xworkon, data );
if( check.status == 200 ) { if( Checkjson.status == 200 ) {
data = check.data.data; data = Checkjson.data.data;
} else { } else {
return check; return check;
} }

View File

@ -3,9 +3,12 @@ const fs = require("fs-extra");
const glob = require("glob"); const glob = require("glob");
const jwt = require("jwt-simple"); const jwt = require("jwt-simple");
const axios = require("axios"); const axios = require("axios");
const config = require("../tribes/townconf.js"); const path=require('path');
const Odmdb = require('../models/Odmdb.js'); //const config = require("../tribes/townconf.js");
const Odmdb = require('./Odmdb.js');
// lowercase 1st letter is normal
const towns = require('./Towns.js');
const pagans = require('./Pagans.js');
/* /*
Blockchain manager Blockchain manager
* Manage network directory of nations and towns * Manage network directory of nations and towns
@ -15,14 +18,75 @@ Blockchain manager
* manage APIXP rules 20 M APIXP 1AXESS = 1 block validation * manage APIXP rules 20 M APIXP 1AXESS = 1 block validation
* manage contract = action if something appened validate by a proof of work * manage contract = action if something appened validate by a proof of work
*/ */
const Nationchains = {}; const Nations = {};
Nationchains.init = () => { Nations.init = () => {
console.group("init Nationchains"); console.group("init Nations");
}; };
Nationchains.synchronize = () => {
Nations.updateChains = async (newtown) =>{
/**
* @newtown {object} optional to request a registration in the nationchain network
* if newtown exist then it send a request to update itself else it just refresh from existing town.
* Check public nationchains are up to date from the existing list of towns
* Each object to sync have a /index/config.json with key lastupdate = timestamp last update
* tribes is not synchonized and contain private information
* A town is a network node of the nationchains and allow to synchronize new
*/
const res= {status:400};
const ref2update={}
glob.sync('nationchains/**/index/config.json').forEach(f=>{
const ref=fs.readJsonSync(f)
ref2update[path.basename(ref.schema,'.json')]=ref.lastupdate;
})
console.log(ref2update)
// Get list of town to check n of them have fresh update
const knowntowns =fs.readJsonSync('nationchains/towns/index/towns_townId_all.json');
let promiselistblock=[]
let towidlist=[]
Object.keys(knowntowns).forEach(townid=>{
// identify the town with the highest block to update town
promiselistblock.push(axios.get(`${knowntowns[townid].url}/blocks/index/config.json`));
townidlistblock.push(townid)
});
let selectedtown=""
let blocnum=0
await Promise.all(promiselistblock)
.then(rep=>{
for (let pos=0;pos<townidlist.length;pos++){
if (rep[pos].blocnum > blocnum) {
selectedtown=townidlist[pos]
blocnum=rep[pos].blocnum
}
}
})
.catch(err=>{
console.log(err)
})
let promiselistref=[]
Object.keys(ref2update).forEach(ob=>{
promiselistref.push(axios.get(`${knowntowns[selectedtown].url}/${obj}/index/config.json`));
})
await Promise.all(promiselistref)
.then(rep=>{
for (let pos=0;pos<townidlist.length;pos++){
//si axios lastupdate > local lastupate => recupe _all et regenere tous les objets par ecrasement
}
})
.catch(err=>{
console.log(err)
})
return res
}
Nation.update=(nationsource)=>{
/**
* Update object nation with last update
*/
}
Nations.synchronize = () => {
/* /*
Run process to communicate with a list of apxtrib instance to update transaction and earn AXP Run process to communicate with a list of towns to update network and transaction
To creation of a new tribeid or a new login
*/ */
//update himself then send to other information //update himself then send to other information
if (process.env.NODE_ENV != "prod") { if (process.env.NODE_ENV != "prod") {
@ -64,7 +128,7 @@ Nationchains.synchronize = () => {
// proof of work // proof of work
// try to find a key based on last block with difficulty // try to find a key based on last block with difficulty
// if find then send to all for update and try to get token // if find then send to all for update and try to get token
// in any case rerun Nationchains.synchronize() // in any case rerun Nations.synchronize()
currentinstance.instanceknown.forEach((u) => { currentinstance.instanceknown.forEach((u) => {
if (u != config.rootURL) { if (u != config.rootURL) {
//send currentinstance info and get back state of //send currentinstance info and get back state of
@ -119,14 +183,14 @@ Nationchains.synchronize = () => {
}); });
}; };
Nationchains.create = (conf) => { Nations.create = (conf) => {
/* /*
@conf from a nationchains/socialworld/setup/townSetup {object, nationName, townName, dns} @conf from a nationchains/socialworld/setup/townSetup {object, nationName, townName, dns}
@return @return
*/ */
const res = {}; const res = {};
if (conf.object=="town"){ if (conf.object=="towns"){
Odmdb.create("nationchains/socialworld/objects","town",conf); Odmdb.create("nationchains/socialworld/objects","towns",conf);
} }
const nation_town = fs.readJsonSync( const nation_town = fs.readJsonSync(
"./nationchains/socialworld/objects/towns/searchindex/towns_nationId_townId.json" "./nationchains/socialworld/objects/towns/searchindex/towns_nationId_townId.json"

262
app/models/Odmdb.js Normal file
View File

@ -0,0 +1,262 @@
const glob = require("glob");
const path = require("path");
const fs = require("fs-extra");
const axios = require('axios');
//const config = require( '../tribes/townconf.js' );
const Checkjson = require(`./Checkjson.js`);
/* This manage Objects for indexing and check and act to CRUD
objectpath/objects/schema/objectName.json
/objectNames/searchindes/objectName_valueofkey_uuildlist.json
/objectNames/uuid.json
*/
const Odmdb = {};
/*
Input: metaobject => data mapper of Key: Value
objname + an object {} + action Checkjson => get a valid or not answer
objname + an object {} + action search => apply matching algo to find probalistic object id
objname + action index => update /searcindex of objects concern
*/
Odmdb.setObject=(schemaPath, objectPath, objectName, schema, lgjson, lg)=>{
/*
@schemapath {string} path to create or replace a schema ${schemaPath}/schema/
@objectPath {string} path where object are store
@objectName {string} name of the object
@schema {object} the json schema for this object
@lgjson {object} the json file for a specific language
@lg {string} the 2 letters language
a shema :
schemaPath/schema/objectName.json
/lg/objectName_{lg}.json
an object :
objectPath/objectName/index/config.json ={"schema":"relativpathfile or http"}
/uniqueid.json defining schema
*/
if (!fs.existsSync(schemaPath)){
return {status:404, ref:"Odmdb", info:"pathnamedoesnotexist", moreinfo:{fullpath:schemaPath}}
}
if (!fs.existsSync(objectPath)){
return {status:404, ref:"Odmdb", info:"pathnamedoesnotexist",moreinfo:{fullpath:objectPath}}
}
// store schema file if not empty undefined or {}
if (schema && !(Object.keys(schema).length === 0 && schema.constructor === Object)){
fs.outputJSONSync(`${schemaPath}/schema/${objectName}.json`,schema, {spaces:2})
}
if (lgjson && lg && !(Object.keys(lgjson).length === 0 && lgjson.constructor === Object)){
fs.outputJSONSync(`${schemaPath}/lg/${objectName}_${lg}.json`,lgjson, {spaces:2})
}
//create environnement object with the new schema config
if (!fs.existsSync(`${objectPath}/${objectName}`)){
fs.outputJsonSync(`${objectPath}/${objectName}/index/config.json`,{schema:`${schemaPath}/schema/${objectName}.json`},{spaces:2})
}
return {status:200}
}
Odmdb.schema = (schemaPath, objectName, withschemacheck) => {
// Return schema if exist and objectpath contain objectName { status:200;data:schema}
if (!fs.existsSync(`${schemaPath}/${objectName}`))
return {
status: 404,
info: "|odmdb|schemapathnamedoesnotexist",
moreinfo: `${schemaPath}/${objectName}`,
};
if (!fs.existsSync(`${objectPath}/schema/${objectName}.json`)) {
return {
status: 404,
info: `|odmdb|schemanotfound`,
moreinfo: `file not found ${schemaPath}/schema/${objectName}.json`,
};
}
const schema = fs.readJsonSync(`${schemaPath}/schema/${objectName}.json`);
// check schema apx validity specificities primary unique ans searchindex
if (withschemacheck) {
if (!schema.apxprimarykey) {
// code 422: unprocessable Content
return {
status: 422,
info: "|Odmdb|apxprimarykeynotfound",
moreinfo: `${schemaPath}/schema/${objectName}.json`,
};
} else {
if (
!(
schema.apxsearchindex[schema.apxprimarykey] &&
schema.apxsearchindex[schema.apxprimarykey].list
)
) {
return {
status: 422,
info: "|Odmdb|apxprimaryketnotinsearchindexlist",
moreinfo: `${schemaPath}/schema/${objectName}.json`,
};
}
if (schema.apxuniquekey) {
schema.apxuniquekey.forEach((k) => {
if (
!(
schema.apxsearchindex[k] &&
schema.apxsearchindex[k][schema.apxprimarykey]
)
) {
return {
status: 422,
info: "|Odmdb|apxuniquekeynotinsearchindex",
moreinfo: `${schemaPath}/schema/${objectName}.json`,
};
}
});
}
}
const validschema = Checkjson.schema.validation(schema);
if (validschema.status != 200) return validschema;
}
return {
status: 200,
data: schema,
};
};
Odmdb.Checkjson = (objectPath, objectName, data, withschemacheck) => {
/*
@objectPath path to the folder that contain /objects/objectName/ /lg/objectName_{lg}.json /schema/objectName.json
@objectName name of object
@data data to check based on schema objectName definition
@return status:200 Data is consistent with schema and primarykey does not exist
status:201 Data is consistent with schema and primarykey does already exist
status:other means unconsistent schema:
404: schema does not exist
or unconsitent data and schema from Checkjson.js Checkjson.schema.data
*/
const res = { status: 200 };
//get schema link of object
const schemaPath = fs.readJsonSync(`${objectPath}/${objectName}/index/config.json`)['schema']
if (schemaPath.substring(0,4)=="http"){
// lance requete http pour recuperer le schema
}else{
schema=="!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
}
// check schema validity
const schema = Odmdb.schema(objectPath, objectName, withschemacheck);
if (schema.status != 200) return schema;
console.log("SCHEMA for checking:");
console.log(schema.data);
console.log("DATA to check:");
console.log(data);
// withschemacheck at false, if check then it is done at Odmdb.schema
const validate = Checkjson.schema.data(schema.data, data, false);
if (validate.status != 200) {
return validate;
}
if (
schema.data.apxprimarykey &&
data[k] &&
fs.existsSync(`${objectPath}/${objectName}/${data[k]}.json}`)
) {
res.status = 201; // means created => exist an object with this primary key
}
if (schema.data.apxuniquekey) {
schema.data.apxuniquekey.forEach((k) => {
if (
data[k] &&
fs.existsSync(
`${objectPath}/${objectName}/searchindex/${objectName}_${k}_${schema.data.apxprimarykey}.json}`
) &&
fs.readJsonSync(
`${objectPath}/${objectName}/searchindex/${objectName}_${k}_${schema.data.apxprimarykey}.json}`
)[k]
) {
res.status = 201; // means created => exist as primary key
}
});
}
return res;
};
Odmdb.search = (objectPath, objectName, search) => {
/*
@search= {
txt: string,
algo: match | pattern | fuzzy
fieldstring:[list of field],
indexfilter:{index1:[val1,val2 | ] }
}
Return data:[uuids]
example: search exact match hill in townId
heavy search={txt:"hill",algo:"match",fieldstring:"toxnId"}
light search={txt:"hill", algo:"match", indexfilter:{"key":"townId","value":[]}}
light search={txt:"hill", algo:"match", indexfilter:{"key":"nationId","value":"ants"}}
*/
const schema = Odmdb.schema(objectPath, objectName);
if (schema.status != 200) return schema;
};
Odmdb.get = (objectPath, objectName, uuidprimarykeyList, fieldList) => {
/*
@uuidprimarykeyList list of uuid requested
@fieldList key to return for each object
Return objectName {status:200; data:{found:[{primarykey,field}],notfound:[uuid]}
if all primarykey exist then data.notfound does not exist
if all primarykey does not exist data.found does not exist
*/
const res = { status: 200, data: {} };
uuidprimarykeyList.forEach((id) => {
if (fs.existsSync(`${objectPath}/${objectName}/${id}.json`)) {
if (!res.data.found) res.data.found = [];
const objectdata = fs.readJsonSync(
`${objectPath}/${objectName}/${id}.json`
);
if (!fieldList) {
res.data.found.push(objectdata);
} else {
const objinfo = {};
fieldlList.forEach((k) => {
if (objectdata[k]) objinfo[k] = objectdata[k];
});
res.data.found.push(objinfo);
}
} else {
if (!res.data.notfound) res.data.notfound = [];
}
});
return res;
};
Odmdb.create = (objectPath, objectName, data) => {
/*
Create an objects data into objectName
@objectPath path to the folder that contain /objects/objectName/ /objectsInfo/objectName_lg.json /objectsMeta/objectName.json
@objectName name of object
@data data to check based on objectsMeta definition
*/
};
Odmdb.update = (objectPath, objectName, data) => {
/*
Create an objects data into objectName
@objectPath path to the folder that contain /objects/objectName/ /objectsInfo/objectName_lg.json /objectsMeta/objectName.json
@objectName name of object
@data data to check based on objectsMeta definition
*/
};
Odmdb.delete = (objectPath, objectName, data) => {
/*
Create an objects data into objectName
@objectPath path to the folder that contain /objects/objectName/ /objectsInfo/objectName_lg.json /objectsMeta/objectName.json
@objectName name of object
@data data to check based on objectsMeta definition
*/
};
/*console.log("test Odmdb");
console.log(
Odmdb.check(
"/media/phil/usbfarm/apxtrib/nationchains/socialworld/objects",
"nations",
{ nationId: "123", status: "unchain" }
)
);*/
module.exports = Odmdb;

View File

@ -11,7 +11,7 @@ const smtpTransport = require( 'nodemailer-smtp-transport' );
const axios = require( 'axios' ); const axios = require( 'axios' );
const { GoogleSpreadsheet } = require( 'google-spreadsheet' ); const { GoogleSpreadsheet } = require( 'google-spreadsheet' );
const config = require( '../tribes/townconf.js' ); const config = require( '../tribes/townconf.js' );
const checkdata = require( `../nationchains/socialworld/contracts/checkdata.js` ); const Checkjson = require( `./Checkjson.js` );
const Outputs = {}; const Outputs = {};

View File

@ -12,7 +12,7 @@ const axios = require( 'axios' );
const { GoogleSpreadsheet } = require( 'google-spreadsheet' ); const { GoogleSpreadsheet } = require( 'google-spreadsheet' );
const async = require( 'async' ); const async = require( 'async' );
const config = require( '../tribes/townconf.js' ); const config = require( '../tribes/townconf.js' );
const checkdata = require( `${config.tribes}/${config.mayorId}/www/cdn/public/js/checkdata` ); const Checkjson = require( `${config.tribes}/${config.mayorId}/www/cdn/public/js/Checkjson` );
const Outputs = {}; const Outputs = {};
const sleep = ( milliseconds = 500 ) => new Promise( resolve => setTimeout( resolve, milliseconds ) ); const sleep = ( milliseconds = 500 ) => new Promise( resolve => setTimeout( resolve, milliseconds ) );

View File

@ -6,7 +6,7 @@ const jwt = require( 'jwt-simple' );
const UUID = require( 'uuid' ); const UUID = require( 'uuid' );
const Outputs = require( './Outputs.js' ); const Outputs = require( './Outputs.js' );
const config = require( '../tribes/townconf.js' ); const config = require( '../tribes/townconf.js' );
const checkdata = require( `../nationchains/socialworld/contracts/checkdata.js`); const Checkjson = require( `./Checkjson.js`);
/* /*
Gestion des utilisateurs connecte Gestion des utilisateurs connecte
@ -331,7 +331,7 @@ Pagans.getUser = ( UUID, tribeid, accessrights ) => {
}; };
}; };
Pagans.getUserIdFromEMAIL = ( tribeid, EMAIL ) => { Pagans.getUserIdFromEMAIL = ( tribeid, EMAIL ) => {
if( !checkdata.test.EMAIL( EMAIL ) ) { if( !Checkjson.test.EMAIL( EMAIL ) ) {
return { return {
status: 400, status: 400,
data: { data: {
@ -368,7 +368,7 @@ Pagans.updateUserpassword = ( UUID, header, data ) => {
}; };
} }
// console.log('Credentials are matching!'); // console.log('Credentials are matching!');
if( checkdata.test.password( {}, data.pswnew ) ) { if( Checkjson.test.password( {}, data.pswnew ) ) {
user.PASSWORD = bcrypt.hashSync( data.pswnew, config.saltRounds ); user.PASSWORD = bcrypt.hashSync( data.pswnew, config.saltRounds );
fs.outputJsonSync( `${config.tribes}/${header.xworkon}/users/${UUID}.json`, user, { fs.outputJsonSync( `${config.tribes}/${header.xworkon}/users/${UUID}.json`, user, {
spaces: 2 spaces: 2
@ -408,8 +408,8 @@ Pagans.createUser = ( header, data ) => {
console.assert( config.loglevel == "quiet", 'EMAIL list', EMAIL ); console.assert( config.loglevel == "quiet", 'EMAIL list', EMAIL );
// list.UUID est forcement unique car on est en update et pas en create // list.UUID est forcement unique car on est en update et pas en create
if( !data.UUID ) data.UUID = UUID.v4(); if( !data.UUID ) data.UUID = UUID.v4();
// pour la logique de checkdata il faut passer le parametre // pour la logique de Checkjson il faut passer le parametre
const check = checkdata.evaluate( { const Checkjson = Checkjson.evaluate( {
list: { list: {
LOGIN, LOGIN,
EMAIL, EMAIL,
@ -417,17 +417,17 @@ Pagans.createUser = ( header, data ) => {
} }
}, ref, data ); }, ref, data );
console.assert( config.loglevel == "quiet", 'check & clean data before update ', check ); console.assert( config.loglevel == "quiet", 'check & clean data before update ', check );
if( check.invalidefor.length > 0 ) { if( Checkjson.invalidefor.length > 0 ) {
return { return {
status: 403, status: 403,
data: { data: {
model: 'Pagans', model: 'Pagans',
info: check.invalidefor info: Checkjson.invalidefor
} }
}; };
} }
const clientConfig = fs.readJsonSync( `${config.tribes}/${header.xworkon}/clientconf.json` ); const clientConfig = fs.readJsonSync( `${config.tribes}/${header.xworkon}/clientconf.json` );
const user = check.data; const user = Checkjson.data;
user.DATE_CREATE = new Date() user.DATE_CREATE = new Date()
.toISOString(); .toISOString();
user.PASSWORD = bcrypt.hashSync( clientConfig.genericpsw, config.saltRounds ); user.PASSWORD = bcrypt.hashSync( clientConfig.genericpsw, config.saltRounds );
@ -502,8 +502,8 @@ Pagans.updateUser = ( UUID, header, data ) => {
.filter( e => emails[ e ] != user.UUID ); .filter( e => emails[ e ] != user.UUID );
// console.log( 'EMAIL list', EMAIL ); // console.log( 'EMAIL list', EMAIL );
// list.UUID est forcement unique car on est en update et pas en create // list.UUID est forcement unique car on est en update et pas en create
// pour la logique de checkdata il faut passer le parametre // pour la logique de Checkjson il faut passer le parametre
const check = checkdata.evaluate( { const Checkjson = Checkjson.evaluate( {
profil: user[ 'apps' + header.xworkon + 'profil' ], profil: user[ 'apps' + header.xworkon + 'profil' ],
list: { list: {
LOGIN, LOGIN,
@ -511,16 +511,16 @@ Pagans.updateUser = ( UUID, header, data ) => {
UUID: [] UUID: []
} }
}, ref, data ); }, ref, data );
if( check.invalidefor.length > 0 ) { if( Checkjson.invalidefor.length > 0 ) {
return { return {
status: 403, status: 403,
data: { data: {
model: 'Pagans', model: 'Pagans',
info: check.invalidefor, info: Checkjson.invalidefor,
} }
}; };
} }
data = check.data; data = Checkjson.data;
let saveuser = false; let saveuser = false;
let updateDatabase = false; let updateDatabase = false;
Object.keys( data ) Object.keys( data )

View File

@ -2,26 +2,30 @@ const fs = require( 'fs-extra' );
const path = require( 'path' ); const path = require( 'path' );
const dnsSync = require( 'dns-sync' ); const dnsSync = require( 'dns-sync' );
const Mustache = require( 'mustache' ); const Mustache = require( 'mustache' );
const Odmdb= require('./Odmdb.js'); const Nations = require('./Nations.js')
const Setup = {}; const Setup = {};
const nationsync = Nations.updateChains()
if (nationsync.status!=200){
console.log( '\x1b[31m Check your internet access, to setup this town we need to update the Nations. It seems we cannot do it' );
process.exit();
}
if( !fs.existsSync( '/etc/nginx/nginx.conf' ) ) { if( !fs.existsSync( '/etc/nginx/nginx.conf' ) ) {
console.log( '\x1b[31m Check documentation, nginx have to be installed on this server first, no /etc/nginx/nginx.conf available' ); console.log( '\x1b[31m Check documentation, nginx have to be installed on this server first, no /etc/nginx/nginx.conf available' );
process.exit(); process.exit();
} }
if( !fs.existsSync( `./tribes/townconf.json` ) ) { if( !fs.existsSync( './nationchains/tribes/index/conf.json' ) ){
console.log( `\x1b[42m####################################\nWellcome into apxtrib, this is a first install.\nWe need to make this server accessible from internet subdomain.domain to current IP. This setup will create your unique tribeid, with an admin login user to let you connect to the parameter interface.\nCheck README's project to learn more. more.\n#####################################\x1b[0m` ); console.log( `\x1b[42m####################################\nWellcome into apxtrib, this is a first install.\nWe need to make this server accessible from internet subdomain.domain to current IP. This setup will create your unique tribeid, with an admin login user to let you connect to the parameter interface.\nCheck README's project to learn more. more.\n#####################################\x1b[0m` );
const townSetup = fs.readJsonSync( './nationchains/socialworld/setup/townSetup.json') ; const townSetup = fs.readJsonSync( './app/setup/townSetup.json') ;
console.log( `Current setup conf from :./nationchains/socialworld/setup/townSetup.json\nChange with relevant setup data and rerun yarn setup` ) ; console.log( `Current setup conf from :./app/setup/townSetup.json\nChange with relevant setup data and rerun yarn setup` ) ;
console.log( townSetup ) console.log( townSetup )
const readline = require( 'readline' ); const readline = require( 'readline' );
const rl = readline.createInterface( { const rl = readline.createInterface( {
input: process.stdin, input: process.stdin,
output: process.stdout output: process.stdout
} ); } );
rl.question( 'This is the data from ./app/setup/townSetup.json used, is it correct to use as first install (Yes/no)?', function ( rep1 ) {
rl.question( 'This is the data from setup/configsetup.json used, is it correct to use as first install (Yes/no)?', function ( rep1 ) {
let quest = `This is a production install, please check that ${townSetup.townName}.${townSetup.nationName}.${townSetup.dns} IP is well redirect to tour server`; let quest = `This is a production install, please check that ${townSetup.townName}.${townSetup.nationName}.${townSetup.dns} IP is well redirect to tour server`;
if( rep1 !== "Yes" ) process.exit( 0 ); if( rep1 !== "Yes" ) process.exit( 0 );
if( townSetup.dns == 'unchain' ) { if( townSetup.dns == 'unchain' ) {
@ -29,18 +33,21 @@ if( !fs.existsSync( `./tribes/townconf.json` ) ) {
} }
rl.question( quest + '\nAre you sure to set this? (Yes/no)', function ( rep2 ) { rl.question( quest + '\nAre you sure to set this? (Yes/no)', function ( rep2 ) {
if( rep2 == "Yes" ) { if( rep2 == "Yes" ) {
const check = Setup.check( townSetup ); const Checkjson = Setup.check( townSetup );
if( check == "" ) { if( Checkjson == "" ) {
const townconf=fs.readJsonSync('./nationchains/socialworld/setup/townSetup.json') const townconf=fs.readJsonSync('./app/setup/townconf.json')
// create tribes folder with townconf.json // create tribes folder with townconf.json
fs.outputJsonSync(`./tribes/townconf.json`,{...townSetup,...townconf},{spaces:2}) const towndata={...townSetup,...townconf};
const Nationchains = require('./Nationchains'); const Towns = require('./Towns');
const Tribes = require('./Tribes'); const Tribes = require('./Tribes');
const Pagans = require('./Pagans'); const Pagans = require('./Pagans');
townSetup.object="town" if (!towndata.mayorid ) Pagans.create
Nationschains.create(townSetup);
Towns.create('./nationchains','./nationchains','towns',{...townSetup,...townconf});
//Nationschains.create(townSetup);
Tribes.create(townSetup); Tribes.create(townSetup);
Pagans.create(townSetup);
// creer un lien symbolique vers /nationchains/ pour traiter les demandes via xworkon comme une tribe
//Setup.config( townSetup ); //Setup.config( townSetup );
} else { } else {
console.log( check ); console.log( check );
@ -59,7 +66,7 @@ if( !fs.existsSync( `./tribes/townconf.json` ) ) {
console.log( 'Carefull you have already a config.js that is running. If you want to change remove config.js file and run again yarn setup' ); console.log( 'Carefull you have already a config.js that is running. If you want to change remove config.js file and run again yarn setup' );
} }
Setup.check = conf => { Setup.Checkjson = conf => {
var rep = ""; var rep = "";
const nation_town=fs.readJsonSync('./nationchains/socialworld/objects/towns/searchindex/towns_nation_uuid.json'); const nation_town=fs.readJsonSync('./nationchains/socialworld/objects/towns/searchindex/towns_nation_uuid.json');
if (!ObjectKeys(nation_town).includes(conf.nationName)){ if (!ObjectKeys(nation_town).includes(conf.nationName)){
@ -68,8 +75,8 @@ Setup.check = conf => {
if (nation_town[conf.nationName].includes(conf.townName)){ if (nation_town[conf.nationName].includes(conf.townName)){
rep+=`This conf.townName already exist you have to find a unique town name per nation`; rep+=`This conf.townName already exist you have to find a unique town name per nation`;
} }
const getnation = Odmdb.get('./nationchains/socialworld/objects','towns',[conf.NationName];[nationId]) const getnation = Odmdb.get('./nationchains/socialworld/objects','towns',[conf.NationName],[nationId])
if getnation.data.notfound //if getnation.data.notfound
conf.language.forEach( l => { conf.language.forEach( l => {
if( ![ "fr", "en", "it", "de", "sp" ].includes( l ) ) { if( ![ "fr", "en", "it", "de", "sp" ].includes( l ) ) {
rep += l + " Only fr,en,it,de,sp are available \n"; rep += l + " Only fr,en,it,de,sp are available \n";

646
app/models/Toolsbox.js Executable file
View File

@ -0,0 +1,646 @@
/* eslint-disable no-useless-escape */
const fs = require("fs");
const path = require("path");
const bcrypt = require("bcrypt");
const moment = require("moment");
const utils = {};
console.log(
"Check in /utils/index.js to find usefull function for your dev.\n Feel free to send suggestion, code to maintainer of apxtrib project (see /package.json to get email).\n We'll add to the roadmap to add it."
);
/**
* EMAIL
*/
/* const validateEmail = email => {
const regExp = /^(([^<>()[\]\\.,;:\s@\"]+(\.[^<>()[\]\\.,;:\s@\"]+)*)|(\".+\"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/;
return regExp.test(email);
};
const validatePassword = pwd => {
const regExp = new RegExp(
/^(?=.*[a-z])(?=.*[A-Z])(?=.*\d)(?=.*[$@$!%*?&.])[A-Za-z\d$@$!%*?&.{}:|\s]{8,}/
);
return regExp.test(pwd);
};
const filterInvalidInArray = (array, validate) =>
array ? array.filter(el => !validate(el)) : undefined; // return undefined when every elements is valid
/**
* POSTAL CODE
*/
/*
const validatePostalCode = postalCode =>
/(^\d{5}$)|(^\d{5}-\d{4}$)/.test(postalCode);
/**
* PHONE
*/
/* const validatePhoneNumber = phoneNumber =>
/((^0[1-9]|\+[0-9]{3})([-. ]?[0-9]{2}){4}$)/.test(phoneNumber);
const correctPhoneNumber = phone =>
phone[0] === '0' ? '+33' + phone.substr(1) : phone;
const Checkjson = (appProfil, referential, data) => {
// @TODO get a referentiel per object then check data validity and allowed access
// need to add referentiel manager
const invalidefor = [];
let updateDatabase = false;
Object.keys(data).forEach(field => {
switch (field) {
case 'token':
updateDatabase = true;
break;
case 'email':
if (!validateEmail(data.email)) {
invalidefor.push('ERREMAIL:' + field);
} else {
updateDatabase = true;
}
break;
case 'password':
if (!validatePassword(data.password)) {
invalidefor.push('ERRPWD:' + field);
} else {
data.password = bcrypt.hash(data.password, config.saltRounds);
updateDatabase = true;
}
break;
}
});
return { invalidefor, data, updateDatabase };
};
*/
//Permet d'attendre en milliseconde
// s'utilise avec async ()=>{
// await sleep(2000)
//}
utils.sleep = (ms) => {
return new Promise((resolve) => setTimeout(resolve, ms));
};
utils.normalize = {};
utils.normalize.telephonefr = (phone) => {
phone = phone.trim().replace(/[- .]/g, "");
if (
Checkjson.schema.properties.format.telephoenfr(phone) &&
phone.length == 10 &&
phone[0] == "0"
) {
phone = "+33 " + phone.substring(1);
}
return phone;
};
utils.normalize.zfill10 = (num) => {
let s = num + "";
while (s.length < 10) s = "0" + s;
return s;
};
utils.generemdp = (nbpos, fromchar) => {
if (!fromchar) {
const fromchar = "ABCDEFGHIJKLMNPQRSTUVWZY123456789";
}
//const chaine = "ABCDEFGHIJKLMNPQRSTUVWZY123456789";
let mdp = "";
for (var i = 0; i < nbpos; i++) {
var pos = Math.floor(Math.random() * fromchar.length);
mdp += fromchar.substring(pos, pos + 1);
}
return mdp;
};
utils.generecompteur = (filecpt, typeincrement) => {
let file = `${filecpt}/${typeincrement}.json`;
let prefix = "";
if ((typeincrement = "ANNEESEMAINE")) {
file = `${filecpt}/${typeincrement}${moment().format(
"YYYY"
)}${moment().format("WW")}.json`;
prefix = `${moment().format("YYYY")}${moment().format("WW")}`;
}
let num = 1;
try {
num = parseInt(fs.readFileSync(file, "utf8")) + 1;
} catch (err) {
console.log("Nouveau compteur incrementale ", file);
}
fs.writeFileSync(file, num, "utf8");
return prefix + num;
};
/**
* CSV
*/
utils.json2csv = (jsondata, options, callback) => {
// uniquement json = [{niv1:val,niv1:[liste of val]}]
// console.log('_________________________');
// console.log(jsondata)
// console.log('_________________________');
if (jsondata.length == 0) {
return callback("Empty json", null);
}
if (!options.retln) options.retln = "\n";
if (!options.sep) options.sep = ";";
if (!options.arraysplitsep) options.arraysplitsep = ",";
if (!options.replacespecialcarJson2Csv) {
options.replacespecialcarJson2Csv = [];
} else {
if (typeof options.replacespecialcarJson2Csv == "string") {
//permet de passer des regex en string
options.replacespecialcarJson2Csv = eval(
options.replacespecialcarJson2Csv
);
}
}
let etat = "";
let csv = "";
let entete = "";
let prem = true;
for (const j in jsondata) {
// console.log(jsondata[j])
for (const c in options.champs) {
if (prem) {
entete += options.champs[c] + options.sep;
}
if (jsondata[j][options.champs[c]]) {
if (options.array.indexOf(options.champs[c]) > -1) {
csv +=
jsondata[j][options.champs[c]].join(options.arraysplitsep) +
options.sep;
} else {
let currentValue = "";
if (jsondata[j][options.champs[c]])
currentValue += jsondata[j][options.champs[c]];
options.replacespecialcarJson2Csv.forEach((re) => {
//console.log(currentValue)
currentValue = currentValue.replace(re[1], re[0]);
});
csv += currentValue + options.sep;
}
} else {
csv += options.sep;
}
}
csv = csv.substring(0, csv.length - 1) + options.retln;
if (prem) {
prem = false;
entete = entete.substring(0, entete.length - 1) + options.retln;
// console.log(entete)
}
}
// return entete + csv;
if (etat == "") {
return callback(null, entete + csv);
} else {
return callback(etat, null);
}
};
/**
* Get headers from first line of CSV
* @param {array} lines array of string which contains each csv lines
* @return {array} string array of headers
*/
utils.getHeaders = (lines, sep) =>
lines[0].split(sep).map((i) => i.replace(/"/g, ""));
/**
* [csv2json description]
* @param {object} csv object of csv file that has been read
* @param {object} options object containing csv options, headers, ...
{retln:'code de retour de ligne \n ou \n\r',
sep:'code to split cells',
champs:[ch1,ch2,...] catch only those field,
array:[ch1, ] can have more than one field champs with same name then data are push into an array }
* @param {Function} callback callback function
* @return {callback} - return an error if error, else return json
it convert a csv file into a json = [{field:value}]
Usage example:
fiche.csv2article = (err, fiche) => {
if (!err) {
console.log(fiche)
}
}
utils.csv2json(fs.readFileSync('./devdata/tribee/aubergenville/infoexterne/localbusiness.csv', 'utf-8'), {
retln: "\n",
sep: ";",
champs: ["NOM", "OBJET", "ADRESSE_PRO", "CP_PRO", "VILLE_PRO", "ZONE", "PHONE_PRO", "HORAIRESDESC", "HORAIREDATA", "URL", "FACEBOOK", "INSTA", "EMAIL_PRO", "IMG", "TAG"],
array: ["TAG", "PHONE_PRO", "EMAIL_PRO"]
}, fiche.csv2article)
*/
utils.replacecarbtweendblquote = (csv, car, carremplacant) => {
/*
return csv text with any car betwenn 2 " by CARSEPARATOR
*/
let newcsv = "";
let txtencours = "";
let flagouvert = false;
const sepreg = new RegExp(`${car}`, "gmi");
for (let j = 0; j < csv.length; j++) {
//if((csv[j] == "\"" && csv[j + 1] && csv[j + 1] != "\"") || (csv[j] == "\"" && csv[j - 1] && csv[j - 1] != "\"") || (csv[j] == "\"" && csv[j - 1] && csv[j - 2] && csv[j - 1] != "\"" && csv[j - 2] != "\"")) {
if (csv[j] == '"') {
if (flagouvert) {
// on cherche à ferme une chaine de texte
if (csv[j + 1] == '"') {
//on a "" consecutif qu'on remplace par "" et on fait j+1
txtencours += '""';
j++;
} else {
// on a bien une fermeture
flagouvert = false;
newcsv += txtencours.replace(sepreg, carremplacant);
txtencours = '"';
}
} else {
// on ouvre une chaine
flagouvert = true;
//on met le contenu précédent ds newcsv
newcsv += txtencours;
txtencours = '"';
}
//} else if((csv[j] !== "\n") && (csv[j + 1] && csv[j] + csv[j + 1] !== "\n\r")) {
} else if (csv[j] !== "\n") {
txtencours += csv[j];
// } else if((csv[j] == "\n") || (csv[j + 1] && csv[j] + csv[j + 1] == "\n\r")) {
} else if (csv[j] == "\n") {
if (!flagouvert) txtencours += "\n";
}
}
return newcsv + txtencours;
};
utils.analysestring = (string) => {
let buftxt = "";
let bufcode = "";
let i = 0;
let avecRL = false;
for (let p = 0; p < string.length; p++) {
if (string[p].charCodeAt() == 10) {
buftxt += "[RL]";
avecRL = true;
} else {
buftxt += string[p];
}
bufcode += "-" + string[p].charCodeAt();
if (i == 20) {
if (avecRL) {
console.log(`${buftxt} - ${bufcode}`);
} else {
console.log(`${buftxt} ---- ${bufcode}`);
}
i = 0;
buftxt = "";
bufcode = "";
avecRL = false;
}
i++;
}
};
const txtstring = `32932,BK_F2F_B_COM_10x1H-09,"My Communication Workshop ""Session N°9 - 1H""","<p>&nbsp;</p>
<table>
<tbody>
<tr>
<td>
<p>Learner who needs to develop their ability to communicate effectively at work, both in writing and speaking</p>
</td>
</tr>
</tbody>
</table>",,english,2,0,,2,0,classroom,"0000-00-00 00:00:00","0000-00-00 00:00:00",0000-00-00,0000-00-00,https://www.yesnyoulearning.com/lms/index.php?r=player&course_id=32932,1101,,"BUSINESS KEYS",0,
32933,BK_F2F_B_COM_10x1H-10,"My Communication Workshop Session N°10 - 1H","<p>&nbsp;</p>
<table>
<tbody>
<tr>
<td>
<p>Learner who needs to develop their ability to communicate effectively at work, both in writing and speaking</p>
</td>
</tr>
</tbody>
</table>",,english,2,0,,2,0,classroom,"0000-00-00 00:00:00","0000-00-00 00:00:00",0000-00-00,0000-00-00,https://www.yesnyoulearning.com/lms/index.php?r=player&course_id=32933,1101,,"BUSINESS KEYS",0,
32934,BK_F2F_B_JOB_10x1H-01,"My Job Search Workshop Session N°1 - 1H","<p>PACK JOB SEARCH</p>",,english,2,0,,2,0,classroom,,,0000-00-00,0000-00-00,https://www.yesnyoulearning.com/lms/index.php?r=player&course_id=32934,1108,,,0,
32935,BK_F2F_B_JOB_10x1H-02,"My Job Search Workshop Session N°2 - 1H","<p>PACK JOB SEARCH</p>",,english,2,0,,2,0,classroom,,,0000-00-00,0000-00-00,https://www.yesnyoulearning.com/lms/index.php?r=player&course_id=32935,1108,,,0,`;
//utils.analysestring(txtstring)
//console.log(utils.replacecarbtweendblquote(txtstring, ",", 'CARSEPARATOR')
// .split("\n")[0].split(","))
utils.csv2json = (csv, options, callback) => {
// EN CAS DE PB AVEC UN FICHIER EXCEL RECALCITRANT
// l'ouvrir dans calc linux et sauvegarder csv utf8, ; , " enregistrer le contenu de la cellule comme affiché
console.log("\n--------------- CSV2JSON ---------------\n");
// Default CSV options
if (!options.retln) options.retln = "\n";
if (csv.indexOf("\n\r") > -1) options.retln = "\n\r";
if (!options.sep) options.sep = ";";
//gestion d un separateur dans une chaine de texte
//const regseptext = new RegExp(`${options.sep}(?!(?:[^"]*"[^"]*")*[^"]*$)`, 'gm');
//csv = csv.replace(regseptext, "CARACSEPAR");
// csv = utils.replacecarbtweendblquote(csv, options.retln, "RETLIGNE")
csv = utils.replacecarbtweendblquote(csv, options.sep, "CARSEPARATOR");
if (!options.replacespecialcarCsv2Json) {
options.replacespecialcarCsv2Json = [];
} else {
if (typeof options.replacespecialcarCsv2Json == "string") {
//permet de passer des regex en string
options.replacespecialcarCsv2Json = eval(
options.replacespecialcarCsv2Json
);
}
}
const result = [];
const lines = csv.split(options.retln);
const headers = utils.getHeaders(lines, options.sep);
let unknownHeaders = "";
//console.log('headers', headers)
//console.log('options.champs', options.champs)
headers.forEach((header) => {
// Si un header n'est pas présent dans la liste des champs prédéfinis
// on l'ajoute aux champs inconnus
if (options.champs.indexOf(header) === -1) {
unknownHeaders += `${header}, `;
}
});
if (unknownHeaders !== "") {
const errorMsg = `CSV2JSON() - Champs inconnus : ${unknownHeaders}`;
return callback(errorMsg, null);
}
lines.forEach((line, index) => {
// Skip headers line or empty lines
if (index === 0 || line.replace(/\s/g, "").length === 0) {
return;
}
// pour debuguer on met origincsv pour voir la ligne d'origine
const currentLineData = { origincsv: line, linenumber: index };
const currentLine = line.split(options.sep); // Current string in the line
for (let j = 0; j < headers.length; j++) {
// Si la ligne n'est pas vide
if (currentLine[j]) {
// On clean le champs
// ajout eventuel de modification de caracter reservé ; dans les libelléetc...
let currentValue = currentLine[j].trim();
//on transforme le caractere separateur modifié entre double quote
currentValue = currentValue.replace("CARSEPARATOR", options.sep);
options.replacespecialcarCsv2Json.forEach((re) => {
currentValue = currentValue.replace(re[0], re[1]);
});
// Si le header est un email
if (headers[j].includes("EMAIL")) {
// Supprimer tous les espaces
currentValue = currentLine[j].replace(/\s/g, "");
}
// on check si le chamos doit être numerique
if (options.numericfield.includes(headers[j])) {
currentValue = currentLine[j].replace(/\,/g, ".");
try {
const test = parseFloat(currentValue);
} catch (er) {
return callback(
`${headers[j]} contiens la valeur -${currentValue}- et devrait être numerique`,
null
);
}
}
if (currentValue) {
// Si le header actuel est de type array
// Cela signifie que le header apparaît plusieurs fois dans le CSV
// et que les valeurs correspondantes à ce header
// doivent être mis dans un array
if (options.array && options.array.indexOf(headers[j]) > -1) {
// Si le tableau pour ce header n'existe pas on le crée
if (!currentLineData[headers[j]]) {
currentLineData[headers[j]] = [];
}
if (options.arraysplitsep) {
currentValue.split(options.arraysplitsep).forEach((v) => {
currentLineData[headers[j]].push(v);
});
} else {
currentLineData[headers[j]].push(currentValue);
}
} else {
// Si un header est déjà présent pour la ligne
// alors que il n'est pas spécifié comme étant un array
// on retourne une erreur
if (currentLineData[headers[j]]) {
const errorMsg = `Le champ ${headers[j]} est présent plusieurs fois alors qu'il n'est pas spécifié comme étant un array !`;
return callback(errorMsg, null);
}
currentLineData[headers[j]] = currentValue;
}
}
}
}
result.push(currentLineData);
});
return callback(null, result);
};
/**
* [csvparam2json description]
* @param {object} csv object of csv file that has been read
* @param {object} options object containing csv options, headers, ...
{retln:'code de retour de ligne \n ou \n\r',
sep:'code to split cells',
champs:[ch1,ch2,...] catch only those field,
array:[ch1, ] can have more than one field champs with same name then data are push into an array }
* @param {Function} callback callback function
* @return {callback} - return an error if error, else return json
it converts a csv with 3 column col1;col2;col3 in a json in a tree
if in col1 we have __ => then it splits a leaf
col1 = xxxx__yyyy ; col2 = value ; col3 = comment that is ignored
return data = {xxxx:{yyyy:value}}
col1 = xxxx; col2 = value; col3 = comment ignored
return data = {xxxx:value}
Usage example:
fiche.csvparam2article = (err, fiche) => {
if (!err) {
console.log(fiche)
}
}
utils.csvparam2json(fs.readFileSync('./devdata/tribee/aubergenville/infoexterne/localbusiness.csv', 'utf-8'), {
retln: "\n",
sep: ";",
champs: ["NOM", "OBJET", "ADRESSE_PRO", "CP_PRO", "VILLE_PRO", "ZONE", "PHONE_PRO", "HORAIRESDESC", "HORAIREDATA", "URL", "FACEBOOK", "INSTA", "EMAIL_PRO", "IMG", "TAG"],
array: ["TAG", "PHONE_PRO", "EMAIL_PRO"]
}, fiche.csv2article)
*/
utils.csvparam2json = (csv, options, callback) => {
console.log("\n--------------- CSVPARAM2JSON ---------------\n");
let etat = "";
const param = {};
if (!options.retln) {
options.retln = "\n";
}
if (csv.indexOf("\n\r") > -1) {
options.retln = "\n\r";
}
if (!options.sep) {
options.sep = ";";
}
if (!options.seplevel) {
options.seplevel = "__";
}
if (!options.replacespecialcarCsv2Json) {
options.replacespecialcarCsv2Json = [];
} else {
if (typeof options.replacespecialcarCsv2Json == "string") {
//permet de passer des regex en string
options.replacespecialcarCsv2Json = eval(
options.replacespecialcarCsv2Json
);
}
}
const lines = csv.split(options.retln);
for (let i = 0; i < lines.length; i++) {
const infol = lines[i].split(options.sep);
//console.log(infol)
if (infol[0].length > 4 && infol.length < 2) {
// si le 1er element à plus de 4 caractere et s'il y a moins de 3 colonnes c'est qu'il y a un pb
etat += `Erreur sur ${lines[i]} moins de 3 column separé par ${options.sep}`;
continue;
}
// On ajoute ici la gestion de tous les caracteres spéciaux
// reservées pour le csv ; ' etc..'
if (infol[1] && infol[1] + "" == infol[1]) {
options.replacespecialcarCsv2Json.forEach((re) => {
//console.log("gggggggggggggggggggg", infol[1])
infol[1] = infol[1].replace(re[0], re[1]);
});
// console.log(infol[1])
infol[1] = infol[1].replace(/'|/g, '"');
//console.log(infol[1])
if (infol[1].toLowerCase() === "true") {
infol[1] = true;
} else if (infol[1].toLowerCase() === "false") {
infol[1] = false;
}
}
console.log(infol[1]);
//supprime des lignes vides
if (infol[0] == "") continue;
if (infol[0].indexOf(options.seplevel) == -1) {
param[infol[0]] = infol[1];
continue;
} else {
const arbre = infol[0].split(options.seplevel);
switch (arbre.length) {
case 1:
param[arbre[0]] = infol[1];
break;
case 2:
if (arbre[1] != "ARRAY") {
if (!param[arbre[0]]) param[arbre[0]] = {};
param[arbre[0]][arbre[1]] = infol[1];
} else {
if (!param[arbre[0]]) param[arbre[0]] = [];
//console.log('aff', infol[1].substring(1, infol[1].length - 1).replace(/""/g, '"'))
eval("result=" + infol[1]);
//.substring(1, infol[1].length - 1).replace(/""/g, '"'))
param[arbre[0]].push(result);
}
break;
case 3:
if (arbre[2] != "ARRAY") {
if (!param[arbre[0]]) param[arbre[0]] = {};
if (!param[arbre[0]][arbre[1]]) param[arbre[0]][arbre[1]] = {};
param[arbre[0]][arbre[1]][arbre[2]] = infol[1];
} else {
if (!param[arbre[0]]) param[arbre[0]] = {};
if (!param[arbre[0]][arbre[1]]) param[arbre[0]][arbre[1]] = [];
//eval("result = \"test\"");
//console.log(result);
eval("result=" + infol[1]);
//.substring(1, infol[1].length - 1).replace(/""/g, '"'))
param[arbre[0]][arbre[1]].push(result);
}
break;
case 4:
if (arbre[3] != "ARRAY") {
if (!param[arbre[0]]) param[arbre[0]] = {};
if (!param[arbre[0]][arbre[1]]) param[arbre[0]][arbre[1]] = {};
if (!param[arbre[0]][arbre[1]][arbre[2]])
param[arbre[0]][arbre[1]][arbre[2]] = {};
param[arbre[0]][arbre[1]][arbre[2]][arbre[3]] = infol[1];
} else {
if (!param[arbre[0]]) param[arbre[0]] = {};
if (!param[arbre[0]][arbre[1]]) param[arbre[0]][arbre[1]] = {};
if (!param[arbre[0]][arbre[1]][arbre[2]])
param[arbre[0]][arbre[1]][arbre[2]] = [];
eval("result=" + infol[1]);
//.substring(1, infol[1].length - 1).replace(/""/g, '"'))
param[arbre[0]][arbre[1]][arbre[2]].push(result);
break;
}
default:
break;
}
}
}
// JSON.parse(JSON.stringify(param))
console.log(
"kkkkkkkkkkkkkkkkkk",
param["catalogue"]["filtrecatalog"]["searchengine"]
);
if (etat == "") {
return callback(null, JSON.parse(JSON.stringify(param)));
} else {
return callback(etat, null);
}
};
utils.levenshtein = (a, b) => {
if (a.length === 0) return b.length;
if (b.length === 0) return a.length;
let tmp, i, j, prev, val, row;
// swap to save some memory O(min(a,b)) instead of O(a)
if (a.length > b.length) {
tmp = a;
a = b;
b = tmp;
}
row = Array(a.length + 1);
// init the row
for (i = 0; i <= a.length; i++) {
row[i] = i;
}
// fill in the rest
for (i = 1; i <= b.length; i++) {
prev = i;
for (j = 1; j <= a.length; j++) {
if (b[i - 1] === a[j - 1]) {
val = row[j - 1]; // match
} else {
val = Math.min(
row[j - 1] + 1, // substitution
Math.min(
prev + 1, // insertion
row[j] + 1
)
); // deletion
}
row[j - 1] = prev;
prev = val;
}
row[a.length] = prev;
}
return row[a.length];
};
utils.testinarray = (array, arrayreferent) => {
// au moins un element de array existe dans arryreferent
let exist = false;
if (arrayreferent) {
//console.log('arrrrrrrrrrrrrrr', arrayreferent)
array.forEach((e) => {
//console.log(e)
if (arrayreferent.includes(e)) exist = true;
});
}
return exist;
};
/*
DIRECTORY
*/
const isDirectory = (source) => fs.lstatSync(source).isDirectory();
const getDirectories = (source) =>
fs
.readdirSync(source)
.map((name) => path.join(source, name))
.filter(isDirectory);
module.exports = utils;

13
app/models/Towns.js Normal file
View File

@ -0,0 +1,13 @@
const bcrypt = require( 'bcrypt' );
const fs = require( 'fs-extra' );
const glob = require( 'glob' );
const moment = require( 'moment' );
const jwt = require( 'jwt-simple' );
const UUID = require( 'uuid' );
const config = require( '../tribes/townconf.js' );
const Checkjson = require( `./Checkjson.js`);
const Towns = {};
module.exports= Towns;

View File

@ -9,11 +9,10 @@ const dnsSync = require( 'dns-sync' );
const jwt = require( 'jwt-simple' ); const jwt = require( 'jwt-simple' );
const moment = require( 'moment' ); const moment = require( 'moment' );
const UUID = require( 'uuid' ); const UUID = require( 'uuid' );
const Outputs = require( './Outputs.js' );
const Pagans = require( './Pagans.js' ); const Pagans = require( './Pagans.js' );
const config = require( '../tribes/townconf' ); const config = require( '../tribes/townconf' );
const checkdata = require( `../nationchains/socialworld/contracts/checkdata.js`); const Checkjson = require( `./Checkjson.js`);
/* /*
tribeid manager tribeid manager

View File

@ -1,5 +1,5 @@
{ {
"typedoesnnotexistinschema":"This type in your propertie is not manage by checkdata.js", "typedoesnnotexistinschema":"This type in your propertie is not manage by Checkjson.js",
"dataerrpropertie":"Check your data that not fit your schema rules propertie", "dataerrpropertie":"Check your data that not fit your schema rules propertie",
"dataerrpropertiesrequired":"This propertie is required and not present in your data" "dataerrpropertiesrequired":"This propertie is required and not present in your data"
} }

View File

@ -0,0 +1,4 @@
{
"schemanotfound":"Schema not found in {{fullpath}}",
"pathnamedoesnotexist":"ObjectPath or objectName does not exist {{fullpath}}"
}

View File

@ -2,21 +2,27 @@
Unit testing Unit testing
*/ */
const assert = require("assert"); const assert = require("assert");
const checkdata = require("../checkdata.js"); const Checkjson = require("../Checkjson.js");
const ut = { name: "checkdata" }; const ut = { name: "Checkjson" };
const schema = { const schema = {
$schema: "http://json-schema.org/schema#", $schema: "http://json-schema.org/schema#",
title: "Dummy schema to test checkdata.js", title: "Dummy schema to test Checkjson.js",
description: "Checkdata is use on server as well as into a browser", description: "Checkjson is use on server as well as into a browser",
$comment: "We change schema type on the fly to simplify the test", $comment: "We change schema type on the fly to simplify the test",
type: "Object", type: "object",
properties: { properties: {
totest: {}, totest: {},
}, },
}; };
const testproperties = [ const testproperties = [
{
name: "test0",
data: { totest: true },
properties: { totest: { type: "boolean" } },
status: 200
},
{ {
name: "test1", name: "test1",
data: { totest: "blabla" }, data: { totest: "blabla" },
@ -126,7 +132,7 @@ ut.testproperties = (options) => {
let msg = ""; let msg = "";
testproperties.forEach((t) => { testproperties.forEach((t) => {
schema.properties = t.properties; schema.properties = t.properties;
const res = checkdata.schema.data(schema, {}, t.data); const res = Checkjson.schema.data(schema, t.data);
if (res.status != t.status) { if (res.status != t.status) {
msg = (msg == "") ? "Unconsistent testproperties() name list: " : `${msg},`; msg = (msg == "") ? "Unconsistent testproperties() name list: " : `${msg},`;
if (options.verbose) { if (options.verbose) {
@ -140,7 +146,7 @@ ut.testproperties = (options) => {
}; };
ut.run = (options) => { ut.run = (options) => {
console.log("Test checkdata properties"); console.log("Test Checkjson properties");
ut.testproperties(options); ut.testproperties(options);
}; };
module.exports = ut; module.exports = ut;

View File

@ -0,0 +1,72 @@
/*
Unit testing
*/
const assert = require("assert");
const fs=require('fs-extra');
const path= require('path');
const Odmdb = require("../Odmdb.js");
const {generemdp} = require('../../nationchains/socialworld/contracts/toolsbox.js');
const ut = { name: "Odmdb" };
/*
We test only search and indexation here
Create Update Read and Delete are unit testing with specificities of each Object.
To do that we create in tmp a dummy data folder for a dummy schema object
*/
const schema = {
$schema: "http://json-schema.org/schema#",
title: "Dummy schema to test Checkjson.js",
description: "Checkjson is use on server as well as into a browser",
$comment: "We change schema type on the fly to simplify the test",
type: "object",
properties: {
uuid: {
type:"string",
format:"uuid",
default:"=uuid.v4()"
},
dtcreate:{
type:"string",
format:"datetime",
default:"=date.now()"
},
tag:{
type:"string",
enum:["t1","t2","t3"],
default:"t1"
},
info:{
type:"string",
minLength: 10,
default:"=generemdp(255,'ABCDEFGHIJKLM 12340')"
}
},
required:["uuid"],
apxprimarykey:"uuid",
apxuniquekey:["info"],
apxsearchindex:{
"uuid":{"list":[],"taginfo":['tag','info'],"all":""},
"info":{"uuid":['uuid']}
}
};
const obj={tag:"t1",info:"Lorem ipsum A"}
ut.createanobject=(schema,obj)=>{
const res={status:200,err:[]}
return res
}
ut.run = (options) => {
const objectPath=path.resolve(__dirname,'../../tmp/testobjects');
const schemaPath=path.resolve(__dirname,'../../tmp/testschema');
if (!fs.existsSync(objectPath)) fs.ensureDirSync(objectPath);
if (!fs.existsSync(schemaPath)) fs.ensureDirSync(schemaPath);
const createenvobj=Odmdb.setObject(schemaPath,objectPath,"objtest",schema,{},"en");
assert.deepEqual(createenvobj,{status:200},JSON.stringify(createenvobj));
const checkschema= Odmdb.schema(schemaPath,"objtest",true)
assert.deepEqual(checkschema.status,200,JSON.stringify(checkschema))
};
module.exports = ut;

View File

@ -30,7 +30,7 @@ router.put( '/:objectname/:uuid', checkHeaders, isAuthenticated, ( req, res ) =>
// if does not exist and accessright C then it create it with uuid // if does not exist and accessright C then it create it with uuid
// then if req.body.tplmessage => render email with data // then if req.body.tplmessage => render email with data
// No data management are done here, if need you can add plugin to create a workflow based object // No data management are done here, if need you can add plugin to create a workflow based object
// if need specific data check => req.body.callback={tribeidpugin,pluginname,function} will run pluginname.function(data) add data run specific stuf before saved the message object in /objectname/data.uuid_lg/json // if need specific data Checkjson => req.body.callback={tribeidpugin,pluginname,function} will run pluginname.function(data) add data run specific stuf before saved the message object in /objectname/data.uuid_lg/json
let result; let result;
console.log( "object", req.params.objectname ) console.log( "object", req.params.objectname )
if( req.params.objectname == 'notifications' ) { if( req.params.objectname == 'notifications' ) {

78
app/routes/odmdb.js Normal file
View File

@ -0,0 +1,78 @@
const express = require( 'express' );
const glob = require( 'glob' );
const path = require( 'path' );
// Classes
const Odmdb = require( '../models/Odmdb.js' );
// Middlewares
const checkHeaders = require( '../middlewares/checkHeaders' );
const isAuthenticated = require( '../middlewares/isAuthenticated' );
const hasAccessrighton = require( '../middlewares/hasAccessrighton' );
const router = express.Router();
router.get('/searchauth/:objectname/:question',checkHeaders,isAuthenticated,( req, res ) => {
/**
*
*
*/
console.log( 'route referentials get all language' + req.params.objectname + '-' + req.params.question );
const getref = Referentials.getref( true, req.params.source, req.params.idref, req.session.header.xworkon, req.session.header.xlang );
// Return any status the data if any erreur return empty object
res.jsonp( getref.payload.data );
} );
router.get('schema/:objectname', checkHeaders, isAuthenticated,(req,res)=>{
/**
* @api {get} /odmdb/schema/:objectname
* @apiName GetSchema
* @apiGroup Odmdb
*
* @apiUse apxHeader
*
* @apiParam {String} objectname Mandatory if headers.xworkon == nationchains then into ./nationchains/ else into ./tribes/xworkon/
*
* @apiError (404) {string} info a key word to understand not found schema
* @apiError (404) {string} ref an string to find referential to get description of info in xlang request
* @apiError (404) {object} [moreinfo} an object with element to render ref_lg.json[info] to understand error
*
* @apiSuccess (200) {object} data contains schema requested
*
*/
const fullpath = path.resolve(`${__dirname}/tribes/${req.session.header.xworkon}/schema/${req.params.pathobjectname}.json`);
if (fs.existsSync(fullpath)){
res.status(200).json(data:fs.readJsonSync(fullpath))
}else{
res.status(404).json(info:"schemanotfound", ref:"odmdb", moreinfo:{fullpath})
}
})
router.put('schema/:objectname', checkHeaders, isAuthenticated,(req,res)=>{
/**
* @api {put} /odmdb/schema/:objectname
* @apiName putSchema
* @apiGroup Odmdb
*
* @apiUse apxHeader
*
* @apiParam {String} objectname Mandatory if headers.xworkon == nationchains then into ./nationchains/ else into ./tribes/xworkon/
* @apiBody {string} schemapath where to store schema .../schema
* @apiBody {string} objectpath where to store object ...objectname/index/config.json
* @apiBody {json} schema content
* @apiBody {json} schemalang content in lg
* @apiBody {string} lang define which schemalg is (2 letters)
*
* @apiError (404) {string} info a key word to understand not found schema
* @apiError (404) {string} ref an string to find referential to get description of info in xlang request
* @apiError (404) {object} [moreinfo} an object with element to render ref_lg.json[info] to understand error
*
* @apiSuccess (200) {object} data contains schema requested
*
*/
const fullpath = path.resolve(`${__dirname}/tribes/${req.session.header.xworkon}/schema/${req.params.pathobjectname}.json`);
const set=Odmdb.setObject(path.resolve(`${__dirname}/tribes/${req.session.header.xworkon}`),)
if (fs.existsSync(fullpath)){
res.status(200).json(data:fs.readJsonSync(fullpath))
}else{
res.status(404).json(info:"schemanotfound", ref:"odmdb", moreinfo:{fullpath})
}
})
module.exports = router;

View File

@ -16,18 +16,48 @@ if( !fs.existsSync( '/etc/nginx/nginx.conf' ) ) {
console.log( '\x1b[31m Check documentation, nginx have to be installed on this server first, no /etc/nginx/nginx.conf available, install then rerun yarn command.' ); console.log( '\x1b[31m Check documentation, nginx have to be installed on this server first, no /etc/nginx/nginx.conf available, install then rerun yarn command.' );
process.exit(); process.exit();
} }
if( !fs.existsSync( './tribes/townconf.js' ) ) { if( !fs.existsSync( './nationchains/tribes/index/conf.json' ) ) {
console.log( `\x1b[42m#########################################################################\x1b[0m\n\x1b[42mWellcome into apxtrib, init your town and first tribe by 'yarn setup'. \x1b[0m \n\x1b[42mThen 'yarn dev' or 'yarn startpm2'. Check README's project to learn more.\x1b[0m\n\x1b[42m#########################################################################\x1b[0m` ); // this is a first installation of a dev or prod machine
const readline = require( 'readline' );
const rl = readline.createInterface( {
input: process.stdin,
output: process.stdout
} );
const townconf = fs.readJsonSync( './nationchains/www/setup/townconf.json') ;
townconf.sudoerUser=process.env.USER;
townconf.dirname=__dirname;
townconf.nginx.include.push(`${__dirname}/nationchains/**/nginx_*.conf`);
townconf.nginx.logs=`${__dirname}/nationchains/logs/nginx`;
townconf.nginx.website='setup';
townconf.nginx.fswww='nationchains/';//for a local tribe nationchains/tribes/tribeid
townconf.nginx.tribeid="town"
console.log(townconf)
rl.question( 'This is the first install from ./nationchains/www/setup/townconf.json used, this will change your nginx config (/etc/nginx.conf will be saved as /etc/nginxconf.saved) (Yes/no)?', function ( rep1 ) {
let quest = `This is a production install, please check that ${townconf.townName}.${townconf.nationName}.${townconf.dns} IP is well redirect to tour server`;
if( rep1 !== "Yes" ) process.exit( 0 );
//console.log(process.env)
// saved and change nginx conf
const mustache=require('Mustache');
fs.moveSync("/etc/nginx/nginx.conf","/etc/nginx/nginxconf.saved");
const tplnginxconf=fs.readFileSync("./nationchains/www/setup/nginx/nginx.conf.mustache","utf8");
fs.outputFileSync("/etc/nginx/nginx.conftest",mustache.render(tplnginxconf, townconf),"utf8")
const tplnginxwww=fs.readFileSync("./nationchains/www/setup/nginx/modelwebsiteconf.mustache","utf8");
fs.outputFileSync(`./${townconf.nginx.fswww}www/nginx_${townconf.nginx.website}.conf`,mustache.render(tplnginxwww, townconf),"utf8")
//restart nginx
//fs.outputJsonSync('./nationchains/tribes/index/conf.json',setupconf);
console.log( `\x1b[42m#########################################################################\x1b[0m\n\x1b[42mWellcome into apxtrib, init your town and first tribe by 'yarn setup'. \x1b[0m \n\x1b[42mThen 'yarn dev' or 'yarn startpm2' or 'yarn unittest'. Check README's project to learn more.\x1b[0m\n\x1b[42m#########################################################################\x1b[0m` );
process.exit(); process.exit();
} }
// config.js exist in any case from Setup.checkinit(); const config = require( './nationchains/tribes/index/conf.json' );
const config = require( './tribes/townconf.js' );
// Tribes allow to get local apxtrib instance context // Tribes allow to get local apxtrib instance context
// dataclient .tribeids [] .DOMs [] .routes (plugins {url:name route:path}) .appname {tribeid:[website]} // dataclient .tribeids [] .DOMs [] .routes (plugins {url:name route:path}) .appname {tribeid:[website]}
const dataclient = require( './models/Tribes' ) const dataclient = require( './app/models/Tribes' )
.init(); .init();
console.log( 'allowed DOMs to access to this apxtrib server: ', dataclient.DOMs ) console.log( 'allowed DOMs to access to this apxtrib server: ', dataclient.DOMs )
const app = express(); const app = express();
Object.keys(config.appset).forEach(p=>{ Object.keys(config.appset).forEach(p=>{
app.set(p,config.appset[p]) app.set(p,config.appset[p])
}) })

View File

@ -45,4 +45,4 @@
</div> </div>
<!-- /div --> <!-- /div -->
</div> </div>
<script src='js/simplebar.min.js'></script><script src='js/feather.min.js'></script><script src='js/bootstrap.js'></script><script src='js/axios.min.js'></script><script src='js/mustache.min.js'></script><script src='js/checkdata.js'></script><script src='js/auth.js'></script><script src='js/state.js'></script><script src='js/main.js'></script><script src='js/notification.js'></script></body></html> <script src='js/simplebar.min.js'></script><script src='js/feather.min.js'></script><script src='js/bootstrap.js'></script><script src='js/axios.min.js'></script><script src='js/mustache.min.js'></script><script src='js/Checkjson.js'></script><script src='js/auth.js'></script><script src='js/state.js'></script><script src='js/main.js'></script><script src='js/notification.js'></script></body></html>

View File

@ -166,4 +166,4 @@ value="Trze3aze!" placeholder="Mot de passe ou hash sur clé public" />
</div> </div>
<!-- /main --> <!-- /main -->
</main> </main>
<script src='js/simplebar.min.js'></script><script src='js/feather.min.js'></script><script src='js/bootstrap.js'></script><script src='js/axios.min.js'></script><script src='js/mustache.min.js'></script><script src='js/checkdata.js'></script><script src='js/auth.js'></script><script src='js/state.js'></script><script src='js/main.js'></script><script src='js/notification.js'></script><script src='js/auth.js'></script></body></html> <script src='js/simplebar.min.js'></script><script src='js/feather.min.js'></script><script src='js/bootstrap.js'></script><script src='js/axios.min.js'></script><script src='js/mustache.min.js'></script><script src='js/Checkjson.js'></script><script src='js/auth.js'></script><script src='js/state.js'></script><script src='js/main.js'></script><script src='js/notification.js'></script><script src='js/auth.js'></script></body></html>

View File

@ -38,7 +38,7 @@ pwa.auth = {};
// Refresh browser state if exist else get pwa.state defaults // Refresh browser state if exist else get pwa.state defaults
//pwa.state.ready( pwa.auth.check ); //pwa.state.ready( pwa.auth.check );
pwa.auth.check = () => { pwa.auth.Checkjson = () => {
if( pwa.state.data.login.isAuthenticated ) { if( pwa.state.data.login.isAuthenticated ) {
if( !pwa.auth.isAuthenticate() ) { if( !pwa.auth.isAuthenticate() ) {
// Then reinit local storage and refresh page // Then reinit local storage and refresh page
@ -161,8 +161,8 @@ pwa.auth.login = async function () {
PASSWORD: document.querySelector( "#signin input[name='password']" ) PASSWORD: document.querySelector( "#signin input[name='password']" )
.value .value
} }
console.log( 'check password', checkdata.test.password( "", data.PASSWORD ) ) console.log( 'check password', Checkjson.test.password( "", data.PASSWORD ) )
if( data.LOGIN.length < 4 || !checkdata.test.password( "", data.PASSWORD ) ) { if( data.LOGIN.length < 4 || !Checkjson.test.password( "", data.PASSWORD ) ) {
/*$("#loginpart p.msginfo") /*$("#loginpart p.msginfo")
.html("") .html("")
.fadeOut(2000)*/ .fadeOut(2000)*/

View File

@ -3,18 +3,18 @@ This module have to be independant of any external package
it is shared between back and front and is usefull it is shared between back and front and is usefull
to apply common check in front before sending it in back to apply common check in front before sending it in back
can be include in project with can be include in project with
<script src="https://apiback.maildigit.fr/js/checkdata.js"></script> <script src="https://apiback.maildigit.fr/js/Checkjson.js"></script>
or with const checkdata = require('../public/js/checkdata.js') or with const Checkjson = require('../public/js/Checkjson.js')
*/ */
// --## // --##
const checkdata = {}; const Checkjson = {};
// each checkdata.test. return true or false // each Checkjson.test. return true or false
checkdata.test = {}; Checkjson.test = {};
checkdata.test.emailadress = ( ctx, email ) => { Checkjson.test.emailadress = ( ctx, email ) => {
const regExp = /^(([^<>()[\]\\.,;:\s@\"]+(\.[^<>()[\]\\.,;:\s@\"]+)*)|(\".+\"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/; const regExp = /^(([^<>()[\]\\.,;:\s@\"]+(\.[^<>()[\]\\.,;:\s@\"]+)*)|(\".+\"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/;
return regExp.test( email ); return regExp.test( email );
}; };
@ -22,13 +22,13 @@ checkdata.test.emailadress = ( ctx, email ) => {
* @emaillist = "email1,email2, email3" * @emaillist = "email1,email2, email3"
* it check if each eamil separate by , are correct * it check if each eamil separate by , are correct
*/ */
checkdata.test.emailadresslist = ( ctx, emaillist ) => { Checkjson.test.emailadresslist = ( ctx, emaillist ) => {
//console.log(emaillist.split(',')) //console.log(emaillist.split(','))
if( emaillist.length > 0 ) { if( emaillist.length > 0 ) {
const emails = emaillist.split( ',' ); const emails = emaillist.split( ',' );
for( var i in emails ) { for( var i in emails ) {
//console.log(emails[i]) //console.log(emails[i])
if( !checkdata.test.emailadress( "", emails[ i ].trim() ) ) { if( !Checkjson.test.emailadress( "", emails[ i ].trim() ) ) {
return false return false
} }
} }
@ -36,19 +36,19 @@ checkdata.test.emailadresslist = ( ctx, emaillist ) => {
return true; return true;
}; };
checkdata.test.password = ( ctx, pwd ) => { Checkjson.test.password = ( ctx, pwd ) => {
const regExp = new RegExp( const regExp = new RegExp(
/^(?=.*[a-z])(?=.*[A-Z])(?=.*\d)(?=.*[$@$!%*?&.])[A-Za-z\d$@$!%*?&.{}:|\s]{8,}/ /^(?=.*[a-z])(?=.*[A-Z])(?=.*\d)(?=.*[$@$!%*?&.])[A-Za-z\d$@$!%*?&.{}:|\s]{8,}/
); );
return regExp.test( pwd ); return regExp.test( pwd );
}; };
checkdata.test.required = ( ctx, val ) => Checkjson.test.required = ( ctx, val ) =>
val != null && val != 'undefined' && val.length > 0; val != null && val != 'undefined' && val.length > 0;
checkdata.test.isNumber = ( ctx, n ) => typeof n === 'number'; Checkjson.test.isNumber = ( ctx, n ) => typeof n === 'number';
checkdata.test.isInt = ( ctx, n ) => n != '' && !isNaN( n ) && Math.round( n ) == n; Checkjson.test.isInt = ( ctx, n ) => n != '' && !isNaN( n ) && Math.round( n ) == n;
checkdata.test.isFloat = ( ctx, n ) => n != '' && !isNaN( n ) && Math.round( n ) != n; Checkjson.test.isFloat = ( ctx, n ) => n != '' && !isNaN( n ) && Math.round( n ) != n;
checkdata.test.unique = ( ctx, val ) => { Checkjson.test.unique = ( ctx, val ) => {
if( ctx.list[ ctx.currentfield ] ) { if( ctx.list[ ctx.currentfield ] ) {
return !ctx.list[ ctx.currentfield ].includes( val ); return !ctx.list[ ctx.currentfield ].includes( val );
} else { } else {
@ -56,13 +56,13 @@ checkdata.test.unique = ( ctx, val ) => {
return false; return false;
} }
}; };
checkdata.test.isDateDay = ( ctx, dateDay ) => true; Checkjson.test.isDateDay = ( ctx, dateDay ) => true;
/* checkdata.test.filterInvalidInArray = (array, validate) => /* Checkjson.test.filterInvalidInArray = (array, validate) =>
array ? array.filter(el => !validate(el)) : true; array ? array.filter(el => !validate(el)) : true;
// return true when every elements is valid // return true when every elements is valid
*/ */
checkdata.test.postalCode = ( ctx, postalCode ) => { Checkjson.test.postalCode = ( ctx, postalCode ) => {
if( postalCode.length == 0 ) return true; if( postalCode.length == 0 ) return true;
const regExp = new RegExp( /(^\d{5}$)|(^\d{5}-\d{4}$)/ ); const regExp = new RegExp( /(^\d{5}$)|(^\d{5}-\d{4}$)/ );
return regExp.test( postalCode ); return regExp.test( postalCode );
@ -70,7 +70,7 @@ checkdata.test.postalCode = ( ctx, postalCode ) => {
/** /**
* PHONE * PHONE
*/ */
checkdata.test.phoneNumber = ( ctx, phoneNumber ) => { Checkjson.test.phoneNumber = ( ctx, phoneNumber ) => {
if( phoneNumber.length == 0 ) return true; if( phoneNumber.length == 0 ) return true;
phoneNumber = phoneNumber.trim() phoneNumber = phoneNumber.trim()
.replace( /[- .]/g, '' ) .replace( /[- .]/g, '' )
@ -83,13 +83,13 @@ checkdata.test.phoneNumber = ( ctx, phoneNumber ) => {
* @phonelist = "phone1,phone2,phone3" * @phonelist = "phone1,phone2,phone3"
* it check if each phone separate by , are correct * it check if each phone separate by , are correct
*/ */
checkdata.test.phoneNumberlist = ( ctx, phonelist ) => { Checkjson.test.phoneNumberlist = ( ctx, phonelist ) => {
//console.log(emaillist.split(',')) //console.log(emaillist.split(','))
if( phonelist.length > 0 ) { if( phonelist.length > 0 ) {
const phones = phonelist.split( ',' ); const phones = phonelist.split( ',' );
for( var i in phones ) { for( var i in phones ) {
//console.log(emails[i]) //console.log(emails[i])
if( !checkdata.test.phoneNumber( "", phones[ i ].trim() ) ) { if( !Checkjson.test.phoneNumber( "", phones[ i ].trim() ) ) {
return false return false
} }
} }
@ -97,29 +97,29 @@ checkdata.test.phoneNumberlist = ( ctx, phonelist ) => {
return true; return true;
}; };
// checkdata.normalize take a correct data then reformat it to harmonise it // Checkjson.normalize take a correct data then reformat it to harmonise it
checkdata.normalize = {}; Checkjson.normalize = {};
checkdata.normalize.phoneNumber = ( ctx, phone ) => { Checkjson.normalize.phoneNumber = ( ctx, phone ) => {
phone = phone.trim() phone = phone.trim()
.replace( /[- .]/g, '' ); .replace( /[- .]/g, '' );
if( checkdata.test.phoneNumber( '', phone ) && phone.length == 10 && phone[ 0 ] == "0" ) { if( Checkjson.test.phoneNumber( '', phone ) && phone.length == 10 && phone[ 0 ] == "0" ) {
phone = '+33 ' + phone.substring( 1 ); phone = '+33 ' + phone.substring( 1 );
} }
return phone; return phone;
} }
checkdata.normalize.upperCase = ( ctx, txt ) => txt.toUpperCase(); Checkjson.normalize.upperCase = ( ctx, txt ) => txt.toUpperCase();
checkdata.normalize.lowerCase = ( ctx, txt ) => txt.toLowerCase(); Checkjson.normalize.lowerCase = ( ctx, txt ) => txt.toLowerCase();
// fixe 10 position et complete par des 0 devant // fixe 10 position et complete par des 0 devant
checkdata.normalize.zfill10 = ( ctx, num ) => { Checkjson.normalize.zfill10 = ( ctx, num ) => {
let s = num + ''; let s = num + '';
while( s.length < 10 ) s = '0' + s; while( s.length < 10 ) s = '0' + s;
return s; return s;
}; };
/*let tt = "+33 1 02.03 04 05"; /*let tt = "+33 1 02.03 04 05";
console.log(checkdata.test.phoneNumber('', tt)) console.log(Checkjson.test.phoneNumber('', tt))
console.log(checkdata.normalize.phoneNumber('', tt)) console.log(Checkjson.normalize.phoneNumber('', tt))
*/ */
checkdata.evaluate = ( contexte, referential, data ) => { Checkjson.evaluate = ( contexte, referential, data ) => {
/* /*
* contexte object {} with full info for evaluation * contexte object {} with full info for evaluation
* file referential path to get object to apply * file referential path to get object to apply
@ -148,13 +148,13 @@ checkdata.evaluate = ( contexte, referential, data ) => {
} else { } else {
if( objectdef[ field ].check ) { if( objectdef[ field ].check ) {
// check data with rule list in check // check data with rule list in check
objectdef[ field ].check.forEach( ctrl => { objectdef[ field ].Checkjson.forEach( ctrl => {
console.log( 'ctrl', ctrl ); console.log( 'ctrl', ctrl );
contexte.currentfield = field; contexte.currentfield = field;
if( !checkdata.test[ ctrl ] ) { if( !Checkjson.test[ ctrl ] ) {
invalidefor.push( 'ERR check function does not exist :' + ctrl + '___' + field ) invalidefor.push( 'ERR check function does not exist :' + ctrl + '___' + field )
} else { } else {
if( !checkdata.test[ ctrl ]( contexte, data[ field ] ) ) if( !Checkjson.test[ ctrl ]( contexte, data[ field ] ) )
invalidefor.push( 'ERR' + ctrl + '___' + field ); invalidefor.push( 'ERR' + ctrl + '___' + field );
} }
} ); } );
@ -181,4 +181,4 @@ checkdata.evaluate = ( contexte, referential, data ) => {
}; };
}; };
if( typeof module !== 'undefined' ) module.exports = checkdata; if( typeof module !== 'undefined' ) module.exports = Checkjson;

Some files were not shown because too many files have changed in this diff Show More