Global replacement of "console.log"
This commit is contained in:
parent
df0c86d420
commit
337c7e5e3c
37
apxtrib.js
37
apxtrib.js
@ -2,6 +2,7 @@ const fs = require( 'fs-extra' );
|
||||
const bodyParser = require( 'body-parser' );
|
||||
const cors = require( 'cors' );
|
||||
const express = require( 'express' );
|
||||
const logger = require('./src/core/logger')
|
||||
|
||||
/*******************************************
|
||||
|
||||
@ -11,11 +12,11 @@ To have a quick understanding before doing deeply in source code
|
||||
*********************************************/
|
||||
// check setup
|
||||
if( !fs.existsSync( '/etc/nginx/nginx.conf' ) ) {
|
||||
console.log( '\x1b[31m Check documentation, nginx have to be installed on this server first, no /etc/nginx/nginx.conf available, install then rerun yarn command.' );
|
||||
logger.info( '\x1b[31m Check documentation, nginx have to be installed on this server first, no /etc/nginx/nginx.conf available, install then rerun yarn command.' );
|
||||
process.exit();
|
||||
}
|
||||
if( !fs.existsSync( './tribes/townconf.js' ) ) {
|
||||
console.log( `\x1b[42m####################################\nWellcome into apixtribe, you need to init your town by "yarn setup" the first time . \nCheck README's project to learn more. more.\n #####################################\x1b[0m` );
|
||||
logger.info( `\x1b[42m####################################\nWellcome into apixtribe, you need to init your town by "yarn setup" the first time . \nCheck README's project to learn more. more.\n #####################################\x1b[0m` );
|
||||
process.exit();
|
||||
}
|
||||
// config.js exist in any case from Setup.checkinit();
|
||||
@ -24,7 +25,7 @@ const config = require( './tribes/townconf.js' );
|
||||
// dataclient .tribeids [] .DOMs [] .routes (plugins {url:name route:path}) .appname {tribeid:[website]}
|
||||
const dataclient = require( './models/Tribes' )
|
||||
.init();
|
||||
console.log( 'allowed DOMs to access to this apixtribe server: ', dataclient.DOMs )
|
||||
logger.info( 'allowed DOMs to access to this apixtribe server: ', dataclient.DOMs )
|
||||
const app = express();
|
||||
app.set( 'trust proxy', true );
|
||||
// To set depending of data form or get size to send
|
||||
@ -33,12 +34,12 @@ app.use( bodyParser.urlencoded( config.bodyparse.urlencoded ) );
|
||||
app.use( express.json() )
|
||||
app.use( bodyParser.json( config.bodyparse.json ) );
|
||||
app.locals.tribeids = dataclient.tribeids;
|
||||
console.log( 'app.locals.tribeids', app.locals.tribeids );
|
||||
logger.info( 'app.locals.tribeids', app.locals.tribeids );
|
||||
// User token authentification and user init user search
|
||||
const datauser = require( './models/Pagans' )
|
||||
.init( dataclient.tribeids );
|
||||
app.locals.tokens = datauser.tokens;
|
||||
console.log( 'app.locals.tokens key ', Object.keys( app.locals.tokens ) )
|
||||
logger.info( 'app.locals.tokens key ', Object.keys( app.locals.tokens ) )
|
||||
// Cors management
|
||||
const corsOptions = {
|
||||
origin: ( origin, callback ) => {
|
||||
@ -47,25 +48,25 @@ const corsOptions = {
|
||||
} else if( origin.indexOf( 'chrome-extension' ) > -1 ) {
|
||||
callback( null, true );
|
||||
} else {
|
||||
//console.log( 'origin', origin )
|
||||
//logger.info( 'origin', origin )
|
||||
//marchais avant modif eslint const rematch = ( /^https?\:\/\/(.*)\:.*/g ).exec( origin )
|
||||
const rematch = ( /^https?:\/\/(.*):.*/g )
|
||||
.exec( origin )
|
||||
//console.log( rematch )
|
||||
//logger.info( rematch )
|
||||
let tmp = origin.replace( /http.?:\/\//g, '' )
|
||||
.split( '.' )
|
||||
|
||||
if( rematch && rematch.length > 1 ) tmp = rematch[ 1 ].split( '.' );
|
||||
//console.log( 'tmp', tmp )
|
||||
//logger.info( 'tmp', tmp )
|
||||
let dom = tmp[ tmp.length - 1 ];
|
||||
if( tmp.length > 1 ) {
|
||||
dom = `${tmp[tmp.length-2]}.${tmp[tmp.length-1]}`
|
||||
}
|
||||
console.log( `origin: ${origin}, dom:${dom}, CORS allowed? : ${dataclient.DOMs.includes( dom )}` );
|
||||
logger.info( `origin: ${origin}, dom:${dom}, CORS allowed? : ${dataclient.DOMs.includes( dom )}` );
|
||||
if( dataclient.DOMs.includes( dom ) ) {
|
||||
callback( null, true )
|
||||
} else {
|
||||
console.log( `Origin is not allowed by CORS` );
|
||||
logger.info( `Origin is not allowed by CORS` );
|
||||
callback( new Error( 'Not allowed by CORS' ) );
|
||||
}
|
||||
}
|
||||
@ -82,7 +83,7 @@ app.use( express.static( `${__dirname}/tribes/${config.mayorId}/www/cdn/public`,
|
||||
//Allow to public access a space dev delivered by apixtribe
|
||||
// this is just a static open route for dev purpose,
|
||||
// for production, we'll use a nginx static set to /www/app/appname
|
||||
/*console.log( `${config.dnsapixtribe}/space/tribeid/website`, dataclient.appname );
|
||||
/*logger.info( `${config.dnsapixtribe}/space/tribeid/website`, dataclient.appname );
|
||||
Object.keys( dataclient.appname )
|
||||
.forEach( cid => {
|
||||
dataclient.appname[ cid ].forEach( website => {
|
||||
@ -91,29 +92,29 @@ Object.keys( dataclient.appname )
|
||||
} );
|
||||
*/
|
||||
// Routers add any routes from /routes and /plugins
|
||||
console.log( 'Routes available on this apixtribe instance' );
|
||||
console.log( dataclient.routes );
|
||||
logger.info( 'Routes available on this apixtribe instance' );
|
||||
logger.info( dataclient.routes );
|
||||
// prefix only use for dev purpose in production a proxy nginx redirect /app/ to node apixtribe
|
||||
|
||||
dataclient.routes.forEach( r => {
|
||||
try {
|
||||
app.use( r.url, require( r.route ) );
|
||||
} catch ( err ) {
|
||||
console.log( `\x1b[31m!!! WARNING issue with route ${r.route} from ${r.url} check err if route is key then solve err, if not just be aware that this route won't work on your server. If you are not the maintainer and no turn around please contact the email maintainer.\x1b[0m` )
|
||||
console.log( 'raise err-:', err );
|
||||
logger.info( `\x1b[31m!!! WARNING issue with route ${r.route} from ${r.url} check err if route is key then solve err, if not just be aware that this route won't work on your server. If you are not the maintainer and no turn around please contact the email maintainer.\x1b[0m` )
|
||||
logger.info( 'raise err-:', err );
|
||||
}
|
||||
} )
|
||||
// Listen web server from config profil (dev prod, other)
|
||||
app.listen( config.porthttp, () => {
|
||||
console.log( `check in your browser that api works http://${config.dnsapixtribe}:${config.porthttp}` );
|
||||
logger.info( `check in your browser that api works http://${config.dnsapixtribe}:${config.porthttp}` );
|
||||
} );
|
||||
/*httpServer.setTimeout( config.settimeout );
|
||||
if( config.withssl == "YES" ) {
|
||||
const httpsServer = https.createServer( config.SSLCredentials, app );
|
||||
httpsServer.listen( config.port.https, () => {
|
||||
console.log( `check in your browser that api works https://${config.dnsapixtribe}:${config.port.https}` );
|
||||
logger.info( `check in your browser that api works https://${config.dnsapixtribe}:${config.port.https}` );
|
||||
} );
|
||||
httpsServer.setTimeout( config.settimeout );
|
||||
};*/
|
||||
|
||||
console.log( "\x1b[42m\x1b[37m", "Made with love for people's freedom, enjoy !!!", "\x1b[0m" );
|
||||
logger.info( "\x1b[42m\x1b[37m", "Made with love for people's freedom, enjoy !!!", "\x1b[0m" );
|
||||
|
@ -31,7 +31,7 @@ const haveAccessrighttoanobject = ( req, res, next ) => {
|
||||
Action get = Read put = Update post = Create delete = Delete
|
||||
object = req.Urlpath.split(/)[0]
|
||||
*/
|
||||
console.log( 'haveAccessrighttoanobject()?' );
|
||||
logger.info( 'haveAccessrighttoanobject()?' );
|
||||
// req.originalUrl contain /object/action/id object id to run action
|
||||
// req.route.methods ={ put:true, delete:true post:true, get:true }
|
||||
const objet = req.baseUrl.slice( 1 ); //contain /object
|
||||
@ -57,9 +57,9 @@ const haveAccessrighttoanobject = ( req, res, next ) => {
|
||||
.OWNBY;
|
||||
}
|
||||
}
|
||||
//console.log( req.session.header )
|
||||
//logger.info( req.session.header )
|
||||
if( req.session.header.xpaganid == config.devnoauthxuuid ) {
|
||||
console.log( 'haveAccessrighttoanobject yes cause dev test user' );
|
||||
logger.info( 'haveAccessrighttoanobject yes cause dev test user' );
|
||||
} else {
|
||||
// accessrights was load from isAuthenticated.js middleware to make it available in req.session.header to be used into route for specific access if needed mainly to filter data in the get request depending of profil and data accessright.
|
||||
if( Object.keys( req.session.header.accessrights.data )
|
||||
@ -74,11 +74,11 @@ const haveAccessrighttoanobject = ( req, res, next ) => {
|
||||
( req.route.methods.put && droit.includes( 'U' ) ) ||
|
||||
( req.route.methods.delete && droit.includes( 'D' ) ) ||
|
||||
ownby.includes( req.params.id ) ) {
|
||||
console.log( 'haveAccessrighttoanobject yes' )
|
||||
logger.info( 'haveAccessrighttoanobject yes' )
|
||||
} else if( req.route.methods.post && droit.includes( 'C' ) ) {
|
||||
console.log( 'haveAccessrighttoanobject yes create' );
|
||||
logger.info( 'haveAccessrighttoanobject yes create' );
|
||||
} else {
|
||||
console.log( 'haveAccessrighttoanobject no' )
|
||||
logger.info( 'haveAccessrighttoanobject no' )
|
||||
res.status( 403 )
|
||||
.send( {
|
||||
payload: {
|
||||
|
@ -9,7 +9,7 @@ Check que le header contient des éléments necessaire pour les
|
||||
routes utilisant tribeid / language / token / uuid
|
||||
*/
|
||||
const checkHeaders = ( req, res, next ) => {
|
||||
//console.log( 'checkHeaders()' );
|
||||
//logger.info( 'checkHeaders()' );
|
||||
// These headers must be passed in the request
|
||||
// X-Auth and X-Uuid could have any true value
|
||||
// header is stored in req.app.locals.header to be pass to route
|
||||
@ -26,12 +26,12 @@ const checkHeaders = ( req, res, next ) => {
|
||||
req.session = {};
|
||||
const header = {};
|
||||
let missingheader = "";
|
||||
//console.log( 'avant validation headers', req.headers );
|
||||
//logger.info( 'avant validation headers', req.headers );
|
||||
//attention changement 7/11/2021 phil des exposedheader cf config.js
|
||||
//If in httprequest url header are send then they are used inpriority
|
||||
//Use case : send an email with a unique link that works without password and request to change password
|
||||
for( const h of config.exposedHeaders ) {
|
||||
//console.log( h, req.header( h ) )
|
||||
//logger.info( h, req.header( h ) )
|
||||
if( req.params[ h ] ) {
|
||||
header[ h ] = req.params[ h ]
|
||||
} else if( req.header( h ) ) {
|
||||
@ -41,7 +41,7 @@ const checkHeaders = ( req, res, next ) => {
|
||||
missingheader += " " + h
|
||||
}
|
||||
};
|
||||
//console.log( 'header', header )
|
||||
//logger.info( 'header', header )
|
||||
if( req.params.xauth && req.params.xuuid ) {
|
||||
// If this exist => it is a timeout limited token
|
||||
req.app.locals.tokens[ req.params.xpaganid ] = req.params.xauth;
|
||||
@ -56,7 +56,7 @@ const checkHeaders = ( req, res, next ) => {
|
||||
moreinfo: 'checkHeader headerIsMissing:' + missingheader
|
||||
} );
|
||||
};
|
||||
//console.log( req.app.locals.tribeids )
|
||||
//logger.info( req.app.locals.tribeids )
|
||||
if( !req.app.locals.tribeids.includes( header.xtribe ) ) {
|
||||
return res.status( 404 )
|
||||
.send( {
|
||||
@ -81,8 +81,8 @@ const checkHeaders = ( req, res, next ) => {
|
||||
moreinfo: `xlang unknown: ${header.xlang}`
|
||||
} );
|
||||
}
|
||||
//console.log( 'After middleare checkHeaders.js req.session.header', req.session.header )
|
||||
//console.log( 'checkheaders next' )
|
||||
//logger.info( 'After middleare checkHeaders.js req.session.header', req.session.header )
|
||||
//logger.info( 'checkheaders next' )
|
||||
next();
|
||||
};
|
||||
module.exports = checkHeaders;
|
||||
|
@ -14,8 +14,8 @@ const hasAccessrighton = ( object, action, ownby ) => {
|
||||
Careffull if you have many action CRO let O at the end this will force req.right at true if the owner try an action on this object
|
||||
*/
|
||||
return ( req, res, next ) => {
|
||||
//console.log( 'err.stack hasAccessrights', err.statck )
|
||||
//console.log( `test accessright on object:${object} for ${req.session.header.xworkon}:`, req.app.locals.tokens[ req.session.header.xpaganid ].ACCESSRIGHTS.data[ req.session.header.xworkon ] )
|
||||
//logger.info( 'err.stack hasAccessrights', err.statck )
|
||||
//logger.info( `test accessright on object:${object} for ${req.session.header.xworkon}:`, req.app.locals.tokens[ req.session.header.xpaganid ].ACCESSRIGHTS.data[ req.session.header.xworkon ] )
|
||||
req.right = false;
|
||||
if( req.app.locals.tokens[ req.session.header.xpaganid ].ACCESSRIGHTS.data[ req.session.header.xworkon ] && req.app.locals.tokens[ req.session.header.xpaganid ].ACCESSRIGHTS.data[ req.session.header.xworkon ][ object ] ) {
|
||||
req.right = true;
|
||||
@ -27,7 +27,7 @@ const hasAccessrighton = ( object, action, ownby ) => {
|
||||
}
|
||||
} )
|
||||
}
|
||||
//console.log( 'Access data autorise? ', req.right )
|
||||
//logger.info( 'Access data autorise? ', req.right )
|
||||
if( !req.right ) {
|
||||
return res.status( 403 )
|
||||
.send( {
|
||||
|
@ -22,31 +22,31 @@ const isAuthenticated = ( req, res, next ) => {
|
||||
a xuuid can read any objet if R
|
||||
if O wner means that it can only read write its object create by himself
|
||||
*/
|
||||
console.log( 'isAuthenticated()?' );
|
||||
//console.log( 'req.app.locals.tokens', req.app.locals.tokens )
|
||||
//console.log( 'req.session.header', req.session.header );
|
||||
logger.info( 'isAuthenticated()?' );
|
||||
//logger.info( 'req.app.locals.tokens', req.app.locals.tokens )
|
||||
//logger.info( 'req.session.header', req.session.header );
|
||||
// Check if token exist or not
|
||||
req.session.header.accessrights = { app: "", data: {} }
|
||||
if( req.session.header.xpaganid == config.devnoauthxuuid && req.session.header.xauth == config.devnoauthxauth ) {
|
||||
console.log( 'isAuthenticated yes: carrefull using a bypass password give you accessrights={}' );
|
||||
logger.info( 'isAuthenticated yes: carrefull using a bypass password give you accessrights={}' );
|
||||
} else if( req.session.header.xpaganid == "1" || !req.app.locals.tokens[ req.session.header.xpaganid ] ) {
|
||||
console.log( `isAuthenticated no : uuid=1 (value=${req.session.header.xpaganid}) or locals.tokens[uuid] empty ` );
|
||||
console.log( 'req.app.locals.tokens de xpaganid', req.app.locals.tokens[ req.session.header.xpaganid ] );
|
||||
console.log( 'list key uuid de req.app.locals.tokens', Object.keys( req.app.locals.tokens ) )
|
||||
logger.info( `isAuthenticated no : uuid=1 (value=${req.session.header.xpaganid}) or locals.tokens[uuid] empty ` );
|
||||
logger.info( 'req.app.locals.tokens de xpaganid', req.app.locals.tokens[ req.session.header.xpaganid ] );
|
||||
logger.info( 'list key uuid de req.app.locals.tokens', Object.keys( req.app.locals.tokens ) )
|
||||
req.session.header.xauth = "1"
|
||||
} else if( req.app.locals.tokens[ req.session.header.xpaganid ].TOKEN !== req.session.header.xauth ) {
|
||||
// console.log(req.session.header.xuuid);
|
||||
// console.log(req.session.header.xauth);
|
||||
// logger.info(req.session.header.xuuid);
|
||||
// logger.info(req.session.header.xauth);
|
||||
// update tokens from file in case recently logged
|
||||
try {
|
||||
console.log( 'token not in list of token (req.app.locals.tokens) try to refresh from file' );
|
||||
logger.info( 'token not in list of token (req.app.locals.tokens) try to refresh from file' );
|
||||
req.app.locals.tokens = jsonfile.readFileSync( `${config.tmp}/tokens.json` );
|
||||
} catch ( err ) {
|
||||
console.log( `check isAuthenticated issue in reading ${config.tmp}/tokens.json` );
|
||||
logger.info( `check isAuthenticated issue in reading ${config.tmp}/tokens.json` );
|
||||
}
|
||||
if( req.app.locals.tokens[ req.session.header.xpaganid ].TOKEN !== req.session.header.xauth ) {
|
||||
// if still does not exist then out
|
||||
console.log( 'isAuthenticated no, token outdated' );
|
||||
logger.info( 'isAuthenticated no, token outdated' );
|
||||
req.session.header.xauth = "1"
|
||||
req.session.header.xpaganid = "1"
|
||||
}
|
||||
@ -60,9 +60,9 @@ const isAuthenticated = ( req, res, next ) => {
|
||||
moreinfo: 'isAuthenticated faill'
|
||||
} )
|
||||
} else {
|
||||
console.log( 'isAuthenticated yes' );
|
||||
logger.info( 'isAuthenticated yes' );
|
||||
if( req.app.locals.tokens[ req.session.header.xpaganid ] ) {
|
||||
//console.log( `accessright pour ${req.session.header.xpaganid}`, req.app.locals.tokens[ req.session.header.xpaganid ].ACCESSRIGHTS );
|
||||
//logger.info( `accessright pour ${req.session.header.xpaganid}`, req.app.locals.tokens[ req.session.header.xpaganid ].ACCESSRIGHTS );
|
||||
//set header.accessrights from tokens.json
|
||||
req.session.header.accessrights = req.app.locals.tokens[ req.session.header.xpaganid ].ACCESSRIGHTS
|
||||
} else {
|
||||
@ -72,13 +72,13 @@ const isAuthenticated = ( req, res, next ) => {
|
||||
// Once per day, clean old token
|
||||
const currentday = moment()
|
||||
.date();
|
||||
console.log( 'test si menagedone' + currentday, !fs.existsSync( `${config.tmp}/menagedone${currentday}` ) )
|
||||
logger.info( 'test si menagedone' + currentday, !fs.existsSync( `${config.tmp}/menagedone${currentday}` ) )
|
||||
if( !fs.existsSync( `${config.tmp}/menagedone${currentday}` ) ) {
|
||||
glob.sync( `${config.tmp}/menagedone*` )
|
||||
.forEach( f => {
|
||||
fs.remove( f, ( err ) => {
|
||||
if( err ) {
|
||||
console.log( 'err remove menagedone', err )
|
||||
logger.info( 'err remove menagedone', err )
|
||||
}
|
||||
} )
|
||||
} );
|
||||
@ -86,7 +86,7 @@ const isAuthenticated = ( req, res, next ) => {
|
||||
.forEach( f => {
|
||||
fs.remove( f, ( err ) => {
|
||||
if( err ) {
|
||||
console.log( 'err remove mdcreator log', err )
|
||||
logger.info( 'err remove mdcreator log', err )
|
||||
}
|
||||
} )
|
||||
} );
|
||||
@ -94,13 +94,13 @@ const isAuthenticated = ( req, res, next ) => {
|
||||
for( const k of Object.keys( req.app.locals.tokens ) ) {
|
||||
try {
|
||||
const decodedToken = jwt.decode( req.app.locals.tokens[ k ].TOKEN, config.jwtSecret );
|
||||
//console.log( moment( decodedToken.expiration ), moment() )
|
||||
//console.log( moment( decodedToken.expiration ) >= moment() )
|
||||
//logger.info( moment( decodedToken.expiration ), moment() )
|
||||
//logger.info( moment( decodedToken.expiration ) >= moment() )
|
||||
if( moment( decodedToken.expiration ) >= moment() ) {
|
||||
newtokens[ k ] = req.app.locals.tokens[ k ];
|
||||
}
|
||||
} catch ( err ) {
|
||||
console.log( "Check isAuthenticated cleaning token ", err );
|
||||
logger.info( "Check isAuthenticated cleaning token ", err );
|
||||
}
|
||||
};
|
||||
req.app.locals.tokens = newtokens;
|
||||
|
@ -31,7 +31,7 @@ Contracts.sendcampain = async ( param, envoicampain ) => {
|
||||
headers: param.headers
|
||||
} );
|
||||
if( retcampain.status !== 200 ) {
|
||||
console.log( "err", retcampain.payload.moreinfo );
|
||||
logger.info( "err", retcampain.payload.moreinfo );
|
||||
fs.appendFileSync( `${config.tribes}/log_erreurglobal.txt`, moment( new Date() )
|
||||
.format( 'YYYYMMDD HH:mm:ss' ) + ' - IMPOSSIBLE TO SEND CAMPAIN TODO for :' + param.tribeid + ' -- ' + retcampain.payload.moreinfo + '\n', 'utf-8' );
|
||||
};
|
||||
@ -64,21 +64,21 @@ Contracts.initActiontodo = async ( envoie ) => {
|
||||
};
|
||||
const listclient = jsonfile.readFileSync( `${config.tribes}/tribeids.json` );
|
||||
for( let clid in listclient ) {
|
||||
console.log( listclient[ clid ] );
|
||||
logger.info( listclient[ clid ] );
|
||||
let listaction = glob.sync( `${config.tribes}/${listclient[clid]}/actions/todo/*.json` );
|
||||
for( let action in listaction ) {
|
||||
console.log( listaction[ action ] )
|
||||
logger.info( listaction[ action ] )
|
||||
log.nbaction++;
|
||||
todo = jsonfile.readFileSync( listaction[ action ] );
|
||||
let passdate = true;
|
||||
// currentdate doit etre après la startDate si existe et avant valideuntilDate si existe
|
||||
// console.log('test now est avant date start ', moment() < moment(todo.startDate, 'YYYYMMDD HH:mm:ss').toDate());
|
||||
// logger.info('test now est avant date start ', moment() < moment(todo.startDate, 'YYYYMMDD HH:mm:ss').toDate());
|
||||
if( todo.startDate && ( moment() < moment( todo.startDate, 'YYYYMMDD HH:mm:ss' )
|
||||
.toDate() ) ) {
|
||||
passdate = false;
|
||||
};
|
||||
// currentdate ne doit pas depasser la date de validité de la tache
|
||||
// console.log('test now est après la date de validite ', moment() > moment(todo.validuntilDate, 'YYYYMMDD HH:mm:ss').toDate());
|
||||
// logger.info('test now est après la date de validite ', moment() > moment(todo.validuntilDate, 'YYYYMMDD HH:mm:ss').toDate());
|
||||
if( todo.valideuntilDate && ( moment() > moment( todo.validuntilDate, 'YYYYMMDD HH:mm:ss' )
|
||||
.toDate() ) ) {
|
||||
passdate = false;
|
||||
@ -89,7 +89,7 @@ Contracts.initActiontodo = async ( envoie ) => {
|
||||
const actiondone = await Contracts[ todo.action ]( todo, envoie );
|
||||
todo.datesRun.push( moment( new Date() )
|
||||
.format( 'YYYYMMDD HH:mm:ss' ) );
|
||||
//console.log("actiondone", actio jsonfile.writeFileSyncndone);
|
||||
//logger.info("actiondone", actio jsonfile.writeFileSyncndone);
|
||||
log.actionlist += "STATUS:" + actiondone.status + " -- " + listaction[ action ] + "\n";
|
||||
if( actiondone.status == 200 ) {
|
||||
todo.error = "";
|
||||
|
@ -42,7 +42,7 @@ Messages.byEmailwithmailjet = ( tribeid, msg ) => {
|
||||
tribeidsender/messages/logs/sent/timestamp.json
|
||||
@todo GUI to manage statistics and notification alert limit sender email
|
||||
*/
|
||||
console.log( 'Envoie mailjet' )
|
||||
logger.info( 'Envoie mailjet' )
|
||||
const confclient = fs.readJsonSync( `${config.tribes}/${tribeid}/clientconf.json` );
|
||||
let tribeidsender = tribeid;
|
||||
if( confclient.smtp && confclient.smtp.mailjet ) {
|
||||
@ -67,13 +67,13 @@ Messages.byEmailwithmailjet = ( tribeid, msg ) => {
|
||||
const t = Date.now();
|
||||
MSG.result = result.body;
|
||||
fs.outputJson( `${config.tribes}/${tribeidsender}/messages/logs/sent/${t}.json`, MSG )
|
||||
console.log( result.body )
|
||||
logger.info( result.body )
|
||||
} )
|
||||
.catch( err => {
|
||||
const t = Date.now();
|
||||
MSG.result = err;
|
||||
fs.outputJson( `${config.tribes}/${tribeidsender}/messages/logs/error/${t}.json`, MSG )
|
||||
console.log( err.statusCode, err )
|
||||
logger.info( err.statusCode, err )
|
||||
} )
|
||||
|
||||
};
|
||||
@ -110,7 +110,7 @@ Messages.buildemail = ( tribeid, tplmessage, data ) => {
|
||||
msg.To.push( { Email: uuidconf.EMAIL, Name: uuidconf.LOGIN } )
|
||||
}
|
||||
}
|
||||
//console.log( data )
|
||||
//logger.info( data )
|
||||
// email content
|
||||
msg.Subject = `Message from ${tribeid}`;
|
||||
if( data.subject ) msg.Subject = data.subject;
|
||||
@ -135,7 +135,7 @@ Messages.postinfo = ( data ) => {
|
||||
[ 'contactemail', 'contactphone', 'contactuuid', 'contactlogin' ].forEach( c => {
|
||||
if( data[ c ] ) contact += c + "##" + data[ c ] + "###";
|
||||
} )
|
||||
console.log( contact )
|
||||
logger.info( contact )
|
||||
if( contact == "" ) {
|
||||
return { status: 404, data: { model: "Messages", info: [ "contactundefine" ], moreinfo: "no contact field found in this form" } }
|
||||
}
|
||||
@ -177,13 +177,13 @@ Messages.aggregate = () => {
|
||||
try {
|
||||
glob.sync( `${ config.tribes }/**/notif_*.json` )
|
||||
.forEach( f => {
|
||||
//console.log( 'find ', f )
|
||||
//logger.info( 'find ', f )
|
||||
const repglob = `${path.dirname(f)}/global.json`;
|
||||
if( !dest[ repglob ] ) { dest[ repglob ] = [] }
|
||||
dest[ repglob ].push( fs.readJsonSync( f, 'utf-8' ) );
|
||||
fs.removeSync( f );
|
||||
} )
|
||||
//console.log( dest )
|
||||
//logger.info( dest )
|
||||
Object.keys( dest )
|
||||
.forEach( g => {
|
||||
let notif = [];
|
||||
@ -202,8 +202,8 @@ Messages.object = ( data, header ) => {
|
||||
data.descttribeid tribeid to send at least to admin
|
||||
data.tplmessage = folder of emailtemplate
|
||||
*/
|
||||
console.log( 'data', data )
|
||||
console.log( `${config.tribes}/${header.xworkon}/${data.object}` )
|
||||
logger.info( 'data', data )
|
||||
logger.info( `${config.tribes}/${header.xworkon}/${data.object}` )
|
||||
if( !fs.existsSync( `${config.tribes}/${header.xworkon}/${data.object}` ) ) {
|
||||
return {
|
||||
status: 404,
|
||||
@ -233,7 +233,7 @@ Messages.object = ( data, header ) => {
|
||||
fs.existsSync( `${config.tribes}/${data.tplmessage}` ) ) {
|
||||
const msg = Messages.buildemail( data.desttribeid, data.tplmessage, data )
|
||||
if( msg.status == 200 ) {
|
||||
console.log( 'WARN EMAIL DESACTIVATED CHANGE TO ACTIVATE in Messages.js' )
|
||||
logger.info( 'WARN EMAIL DESACTIVATED CHANGE TO ACTIVATE in Messages.js' )
|
||||
//Messages.byEmailwithmailjet( data.desttribeid, [ msg.data.msg ] );
|
||||
}
|
||||
// we get error message eventualy but email feedback sent is not in real time see notification alert in case of email not sent.
|
||||
@ -377,9 +377,9 @@ Messages.request = ( tribeid, uuid, ACCESSRIGHTS, apprequest ) => {
|
||||
const cliobjnot = `${config.tribes}/${c}/${o}/Messages/global.json`
|
||||
//check for each tribeid / Object per accessright user
|
||||
if( fs.existsSync( cliobjnot ) ) {
|
||||
console.log( `droit sur client ${c} objet ${o} : ${ACCESSRIGHTS.data[ c ][ o ]}` );
|
||||
logger.info( `droit sur client ${c} objet ${o} : ${ACCESSRIGHTS.data[ c ][ o ]}` );
|
||||
//check if intersection between user accessrigth for this object and the notification accessright is not empty @Todo replace true by intersec
|
||||
console.log( 'WARN no actif filter all notif are shared with any authenticated user' )
|
||||
logger.info( 'WARN no actif filter all notif are shared with any authenticated user' )
|
||||
const newnotif = fs.readJsonSync( cliobjnot )
|
||||
.filter( n => { return true } );
|
||||
notif.notifs = notif.notifs.concat( newnotif );
|
||||
|
@ -49,7 +49,7 @@ Nationchains.synchronize = () => {
|
||||
try {
|
||||
currentinstance = fs.readFileSync( `${config.tribes}/${config.mayorId}/nationchains/nodes/${config.rootURL}`, 'utf-8' )
|
||||
} catch ( err ) {
|
||||
console.log( 'first init' )
|
||||
logger.info( 'first init' )
|
||||
}
|
||||
const loginsglob = fs.readJsonSync( `${config.tmp}/loginsglob.json`, 'utf-8' );
|
||||
currentinstance.logins = Object.keys( loginsglob );
|
||||
|
@ -37,15 +37,15 @@ Outputs.ggsheet2json = async ( req, header ) => {
|
||||
const confgg = fs.readJsonSync( `${config.tribes}/${header.xworkon}/${req.ggsource}`, 'utf-8' )
|
||||
//const ggconnect = clientconf.ggsheet[ req.ggsource ]
|
||||
//googleDriveCredentials;
|
||||
//console.log( ggconnect )
|
||||
//logger.info( ggconnect )
|
||||
doc = new GoogleSpreadsheet( confgg.productIdSpreadsheet );
|
||||
await doc.useServiceAccountAuth( confgg.googleDriveCredentials );
|
||||
await doc.loadInfo();
|
||||
let result = [];
|
||||
let invalidfor = "";
|
||||
//console.log( "sheets", req.sheets );
|
||||
//logger.info( "sheets", req.sheets );
|
||||
for( const sheetName of req.sheets ) {
|
||||
console.log( 'loading: ', sheetName );
|
||||
logger.info( 'loading: ', sheetName );
|
||||
if( !doc.sheetsByTitle[ sheetName ] ) {
|
||||
invalidfor += " " + sheetName;
|
||||
} else {
|
||||
@ -98,16 +98,16 @@ csv => export json file to csv data
|
||||
pdf => generate a customized document
|
||||
*/
|
||||
Outputs.envoiemail = async ( msg, nowait, nbfois ) => {
|
||||
// console.log('{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}');
|
||||
// console.log('msg to send', msg);
|
||||
console.log( 'nbfois', nbfois );
|
||||
// logger.info('{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}');
|
||||
// logger.info('msg to send', msg);
|
||||
logger.info( 'nbfois', nbfois );
|
||||
let transporter = nodemailer.createTransport( msg.smtp );
|
||||
if( !nowait ) {
|
||||
console.log( 'attente 1er msg avant d envoyer les autres' );
|
||||
logger.info( 'attente 1er msg avant d envoyer les autres' );
|
||||
const transport = await transporter.verify();
|
||||
console.log( 'transport', transport );
|
||||
logger.info( 'transport', transport );
|
||||
if( transport.error ) {
|
||||
console.log( 'Probleme de smtp', error );
|
||||
logger.info( 'Probleme de smtp', error );
|
||||
return {
|
||||
status: 500,
|
||||
payload: {
|
||||
@ -116,7 +116,7 @@ Outputs.envoiemail = async ( msg, nowait, nbfois ) => {
|
||||
};
|
||||
} else {
|
||||
let rep = await transporter.sendMail( msg );
|
||||
console.log( 'rep sendMail', rep );
|
||||
logger.info( 'rep sendMail', rep );
|
||||
if( rep.accepted && rep.accepted.length > 0 && rep.rejected.length == 0 ) {
|
||||
fs.appendFileSync( `${config.tribes}/${msg.headers['x-client-nd-id']}/logs/${msg.headers['x-campaign-id']}_success.txt`, moment( new Date() )
|
||||
.format( 'YYYYMMDD HH:mm:ss' ) + ' - Success after waiting 1st email to send ' + msg.to + '\n', 'utf-8' );
|
||||
@ -144,19 +144,19 @@ Outputs.envoiemail = async ( msg, nowait, nbfois ) => {
|
||||
transporter.sendMail( msg, async ( err, info ) => {
|
||||
if( err ) {
|
||||
if( nbfois < 4 ) {
|
||||
console.log( 'nouvelle tentative ', nbfois );
|
||||
logger.info( 'nouvelle tentative ', nbfois );
|
||||
await sleep( 600000 ); // attends 60sec pour faire une niéme tentative
|
||||
Outputs.envoiemail( msg, true, nbfois + 1 );
|
||||
} else {
|
||||
// logerror in file
|
||||
console.log( 'err', err )
|
||||
logger.info( 'err', err )
|
||||
fs.appendFileSync( `${config.tribes}/${msg.headers['x-client-nd-id']}/logs/${msg.headers['x-campaign-id']}_error.txt`, moment( new Date() )
|
||||
.format( 'YYYYMMDD HH:mm:ss' ) + ' - err after 4 tries to ' + info.rejected.join( ',' ) + '\n', 'utf-8' );
|
||||
// console.log('msg.to not well sent', msg.to);
|
||||
// logger.info('msg.to not well sent', msg.to);
|
||||
}
|
||||
} else {
|
||||
console.log( 'info', info )
|
||||
// console.log('msg.to well sent', msg.to);
|
||||
logger.info( 'info', info )
|
||||
// logger.info('msg.to well sent', msg.to);
|
||||
fs.appendFileSync( `${config.tribes}/${msg.headers['x-client-nd-id']}/logs/${msg.headers['x-campaign-id']}_success.txt`, moment( new Date() )
|
||||
.format( 'YYYYMMDD HH:mm:ss' ) + ' - Success after ' + nbfois + ' tries to ' + info.accepted.join( ',' ) + '\n', 'utf-8' );
|
||||
}
|
||||
@ -176,16 +176,16 @@ Outputs.generemsg = async ( msg, header ) => {
|
||||
wait msg sent and return result sent
|
||||
*/
|
||||
// Recupere les parametre smtp du domainName à utiliser
|
||||
console.log( 'pass Outputs.generemsg' )
|
||||
logger.info( 'pass Outputs.generemsg' )
|
||||
try {
|
||||
const confclientexpediteur = jsonfile.readFileSync( `${config.tribes}/${msg.tribeidperso.tribeidexpediteur}/clientconf.json` );
|
||||
//console.log('expediteur', confclientexpediteur);
|
||||
//logger.info('expediteur', confclientexpediteur);
|
||||
msg.smtp = confclientexpediteur.smtp;
|
||||
/* const confclient = jsonfile.readFileSync(
|
||||
`${config.tribes}/${msg.tribeidperso.tribeid}/clientconf.json`
|
||||
);*/
|
||||
} catch ( err ) {
|
||||
console.log( 'la conf smtp du client n\'est pas definie' );
|
||||
logger.info( 'la conf smtp du client n\'est pas definie' );
|
||||
return {
|
||||
status: 404,
|
||||
payload: {
|
||||
@ -194,14 +194,14 @@ Outputs.generemsg = async ( msg, header ) => {
|
||||
}
|
||||
};
|
||||
}
|
||||
console.log( msg );
|
||||
logger.info( msg );
|
||||
if( !msg.template.sendascontent && msg.template.htmlfile ) {
|
||||
try {
|
||||
msg.template.html = fs.readFileSync( config.sharedData + '/' + msg.template.htmlfile + '/contentinline.mustache', 'utf-8' );
|
||||
msg.template.text = fs.readFileSync( config.sharedData + '/' + msg.template.htmlfile + '/contenttxt.mustache', 'utf-8' );
|
||||
} catch ( err ) {
|
||||
console.log( 'WARNING, html file template missing ' + config.sharedData + '/' + msg.template.htmlfile );
|
||||
console.log( err );
|
||||
logger.info( 'WARNING, html file template missing ' + config.sharedData + '/' + msg.template.htmlfile );
|
||||
logger.info( err );
|
||||
return {
|
||||
status: 404,
|
||||
payload: {
|
||||
@ -213,7 +213,7 @@ Outputs.generemsg = async ( msg, header ) => {
|
||||
}
|
||||
}
|
||||
if( msg.template.html.length == 0 ) {
|
||||
console.log( 'template.html est vide' )
|
||||
logger.info( 'template.html est vide' )
|
||||
return {
|
||||
status: 404,
|
||||
payload: {
|
||||
@ -224,7 +224,7 @@ Outputs.generemsg = async ( msg, header ) => {
|
||||
};
|
||||
}
|
||||
// mustache any data into
|
||||
// console.log(msg);
|
||||
// logger.info(msg);
|
||||
const msg2send = {};
|
||||
msg2send.smtp = msg.smtp;
|
||||
msg2send.from = msg.tribeidperso.from;
|
||||
@ -237,9 +237,9 @@ Outputs.generemsg = async ( msg, header ) => {
|
||||
'x-template-nd-id': msg.tribeidperso.templateId
|
||||
};
|
||||
// we get in datacust= {tribeidperso: with clientconf,destperso: personnalise data to send for email}
|
||||
// console.log(msg);
|
||||
console.log( 'nb de message to send:', msg.destperso.length );
|
||||
//console.log(msg.destperso);
|
||||
// logger.info(msg);
|
||||
logger.info( 'nb de message to send:', msg.destperso.length );
|
||||
//logger.info(msg.destperso);
|
||||
//msg.destperso.forEach(async (perso, pos) => {
|
||||
let pos;
|
||||
let pass1ermsg = false;
|
||||
@ -267,15 +267,15 @@ Outputs.generemsg = async ( msg, header ) => {
|
||||
}
|
||||
} )
|
||||
msg2send.to = msg.destperso[ pos ].email;
|
||||
console.log( 'msg2send.to ' + msg2send.to + ' pos:' + pos );
|
||||
// console.log('avec datacusteval ', datacusteval)
|
||||
logger.info( 'msg2send.to ' + msg2send.to + ' pos:' + pos );
|
||||
// logger.info('avec datacusteval ', datacusteval)
|
||||
msg2send.subject = mustache.render( msg.template.subject, datacusteval );
|
||||
msg2send.text = mustache.render( msg.template.text, datacusteval );
|
||||
msg2send.html = mustache.render( msg.template.html, datacusteval );
|
||||
let nowait = true;
|
||||
if( config.emailerurl == 'http://devapia.maildigit.fr:3015' ) {
|
||||
fs.writeFileSync( 'devdata/tmp/test.html', msg2send.html, 'utf-8' );
|
||||
console.log( 'lancement email sur dev, pour controler le mail générer voir ds ./config.js il faut changer config.emailerurl avec https://mail.maildigit.fr pour envoyer le mail ' )
|
||||
logger.info( 'lancement email sur dev, pour controler le mail générer voir ds ./config.js il faut changer config.emailerurl avec https://mail.maildigit.fr pour envoyer le mail ' )
|
||||
return {
|
||||
status: 200,
|
||||
payload: {
|
||||
@ -290,22 +290,22 @@ Outputs.generemsg = async ( msg, header ) => {
|
||||
/* we are waiting the first email was sent ok then we send all other
|
||||
check NEEDDATA/OVH/workspace/emailerovh to send emailer with nodemailer and nodemailer-smtp-transport
|
||||
*/
|
||||
// console.log('envoie msg', msg);
|
||||
//console.log(msg2send);
|
||||
// logger.info('envoie msg', msg);
|
||||
//logger.info(msg2send);
|
||||
const ret = await Outputs.envoiemail( msg2send, nowait, 0 );
|
||||
console.log( 'ret 1er msg', ret );
|
||||
logger.info( 'ret 1er msg', ret );
|
||||
if( ret.status == 200 ) {
|
||||
pass1ermsg = true;
|
||||
};
|
||||
} else if( pass1ermsg ) {
|
||||
console.log( '###############################################' )
|
||||
console.log( "envoie msg numero: " + pos + " email: " + msg2send.to )
|
||||
//console.log(msg2send)
|
||||
logger.info( '###############################################' )
|
||||
logger.info( "envoie msg numero: " + pos + " email: " + msg2send.to )
|
||||
//logger.info(msg2send)
|
||||
Outputs.envoiemail( msg2send, nowait, 0 );
|
||||
/*Outputs.envoiemail(msg2send, nowait, 0).then(rep => {
|
||||
console.log("envoie email" + pos)
|
||||
logger.info("envoie email" + pos)
|
||||
}).catch(err => {
|
||||
console.log(err);
|
||||
logger.info(err);
|
||||
});*/
|
||||
};
|
||||
};
|
||||
@ -339,16 +339,16 @@ Outputs.sendMailcampain = async ( msg, headersmsg ) => {
|
||||
Pour un exemple de msg voir u exemple type de message envoyé dans un tribeid/domain/clientconf.json
|
||||
avec l'envoi d'email
|
||||
*/
|
||||
//console.log(msg)
|
||||
//logger.info(msg)
|
||||
// On ajoute le contenu du template directement dans la demande
|
||||
if( msg.template.sendascontent && msg.template.htmlfile ) {
|
||||
try {
|
||||
console.log( 'test', msg.template.sendascontent )
|
||||
logger.info( 'test', msg.template.sendascontent )
|
||||
msg.template.html = fs.readFileSync( config.sharedData + '/' + msg.template.htmlfile + '/contentinline.mustache', 'utf-8' );
|
||||
msg.template.text = fs.readFileSync( config.sharedData + '/' + msg.template.htmlfile + '/contenttxt.mustache', 'utf-8' );
|
||||
} catch ( err ) {
|
||||
console.log( 'WARNING, html file template missing ' + config.sharedData + '/' + msg.template.htmlfile );
|
||||
//console.log(err);
|
||||
logger.info( 'WARNING, html file template missing ' + config.sharedData + '/' + msg.template.htmlfile );
|
||||
//logger.info(err);
|
||||
return {
|
||||
status: 404,
|
||||
payload: {
|
||||
@ -370,8 +370,8 @@ Outputs.sendMailcampain = async ( msg, headersmsg ) => {
|
||||
};
|
||||
}
|
||||
}
|
||||
console.log( 'envoie sur', `${config.emailerurl}/outputs/msg` )
|
||||
//console.log(msg)
|
||||
logger.info( 'envoie sur', `${config.emailerurl}/outputs/msg` )
|
||||
//logger.info(msg)
|
||||
// on check si les key de headermsg sont des key traduite via exposedHeaders
|
||||
// (cas ou c'est l'application qui envoie un email)
|
||||
if( headersmsg.xtribeid ) {
|
||||
@ -383,13 +383,13 @@ Outputs.sendMailcampain = async ( msg, headersmsg ) => {
|
||||
}
|
||||
// on ajoute le code pour la signature
|
||||
headersmsg.hashbody = msg.code;
|
||||
console.log( 'header after traduction: ', headersmsg )
|
||||
logger.info( 'header after traduction: ', headersmsg )
|
||||
try {
|
||||
const resCamp = await axios.post( `${config.emailerurl}/outputs/msg`, msg, {
|
||||
headers: headersmsg
|
||||
} );
|
||||
//console.log('Etat:', resCamp);
|
||||
console.log( 'Tried to send 1st email of the campain ' + msg.destperso[ 0 ].email );
|
||||
//logger.info('Etat:', resCamp);
|
||||
logger.info( 'Tried to send 1st email of the campain ' + msg.destperso[ 0 ].email );
|
||||
// it just check the 1st email in destperso to return an answer if 1st is ok then all other are send in queue
|
||||
if( resCamp ) {
|
||||
return resCamp;
|
||||
@ -404,9 +404,9 @@ Outputs.sendMailcampain = async ( msg, headersmsg ) => {
|
||||
Outputs.get = function ( filename, header ) {
|
||||
// check file exist
|
||||
const file = `${config.tribes}/${header.xworkon}/${filename}`;
|
||||
// console.log('fichier demande ', file);
|
||||
// logger.info('fichier demande ', file);
|
||||
if( !fs.existsSync( file ) ) {
|
||||
// console.log('le fichier demande n existe pas ', file);
|
||||
// logger.info('le fichier demande n existe pas ', file);
|
||||
return {
|
||||
status: 404,
|
||||
payload: {
|
||||
@ -415,7 +415,7 @@ Outputs.get = function ( filename, header ) {
|
||||
}
|
||||
};
|
||||
} else {
|
||||
console.log( 'envoie le fichier ', file );
|
||||
logger.info( 'envoie le fichier ', file );
|
||||
return {
|
||||
status: 200,
|
||||
payload: {
|
||||
@ -431,7 +431,7 @@ Outputs.addjson = function ( data, header ) {
|
||||
Le header = {X-WorkOn:"",destinationfile:"", filename:""}
|
||||
Le body = {jsonp:{},callback:function to launch after download,'code':'mot cle pour verifier que le fichier est à garder'}
|
||||
*/
|
||||
// console.log(req.body.jsonp);
|
||||
// logger.info(req.body.jsonp);
|
||||
try {
|
||||
jsonfile.writeFileSync( header.destinationfile + '/' + header.filename, data.jsonp );
|
||||
if( data.callback ) {
|
||||
@ -451,7 +451,7 @@ Outputs.addjson = function ( data, header ) {
|
||||
}
|
||||
};
|
||||
} catch ( err ) {
|
||||
console.log( 'Impossible de sauvegarder le fichier, A COMPRENDRE', err );
|
||||
logger.info( 'Impossible de sauvegarder le fichier, A COMPRENDRE', err );
|
||||
return {
|
||||
status: 503,
|
||||
payload: {
|
||||
@ -463,22 +463,22 @@ Outputs.addjson = function ( data, header ) {
|
||||
};
|
||||
Outputs.add = function ( req, header ) {
|
||||
const form = new formidable.IncomingForm();
|
||||
console.log( 'req.headers', req.headers );
|
||||
console.log( 'req.params', req.params );
|
||||
console.log( 'req.query', req.query );
|
||||
console.log( 'req.body', req.body );
|
||||
logger.info( 'req.headers', req.headers );
|
||||
logger.info( 'req.params', req.params );
|
||||
logger.info( 'req.query', req.query );
|
||||
logger.info( 'req.body', req.body );
|
||||
let destinationfile = `${config.tribes}/${header.xworkon}/${header.destinationfile
|
||||
}`;
|
||||
form.parse( req, function ( err, fields, files ) {
|
||||
console.log( 'files', files.file.path );
|
||||
console.log( 'fields', fields );
|
||||
logger.info( 'files', files.file.path );
|
||||
logger.info( 'fields', fields );
|
||||
const oldpath = files.file.path;
|
||||
destinationfile += '/' + files.file.name;
|
||||
console.log( 'oldpath', oldpath );
|
||||
console.log( 'destinationfile', destinationfile );
|
||||
logger.info( 'oldpath', oldpath );
|
||||
logger.info( 'destinationfile', destinationfile );
|
||||
fs.copyFile( oldpath, destinationfile, function ( err ) {
|
||||
if( err ) {
|
||||
console.log( err );
|
||||
logger.info( err );
|
||||
return {
|
||||
status: 500,
|
||||
payload: {
|
||||
@ -487,7 +487,7 @@ Outputs.add = function ( req, header ) {
|
||||
}
|
||||
};
|
||||
} else {
|
||||
console.log( 'passe' );
|
||||
logger.info( 'passe' );
|
||||
fs.unlink( oldpath );
|
||||
return {
|
||||
status: 200,
|
||||
|
@ -28,7 +28,7 @@ Outputs.envoiemail = ( msg, next ) => {
|
||||
if( err ) {
|
||||
next( err );
|
||||
} else {
|
||||
console.log( 'info', info )
|
||||
logger.info( 'info', info )
|
||||
fs.appendFileSync( `${config.tribes}/${msg.headers['x-client-nd-id']}/logs/${msg.headers['x-campaign-id']}_success.txt`, moment( new Date() )
|
||||
.format( 'YYYYMMDD HH:mm:ss' ) + ' - Success after ' + '0' + ' tries to ' + info.accepted.join( ',' ) + '\n', 'utf-8' );
|
||||
next( null );
|
||||
@ -59,14 +59,14 @@ Outputs.setupmail = ( msg, msg2send, index ) => {
|
||||
}
|
||||
} )
|
||||
msg2send.to = index.email;
|
||||
console.log( 'msg2send.to ' + msg2send.to );
|
||||
logger.info( 'msg2send.to ' + msg2send.to );
|
||||
msg2send.subject = mustache.render( msg.template.subject, datacusteval );
|
||||
msg2send.text = mustache.render( msg.template.text, datacusteval );
|
||||
msg2send.html = mustache.render( msg.template.html, datacusteval );
|
||||
// TODO need to move that in generemsg
|
||||
// if (config.emailerurl == 'http://devapia.maildigit.fr:3015') {
|
||||
// fs.writeFileSync('devdata/tmp/test.html', msg2send.html, 'utf-8');
|
||||
// console.log('lancement email sur dev, pour controler le mail générer voir ds ./config.js il faut changer config.emailerurl avec https://mail.maildigit.fr pour envoyer le mail ')
|
||||
// logger.info('lancement email sur dev, pour controler le mail générer voir ds ./config.js il faut changer config.emailerurl avec https://mail.maildigit.fr pour envoyer le mail ')
|
||||
// return {
|
||||
// status: 200,
|
||||
// payload: {
|
||||
@ -79,14 +79,14 @@ Outputs.setupmail = ( msg, msg2send, index ) => {
|
||||
return msg2send;
|
||||
}
|
||||
Outputs.envoiefirstmail = async ( msg ) => {
|
||||
console.log( '###############################################' )
|
||||
console.log( "envoie first msg email: " + msg.to )
|
||||
logger.info( '###############################################' )
|
||||
logger.info( "envoie first msg email: " + msg.to )
|
||||
let transporter = nodemailer.createTransport( msg.smtp );
|
||||
console.log( 'attente 1er msg avant d envoyer les autres' );
|
||||
logger.info( 'attente 1er msg avant d envoyer les autres' );
|
||||
const transport = await transporter.verify();
|
||||
console.log( 'transport', transport );
|
||||
logger.info( 'transport', transport );
|
||||
if( transport.error ) {
|
||||
console.log( 'Probleme de smtp', error );
|
||||
logger.info( 'Probleme de smtp', error );
|
||||
return {
|
||||
status: 500,
|
||||
payload: {
|
||||
@ -95,7 +95,7 @@ Outputs.envoiefirstmail = async ( msg ) => {
|
||||
};
|
||||
} else {
|
||||
let rep = await transporter.sendMail( msg );
|
||||
console.log( 'rep sendMail', rep );
|
||||
logger.info( 'rep sendMail', rep );
|
||||
if( rep.accepted && rep.accepted.length > 0 && rep.rejected.length == 0 ) {
|
||||
fs.appendFileSync( `${config.tribes}/${msg.headers['x-client-nd-id']}/logs/${msg.headers['x-campaign-id']}_success.txt`, moment( new Date() )
|
||||
.format( 'YYYYMMDD HH:mm:ss' ) + ' - Success after waiting 1st email to send ' + msg.to + '\n', 'utf-8' );
|
||||
@ -124,8 +124,8 @@ Outputs.envoiemails = ( msg, msg2send, targets, iteration, resolve, reject ) =>
|
||||
let newtargets = [];
|
||||
async.each( targets, ( index, callback ) => { // iterate asynchronously in msg.destperso (targets)
|
||||
let finalmsg = Outputs.setupmail( msg, msg2send, index );
|
||||
console.log( '###############################################' )
|
||||
console.log( "envoie msg email: " + finalmsg.to )
|
||||
logger.info( '###############################################' )
|
||||
logger.info( "envoie msg email: " + finalmsg.to )
|
||||
Outputs.envoiemail( finalmsg, ( err ) => {
|
||||
if( err ) { // intentionally don't pass this error in callback, we dont want to break loop
|
||||
newtargets.push( index ); // stock all errored mails for next try
|
||||
@ -145,12 +145,12 @@ Outputs.generemsg = async ( msg, header ) => {
|
||||
wait msg sent and return result sent
|
||||
*/
|
||||
// Recupere les parametre smtp du domainName à utiliser
|
||||
console.log( 'pass Outputs.generemsg' )
|
||||
logger.info( 'pass Outputs.generemsg' )
|
||||
try {
|
||||
const confclientexpediteur = jsonfile.readFileSync( `${config.tribes}/${msg.tribeidperso.tribeidexpediteur}/clientconf.json` );
|
||||
msg.smtp = confclientexpediteur.smtp;
|
||||
} catch ( err ) {
|
||||
console.log( 'la conf smtp du client n\'est pas definie' );
|
||||
logger.info( 'la conf smtp du client n\'est pas definie' );
|
||||
return {
|
||||
status: 404,
|
||||
payload: {
|
||||
@ -159,14 +159,14 @@ Outputs.generemsg = async ( msg, header ) => {
|
||||
}
|
||||
};
|
||||
}
|
||||
console.log( msg );
|
||||
logger.info( msg );
|
||||
if( !msg.template.sendascontent && msg.template.htmlfile ) {
|
||||
try {
|
||||
msg.template.html = fs.readFileSync( config.sharedData + '/' + msg.template.htmlfile + '/contentinline.mustache', 'utf-8' );
|
||||
msg.template.text = fs.readFileSync( config.sharedData + '/' + msg.template.htmlfile + '/contenttxt.mustache', 'utf-8' );
|
||||
} catch ( err ) {
|
||||
console.log( 'WARNING, html file template missing ' + config.sharedData + '/' + msg.template.htmlfile );
|
||||
console.log( err );
|
||||
logger.info( 'WARNING, html file template missing ' + config.sharedData + '/' + msg.template.htmlfile );
|
||||
logger.info( err );
|
||||
return {
|
||||
status: 404,
|
||||
payload: {
|
||||
@ -178,7 +178,7 @@ Outputs.generemsg = async ( msg, header ) => {
|
||||
}
|
||||
}
|
||||
if( msg.template.html.length == 0 ) {
|
||||
console.log( 'template.html est vide' )
|
||||
logger.info( 'template.html est vide' )
|
||||
return {
|
||||
status: 404,
|
||||
payload: {
|
||||
@ -200,15 +200,15 @@ Outputs.generemsg = async ( msg, header ) => {
|
||||
'x-client-nd-id': msg.tribeidperso.tribeid,
|
||||
'x-template-nd-id': msg.tribeidperso.templateId
|
||||
};
|
||||
console.log( 'nb de message to send:', msg.destperso.length );
|
||||
logger.info( 'nb de message to send:', msg.destperso.length );
|
||||
// send first mail
|
||||
const ret = await Outputs.envoiefirstmail( Outputs.setupmail( msg, msg2send, msg.destperso[ 0 ] ) );
|
||||
console.log( 'ret 1er msg', ret );
|
||||
logger.info( 'ret 1er msg', ret );
|
||||
if( ret.status == 200 ) {
|
||||
pass1ermsg = true;
|
||||
msg.destperso.shift();
|
||||
};
|
||||
console.log( 'attente 1er msg avant d envoyer les autres' );
|
||||
logger.info( 'attente 1er msg avant d envoyer les autres' );
|
||||
// send other mails
|
||||
new Promise( ( resolve, reject ) => { // useless promise used for recursive calls in Outputs.envoiemails
|
||||
Outputs.envoiemails( msg, msg2send, msg.destperso, 0, resolve, reject );
|
||||
@ -219,9 +219,9 @@ Outputs.generemsg = async ( msg, header ) => {
|
||||
.format( 'YYYYMMDD HH:mm:ss' ) + ' - err after 4 tries to ' + info.rejected.join( ',' ) + '\n', 'utf-8', ( err ) => {
|
||||
callback( err );
|
||||
}, ( err ) => {
|
||||
if( err ) console.log( err );
|
||||
if( err ) logger.info( err );
|
||||
} );
|
||||
console.log( 'msg.to not well sent', msg.to );
|
||||
logger.info( 'msg.to not well sent', msg.to );
|
||||
} );
|
||||
} )
|
||||
if( pass1ermsg ) {
|
||||
@ -254,16 +254,16 @@ Outputs.sendMailcampain = async ( msg, headersmsg ) => {
|
||||
Pour un exemple de msg voir u exemple type de message envoyé dans un tribeid/domain/clientconf.json
|
||||
avec l'envoi d'email
|
||||
*/
|
||||
//console.log(msg)
|
||||
//logger.info(msg)
|
||||
// On ajoute le contenu du template directement dans la demande
|
||||
if( msg.template.sendascontent && msg.template.htmlfile ) {
|
||||
try {
|
||||
console.log( 'test', msg.template.sendascontent )
|
||||
logger.info( 'test', msg.template.sendascontent )
|
||||
msg.template.html = fs.readFileSync( config.sharedData + '/' + msg.template.htmlfile + '/contentinline.mustache', 'utf-8' );
|
||||
msg.template.text = fs.readFileSync( config.sharedData + '/' + msg.template.htmlfile + '/contenttxt.mustache', 'utf-8' );
|
||||
} catch ( err ) {
|
||||
console.log( 'WARNING, html file template missing ' + config.sharedData + '/' + msg.template.htmlfile );
|
||||
//console.log(err);
|
||||
logger.info( 'WARNING, html file template missing ' + config.sharedData + '/' + msg.template.htmlfile );
|
||||
//logger.info(err);
|
||||
return {
|
||||
status: 404,
|
||||
payload: {
|
||||
@ -285,8 +285,8 @@ Outputs.sendMailcampain = async ( msg, headersmsg ) => {
|
||||
};
|
||||
}
|
||||
}
|
||||
console.log( 'envoie sur', `${config.emailerurl}/outputs/msg` )
|
||||
//console.log(msg)
|
||||
logger.info( 'envoie sur', `${config.emailerurl}/outputs/msg` )
|
||||
//logger.info(msg)
|
||||
// on check si les key de headermsg sont des key traduite via exposedHeaders
|
||||
// (cas ou c'est l'application qui envoie un email)
|
||||
if( headersmsg.xtribeid ) {
|
||||
@ -298,13 +298,13 @@ Outputs.sendMailcampain = async ( msg, headersmsg ) => {
|
||||
}
|
||||
// on ajoute le code pour la signature
|
||||
headersmsg.hashbody = msg.code;
|
||||
console.log( 'header after traduction: ', headersmsg )
|
||||
logger.info( 'header after traduction: ', headersmsg )
|
||||
try {
|
||||
const resCamp = await axios.post( `${config.emailerurl}/outputs/msg`, msg, {
|
||||
headers: headersmsg
|
||||
} );
|
||||
//console.log('Etat:', resCamp);
|
||||
console.log( 'Tried to send 1st email of the campain ' + msg.destperso[ 0 ].email );
|
||||
//logger.info('Etat:', resCamp);
|
||||
logger.info( 'Tried to send 1st email of the campain ' + msg.destperso[ 0 ].email );
|
||||
// it just check the 1st email in destperso to return an answer if 1st is ok then all other are send in queue
|
||||
if( resCamp ) {
|
||||
return resCamp;
|
||||
@ -319,9 +319,9 @@ Outputs.sendMailcampain = async ( msg, headersmsg ) => {
|
||||
Outputs.get = function ( filename, header ) {
|
||||
// check file exist
|
||||
const file = `${config.tribes}/${header.xworkon}/${filename}`;
|
||||
// console.log('fichier demande ', file);
|
||||
// logger.info('fichier demande ', file);
|
||||
if( !fs.existsSync( file ) ) {
|
||||
// console.log('le fichier demande n existe pas ', file);
|
||||
// logger.info('le fichier demande n existe pas ', file);
|
||||
return {
|
||||
status: 404,
|
||||
payload: {
|
||||
@ -330,7 +330,7 @@ Outputs.get = function ( filename, header ) {
|
||||
}
|
||||
};
|
||||
} else {
|
||||
console.log( 'envoie le fichier ', file );
|
||||
logger.info( 'envoie le fichier ', file );
|
||||
return {
|
||||
status: 200,
|
||||
payload: {
|
||||
@ -346,7 +346,7 @@ Outputs.addjson = function ( data, header ) {
|
||||
Le header = {X-WorkOn:"",destinationfile:"", filename:""}
|
||||
Le body = {jsonp:{},callback:function to launch after download,'code':'mot cle pour verifier que le fichier est à garder'}
|
||||
*/
|
||||
// console.log(req.body.jsonp);
|
||||
// logger.info(req.body.jsonp);
|
||||
try {
|
||||
jsonfile.writeFileSync( header.destinationfile + '/' + header.filename, data.jsonp );
|
||||
if( data.callback ) {
|
||||
@ -366,7 +366,7 @@ Outputs.addjson = function ( data, header ) {
|
||||
}
|
||||
};
|
||||
} catch ( err ) {
|
||||
console.log( 'Impossible de sauvegarder le fichier, A COMPRENDRE', err );
|
||||
logger.info( 'Impossible de sauvegarder le fichier, A COMPRENDRE', err );
|
||||
return {
|
||||
status: 503,
|
||||
payload: {
|
||||
@ -378,22 +378,22 @@ Outputs.addjson = function ( data, header ) {
|
||||
};
|
||||
Outputs.add = function ( req, header ) {
|
||||
const form = new formidable.IncomingForm();
|
||||
console.log( 'req.headers', req.headers );
|
||||
console.log( 'req.params', req.params );
|
||||
console.log( 'req.query', req.query );
|
||||
console.log( 'req.body', req.body );
|
||||
logger.info( 'req.headers', req.headers );
|
||||
logger.info( 'req.params', req.params );
|
||||
logger.info( 'req.query', req.query );
|
||||
logger.info( 'req.body', req.body );
|
||||
let destinationfile = `${config.tribes}/${header.xworkon}/${header.destinationfile
|
||||
}`;
|
||||
form.parse( req, function ( err, fields, files ) {
|
||||
console.log( 'files', files.file.path );
|
||||
console.log( 'fields', fields );
|
||||
logger.info( 'files', files.file.path );
|
||||
logger.info( 'fields', fields );
|
||||
const oldpath = files.file.path;
|
||||
destinationfile += '/' + files.file.name;
|
||||
console.log( 'oldpath', oldpath );
|
||||
console.log( 'destinationfile', destinationfile );
|
||||
logger.info( 'oldpath', oldpath );
|
||||
logger.info( 'destinationfile', destinationfile );
|
||||
fs.copyFile( oldpath, destinationfile, function ( err ) {
|
||||
if( err ) {
|
||||
console.log( err );
|
||||
logger.info( err );
|
||||
return {
|
||||
status: 500,
|
||||
payload: {
|
||||
@ -402,7 +402,7 @@ Outputs.add = function ( req, header ) {
|
||||
}
|
||||
};
|
||||
} else {
|
||||
console.log( 'passe' );
|
||||
logger.info( 'passe' );
|
||||
fs.unlink( oldpath );
|
||||
return {
|
||||
status: 200,
|
||||
@ -424,7 +424,7 @@ Outputs.sheettojson = async ( req, header ) => {
|
||||
await doc.loadInfo();
|
||||
let result = [];
|
||||
for( const sheetName of req.sheets ) {
|
||||
console.log( 'loading: ', sheetName );
|
||||
logger.info( 'loading: ', sheetName );
|
||||
sheet = doc.sheetsByTitle[ sheetName ]
|
||||
await sheet.loadHeaderRow();
|
||||
const records = await sheet.getRows( { offset: 1 } )
|
||||
|
@ -47,7 +47,7 @@ Pagans.init = tribeids => {
|
||||
const emailsglob = {};
|
||||
const loginsglob = {};
|
||||
// For each tribeid create series of indexes
|
||||
//console.log(tribeids);
|
||||
//logger.info(tribeids);
|
||||
tribeids.forEach( tribeid => {
|
||||
// Reset for each domain
|
||||
const uids = {};
|
||||
@ -62,12 +62,12 @@ Pagans.init = tribeids => {
|
||||
}*/
|
||||
glob.sync( `${config.tribes}/${tribeid}/users/*.json` )
|
||||
.forEach( file => {
|
||||
//console.log( file );
|
||||
//logger.info( file );
|
||||
const u = fs.readJsonSync( file, 'utf-8' );
|
||||
if( !u.TOKEN ) {
|
||||
u.TOKEN = '';
|
||||
}
|
||||
//console.log( u )
|
||||
//logger.info( u )
|
||||
uids[ u.UUID ] = [ u.LOGIN, u.EMAIL, u.PASSWORD, u.ACCESSRIGHTS, u.TOKEN ];
|
||||
logins[ u.LOGIN ] = u.UUID;
|
||||
loginsglob[ u.LOGIN ] = tribeid;
|
||||
@ -76,13 +76,13 @@ Pagans.init = tribeids => {
|
||||
if( u.TOKEN != '' ) {
|
||||
try {
|
||||
decodedTOKEN = jwt.decode( u.TOKEN, config.jwtSecret );
|
||||
//console.log( 'decodeTOKEN', decodedTOKEN )
|
||||
//logger.info( 'decodeTOKEN', decodedTOKEN )
|
||||
if( moment( decodedTOKEN.expiration ) > moment() ) {
|
||||
tokens[ u.UUID ] = { TOKEN: u.TOKEN, ACCESSRIGHTS: u.ACCESSRIGHTS };
|
||||
//console.log( `add token valid for ${u.UUID}:`, tokens[ u.UUID ] )
|
||||
//logger.info( `add token valid for ${u.UUID}:`, tokens[ u.UUID ] )
|
||||
}
|
||||
} catch ( err ) {
|
||||
console.log( 'pb de TOKEN impossible a decoder' + u.TOKEN, err );
|
||||
logger.info( 'pb de TOKEN impossible a decoder' + u.TOKEN, err );
|
||||
}
|
||||
}
|
||||
if( u.EMAIL ) {
|
||||
@ -152,10 +152,10 @@ Pagans.updateDatabase = ( user, tribeid, rm = false ) => {
|
||||
jsonfile.writeFile( loginsIndex, logins, {
|
||||
spaces: 2
|
||||
}, err => {
|
||||
if( err ) console.log( err );
|
||||
if( err ) logger.info( err );
|
||||
} );
|
||||
} catch ( err ) {
|
||||
console.log( 'Gros pb de mise à jour Pagans.updateDatabase conflit des logins' );
|
||||
logger.info( 'Gros pb de mise à jour Pagans.updateDatabase conflit des logins' );
|
||||
}
|
||||
} );
|
||||
const uidsIndex = `${config.tribes}/${tribeid}/users/searchindex/uids.json`;
|
||||
@ -175,10 +175,10 @@ Pagans.updateDatabase = ( user, tribeid, rm = false ) => {
|
||||
jsonfile.writeFile( uidsIndex, uids, {
|
||||
spaces: 2
|
||||
}, err => {
|
||||
if( err ) console.log( err );
|
||||
if( err ) logger.info( err );
|
||||
} );
|
||||
} catch ( err ) {
|
||||
console.log( 'Gros pb de mise à jour Pagans.updateDatabase conflit des uids si ce reproduit passer en mode sync bloquant' );
|
||||
logger.info( 'Gros pb de mise à jour Pagans.updateDatabase conflit des uids si ce reproduit passer en mode sync bloquant' );
|
||||
}
|
||||
} );
|
||||
const emailsIndex = `${config.tribes}/${tribeid}/users/searchindex/emails.json`;
|
||||
@ -193,10 +193,10 @@ Pagans.updateDatabase = ( user, tribeid, rm = false ) => {
|
||||
jsonfile.writeFile( emailsIndex, emails, {
|
||||
spaces: 2
|
||||
}, err => {
|
||||
if( err ) console.log( err );
|
||||
if( err ) logger.info( err );
|
||||
} );
|
||||
} catch ( err ) {
|
||||
console.log( 'Gros pb de mise à jour Pagans.updateDatabase conflit des emails' );
|
||||
logger.info( 'Gros pb de mise à jour Pagans.updateDatabase conflit des emails' );
|
||||
}
|
||||
} );
|
||||
const tokensIndex = `${config.tmp}/tokens.json`;
|
||||
@ -204,7 +204,7 @@ Pagans.updateDatabase = ( user, tribeid, rm = false ) => {
|
||||
try {
|
||||
tokens = jsonfile.readFileSync( tokensIndex );
|
||||
} catch ( err ) {
|
||||
console.log( 'tokens.json not available' )
|
||||
logger.info( 'tokens.json not available' )
|
||||
}
|
||||
// tokens[user.UUID] = user.TOKEN;
|
||||
tokens[ user.UUID ] = { TOKEN: user.TOKEN, ACCESSRIGHTS: user.ACCESSRIGHTS };
|
||||
@ -215,7 +215,7 @@ Pagans.updateDatabase = ( user, tribeid, rm = false ) => {
|
||||
jsonfile.readFile(tokensIndex, function(err, tokens) {
|
||||
tokens[user.UUID] = user.TOKEN;
|
||||
jsonfile.writeFile(tokensIndex, tokens, { spaces: 2 }, err => {
|
||||
if (err) console.log(err);
|
||||
if (err) logger.info(err);
|
||||
});
|
||||
});*/
|
||||
console.groupEnd();
|
||||
@ -234,7 +234,7 @@ Pagans.getUserlist = ( header, filter, field ) => {
|
||||
if (getuser.status != 200)
|
||||
return { status: getuser.status, data: getuser.payload };
|
||||
const user = getuser.payload.data;
|
||||
// console.log(user);
|
||||
// logger.info(user);
|
||||
// check if update accessright allowed
|
||||
// choose the level depending of ownby xuuid
|
||||
let accessright = user.objectRights[header.xtribeid].users[0];
|
||||
@ -242,8 +242,8 @@ Pagans.getUserlist = ( header, filter, field ) => {
|
||||
accessright = user.objectRights[header.xtribeid].users[1];
|
||||
}
|
||||
// Check update is possible at least user itself ownby itself
|
||||
console.log(accessright);
|
||||
console.log(accessright & 4);
|
||||
logger.info(accessright);
|
||||
logger.info(accessright & 4);
|
||||
if ((accessright & 4) != 4) {
|
||||
return {
|
||||
status: 403,
|
||||
@ -266,7 +266,7 @@ Pagans.getUserlist = ( header, filter, field ) => {
|
||||
} );
|
||||
Userlist.push( info );
|
||||
} );
|
||||
// console.log('userlist', Userlist);
|
||||
// logger.info('userlist', Userlist);
|
||||
console.groupEnd();
|
||||
return {
|
||||
status: 200,
|
||||
@ -286,7 +286,7 @@ Pagans.getinfoPagans = ( tribeid, accessrights, listindex ) => {
|
||||
info[ index ] = jsonfile.readFileSync( `${config.tribes}/${tribeid}/${object}/searchindex/${index}.json` )
|
||||
}
|
||||
} )
|
||||
console.log( info )
|
||||
logger.info( info )
|
||||
return { status: 200, data: { info: info } }
|
||||
}
|
||||
Pagans.getUser = ( UUID, tribeid, accessrights ) => {
|
||||
@ -306,7 +306,7 @@ Pagans.getUser = ( UUID, tribeid, accessrights ) => {
|
||||
}
|
||||
const user = jsonfile.readFileSync( `${config.tribes}/${tribeid}/users/${UUID}.json` );
|
||||
let access = true;
|
||||
//console.log("test accessrights.data['users'].includes('R')", accessrights.data['users'].includes('R'))
|
||||
//logger.info("test accessrights.data['users'].includes('R')", accessrights.data['users'].includes('R'))
|
||||
console.assert( config.loglevel == "quiet", 'accessrights', accessrights )
|
||||
access = accessrights.users && ( accessrights.users.includes( 'R' ) || ( accessrights.users.includes( 'O' ) && user.OWNEDBY.includes( UUID ) ) );
|
||||
if( access ) {
|
||||
@ -357,7 +357,7 @@ Pagans.updateUserpassword = ( UUID, header, data ) => {
|
||||
const getUser = Pagans.getUser( UUID, header.xtribeid, { users: 'W' } );
|
||||
if( getUser.status == 200 ) {
|
||||
const user = getUser.data.user;
|
||||
// console.log('user exist', user);
|
||||
// logger.info('user exist', user);
|
||||
const match = bcrypt.compareSync( data.password, user.PASSWORD );
|
||||
if( !match ) {
|
||||
return {
|
||||
@ -368,7 +368,7 @@ Pagans.updateUserpassword = ( UUID, header, data ) => {
|
||||
}
|
||||
};
|
||||
}
|
||||
// console.log('Credentials are matching!');
|
||||
// logger.info('Credentials are matching!');
|
||||
if( checkdata.test.password( {}, data.pswnew ) ) {
|
||||
user.PASSWORD = bcrypt.hashSync( data.pswnew, config.saltRounds );
|
||||
jsonfile.writeFileSync( `${config.tribes}/${header.xworkon}/users/${UUID}.json`, user, {
|
||||
@ -397,7 +397,7 @@ Pagans.createUser = ( header, data ) => {
|
||||
/*
|
||||
@input data={PUBKEY,EMAIL,LOGIN,UUID} check and create for header xworkon a user with generic password
|
||||
*/
|
||||
console.log( 'createUser on header.xworkon:' + header.xworkon + ' by user:' + header.xpaganid );
|
||||
logger.info( 'createUser on header.xworkon:' + header.xworkon + ' by user:' + header.xpaganid );
|
||||
console.assert( config.loglevel == "quiet", 'with data:', data );
|
||||
const ref = jsonfile.readFileSync( `${config.tribes}/${header.xworkon}/referentials/${header.xlang}/object/users.json` );
|
||||
const logins = jsonfile.readFileSync( `${config.tribes}/${header.xworkon}/users/searchindex/logins.json` );
|
||||
@ -455,8 +455,8 @@ Pagans.createUser = ( header, data ) => {
|
||||
};
|
||||
};
|
||||
Pagans.updateUser = ( UUID, header, data ) => {
|
||||
console.log( 'updateUser UUID:' + UUID + ' on header.xworkon:' + header.xworkon + ' by user' + header.xpaganid );
|
||||
// console.log('header', header);
|
||||
logger.info( 'updateUser UUID:' + UUID + ' on header.xworkon:' + header.xworkon + ' by user' + header.xpaganid );
|
||||
// logger.info('header', header);
|
||||
console.assert( config.loglevel == "quiet", 'with data', data );
|
||||
const getuser = Pagans.getUser( UUID, header.xworkon, { users: 'R' } );
|
||||
if( getuser.status != 200 ) return {
|
||||
@ -471,8 +471,8 @@ Pagans.updateUser = ( UUID, header, data ) => {
|
||||
const getuserconnected = Pagans.getUser(header.xuuid, header.xtribeid);
|
||||
userconnected = getuserconnected.payload.data;
|
||||
}
|
||||
console.log('user to update', user);
|
||||
console.log('user connected that request update', userconnected);
|
||||
logger.info('user to update', user);
|
||||
logger.info('user connected that request update', userconnected);
|
||||
|
||||
// check if update accessright allowed
|
||||
// choose the level depending of ownby xuuid
|
||||
@ -481,8 +481,8 @@ Pagans.updateUser = ( UUID, header, data ) => {
|
||||
accessright = userconnected.objectRights[header.xworkon].users[1];
|
||||
}
|
||||
// Check update is possible at least user itself ownby itself
|
||||
console.log(accessright);
|
||||
console.log(accessright & 2);
|
||||
logger.info(accessright);
|
||||
logger.info(accessright & 2);
|
||||
if ((accessright & 2) != 2) {
|
||||
return {
|
||||
status: 403,
|
||||
@ -496,12 +496,12 @@ Pagans.updateUser = ( UUID, header, data ) => {
|
||||
const logins = jsonfile.readFileSync( `${config.tribes}/${header.xworkon}/users/searchindex/logins.json` );
|
||||
const LOGIN = Object.keys( logins )
|
||||
.filter( l => logins[ l ] != user.UUID );
|
||||
// console.log( 'LOGIN list', LOGIN );
|
||||
// logger.info( 'LOGIN list', LOGIN );
|
||||
const emails = jsonfile.readFileSync( `${config.tribes}/${header.xworkon}/users/searchindex/emails.json` );
|
||||
// console.log( 'emails', emails );
|
||||
// logger.info( 'emails', emails );
|
||||
const EMAIL = Object.keys( emails )
|
||||
.filter( e => emails[ e ] != user.UUID );
|
||||
// console.log( 'EMAIL list', EMAIL );
|
||||
// logger.info( 'EMAIL list', EMAIL );
|
||||
// list.UUID est forcement unique car on est en update et pas en create
|
||||
// pour la logique de checkdata il faut passer le parametre
|
||||
const check = checkdata.evaluate( {
|
||||
@ -526,9 +526,9 @@ Pagans.updateUser = ( UUID, header, data ) => {
|
||||
let updateDatabase = false;
|
||||
Object.keys( data )
|
||||
.forEach( k => {
|
||||
//console.log( user[ k ] )
|
||||
//console.log( data[ k ] )
|
||||
//console.log( '---' )
|
||||
//logger.info( user[ k ] )
|
||||
//logger.info( data[ k ] )
|
||||
//logger.info( '---' )
|
||||
if( user[ k ] != data[ k ] ) {
|
||||
user[ k ] = data[ k ];
|
||||
saveuser = true;
|
||||
@ -537,7 +537,7 @@ Pagans.updateUser = ( UUID, header, data ) => {
|
||||
}
|
||||
} );
|
||||
if( saveuser ) {
|
||||
//console.log( 'mise à jour user profile.json' );
|
||||
//logger.info( 'mise à jour user profile.json' );
|
||||
if( data.TOKEN ) {
|
||||
user.date_lastLOGIN = new Date()
|
||||
.toISOString();
|
||||
@ -549,15 +549,15 @@ Pagans.updateUser = ( UUID, header, data ) => {
|
||||
jsonfile.writeFileSync( `${config.tribes}/${header.xworkon}/users/${UUID}.json`, user, {
|
||||
spaces: 2
|
||||
} );
|
||||
//console.log( 'declenche updatabase', updateDatabase )
|
||||
//logger.info( 'declenche updatabase', updateDatabase )
|
||||
if( updateDatabase ) {
|
||||
// mean index have to be updated
|
||||
Pagans.updateDatabase( user, header.xworkon, false );
|
||||
console.assert( config.loglevel == "quiet", 'MISE A JOUR DU TOKEN ou de l\'EMAIL ou du LOGIN' );
|
||||
}
|
||||
} catch ( err ) {
|
||||
console.log( 'ERRRRR need to understand update impossible of user: ' + UUID + ' in domain:' + header.xworkon + ' from user ' + header.xpaganid + ' of domain:' + header.xtribe );
|
||||
console.log( 'with data :', data );
|
||||
logger.info( 'ERRRRR need to understand update impossible of user: ' + UUID + ' in domain:' + header.xworkon + ' from user ' + header.xpaganid + ' of domain:' + header.xtribe );
|
||||
logger.info( 'with data :', data );
|
||||
return {
|
||||
status: 400,
|
||||
data: {
|
||||
@ -636,12 +636,12 @@ Pagans.loginUser = ( header, body, checkpsw ) => {
|
||||
// Load user
|
||||
const uid = logins[ body.LOGIN ];
|
||||
const getUser = Pagans.getUser( uid, LOGINdom[ body.LOGIN ], { users: 'R' } );
|
||||
console.log( 'getPagans', getUser )
|
||||
logger.info( 'getPagans', getUser )
|
||||
if( getUser.status != 200 ) {
|
||||
return { status: 200, data: { model: 'Pagans', user: getUser.data.user } };
|
||||
}
|
||||
const user = getUser.data.user;
|
||||
console.log( 'user', user )
|
||||
logger.info( 'user', user )
|
||||
if( checkpsw ) {
|
||||
const match = bcrypt.compareSync( body.PASSWORD, user.PASSWORD );
|
||||
if( !match ) {
|
||||
@ -719,7 +719,7 @@ Pagans.getlinkwithoutpsw = async ( EMAIL, header ) => {
|
||||
checkinfomail += ' Erreur de clientconfig il manque un objet appemailinfo pour poursuivre';
|
||||
}
|
||||
if( checkinfomail != "" ) {
|
||||
console.log( `Pb de config pour ${header.xtribe} ${checkinfomail} ` )
|
||||
logger.info( `Pb de config pour ${header.xtribe} ${checkinfomail} ` )
|
||||
return {
|
||||
status: 500,
|
||||
info: {
|
||||
@ -735,7 +735,7 @@ Pagans.getlinkwithoutpsw = async ( EMAIL, header ) => {
|
||||
LOGIN: infoforuuid[ uuidforemail[ EMAIL ] ][ 0 ],
|
||||
PASSWORD: ""
|
||||
}, false );
|
||||
console.log( 'info simulelogin', simulelogin )
|
||||
logger.info( 'info simulelogin', simulelogin )
|
||||
} catch ( err ) {
|
||||
return {
|
||||
status: 501,
|
||||
@ -747,7 +747,7 @@ Pagans.getlinkwithoutpsw = async ( EMAIL, header ) => {
|
||||
};
|
||||
}
|
||||
const url = `${config.rootURL}?xauth=${simulelogin.data.TOKEN}&xuuid=${simulelogin.data.UUID}&xtribeid=${simulelogin.data.tribeid}&xworkOn=${header.xworkon}&xlang=${header.xlang}`
|
||||
//console.log('envoi email avec' + url)
|
||||
//logger.info('envoi email avec' + url)
|
||||
confdom.appemailinfo.msg.destperso = [ {} ];
|
||||
confdom.appemailinfo.msg.destperso[ 0 ].email = EMAIL;
|
||||
confdom.appemailinfo.msg.destperso[ 0 ].subject = "Lien de réinitialisation valable 1h"
|
||||
@ -759,9 +759,9 @@ Pagans.getlinkwithoutpsw = async ( EMAIL, header ) => {
|
||||
<a href="${url}">Clicker ICI</a>.<br>
|
||||
Nous vous conseillons de changer votre mot de passe.</p>
|
||||
`
|
||||
//console.log('envoi header :', header);
|
||||
//logger.info('envoi header :', header);
|
||||
Outputs.sendMailcampain( confdom.appemailinfo.msg, header );
|
||||
console.log( confdom.appemailinfo );
|
||||
logger.info( confdom.appemailinfo );
|
||||
return {
|
||||
status: 200,
|
||||
info: {
|
||||
|
@ -22,7 +22,7 @@ Referentials.clientconf = ( xworkOn, listkey ) => {
|
||||
*/
|
||||
let conf = {};
|
||||
let dataconf = {};
|
||||
//console.log( `${config.tribes}/${xworkOn}/clientconf.json` )
|
||||
//logger.info( `${config.tribes}/${xworkOn}/clientconf.json` )
|
||||
try {
|
||||
conf = fs.readJsonSync( `${config.tribes}/${xworkOn}/clientconf.json` );
|
||||
// remove information notrelevant for
|
||||
@ -30,9 +30,9 @@ Referentials.clientconf = ( xworkOn, listkey ) => {
|
||||
delete conf[ c ];
|
||||
} );
|
||||
listkey.forEach( k => dataconf[ k ] = conf[ k ] )
|
||||
//console.log( 'dataconf', dataconf )
|
||||
//logger.info( 'dataconf', dataconf )
|
||||
} catch ( err ) {
|
||||
console.log( 'Attention demande sur clienId inconnu ' + xworkOn );
|
||||
logger.info( 'Attention demande sur clienId inconnu ' + xworkOn );
|
||||
}
|
||||
return {
|
||||
status: 200,
|
||||
@ -57,11 +57,11 @@ Referentials.getref = ( origin, source, ref, xworkOn, xlang ) => {
|
||||
} else {
|
||||
src = `${config.tribes}/${xworkOn}/referentials/${source}/${ref}_${xlang}.json`;
|
||||
}
|
||||
//console.log( src )
|
||||
//logger.info( src )
|
||||
try {
|
||||
referent = fs.readJsonSync( src );
|
||||
} catch ( err ) {
|
||||
console.log( `Request ${src} does not exist ` );
|
||||
logger.info( `Request ${src} does not exist ` );
|
||||
}
|
||||
return {
|
||||
status: 200,
|
||||
@ -84,7 +84,7 @@ Referentials.putref = ( source, name, xworkOn, data ) => {
|
||||
|
||||
name for source=json must end by _lg
|
||||
*/
|
||||
//console.log( data )
|
||||
//logger.info( data )
|
||||
const pat = /.*_..\.json$/;
|
||||
const file = `${config.tribes}/${xworkOn}/referentials/${source}/${name}.json`
|
||||
if( [ 'object', 'data' ].includes( source ) ) {
|
||||
@ -127,7 +127,7 @@ Referentials.updatefull = ( tribeid ) => {
|
||||
};
|
||||
|
||||
Referentials.inittribeid = () => {
|
||||
console.log( "Clientconf list for this server", `${config.tribes}/**/clientconf.json` );
|
||||
logger.info( "Clientconf list for this server", `${config.tribes}/**/clientconf.json` );
|
||||
const TribesGlobalConfig = glob.sync( `${config.tribes}/**/clientconf.json` )
|
||||
.map( f => fs.readJsonSync( f ) );
|
||||
// store global conf for sharing to other api
|
||||
@ -143,7 +143,7 @@ Referentials.generetribeids = () => {
|
||||
if( !tribeids.includes( c.tribeid ) ) tribeids.push( c.tribeid );
|
||||
} );
|
||||
fs.outputJsonSync( `${config.tmp}/tribeids.json`, tribeids );
|
||||
console.log( `update ${config.tribes}/tribeids` );
|
||||
logger.info( `update ${config.tribes}/tribeids` );
|
||||
return tribeids;
|
||||
}
|
||||
Referentials.genereallowedDOM = () => {
|
||||
@ -173,7 +173,7 @@ Referentials.genereallowedDOM = () => {
|
||||
}
|
||||
});
|
||||
} else {
|
||||
console.log('erreur de fichier config d\'un site pour ', c);
|
||||
logger.info('erreur de fichier config d\'un site pour ', c);
|
||||
}
|
||||
// GLOBAL Tribes IDS INDEX
|
||||
maketribeidsIndex();
|
||||
@ -218,8 +218,8 @@ Referentials.update = ( tribeid, source, name ) => {
|
||||
refnew.push( d )
|
||||
} )
|
||||
//save new ref in language
|
||||
//console.log( "New ref", refnew )
|
||||
console.log( `Update referentials per lg ${config.tribes}/${tribeid}/referentials/${source}/${name}_${lg}.json` )
|
||||
//logger.info( "New ref", refnew )
|
||||
logger.info( `Update referentials per lg ${config.tribes}/${tribeid}/referentials/${source}/${name}_${lg}.json` )
|
||||
fs.outputJsonSync( `${config.tribes}/${tribeid}/referentials/${source}/${name}_${lg}.json`, refnew, {
|
||||
spaces: 2
|
||||
} );
|
||||
@ -239,7 +239,7 @@ Referentials.update = ( tribeid, source, name ) => {
|
||||
}
|
||||
}
|
||||
};
|
||||
//console.log( Referentials.update( 'apixtribe', "object", "user" ) )
|
||||
//logger.info( Referentials.update( 'apixtribe', "object", "user" ) )
|
||||
|
||||
Referentials.genereobjet = ( tribeid, destination, tplmustache, objet, filtre ) => {
|
||||
/* @TODO
|
||||
@ -271,7 +271,7 @@ Le principe consistait à partager des referentiels dans shareddataLa gestion es
|
||||
fs.mkdirSync( `${config.tribes}/${c.tribeid}/${fol}` );
|
||||
}
|
||||
} )
|
||||
if( c.referentials && !c.langue ) { console.log( `ERREUR referentials mais pas de langue:[] pour ${c.tribeid}/clientconf.json` ) }
|
||||
if( c.referentials && !c.langue ) { logger.info( `ERREUR referentials mais pas de langue:[] pour ${c.tribeid}/clientconf.json` ) }
|
||||
if( c.referentials && c.langue ) {
|
||||
let majclientconf = false;
|
||||
// Create and check Object structure
|
||||
@ -312,7 +312,7 @@ Le principe consistait à partager des referentiels dans shareddataLa gestion es
|
||||
Object.keys( c.referentials.data )
|
||||
.forEach( d => {
|
||||
// if object exist in shared then it merge sharedObject and domain referential object
|
||||
// console.log(c.tribeid + '--' + d);
|
||||
// logger.info(c.tribeid + '--' + d);
|
||||
let datafull = [];
|
||||
const datashared = `${
|
||||
config.sharedData
|
||||
@ -332,7 +332,7 @@ Le principe consistait à partager des referentiels dans shareddataLa gestion es
|
||||
*/
|
||||
// for each Langues => generate fr.obj and compare it with existing file
|
||||
// if diff then => upgrade version number in clientconf
|
||||
// console.log(datafull);
|
||||
// logger.info(datafull);
|
||||
// this could be improved by usind d.meta wich is the object that DESCribe this data
|
||||
/* c.langue.forEach( lg => {
|
||||
let meta;
|
||||
@ -379,7 +379,7 @@ Le principe consistait à partager des referentiels dans shareddataLa gestion es
|
||||
Object.keys( c.referentials.json )
|
||||
.forEach( j => {
|
||||
// if object exist in shared then it merge sharedObject and domain referential object
|
||||
// console.log(c.tribeid + '--' + d);
|
||||
// logger.info(c.tribeid + '--' + d);
|
||||
let jsonfull = [];
|
||||
const jsondomain = `${config.tribes}/${c.tribeid}/referentials/dataManagement/json/${j}.json`;
|
||||
if( fs.existsSync( jsondomain ) ) {
|
||||
@ -389,7 +389,7 @@ Le principe consistait à partager des referentiels dans shareddataLa gestion es
|
||||
const jsondomlg = `${config.tribes}/${
|
||||
c.tribeid
|
||||
}/referentials/${lg}/json/${j}.json`;
|
||||
// console.log('jsondomlg', jsondomlg);
|
||||
// logger.info('jsondomlg', jsondomlg);
|
||||
let datalg = jsonfull;
|
||||
if( jsonfull[ lg ] ) {
|
||||
datalg = jsonfull[ lg ];
|
||||
|
@ -23,7 +23,7 @@ Referentials.clientconf = ( xworkOn, listkey ) => {
|
||||
*/
|
||||
let conf = {};
|
||||
let dataconf = {};
|
||||
console.log( `${config.tribes}/${xworkOn}/clientconf.json` )
|
||||
logger.info( `${config.tribes}/${xworkOn}/clientconf.json` )
|
||||
try {
|
||||
conf = fs.readJsonSync( `${config.tribes}/${xworkOn}/clientconf.json` );
|
||||
// remove information notrelevant for
|
||||
@ -31,9 +31,9 @@ Referentials.clientconf = ( xworkOn, listkey ) => {
|
||||
delete conf[ c ];
|
||||
} );
|
||||
listkey.forEach( k => dataconf[ k ] = conf[ k ] )
|
||||
console.log( 'dataconf', dataconf )
|
||||
logger.info( 'dataconf', dataconf )
|
||||
} catch ( err ) {
|
||||
console.log( 'Attention demande sur clienId inconnu ' + xworkOn );
|
||||
logger.info( 'Attention demande sur clienId inconnu ' + xworkOn );
|
||||
}
|
||||
return {
|
||||
status: 200,
|
||||
@ -50,7 +50,7 @@ Referentials.clientconfglob = () => ( {
|
||||
} );
|
||||
|
||||
Referentials.inittribeid = () => {
|
||||
console.log( "Clientconf list for this server", `${config.tribes}/**/clientconf.json` );
|
||||
logger.info( "Clientconf list for this server", `${config.tribes}/**/clientconf.json` );
|
||||
const TribesGlobalConfig = glob.sync( `${config.tribes}/**/clientconf.json` )
|
||||
.map( f => fs.readJsonSync( f ) );
|
||||
// store global conf for sharing to other api
|
||||
@ -68,7 +68,7 @@ Referentials.generetribeids = () => {
|
||||
if( !tribeids.includes( c.tribeid ) ) tribeids.push( c.tribeid );
|
||||
} );
|
||||
fs.outputJsonSync( `${config.tmp}/tribeids.json`, tribeids );
|
||||
console.log( `update ${config.tribes}/tribeids` );
|
||||
logger.info( `update ${config.tribes}/tribeids` );
|
||||
return tribeids;
|
||||
}
|
||||
Referentials.genereallowedDOM = () => {
|
||||
@ -90,11 +90,11 @@ Referentials.getref = ( source, ref, xworkOn, xlang, singlelang = true ) => {
|
||||
//request full referential to manage
|
||||
src = `${config.tribes}/${xworkOn}/referentials/dataManagement/${source}/${ref}.json`
|
||||
}
|
||||
console.log( src )
|
||||
logger.info( src )
|
||||
try {
|
||||
referent = fs.readJsonSync( src );
|
||||
} catch ( err ) {
|
||||
console.log( `Attention demande de referentiel inexistant pour ${src} ` );
|
||||
logger.info( `Attention demande de referentiel inexistant pour ${src} ` );
|
||||
}
|
||||
return {
|
||||
status: 200,
|
||||
@ -113,7 +113,7 @@ Referentials.putref = ( source, name, xworkOn, data ) => {
|
||||
Difference between data and object is that object defines rule to manage an object, and how to create a forms to get data each data is saved in one folder object/uuid.json and have to respect the corresponding object referentials definition.
|
||||
|
||||
*/
|
||||
console.log( data )
|
||||
logger.info( data )
|
||||
// Create a backup of the day hour if exist
|
||||
const file = `${config.tribes}/${xworkOn}/referentials/dataManagement/${source}/${name}.json`
|
||||
|
||||
@ -122,9 +122,9 @@ Referentials.putref = ( source, name, xworkOn, data ) => {
|
||||
const origin = fs.readJsonSync( file, 'utf-8' )
|
||||
fs.outputJsonSync( `${config.tribes}/${xworkOn}/referentials/dataManagementBackup/${source}/${name}${moment().format('YYYYMMDDHHmm')}.json`, origin, { spaces: 2 } )
|
||||
} else {
|
||||
console.log( `Referential ${name}.json does not exist this created it` )
|
||||
logger.info( `Referential ${name}.json does not exist this created it` )
|
||||
}
|
||||
console.log( 'ref backup before update', name );
|
||||
logger.info( 'ref backup before update', name );
|
||||
fs.outputJsonSync( file, data, { spaces: 2 } );
|
||||
// update/create new referential and new version
|
||||
return Referentials.update( xworkOn, source, name );
|
||||
@ -186,8 +186,8 @@ Referentials.update = ( tribeid, source, name ) => {
|
||||
} )
|
||||
}
|
||||
//save new ref in language
|
||||
console.log( "testtttt", refnew )
|
||||
console.log( `${config.tribes}/${tribeid}/referentials/${source}/${name}_${lg}.json` )
|
||||
logger.info( "testtttt", refnew )
|
||||
logger.info( `${config.tribes}/${tribeid}/referentials/${source}/${name}_${lg}.json` )
|
||||
fs.outputJsonSync( `${config.tribes}/${tribeid}/referentials/${source}/${name}_${lg}.json`, refnew, {
|
||||
spaces: 2
|
||||
} );
|
||||
@ -204,7 +204,7 @@ Referentials.update = ( tribeid, source, name ) => {
|
||||
}
|
||||
}
|
||||
};
|
||||
//console.log( Referentials.update( 'apixtribe', "object", "user" ) )
|
||||
//logger.info( Referentials.update( 'apixtribe', "object", "user" ) )
|
||||
|
||||
Referentials.genereobjet = ( tribeid, destination, tplmustache, objet, filtre ) => {
|
||||
/* @TODO
|
||||
|
@ -6,14 +6,14 @@ const Mustache = require( 'mustache' );
|
||||
const Setup = {};
|
||||
|
||||
if( !fs.existsSync( '/etc/nginx/nginx.conf' ) ) {
|
||||
console.log( '\x1b[31m Check documentation, nginx have to be installed on this server first, no /etc/nginx/nginx.conf available' );
|
||||
logger.info( '\x1b[31m Check documentation, nginx have to be installed on this server first, no /etc/nginx/nginx.conf available' );
|
||||
process.exit();
|
||||
}
|
||||
if( !fs.existsSync( '../config.js' ) ) {
|
||||
console.log( `\x1b[42m####################################\nWellcome into apixtribe, this is a first install.\nWe need to make this server accessible from internet subdomain.domain to current IP. This setup will create your unique tribeid, with an admin login user to let you connect to the parameter interface.\nCheck README's project to learn more. more.\n#####################################\x1b[0m` );
|
||||
logger.info( `\x1b[42m####################################\nWellcome into apixtribe, this is a first install.\nWe need to make this server accessible from internet subdomain.domain to current IP. This setup will create your unique tribeid, with an admin login user to let you connect to the parameter interface.\nCheck README's project to learn more. more.\n#####################################\x1b[0m` );
|
||||
const confdata = fs.readJsonSync( path.normalize( `${__dirname}/../setup/configsetup.json` ) );
|
||||
console.log( 'Current setup conf from :\n' + path.normalize( `${__dirname}/../setup/configsetup.json` + '\nChange with relevant setup data and rerun yarn setup' ) );
|
||||
console.log( confdata )
|
||||
logger.info( 'Current setup conf from :\n' + path.normalize( `${__dirname}/../setup/configsetup.json` + '\nChange with relevant setup data and rerun yarn setup' ) );
|
||||
logger.info( confdata )
|
||||
const readline = require( 'readline' );
|
||||
const rl = readline.createInterface( {
|
||||
input: process.stdin,
|
||||
@ -32,20 +32,20 @@ if( !fs.existsSync( '../config.js' ) ) {
|
||||
if( check == "" ) {
|
||||
Setup.config( confdata );
|
||||
} else {
|
||||
console.log( check );
|
||||
logger.info( check );
|
||||
}
|
||||
} else {
|
||||
console.log( 'Nothing done please, check setup/configsetup.json and answer twice Yes' )
|
||||
logger.info( 'Nothing done please, check setup/configsetup.json and answer twice Yes' )
|
||||
}
|
||||
rl.close();
|
||||
} );
|
||||
} );
|
||||
rl.on( 'close', function () {
|
||||
console.log( '\n Setup process ended' );
|
||||
logger.info( '\n Setup process ended' );
|
||||
process.exit( 0 );
|
||||
} );
|
||||
} else {
|
||||
console.log( 'Carefull you have already a config.js that is running. If you want to change remove config.js file and run again yarn setup' );
|
||||
logger.info( 'Carefull you have already a config.js that is running. If you want to change remove config.js file and run again yarn setup' );
|
||||
}
|
||||
|
||||
Setup.checkdata = conf => {
|
||||
@ -88,10 +88,10 @@ Setup.configjs = ( confdata ) => {
|
||||
let confapixtribe = fs.readFileSync( './setup/config.mustache', 'utf-8' );
|
||||
fs.writeFileSync( './config.js', Mustache.render( confapixtribe, confdata ), 'utf-8' );
|
||||
if( fs.existsSync( './config.js' ) ) {
|
||||
console.log( 'config.js successfully created.' );
|
||||
logger.info( 'config.js successfully created.' );
|
||||
} else {
|
||||
console.log( "config.js not created, check what's wrong in tpl:", confapixtribe );
|
||||
console.log( "for data :", confdata );
|
||||
logger.info( "config.js not created, check what's wrong in tpl:", confapixtribe );
|
||||
logger.info( "for data :", confdata );
|
||||
process.exit();
|
||||
}
|
||||
};
|
||||
@ -99,7 +99,7 @@ Setup.druidid = ( confdata ) => {
|
||||
// create a tribeid with a user that will admin this instance into /tribes/tribeid /users
|
||||
const config = require( '../config.js' );
|
||||
// Need to do it on setup this is also done again in models/Tribes.js
|
||||
console.log( `${config.tribes}/${confdata.druidid}` )
|
||||
logger.info( `${config.tribes}/${confdata.druidid}` )
|
||||
fs.ensureDirSync( `${config.tribes}/${confdata.druidid}` );
|
||||
[ 'users', 'www', 'referentials', 'nationchains' ].forEach( r => {
|
||||
fs.copySync( `${config.mainDir}/setup/tribes/apixtribe/${r}`, `${config.tribes}/${confdata.druidid}/${r}` );
|
||||
@ -124,7 +124,7 @@ Setup.druidid = ( confdata ) => {
|
||||
}
|
||||
} );
|
||||
if( createclient.status == 200 ) {
|
||||
console.log( `Your tribeid domain was created with login : ${confdata.login} and password: ${confdata.genericpsw}, change it after the 1st login on https://${confdata.subdomain}.${confdata.domain}` );
|
||||
logger.info( `Your tribeid domain was created with login : ${confdata.login} and password: ${confdata.genericpsw}, change it after the 1st login on https://${confdata.subdomain}.${confdata.domain}` );
|
||||
// Create nginx conf for a first install
|
||||
const confnginx = fs.readFileSync( './setup/nginx/nginx.conf.mustache', 'utf8' );
|
||||
fs.outputFileSync( '/etc/nginx/nginx.conf', Mustache.render( confnginx, confdata ), 'utf-8' );
|
||||
@ -139,10 +139,10 @@ Setup.druidid = ( confdata ) => {
|
||||
pageindex: "app_index_fr.html"
|
||||
} );
|
||||
if( addspaceweb.status == 200 ) {
|
||||
console.log( `WELL DONE run yarn dev to test then yarn startpm2 ` )
|
||||
logger.info( `WELL DONE run yarn dev to test then yarn startpm2 ` )
|
||||
}
|
||||
} else {
|
||||
console.log( 'Issue ', createclient )
|
||||
logger.info( 'Issue ', createclient )
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -27,13 +27,13 @@ Manage tag data collection
|
||||
Popup survey manager
|
||||
*/
|
||||
Tags.info = ( data, req ) => {
|
||||
//console.log('headers:', req.headers)
|
||||
/*console.log('hostname', req.hostname)
|
||||
console.log('req.ip', req.ip)
|
||||
console.log('req.ips', req.ips)
|
||||
console.log('req key', Object.keys(req))
|
||||
//logger.info('headers:', req.headers)
|
||||
/*logger.info('hostname', req.hostname)
|
||||
logger.info('req.ip', req.ip)
|
||||
logger.info('req.ips', req.ips)
|
||||
logger.info('req key', Object.keys(req))
|
||||
*/
|
||||
//console.log('req.rawHeaders', req.body)
|
||||
//logger.info('req.rawHeaders', req.body)
|
||||
data.useragent = `${req.headers['user-agent']}__${req.headers['accept-language']}__${req.headers['accept-encoding']}__${req.headers['connection']}`;
|
||||
data.ips = req.ips;
|
||||
data.ip = req.ip;
|
||||
@ -59,7 +59,7 @@ Tags.getfile = ( filename, req ) => {
|
||||
if( infotg[ 0 ] == "imgtg" ) {
|
||||
jsonfile.writeFile( `${config.tribes}/${infotg[1]}/tags/imgtg/${Date.now()}.json`, Tags.info( { filename: filename, messageId: infotg[ 2 ], operationId: infotg[ 3 ], identifiant: infotg[ 4 ] }, req ), function ( err ) {
|
||||
if( err ) {
|
||||
console.log( `Erreur de sauvegarde de tag:${filename}` )
|
||||
logger.info( `Erreur de sauvegarde de tag:${filename}` )
|
||||
}
|
||||
} );
|
||||
return {
|
||||
@ -71,13 +71,13 @@ Tags.getfile = ( filename, req ) => {
|
||||
}
|
||||
Tags.savehits = ( req ) => {
|
||||
if( !fs.existsSync( `${config.tribes}/${req.params.tribeid}` ) ) {
|
||||
console.log( `Erreur d'envoi de tag sur ${req.params.tribeid} pour ${req.params.r}` );
|
||||
logger.info( `Erreur d'envoi de tag sur ${req.params.tribeid} pour ${req.params.r}` );
|
||||
return false;
|
||||
} else {
|
||||
const info = JSON.parse( JSON.stringify( req.body ) );
|
||||
jsonfile.writeFile( `${config.tribes}/${req.params.tribeid}/tags/hits/${Date.now()}.json`, Tags.info( info, req ), function ( err ) {
|
||||
if( err ) {
|
||||
console.log( `Erreur de sauvegarde de tag pour ${req.params.tribeid} check si /tags/hits et /tags/imgtg exist bien ` )
|
||||
logger.info( `Erreur de sauvegarde de tag pour ${req.params.tribeid} check si /tags/hits et /tags/imgtg exist bien ` )
|
||||
}
|
||||
} );
|
||||
}
|
||||
@ -134,7 +134,7 @@ Tags.dataloadstat = ( tribeid ) => {
|
||||
agrege.data = jsonfile.readfileSync( `${config.tribes}/${tribeid}/tags/stats/data.json`, "utf-8" );
|
||||
agrege.graph = jsonfile.readfileSync( `${config.tribes}/${tribeid}/tags/stats/graph.json`, "utf-8" );
|
||||
} catch ( err ) {
|
||||
console.log( "ATTENTION tag reinitialisé en data.json et graph.json, s'il s'agit de 1ere connexion pas de pb. Le risque est de perdre les tag historiques" )
|
||||
logger.info( "ATTENTION tag reinitialisé en data.json et graph.json, s'il s'agit de 1ere connexion pas de pb. Le risque est de perdre les tag historiques" )
|
||||
//return { status: 503, payload: { info: ['Errconfig'], model: 'Tags', moreinfo: `Il manque un ${config.tribes}/${tribeid}/tags/stats/data.json ou stats/graph.json` } }
|
||||
}
|
||||
glob.sync( `${config.tribes}/${tribeid}/tags/hits/*` )
|
||||
@ -142,7 +142,7 @@ Tags.dataloadstat = ( tribeid ) => {
|
||||
const hit = jsonfile.readFileSync( f );
|
||||
const ts = parseInt( path.basename( f )
|
||||
.split( ".json" )[ 0 ] );
|
||||
//console.log(moment(ts).format('DD-MM-YYYY h:mm:ss'));
|
||||
//logger.info(moment(ts).format('DD-MM-YYYY h:mm:ss'));
|
||||
const tsm = moment( ts )
|
||||
const year = tsm.format( 'YYYY' );
|
||||
const month = tsm.format( 'MMM' );
|
||||
@ -150,7 +150,7 @@ Tags.dataloadstat = ( tribeid ) => {
|
||||
const hourod = tsm.format( 'HH' ) + "h";
|
||||
let newvisitor = false;
|
||||
let alreadydone = false;
|
||||
//console.log(hit.r, ts)
|
||||
//logger.info(hit.r, ts)
|
||||
// Agrege data pour # visiteur vs # de visiteur
|
||||
if( agrege.data[ hit.r ] ) {
|
||||
if( !agrege.data[ hit.r ].data.some( el => el[ 0 ] == ts ) ) {
|
||||
@ -218,12 +218,12 @@ Tags.dataloadstat = ( tribeid ) => {
|
||||
jsonfile.writeFileSync( `${config.tribes}/${tribeid}/tags/stats/graph.json`, agrege.graph, 'utf-8' );
|
||||
return { status: 200, payload: { info: [ 'Statsupdated' ], model: 'Tags' } }
|
||||
}
|
||||
//console.log(Tags.dataloadstat('yes'));
|
||||
//logger.info(Tags.dataloadstat('yes'));
|
||||
/*const ar = [
|
||||
[1, 1],
|
||||
[1, 2]
|
||||
]
|
||||
console.log(ar.some(el => el[0] == 1 && el[1] == 1))
|
||||
console.log(ar.some(el => el == [1, 3]))
|
||||
logger.info(ar.some(el => el[0] == 1 && el[1] == 1))
|
||||
logger.info(ar.some(el => el == [1, 3]))
|
||||
*/
|
||||
module.exports = Tags;
|
||||
|
@ -119,7 +119,7 @@ Tribes.create = ( data ) => {
|
||||
//return in prod all instance apxinfo={tribeids:[],logins:[]}
|
||||
// in dev return only local
|
||||
//check tribeid name is unique
|
||||
console.log( 'liste des tribeid', dataclient.tribeids )
|
||||
logger.info( 'liste des tribeid', dataclient.tribeids )
|
||||
if( dataclient.tribeids.includes( data.tribeid ) ) {
|
||||
return { status: 403, payload: { model: "client", info: [ 'tribeidalreadyexist' ] } }
|
||||
}
|
||||
@ -155,7 +155,7 @@ Tribes.archive = ( tribeid ) => {
|
||||
Tribes.init();
|
||||
return { status: 200, payload: { info: [ 'deletetribeidsuccessfull' ], models: 'Tribes', moreinfo: "TODO see in Tribes.archive" } }
|
||||
} catch ( err ) {
|
||||
console.log( "Erreur d'archivage", err )
|
||||
logger.info( "Erreur d'archivage", err )
|
||||
return { status: 403, payload: { info: [ 'archiveerror' ], models: 'Tribes', moreinfo: err } }
|
||||
}
|
||||
}
|
||||
@ -183,7 +183,7 @@ Tribes.checkaccessfiles = ( listfile, typeaccessrequested, useraccessrights, use
|
||||
if( !fs.existsSync( `${config.tribes}/${f}` ) ) {
|
||||
done = true;
|
||||
checkauthlistfile.ko.push( f )
|
||||
console.log( `${f} file does not exist` )
|
||||
logger.info( `${f} file does not exist` )
|
||||
} else {
|
||||
structf = f.split( '/' );
|
||||
}
|
||||
@ -196,11 +196,11 @@ Tribes.checkaccessfiles = ( listfile, typeaccessrequested, useraccessrights, use
|
||||
checkauthlistfile.ok.push( f );
|
||||
} else {
|
||||
// check if in folder we have a.info.json .file[f].shared{useruuid:'CRUDO'}
|
||||
console.log( 'structf', structf )
|
||||
logger.info( 'structf', structf )
|
||||
if( fs.existsSync( `${config.tribes}/${structf.slice(0,-1).join('/')}/.info.json` ) ) {
|
||||
inforep = fs.readJsonSync( `${config.tribes}/${structf.slice(0,-1).join('/')}/.info.json`, 'utf8' )
|
||||
}
|
||||
console.log( `no accessrights for ${f} for ${useruuid} ` )
|
||||
logger.info( `no accessrights for ${f} for ${useruuid} ` )
|
||||
}
|
||||
if( !done && inforep.file[ f ] && inforep.file[ f ] && inforep.file[ f ].shared && inforep.file[ f ].shared[ useruuid ] && inforep.file[ f ].shared[ useruuid ].includes( typeaccessrequested ) ) {
|
||||
done = true;
|
||||
@ -211,7 +211,7 @@ Tribes.checkaccessfiles = ( listfile, typeaccessrequested, useraccessrights, use
|
||||
checkauthlistfile.ko.push( f )
|
||||
}
|
||||
} // end loop for
|
||||
//console.log( 'checkauthlistfile', checkauthlistfile )
|
||||
//logger.info( 'checkauthlistfile', checkauthlistfile )
|
||||
return checkauthlistfile;
|
||||
}
|
||||
|
||||
@ -227,9 +227,9 @@ Tribes.dirls = ( tribeid, dir ) => {
|
||||
const listdir = []
|
||||
glob.sync( `${config.tribes}/${tribeid}/${dir}/*` )
|
||||
.forEach( f => {
|
||||
//console.log( f )
|
||||
//logger.info( f )
|
||||
const stats = fs.statSync( f );
|
||||
// console.log( stats )
|
||||
// logger.info( stats )
|
||||
if( stats.isFile() ) {
|
||||
listfile.push( path.basename( f ) )
|
||||
if( !comment.file[ path.basename( f ) ] ) {
|
||||
@ -248,7 +248,7 @@ Tribes.dirls = ( tribeid, dir ) => {
|
||||
.length;
|
||||
comment.dir[ path.basename( f ) ].mtime = stats.mtime;
|
||||
comment.dir[ path.basename( f ) ].ctime = stats.mtime;
|
||||
console.log( 'comment.dir', comment.dir )
|
||||
logger.info( 'comment.dir', comment.dir )
|
||||
}
|
||||
} );
|
||||
// on remove les file or dir that was deleted
|
||||
@ -260,7 +260,7 @@ Tribes.dirls = ( tribeid, dir ) => {
|
||||
.forEach( d => {
|
||||
if( !listdir.includes( d ) ) delete comment.dir[ d ]
|
||||
} )
|
||||
//console.log( comment )
|
||||
//logger.info( comment )
|
||||
fs.outputJson( `${config.tribes}/${tribeid}/${dir}/.info.json`, comment, 'utf-8' );
|
||||
return { status: 200, payload: { info: [ 'succestogetls' ], models: 'Tribes', moreinfo: comment } }
|
||||
};
|
||||
@ -326,12 +326,12 @@ Tribes.addspaceweb = ( data ) => {
|
||||
}
|
||||
const nginxrestart = execSync( `sudo systemctl restart nginx` )
|
||||
.toString();
|
||||
console.log( 'Restart nginx', nginxrestart )
|
||||
logger.info( 'Restart nginx', nginxrestart )
|
||||
if( data.mode == "prod" ) {
|
||||
// get ssl certificate ATTENTION il faut ajouter -d devant chaque domain qui redirige vers l'espace web.
|
||||
const certbot = execSync( `sudo certbot --nginx -d ${data.dnsname.join(' -d ')}` )
|
||||
.toString();
|
||||
console.log( 'certbot is running A CHECKER POUR UNE VRAIE PROD ????', certbot )
|
||||
logger.info( 'certbot is running A CHECKER POUR UNE VRAIE PROD ????', certbot )
|
||||
}
|
||||
//sh execution to update change requested
|
||||
return {
|
||||
@ -344,7 +344,7 @@ Tribes.addspaceweb = ( data ) => {
|
||||
};
|
||||
}
|
||||
Tribes.restartapixtribe = ( tribeid ) => {
|
||||
console.log( 'A restarting was requested 5mn ago from a new spacedev for ' + tribeid )
|
||||
logger.info( 'A restarting was requested 5mn ago from a new spacedev for ' + tribeid )
|
||||
execSync( 'yarn restartpm2' );
|
||||
}
|
||||
|
||||
|
@ -14,15 +14,15 @@ const UploadFiles = {};
|
||||
UploadFiles.get = function ( filename, header ) {
|
||||
// check file exist
|
||||
const file = `${config.tribes}/${header.xworkon}/${filename}`;
|
||||
// console.log('fichier demande ', file);
|
||||
// logger.info('fichier demande ', file);
|
||||
if( !fs.existsSync( file ) ) {
|
||||
// console.log('le fichier demande n existe pas ', file);
|
||||
// logger.info('le fichier demande n existe pas ', file);
|
||||
return {
|
||||
status: 404,
|
||||
payload: { info: [ 'fileUnknown' ], model: 'UploadFiles' }
|
||||
};
|
||||
} else {
|
||||
console.log( 'envoie le fichier ', file );
|
||||
logger.info( 'envoie le fichier ', file );
|
||||
return {
|
||||
status: 200,
|
||||
payload: { info: [ 'fileknown' ], model: 'UploadFiles', file: file }
|
||||
@ -34,7 +34,7 @@ UploadFiles.addjson = function ( data, header ) {
|
||||
Le header = {X-WorkOn:"",destinationfile:"", filename:""}
|
||||
Le body = {jsonp:{},callback:function to launch after download,'code':'mot cle pour verifier que le fichier est à garder'}
|
||||
*/
|
||||
// console.log(req.body.jsonp);
|
||||
// logger.info(req.body.jsonp);
|
||||
try {
|
||||
jsonfile.writeFileSync( header.destinationfile + '/' + header.filename, data.jsonp );
|
||||
if( data.callback ) {
|
||||
@ -55,7 +55,7 @@ UploadFiles.addjson = function ( data, header ) {
|
||||
}
|
||||
};
|
||||
} catch ( err ) {
|
||||
console.log( 'Impossible de sauvegarder le fichier, A COMPRENDRE', err );
|
||||
logger.info( 'Impossible de sauvegarder le fichier, A COMPRENDRE', err );
|
||||
return {
|
||||
status: 503,
|
||||
payload: { info: [ 'savingError' ], model: 'UploadFiles' }
|
||||
@ -64,29 +64,29 @@ UploadFiles.addjson = function ( data, header ) {
|
||||
};
|
||||
UploadFiles.add = function ( req, header ) {
|
||||
const form = new formidable.IncomingForm();
|
||||
console.log( 'req.headers', req.headers );
|
||||
console.log( 'req.params', req.params );
|
||||
console.log( 'req.query', req.query );
|
||||
console.log( 'req.body', req.body );
|
||||
logger.info( 'req.headers', req.headers );
|
||||
logger.info( 'req.params', req.params );
|
||||
logger.info( 'req.query', req.query );
|
||||
logger.info( 'req.body', req.body );
|
||||
let destinationfile = `${config.tribes}/${header.xworkon}/${
|
||||
header.destinationfile
|
||||
}`;
|
||||
form.parse( req, function ( err, fields, files ) {
|
||||
console.log( 'files', files.file.path );
|
||||
console.log( 'fields', fields );
|
||||
logger.info( 'files', files.file.path );
|
||||
logger.info( 'fields', fields );
|
||||
const oldpath = files.file.path;
|
||||
destinationfile += '/' + files.file.name;
|
||||
console.log( 'oldpath', oldpath );
|
||||
console.log( 'destinationfile', destinationfile );
|
||||
logger.info( 'oldpath', oldpath );
|
||||
logger.info( 'destinationfile', destinationfile );
|
||||
fs.copyFile( oldpath, destinationfile, function ( err ) {
|
||||
if( err ) {
|
||||
console.log( err );
|
||||
logger.info( err );
|
||||
return {
|
||||
status: 500,
|
||||
payload: { info: [ 'savingError' ], model: 'UploadFiles' }
|
||||
};
|
||||
} else {
|
||||
console.log( 'passe' );
|
||||
logger.info( 'passe' );
|
||||
fs.unlink( oldpath );
|
||||
return {
|
||||
status: 200,
|
||||
|
@ -23,11 +23,11 @@ checkdata.test.emailadress = ( ctx, email ) => {
|
||||
* it check if each eamil separate by , are correct
|
||||
*/
|
||||
checkdata.test.emailadresslist = ( ctx, emaillist ) => {
|
||||
//console.log(emaillist.split(','))
|
||||
//logger.info(emaillist.split(','))
|
||||
if( emaillist.length > 0 ) {
|
||||
const emails = emaillist.split( ',' );
|
||||
for( var i in emails ) {
|
||||
//console.log(emails[i])
|
||||
//logger.info(emails[i])
|
||||
if( !checkdata.test.emailadress( "", emails[ i ].trim() ) ) {
|
||||
return false
|
||||
}
|
||||
@ -53,7 +53,7 @@ checkdata.test.unique = ( ctx, val ) => {
|
||||
if( ctx.list[ ctx.currentfield ] ) {
|
||||
return !ctx.list[ ctx.currentfield ].includes( val );
|
||||
} else {
|
||||
console.log( 'ERR no list for field:' + ctx.currentfield );
|
||||
logger.info( 'ERR no list for field:' + ctx.currentfield );
|
||||
return false;
|
||||
}
|
||||
};
|
||||
@ -85,11 +85,11 @@ checkdata.test.phoneNumber = ( ctx, phoneNumber ) => {
|
||||
* it check if each phone separate by , are correct
|
||||
*/
|
||||
checkdata.test.phoneNumberlist = ( ctx, phonelist ) => {
|
||||
//console.log(emaillist.split(','))
|
||||
//logger.info(emaillist.split(','))
|
||||
if( phonelist.length > 0 ) {
|
||||
const phones = phonelist.split( ',' );
|
||||
for( var i in phones ) {
|
||||
//console.log(emails[i])
|
||||
//logger.info(emails[i])
|
||||
if( !checkdata.test.phoneNumber( "", phones[ i ].trim() ) ) {
|
||||
return false
|
||||
}
|
||||
@ -117,8 +117,8 @@ checkdata.normalize.zfill10 = ( ctx, num ) => {
|
||||
return s;
|
||||
};
|
||||
/*let tt = "+33 1 02.03 04 05";
|
||||
console.log(checkdata.test.phoneNumber('', tt))
|
||||
console.log(checkdata.normalize.phoneNumber('', tt))
|
||||
logger.info(checkdata.test.phoneNumber('', tt))
|
||||
logger.info(checkdata.normalize.phoneNumber('', tt))
|
||||
*/
|
||||
checkdata.evaluate = ( contexte, referential, data ) => {
|
||||
/*
|
||||
@ -129,9 +129,9 @@ checkdata.evaluate = ( contexte, referential, data ) => {
|
||||
clean data eventually reformated
|
||||
updateDatabase}
|
||||
*/
|
||||
console.log( 'contexte', contexte );
|
||||
console.log( 'referentiel', referential );
|
||||
console.log( 'data', data );
|
||||
logger.info( 'contexte', contexte );
|
||||
logger.info( 'referentiel', referential );
|
||||
logger.info( 'data', data );
|
||||
const invalidefor = [];
|
||||
const objectdef = {};
|
||||
const listfield = referential.map( ch => {
|
||||
@ -150,7 +150,7 @@ checkdata.evaluate = ( contexte, referential, data ) => {
|
||||
if( objectdef[ field ].check ) {
|
||||
// check data with rule list in check
|
||||
objectdef[ field ].check.forEach( ctrl => {
|
||||
console.log( 'ctrl', ctrl );
|
||||
logger.info( 'ctrl', ctrl );
|
||||
contexte.currentfield = field;
|
||||
if( !checkdata.test[ ctrl ] ) {
|
||||
invalidefor.push( 'ERR check function does not exist :' + ctrl + '___' + field )
|
||||
@ -163,7 +163,7 @@ checkdata.evaluate = ( contexte, referential, data ) => {
|
||||
|
||||
if( objectdef[ field ].nouserupdate ) {
|
||||
// check if user can modify this information
|
||||
console.log(
|
||||
logger.info(
|
||||
'evaluation :' + field + ' -- ' + objectdef[ field ].nouserupdate,
|
||||
eval( objectdef[ field ].nouserupdate )
|
||||
);
|
||||
@ -172,7 +172,7 @@ checkdata.evaluate = ( contexte, referential, data ) => {
|
||||
}
|
||||
}
|
||||
} );
|
||||
console.log( {
|
||||
logger.info( {
|
||||
invalidefor,
|
||||
data
|
||||
} );
|
||||
|
@ -6,7 +6,7 @@ const moment = require( 'moment' );
|
||||
const config = require( '../config' );
|
||||
const utils = {};
|
||||
|
||||
console.log( "Check in /utils/index.js to find usefull function for your dev.\n Feel free to send suggestion, code to maintainer of apixtribe project (see /package.json to get email).\n We'll add to the roadmap to add it." );
|
||||
logger.info( "Check in /utils/index.js to find usefull function for your dev.\n Feel free to send suggestion, code to maintainer of apixtribe project (see /package.json to get email).\n We'll add to the roadmap to add it." );
|
||||
|
||||
/**
|
||||
* EMAIL
|
||||
@ -99,7 +99,7 @@ utils.generecompteur = ( filecpt, typeincrement ) => {
|
||||
try {
|
||||
num = parseInt( fs.readFileSync( file, 'utf8' ) ) + 1;
|
||||
} catch ( err ) {
|
||||
console.log( "Nouveau compteur incrementale ", file )
|
||||
logger.info( "Nouveau compteur incrementale ", file )
|
||||
}
|
||||
fs.writeFileSync( file, num, 'utf8' );
|
||||
return prefix + num
|
||||
@ -109,9 +109,9 @@ utils.generecompteur = ( filecpt, typeincrement ) => {
|
||||
*/
|
||||
utils.json2csv = ( jsondata, options, callback ) => {
|
||||
// uniquement json = [{niv1:val,niv1:[liste of val]}]
|
||||
// console.log('_________________________');
|
||||
// console.log(jsondata)
|
||||
// console.log('_________________________');
|
||||
// logger.info('_________________________');
|
||||
// logger.info(jsondata)
|
||||
// logger.info('_________________________');
|
||||
if( jsondata.length == 0 ) {
|
||||
return callback( "Empty json", null );
|
||||
}
|
||||
@ -131,7 +131,7 @@ utils.json2csv = ( jsondata, options, callback ) => {
|
||||
let entete = '';
|
||||
let prem = true;
|
||||
for( const j in jsondata ) {
|
||||
// console.log(jsondata[j])
|
||||
// logger.info(jsondata[j])
|
||||
for( const c in options.champs ) {
|
||||
if( prem ) {
|
||||
entete += options.champs[ c ] + options.sep;
|
||||
@ -143,7 +143,7 @@ utils.json2csv = ( jsondata, options, callback ) => {
|
||||
let currentValue = "";
|
||||
if( jsondata[ j ][ options.champs[ c ] ] ) currentValue += jsondata[ j ][ options.champs[ c ] ];
|
||||
options.replacespecialcarJson2Csv.forEach( re => {
|
||||
//console.log(currentValue)
|
||||
//logger.info(currentValue)
|
||||
currentValue = currentValue.replace( re[ 1 ], re[ 0 ] )
|
||||
} )
|
||||
csv += currentValue + options.sep;
|
||||
@ -156,7 +156,7 @@ utils.json2csv = ( jsondata, options, callback ) => {
|
||||
if( prem ) {
|
||||
prem = false;
|
||||
entete = entete.substring( 0, entete.length - 1 ) + options.retln;
|
||||
// console.log(entete)
|
||||
// logger.info(entete)
|
||||
}
|
||||
}
|
||||
// return entete + csv;
|
||||
@ -188,7 +188,7 @@ utils.getHeaders = ( lines, sep ) => lines[ 0 ].split( sep )
|
||||
Usage example:
|
||||
fiche.csv2article = (err, fiche) => {
|
||||
if (!err) {
|
||||
console.log(fiche)
|
||||
logger.info(fiche)
|
||||
}
|
||||
}
|
||||
utils.csv2json(fs.readFileSync('./devdata/tribee/aubergenville/infoexterne/localbusiness.csv', 'utf-8'), {
|
||||
@ -254,9 +254,9 @@ utils.analysestring = ( string ) => {
|
||||
bufcode += "-" + string[ p ].charCodeAt();
|
||||
if( i == 20 ) {
|
||||
if( avecRL ) {
|
||||
console.log( `${buftxt} - ${bufcode}` )
|
||||
logger.info( `${buftxt} - ${bufcode}` )
|
||||
} else {
|
||||
console.log( `${buftxt} ---- ${bufcode}` )
|
||||
logger.info( `${buftxt} ---- ${bufcode}` )
|
||||
}
|
||||
i = 0;
|
||||
buftxt = ""
|
||||
@ -290,12 +290,12 @@ const txtstring = `32932,BK_F2F_B_COM_10x1H-09,"My Communication Workshop ""Sess
|
||||
32934,BK_F2F_B_JOB_10x1H-01,"My Job Search Workshop Session N°1 - 1H","<p>PACK JOB SEARCH</p>",,english,2,0,,2,0,classroom,,,0000-00-00,0000-00-00,https://www.yesnyoulearning.com/lms/index.php?r=player&course_id=32934,1108,,,0,
|
||||
32935,BK_F2F_B_JOB_10x1H-02,"My Job Search Workshop Session N°2 - 1H","<p>PACK JOB SEARCH</p>",,english,2,0,,2,0,classroom,,,0000-00-00,0000-00-00,https://www.yesnyoulearning.com/lms/index.php?r=player&course_id=32935,1108,,,0,`
|
||||
//utils.analysestring(txtstring)
|
||||
//console.log(utils.replacecarbtweendblquote(txtstring, ",", 'CARSEPARATOR')
|
||||
//logger.info(utils.replacecarbtweendblquote(txtstring, ",", 'CARSEPARATOR')
|
||||
// .split("\n")[0].split(","))
|
||||
utils.csv2json = ( csv, options, callback ) => {
|
||||
// EN CAS DE PB AVEC UN FICHIER EXCEL RECALCITRANT
|
||||
// l'ouvrir dans calc linux et sauvegarder csv utf8, ; , " enregistrer le contenu de la cellule comme affiché
|
||||
console.log( '\n--------------- CSV2JSON ---------------\n' );
|
||||
logger.info( '\n--------------- CSV2JSON ---------------\n' );
|
||||
// Default CSV options
|
||||
if( !options.retln ) options.retln = '\n';
|
||||
if( csv.indexOf( '\n\r' ) > -1 ) options.retln = '\n\r';
|
||||
@ -317,8 +317,8 @@ utils.csv2json = ( csv, options, callback ) => {
|
||||
const lines = csv.split( options.retln );
|
||||
const headers = utils.getHeaders( lines, options.sep );
|
||||
let unknownHeaders = '';
|
||||
//console.log('headers', headers)
|
||||
//console.log('options.champs', options.champs)
|
||||
//logger.info('headers', headers)
|
||||
//logger.info('options.champs', options.champs)
|
||||
headers.forEach( header => {
|
||||
// Si un header n'est pas présent dans la liste des champs prédéfinis
|
||||
// on l'ajoute aux champs inconnus
|
||||
@ -421,7 +421,7 @@ return data = {xxxx:value}
|
||||
Usage example:
|
||||
fiche.csvparam2article = (err, fiche) => {
|
||||
if (!err) {
|
||||
console.log(fiche)
|
||||
logger.info(fiche)
|
||||
}
|
||||
}
|
||||
utils.csvparam2json(fs.readFileSync('./devdata/tribee/aubergenville/infoexterne/localbusiness.csv', 'utf-8'), {
|
||||
@ -433,7 +433,7 @@ utils.csvparam2json(fs.readFileSync('./devdata/tribee/aubergenville/infoexterne/
|
||||
|
||||
*/
|
||||
utils.csvparam2json = ( csv, options, callback ) => {
|
||||
console.log( '\n--------------- CSVPARAM2JSON ---------------\n' );
|
||||
logger.info( '\n--------------- CSVPARAM2JSON ---------------\n' );
|
||||
let etat = "";
|
||||
const param = {};
|
||||
if( !options.retln ) {
|
||||
@ -459,7 +459,7 @@ utils.csvparam2json = ( csv, options, callback ) => {
|
||||
const lines = csv.split( options.retln );
|
||||
for( let i = 0; i < lines.length; i++ ) {
|
||||
const infol = lines[ i ].split( options.sep )
|
||||
//console.log(infol)
|
||||
//logger.info(infol)
|
||||
if( infol[ 0 ].length > 4 && infol.length < 2 ) {
|
||||
// si le 1er element à plus de 4 caractere et s'il y a moins de 3 colonnes c'est qu'il y a un pb
|
||||
etat += `Erreur sur ${lines[i]} moins de 3 column separé par ${options.sep}`;
|
||||
@ -469,19 +469,19 @@ utils.csvparam2json = ( csv, options, callback ) => {
|
||||
// reservées pour le csv ; ' etc..'
|
||||
if( infol[ 1 ] && infol[ 1 ] + "" == infol[ 1 ] ) {
|
||||
options.replacespecialcarCsv2Json.forEach( re => {
|
||||
//console.log("gggggggggggggggggggg", infol[1])
|
||||
//logger.info("gggggggggggggggggggg", infol[1])
|
||||
infol[ 1 ] = infol[ 1 ].replace( re[ 0 ], re[ 1 ] );
|
||||
} )
|
||||
// console.log(infol[1])
|
||||
// logger.info(infol[1])
|
||||
infol[ 1 ] = infol[ 1 ].replace( /'|’/g, "\"" );
|
||||
//console.log(infol[1])
|
||||
//logger.info(infol[1])
|
||||
if( infol[ 1 ].toLowerCase() === 'true' ) {
|
||||
infol[ 1 ] = true;
|
||||
} else if( infol[ 1 ].toLowerCase() === 'false' ) {
|
||||
infol[ 1 ] = false;
|
||||
}
|
||||
}
|
||||
console.log( infol[ 1 ] )
|
||||
logger.info( infol[ 1 ] )
|
||||
//supprime des lignes vides
|
||||
if( infol[ 0 ] == '' ) continue;
|
||||
if( infol[ 0 ].indexOf( options.seplevel ) == -1 ) {
|
||||
@ -499,7 +499,7 @@ utils.csvparam2json = ( csv, options, callback ) => {
|
||||
param[ arbre[ 0 ] ][ arbre[ 1 ] ] = infol[ 1 ];
|
||||
} else {
|
||||
if( !param[ arbre[ 0 ] ] ) param[ arbre[ 0 ] ] = [];
|
||||
//console.log('aff', infol[1].substring(1, infol[1].length - 1).replace(/""/g, '"'))
|
||||
//logger.info('aff', infol[1].substring(1, infol[1].length - 1).replace(/""/g, '"'))
|
||||
eval( "result=" + infol[ 1 ] )
|
||||
//.substring(1, infol[1].length - 1).replace(/""/g, '"'))
|
||||
param[ arbre[ 0 ] ].push( result )
|
||||
@ -514,7 +514,7 @@ utils.csvparam2json = ( csv, options, callback ) => {
|
||||
if( !param[ arbre[ 0 ] ] ) param[ arbre[ 0 ] ] = {};
|
||||
if( !param[ arbre[ 0 ] ][ arbre[ 1 ] ] ) param[ arbre[ 0 ] ][ arbre[ 1 ] ] = [];
|
||||
//eval("result = \"test\"");
|
||||
//console.log(result);
|
||||
//logger.info(result);
|
||||
eval( "result=" + infol[ 1 ] );
|
||||
//.substring(1, infol[1].length - 1).replace(/""/g, '"'))
|
||||
param[ arbre[ 0 ] ][ arbre[ 1 ] ].push( result )
|
||||
@ -541,7 +541,7 @@ utils.csvparam2json = ( csv, options, callback ) => {
|
||||
}
|
||||
}
|
||||
// JSON.parse(JSON.stringify(param))
|
||||
console.log( 'kkkkkkkkkkkkkkkkkk', param[ 'catalogue' ][ 'filtrecatalog' ][ 'searchengine' ] )
|
||||
logger.info( 'kkkkkkkkkkkkkkkkkk', param[ 'catalogue' ][ 'filtrecatalog' ][ 'searchengine' ] )
|
||||
if( etat == "" ) {
|
||||
return callback( null, JSON.parse( JSON.stringify( param ) ) );
|
||||
} else {
|
||||
@ -585,9 +585,9 @@ utils.testinarray = ( array, arrayreferent ) => {
|
||||
// au moins un element de array existe dans arryreferent
|
||||
let exist = false;
|
||||
if( arrayreferent ) {
|
||||
//console.log('arrrrrrrrrrrrrrr', arrayreferent)
|
||||
//logger.info('arrrrrrrrrrrrrrr', arrayreferent)
|
||||
array.forEach( e => {
|
||||
//console.log(e)
|
||||
//logger.info(e)
|
||||
if( arrayreferent.includes( e ) ) exist = true
|
||||
} )
|
||||
}
|
||||
|
@ -17,8 +17,8 @@ router.post( '/', checkHeaders, ( req, res ) => {
|
||||
// check if a receiver is well identify if not then it send message to all user tribeid to inform **
|
||||
if( !req.body.desttribeid ) req.body.desttribeid = req.session.header.xworkon;
|
||||
if( !req.body.lang ) req.body.lang = req.session.header.xlang;
|
||||
console.log( '/messages t send for ', req.session.header.xworkon );
|
||||
//console.log(' Content: ',req.body);
|
||||
logger.info( '/messages t send for ', req.session.header.xworkon );
|
||||
//logger.info(' Content: ',req.body);
|
||||
const result = Messages.postinfo( req.body );
|
||||
res.status( result.status )
|
||||
.send( result.data )
|
||||
@ -32,7 +32,7 @@ router.put( '/:objectname/:uuid', checkHeaders, isAuthenticated, ( req, res ) =>
|
||||
// No data management are done here, if need you can add plugin to create a workflow based object
|
||||
// if need specific data check => req.body.callback={tribeidpugin,pluginname,function} will run pluginname.function(data) add data run specific stuf before saved the message object in /objectname/data.uuid_lg/json
|
||||
let result;
|
||||
console.log( "object", req.params.objectname )
|
||||
logger.info( "object", req.params.objectname )
|
||||
if( req.params.objectname == 'notifications' ) {
|
||||
//uuid is a timestamp
|
||||
req.body.time = req.params.uuid;
|
||||
@ -42,7 +42,7 @@ router.put( '/:objectname/:uuid', checkHeaders, isAuthenticated, ( req, res ) =>
|
||||
req.body.object = req.params.objectname;
|
||||
result = Messages.object( req.body, req.session.header );
|
||||
}
|
||||
//console.log( 'result', result );
|
||||
//logger.info( 'result', result );
|
||||
res.status( result.status )
|
||||
.json( result.data )
|
||||
} );
|
||||
@ -50,7 +50,7 @@ router.put( '/:objectname/:uuid', checkHeaders, isAuthenticated, ( req, res ) =>
|
||||
router.get( '/user', checkHeaders, isAuthenticated, ( req, res ) => {
|
||||
// run agregate for tribeid concerned
|
||||
//
|
||||
console.log( "request notifiation for user", req.session.header.xpaganid );
|
||||
logger.info( "request notifiation for user", req.session.header.xpaganid );
|
||||
const app = {
|
||||
tribeid: req.session.header.xapp.split( ':' )[ 0 ],
|
||||
website: req.session.header.xapp.split( ':' )[ 1 ],
|
||||
|
@ -10,7 +10,7 @@ const hasAccessrighton = require( '../middlewares/hasAccessrighton' );
|
||||
const router = express.Router();
|
||||
|
||||
router.get('/searchauth/:objectname/:question',checkHeaders,isAuthenticated,( req, res ) => {
|
||||
console.log( 'route referentials get all language' + req.params.objectname + '-' + req.params.question );
|
||||
logger.info( 'route referentials get all language' + req.params.objectname + '-' + req.params.question );
|
||||
const getref = Referentials.getref( true, req.params.source, req.params.idref, req.session.header.xworkon, req.session.header.xlang );
|
||||
// Return any status the data if any erreur return empty object
|
||||
res.jsonp( getref.payload.data );
|
||||
|
@ -12,7 +12,7 @@ const router = express.Router();
|
||||
|
||||
|
||||
router.post( '/ggsheet2json', checkHeaders, async ( req, res ) => {
|
||||
console.log( 'route outputs sheet to json' );
|
||||
logger.info( 'route outputs sheet to json' );
|
||||
let result = await Outputs.ggsheet2json( req.body, req.session.header );
|
||||
res.send( result );
|
||||
} );
|
||||
@ -22,7 +22,7 @@ router.post( '/ggsheet2json', checkHeaders, async ( req, res ) => {
|
||||
|
||||
// checkHeaders, isuploadFileValid
|
||||
router.post( '/msg', checkHeaders, async ( req, res ) => {
|
||||
console.log( 'route outputs msg post ' );
|
||||
logger.info( 'route outputs msg post ' );
|
||||
const envoi = await Outputs.generemsg( req.body, req.session.header );
|
||||
res.status( envoi.status )
|
||||
.send( {
|
||||
@ -31,7 +31,7 @@ router.post( '/msg', checkHeaders, async ( req, res ) => {
|
||||
} );
|
||||
/*test functionnalité
|
||||
router.post('/msgtest', checkHeaders, isemailValid, async (req, res) => {
|
||||
console.log('route outputs msg post en test');
|
||||
logger.info('route outputs msg post en test');
|
||||
const envoi = await Outputstest.generemsg(req.body, req.session.header);
|
||||
res.status(envoi.status).send({
|
||||
payload: envoi.payload
|
||||
@ -39,23 +39,23 @@ router.post('/msgtest', checkHeaders, isemailValid, async (req, res) => {
|
||||
});
|
||||
*/
|
||||
router.post( '/template', checkHeaders, ( req, res ) => {
|
||||
console.log( 'route outputs post de fichier template ' );
|
||||
logger.info( 'route outputs post de fichier template ' );
|
||||
// a callback can be pass to req.body to run a specific process after upload
|
||||
const saveFile = UploadFile.addjson( req.body, req.session.header );
|
||||
console.log( saveFile );
|
||||
logger.info( saveFile );
|
||||
res.send( saveFile );
|
||||
// res.send({ status: 200, payload: { info: 'fine' } });
|
||||
} );
|
||||
|
||||
router.post( '/pdf', checkHeaders, ( req, res ) => {
|
||||
console.log( 'route outputs pdf post' );
|
||||
logger.info( 'route outputs pdf post' );
|
||||
Outputs.generepdf( req.body, req.session.header )
|
||||
.then( ( doc ) => {
|
||||
res.status( doc.status )
|
||||
.download( doc.payload.data.path, doc.payload.data.name );
|
||||
} )
|
||||
.catch( ( err ) => {
|
||||
console.log( err );
|
||||
logger.info( err );
|
||||
res.status( err.status )
|
||||
.send( { payload: err.payload } );
|
||||
} );
|
||||
|
@ -54,7 +54,7 @@ router.get( '/isauth', checkHeaders, isAuthenticated, ( req, res ) => {
|
||||
.send( { info: "well authenticated" } )
|
||||
} )
|
||||
router.post( '/login', checkHeaders, async ( req, res ) => {
|
||||
// console.log('POST /users/login with: ', req.app.locals.header);
|
||||
// logger.info('POST /users/login with: ', req.app.locals.header);
|
||||
/*
|
||||
Check un mot de passe pour un login pour obtenir un token d'authentification
|
||||
valable 1 hour, 1 day
|
||||
@ -66,13 +66,13 @@ router.post( '/login', checkHeaders, async ( req, res ) => {
|
||||
utile le temps de reinitialisé son mot de passe.
|
||||
@return
|
||||
*/
|
||||
console.log( 'login for ', req.body, "in", req.session.header )
|
||||
logger.info( 'login for ', req.body, "in", req.session.header )
|
||||
const log = await Pagans.loginUser( req.session.header, req.body, true );
|
||||
console.log( "log user login", log );
|
||||
logger.info( "log user login", log );
|
||||
if( log.status == 200 ) {
|
||||
// update req.app.locals.tokens for this uuid just after login success then next isAuth will be valid
|
||||
req.app.locals.tokens[ log.data.user.UUID ] = { TOKEN: log.data.user.TOKEN, ACCESSRIGHTS: log.data.user.ACCESSRIGHTS }
|
||||
console.log( req.app.locals )
|
||||
logger.info( req.app.locals )
|
||||
}
|
||||
return res.status( log.status )
|
||||
.send( log.data );
|
||||
@ -96,7 +96,7 @@ router.get( '/getlinkwithoutpsw/:email', checkHeaders, async ( req, res ) => {
|
||||
}
|
||||
}
|
||||
*/
|
||||
console.log( `GET /users/getlinkwithoutpsw for email: ${req.params.email} tribeid :${req.header('X-Client-Id')}` );
|
||||
logger.info( `GET /users/getlinkwithoutpsw for email: ${req.params.email} tribeid :${req.header('X-Client-Id')}` );
|
||||
if( !req.params.email ) {
|
||||
return res.status( 404 )
|
||||
.send( {
|
||||
@ -106,47 +106,47 @@ router.get( '/getlinkwithoutpsw/:email', checkHeaders, async ( req, res ) => {
|
||||
} else {
|
||||
try {
|
||||
const getlink = await Pagans.getlinkwithoutpsw( req.params.email, req.session.header );
|
||||
console.log( 'getlink', getlink )
|
||||
logger.info( 'getlink', getlink )
|
||||
//met à jour le token créer pour le uuid
|
||||
req.app.locals.tokens[ getlink.data.info.xuuid ] = getlink.data.info.token;
|
||||
// attention si on relance le serveur le token temporaire est perdu
|
||||
return res.status( getlink.status )
|
||||
.send( getlink.data );
|
||||
} catch ( err ) {
|
||||
console.log( err )
|
||||
logger.info( err )
|
||||
return res.status( 500 )
|
||||
.send( { info: [ 'errServer' ], model: 'Pagans' } );
|
||||
}
|
||||
}
|
||||
} );
|
||||
router.post( '/register', checkHeaders, async ( req, res ) => {
|
||||
console.log( `POST /users for ${req.session.header.xtribe}` );
|
||||
logger.info( `POST /users for ${req.session.header.xtribe}` );
|
||||
if( req.session.header.xauth == '123123' ) {
|
||||
// Creation d'un utilisateur avec information de base aucun droit
|
||||
// On modifie le contenu du form pour n egarder que login/email et psw
|
||||
// pour le client_id permet de traiter un user en attente de validation
|
||||
console.log( 'req du post', req );
|
||||
logger.info( 'req du post', req );
|
||||
}
|
||||
} );
|
||||
router.get( '/info/:listindex', checkHeaders, isAuthenticated, hasAccessrighton( 'users', 'R' ), async ( req, res ) => {
|
||||
console.log( `get users info on tribeid ${req.session.header.xworkon} for ${req.params.listindex} with accessright`, req.session.header.accessrights.data );
|
||||
logger.info( `get users info on tribeid ${req.session.header.xworkon} for ${req.params.listindex} with accessright`, req.session.header.accessrights.data );
|
||||
const result = await Pagans.getinfoPagans( req.session.header.xpresworkon, req.session.header.accessrights, req.params.listindex );
|
||||
res.status( result.status )
|
||||
.send( result.data );
|
||||
} );
|
||||
router.get( '/list/:filter/:field', checkHeaders, isAuthenticated, hasAccessrighton( 'users', 'R' ), async ( req, res ) => {
|
||||
console.log( 'GET /users/list/filtre/champs list for ' + req.session.header.xworkon );
|
||||
logger.info( 'GET /users/list/filtre/champs list for ' + req.session.header.xworkon );
|
||||
if(
|
||||
[ 'admin', 'manager' ].includes( req.session.header.decodetoken[ 'apps' + req.session.header.xworkon + 'profil' ] ) ) {
|
||||
try {
|
||||
const userslist = await Pagans.getUserlist( req.session.header, req.params.filter, req.params.field );
|
||||
console.log( 'userslist', userslist );
|
||||
logger.info( 'userslist', userslist );
|
||||
if( userslist.status == 200 ) {
|
||||
return res.status( userslist.status )
|
||||
.send( userslist.data );
|
||||
}
|
||||
} catch ( err ) {
|
||||
console.log( err );
|
||||
logger.info( err );
|
||||
return res.status( 400 )
|
||||
.send( { info: 'erreur' } );
|
||||
}
|
||||
@ -159,15 +159,15 @@ router.get( '/list/:filter/:field', checkHeaders, isAuthenticated, hasAccessrigh
|
||||
}
|
||||
} );
|
||||
router.get( '/uuid/:id', checkHeaders, isAuthenticated, hasAccessrighton( 'users', 'R' ), async ( req, res ) => {
|
||||
console.log( `GET /users/uuid/${req.params.id}` );
|
||||
//console.log('req.app.locals: ', req.app.locals);
|
||||
//console.log('req.session', req.session);
|
||||
logger.info( `GET /users/uuid/${req.params.id}` );
|
||||
//logger.info('req.app.locals: ', req.app.locals);
|
||||
//logger.info('req.session', req.session);
|
||||
const result = await Pagans.getUser( req.params.id, req.session.header.xworkon, req.session.header.accessrights );
|
||||
res.status( result.status )
|
||||
.send( result.data );
|
||||
} );
|
||||
router.put( '/chgpsw/:id', checkHeaders, isAuthenticated, async ( req, res ) => {
|
||||
console.log( `PUT update /users/chgpsw/${req.params.id}` );
|
||||
logger.info( `PUT update /users/chgpsw/${req.params.id}` );
|
||||
try {
|
||||
const majpsw = await Pagans.updateUserpassword( req.params.id, req.session.header, req.body );
|
||||
res.status( majpsw.status )
|
||||
@ -181,15 +181,15 @@ router.put( '/chgpsw/:id', checkHeaders, isAuthenticated, async ( req, res ) =>
|
||||
}
|
||||
} );
|
||||
router.post( '/uuid', checkHeaders, isAuthenticated, hasAccessrighton( 'users', 'C' ), async ( req, res ) => {
|
||||
console.log( 'POST /users create for ' + req.session.header.xworkon, req.body );
|
||||
logger.info( 'POST /users create for ' + req.session.header.xworkon, req.body );
|
||||
const usercreate = await Pagans.createUser( req.session.header, req.body );
|
||||
return res.status( usercreate.status )
|
||||
.send( usercreate.data );
|
||||
} );
|
||||
router.put( '/uuid/:id', checkHeaders, isAuthenticated, hasAccessrighton( 'users', 'U' ), async ( req, res ) => {
|
||||
console.log( `PUT update /users/${req.params.id}` );
|
||||
// console.log('req.app.locals: ', req.app.locals);
|
||||
// console.log('req.session', req.session);
|
||||
logger.info( `PUT update /users/${req.params.id}` );
|
||||
// logger.info('req.app.locals: ', req.app.locals);
|
||||
// logger.info('req.session', req.session);
|
||||
try {
|
||||
const majUser = await Pagans.updateUser( req.params.id, req.session.header, req.body );
|
||||
res.status( majUser.status )
|
||||
@ -203,7 +203,7 @@ router.put( '/uuid/:id', checkHeaders, isAuthenticated, hasAccessrighton( 'users
|
||||
}
|
||||
} );
|
||||
router.delete( '/uuid/:id', checkHeaders, isAuthenticated, hasAccessrighton( 'users', 'D' ), ( req, res ) => {
|
||||
console.log( `DELETE /users/uuid/${req.params.id}` );
|
||||
logger.info( `DELETE /users/uuid/${req.params.id}` );
|
||||
const result = Pagans.deleteUser( req.params.id, req.session.header );
|
||||
res.status( result.status )
|
||||
.send( result.data );
|
||||
|
@ -19,7 +19,7 @@ router.get( '/clientconf/:keylist', checkHeaders, isAuthenticated, ( req, res )
|
||||
/*
|
||||
if (req.session.header.accessrights.data[ "Alltribeid" ] && req.session.header.accessrights.data[ "Alltribeid" ].referentials.includes('R') ;
|
||||
*/
|
||||
console.log( `get clientconf for ${req.session.header.xworkon} on ${req.params.keylist}` )
|
||||
logger.info( `get clientconf for ${req.session.header.xworkon} on ${req.params.keylist}` )
|
||||
let dataref = {}
|
||||
if( req.params.keylist.split( '_' )
|
||||
.length > 0 ) {
|
||||
@ -27,10 +27,10 @@ router.get( '/clientconf/:keylist', checkHeaders, isAuthenticated, ( req, res )
|
||||
if( ref.status == 200 ) {
|
||||
dataref = ref.payload.data;
|
||||
} else {
|
||||
console.log( "erreur ", ref )
|
||||
logger.info( "erreur ", ref )
|
||||
}
|
||||
}
|
||||
console.log( 'envoie en jsonp: dataref' )
|
||||
logger.info( 'envoie en jsonp: dataref' )
|
||||
res.jsonp( dataref )
|
||||
} );
|
||||
router.get( '/clientconfglob', checkHeaders, isAuthenticated, ( req, res ) => {
|
||||
@ -40,7 +40,7 @@ router.get( '/clientconfglob', checkHeaders, isAuthenticated, ( req, res ) => {
|
||||
router.get( '/contentlist/:source', checkHeaders, isAuthenticated,
|
||||
( req, res ) => {
|
||||
const payload = [];
|
||||
console.log( req.params.source, `${config.tribes}/${req.session.header.xworkon}/referentials/dataManagement/${req.params.source}/*.json` )
|
||||
logger.info( req.params.source, `${config.tribes}/${req.session.header.xworkon}/referentials/dataManagement/${req.params.source}/*.json` )
|
||||
glob.sync( `${config.tribes}/${req.session.header.xworkon}/referentials/dataManagement/${req.params.source}/*.json` )
|
||||
.forEach( f => {
|
||||
payload.push( path.basename( f, '.json' ) );
|
||||
@ -50,27 +50,27 @@ router.get( '/contentlist/:source', checkHeaders, isAuthenticated,
|
||||
router.get( '/contentfull/:source/:idref', checkHeaders, isAuthenticated,
|
||||
( req, res ) => {
|
||||
//only for data and object
|
||||
console.log( 'route referentials get all language' + req.params.source + '-' + req.params.idref );
|
||||
logger.info( 'route referentials get all language' + req.params.source + '-' + req.params.idref );
|
||||
const getref = Referentials.getref( true, req.params.source, req.params.idref, req.session.header.xworkon, req.session.header.xlang );
|
||||
// Return any status the data if any erreur return empty object
|
||||
res.jsonp( getref.payload.data );
|
||||
} );
|
||||
router.get( '/content/:source/:idref', checkHeaders, isAuthenticated,
|
||||
( req, res ) => {
|
||||
console.log( 'route referentials get ' + req.params.source + '-' + req.params.idref );
|
||||
logger.info( 'route referentials get ' + req.params.source + '-' + req.params.idref );
|
||||
const getref = Referentials.getref( false, req.params.source, req.params.idref, req.session.header.xworkon, req.session.header.xlang );
|
||||
res.jsonp( getref.payload.data );
|
||||
} );
|
||||
// get with no authentification
|
||||
router.get( '/contentnoauth/:source/:idref', checkHeaders,
|
||||
( req, res ) => {
|
||||
console.log( 'route referentials get ' + req.params.source + '-' + req.params.idref );
|
||||
logger.info( 'route referentials get ' + req.params.source + '-' + req.params.idref );
|
||||
// @TODO check access right in clientconf before sending back json file
|
||||
const getref = Referentials.getref( false, req.params.source, req.params.idref, req.session.header.xworkon, req.session.header.xlang );
|
||||
res.jsonp( getref.payload.data );
|
||||
} );
|
||||
router.get( '/lg', ( req, res ) => {
|
||||
console.log( req.headers[ "accept-language" ] )
|
||||
logger.info( req.headers[ "accept-language" ] )
|
||||
let lg = '??';
|
||||
if( req.headers[ "accept-language" ] && req.headers[ "accept-language" ].split( ',' )
|
||||
.length > 0 ) {
|
||||
@ -79,14 +79,14 @@ router.get( '/lg', ( req, res ) => {
|
||||
res.json( { lg } )
|
||||
} );
|
||||
router.put( '/content/:source/:idref', checkHeaders, isAuthenticated, ( req, res ) => {
|
||||
console.log( `route put content for ${req.params.idref} that is a ${req.params.source}` );
|
||||
logger.info( `route put content for ${req.params.idref} that is a ${req.params.source}` );
|
||||
const putref = Referentials.putref( req.params.source, req.params.idref, req.session.header.xworkon, req.body )
|
||||
return res.status( putref.status )
|
||||
.send( { payload: putref.payload } )
|
||||
} );
|
||||
//hasAccessrighton( 'referentials', 'U' )
|
||||
router.get( '/updatefull', checkHeaders, isAuthenticated, hasAccessrighton( 'referentials', 'U' ), ( req, res ) => {
|
||||
console.log( `route get to force update content updatefull is accessrighton` );
|
||||
logger.info( `route get to force update content updatefull is accessrighton` );
|
||||
const updtref = Referentials.updatefull( req.session.header.xworkon )
|
||||
return res.status( updtref.status )
|
||||
.send( { payload: updtref.payload } )
|
||||
|
@ -11,7 +11,7 @@ const Tags = require('../models/Tags');
|
||||
// Middlewares
|
||||
const router = express.Router();
|
||||
router.get('/:filename', (req, res) => {
|
||||
//console.log('route tags get ', req.params.filename);
|
||||
//logger.info('route tags get ', req.params.filename);
|
||||
const savetag = Tags.getfile(req.params.filename, req);
|
||||
if(savetag.status == 200) {
|
||||
res.sendFile(savetag.payload.filename);
|
||||
@ -21,7 +21,7 @@ router.get('/:filename', (req, res) => {
|
||||
}
|
||||
})
|
||||
router.post('/:tribeid', (req, res) => {
|
||||
//console.log('route tags post ', req.params.tribeid);
|
||||
//logger.info('route tags post ', req.params.tribeid);
|
||||
const savetag = Tags.savehits(req);
|
||||
res.status(200)
|
||||
.send('');
|
||||
|
@ -18,7 +18,7 @@ router.get( '/clientconf/:tribeid', checkHeaders, isAuthenticated, ( req, res )
|
||||
if tribeid == all and user is admin of apixtribe => get /tmp/clientconfglob.json
|
||||
req.session.header.accessrights, req.session.header.apixpaganid
|
||||
*/
|
||||
console.log( `Tribes/clientconf for tribeid:${req.params.tribeid}` )
|
||||
logger.info( `Tribes/clientconf for tribeid:${req.params.tribeid}` )
|
||||
if( req.params.tribeid == "all" && req.session.header.accessrights.data.apixtribe && req.session.header.accessrights.data.apixtribe.tribeid && req.session.header.accessrights.data.apixtribe.tribeid.includes( 'R' ) ) {
|
||||
res.status( 200 )
|
||||
.send( { moreinfo: fs.readJsonSync( `${config.tmp}/clientconfglob.json`, 'utf-8' ) } );
|
||||
@ -41,22 +41,22 @@ router.get( '/clientconf/:tribeid', checkHeaders, isAuthenticated, ( req, res )
|
||||
.end();
|
||||
} )
|
||||
router.put( '/', checkHeaders, isAuthenticated, ( req, res ) => {
|
||||
console.log( 'Create a new tribeid, with a useradmin' )
|
||||
console.log( ' send data = clientconf.json with all parameter.' )
|
||||
logger.info( 'Create a new tribeid, with a useradmin' )
|
||||
logger.info( ' send data = clientconf.json with all parameter.' )
|
||||
// !!!!! check for security any ; \n or so because data can be used into shell
|
||||
const add = Tribes.create( req.body );
|
||||
res.status( add.status )
|
||||
.send( add.payload )
|
||||
} )
|
||||
router.delete( '/archivetribeid/:tribeid', checkHeaders, isAuthenticated, ( req, res ) => {
|
||||
console.log( "request archive tribeid" )
|
||||
logger.info( "request archive tribeid" )
|
||||
const archive = Tribes.archive( req.params.tribeid );
|
||||
res.status( archive.status )
|
||||
.send( archive.payload )
|
||||
} );
|
||||
router.post( '/spaceweb', checkHeaders, isAuthenticated, ( req, res ) => {
|
||||
// !!!!! check for security any ; \n or so because data can be used into shell
|
||||
console.log( 'Create a new webapp for xworkon ' )
|
||||
logger.info( 'Create a new webapp for xworkon ' )
|
||||
req.body.tribeid = req.session.header.xworkon;
|
||||
const add = Tribes.addspaceweb( req.body )
|
||||
res.status( add.status )
|
||||
@ -66,7 +66,7 @@ router.get( '/spaceweb/components/:tribeid/:website/:key', checkHeaders, ( req,
|
||||
// check if key is valid before continue
|
||||
// exemple: get Tribes/spaceweb/components/ndda/mesa/123?rep=appmesatable/appsimpletable.mustache
|
||||
const file = `${config.tribes}/${req.params.tribeid}/spacedev/${req.params.website}/src/ctatic/components/${req.query.path}`
|
||||
console.log( `Request components file from ${file}` )
|
||||
logger.info( `Request components file from ${file}` )
|
||||
if( fs.existsSync( file ) ) {
|
||||
res.sendFile( file );
|
||||
} else {
|
||||
@ -79,7 +79,7 @@ router.get( '/plugins/:tribeid/:pluginname/:key/:filename', ( req, res ) => {
|
||||
// return a file into /:tribeid owner of plugin/plugins/:pluginname/components/:filename
|
||||
// if not exist or invalid key then return console.error
|
||||
const file = `${config.tribes}/${req.params.tribeid}/plugins/${req.params.pluginname}/components/${req.params.filename}`
|
||||
console.log( 'Tribes/plugins/ ', file )
|
||||
logger.info( 'Tribes/plugins/ ', file )
|
||||
if( fs.existsSync( file ) ) {
|
||||
res.sendFile( file );
|
||||
} else {
|
||||
@ -94,13 +94,13 @@ router.get( '/dirls', checkHeaders, isAuthenticated, ( req, res ) => {
|
||||
// {file:[{}],dir:[{}]}
|
||||
// @todo check if isAuthorized and exist
|
||||
|
||||
console.log( 'request dirls', `${config.tribes}/${req.session.header.xworkon}/${req.query.rep}` );
|
||||
logger.info( 'request dirls', `${config.tribes}/${req.session.header.xworkon}/${req.query.rep}` );
|
||||
if( !fs.existsSync( `${config.tribes}/${req.session.header.xworkon}/${req.query.rep}` ) ) {
|
||||
res.status( 404 )
|
||||
.send( { 'info': [ 'dirnotexist' ], model: 'Tribes' } );
|
||||
}
|
||||
const info = Tribes.dirls( req.session.header.xworkon, req.query.rep );
|
||||
console.log( info )
|
||||
logger.info( info )
|
||||
res.status( info.status )
|
||||
.send( info.payload );
|
||||
} )
|
||||
@ -115,9 +115,9 @@ router.delete( '/ls', checkHeaders, isAuthenticated, ( req, res ) => {
|
||||
} );
|
||||
router.put( '/sendjson', checkHeaders, isAuthenticated, ( req, res ) => {
|
||||
//req.body = {object:spacedev, path:website/src/data/tpldataname_lg.json, data:{...}}
|
||||
//console.log( req.body )
|
||||
//logger.info( req.body )
|
||||
const dest = `${config.tribes}/${req.session.header.xworkon}/${req.body.object}/${req.body.path}`;
|
||||
console.log( `Send json to saved to ${dest}` );
|
||||
logger.info( `Send json to saved to ${dest}` );
|
||||
if( !( req.body.object && fs.existsSync( `${config.tribes}/${req.session.header.xworkon}/${req.body.object}` ) ) ) {
|
||||
res.status( '404' )
|
||||
.send( { info: [ 'objectmissiong' ], models: 'Tribes', moreinfo: `object: ${req.body.object} does not exist req.body must {object, data, path} into data ${req.session.header.xworkon}/${req.body.object}` } )
|
||||
@ -163,22 +163,22 @@ router.post( '/downloadls', checkHeaders, isAuthenticated, ( req, res ) => {
|
||||
}
|
||||
} );
|
||||
router.post( '/upfilepond', checkHeaders, isAuthenticated, ( req, res ) => {
|
||||
console.log( 'post /Tribes/uploadfilepond' );
|
||||
logger.info( 'post /Tribes/uploadfilepond' );
|
||||
// Store file and return a unique id to save button
|
||||
// that provide folder where to store it
|
||||
const formidable = require( 'formidable' );
|
||||
const form = formidable( { multiples: false } );
|
||||
form.parse( req, ( err, fields, files ) => {
|
||||
if( err ) { next( err ); return; }
|
||||
//console.log( 'fields',fields);
|
||||
//logger.info( 'fields',fields);
|
||||
// fileMetadaObject send
|
||||
let context = JSON.parse( fields.filepond );
|
||||
let idfile = files.filepond.path;
|
||||
let name = files.filepond.name;
|
||||
let subfolder = context.subfolder;
|
||||
name = name.replace( /[ ,'"’]/g, "_" );
|
||||
//console.log( 'files.filepond:', files.filepond );
|
||||
console.log( idfile, `${config.tribes}/${req.session.header.xworkon}/www/${subfolder}/${name}` )
|
||||
//logger.info( 'files.filepond:', files.filepond );
|
||||
logger.info( idfile, `${config.tribes}/${req.session.header.xworkon}/www/${subfolder}/${name}` )
|
||||
// On le supprime s'il existe deja
|
||||
fs.removeSync( `${config.tribes}/${req.session.header.xworkon}/www/${subfolder}/${name}` );
|
||||
// mv tmp
|
||||
@ -198,14 +198,14 @@ router.delete( '/file', checkHeaders, isAuthenticated, ( req, res ) => {
|
||||
return;
|
||||
};
|
||||
hasAccessrighton( req.query.src.split( '/' )[ 0 ], "D" );
|
||||
console.log( 'Remove file', `${config.tribes}/${req.session.header.xworkon}/${req.query.src}` )
|
||||
console.log( req.body )
|
||||
logger.info( 'Remove file', `${config.tribes}/${req.session.header.xworkon}/${req.query.src}` )
|
||||
logger.info( req.body )
|
||||
fs.removeSync( `${config.tribes}/${req.session.header.xworkon}/${req.query.src}` );
|
||||
res.status( 200 )
|
||||
.send( { info: [ 'Successfullremove' ], models: "Tribes" } )
|
||||
} );
|
||||
router.post( '/uploadfile', checkHeaders, isAuthenticated, ( req, res ) => {
|
||||
console.log( 'upload a file ' )
|
||||
logger.info( 'upload a file ' )
|
||||
/* Authentification is needed to get a TOKEN
|
||||
curl -X POST -H "xtribe: apixtribe" -H "xworkon: pvmsaveurs" -H "xlang: fr" -H "xpaganid: 1" -H "xauth: 1" -H "xapp: pvmsaveurs:pvmsaveurs" -H "Content-Type: application/json" -d '{"LOGIN":"adminapixtribe","PASSWORD":"Trze3aze!"}' http://pvmsaveurs.pvmsaveurs.fr/app/users/login
|
||||
if exist replace xpaganidTOKEN with payload.TOKEN value
|
||||
@ -215,15 +215,15 @@ router.post( '/uploadfile', checkHeaders, isAuthenticated, ( req, res ) => {
|
||||
const formidable = require( 'formidable' );
|
||||
const form = formidable( { multiples: false } );
|
||||
form.parse( req, function ( err, fields, files ) {
|
||||
//console.log( files.data )
|
||||
//logger.info( files.data )
|
||||
var oldPath = files.data.path;
|
||||
var newPath = `${config.tribes}/${req.session.header.xworkon}/${clientconf.uploadzip[files.data.name].dest}`;
|
||||
console.log( 'oldPath', oldPath )
|
||||
console.log( 'newPath', newPath )
|
||||
logger.info( 'oldPath', oldPath )
|
||||
logger.info( 'newPath', newPath )
|
||||
var rawData = fs.readFileSync( oldPath )
|
||||
fs.outputFile( newPath, rawData, function ( err ) {
|
||||
if( err ) {
|
||||
console.log( err );
|
||||
logger.info( err );
|
||||
return res.status( 405 )
|
||||
.send( { info: [ 'savederror' ], models: "Tribes", moreinfo: "your file was not able to be saved into the server" } )
|
||||
} else {
|
||||
@ -237,7 +237,7 @@ router.post( '/uploadfile', checkHeaders, isAuthenticated, ( req, res ) => {
|
||||
} );
|
||||
} );
|
||||
router.post( '/uploadzip', checkHeaders, ( req, res ) => {
|
||||
console.log( 'uploadzip a file ' )
|
||||
logger.info( 'uploadzip a file ' )
|
||||
|
||||
/* no authentification to upload a zip filename into /tribes/${xworkon}/${clientconf.uploadzip[filename].dest}
|
||||
unzip it using the password ${clientconf.uploadzip[filename].psw
|
||||
@ -267,29 +267,29 @@ router.post( '/uploadzip', checkHeaders, ( req, res ) => {
|
||||
const formidable = require( 'formidable' );
|
||||
const form = formidable( { multiples: false } );
|
||||
form.parse( req, function ( err, fields, files ) {
|
||||
//console.log( files.data )
|
||||
//logger.info( files.data )
|
||||
var oldPath = files.data.path;
|
||||
if( !Object.keys( clientconf.uploadzip )
|
||||
.includes( files.data.name ) ) {
|
||||
return res.status( 403 )
|
||||
.send( { info: [ "notAllowed" ], models: "Tribes", moreinfo: `file ${files.data.name} not allowed to be upload` } )
|
||||
} else {
|
||||
console.log( "context:", clientconf.uploadzip[ files.data.name ] )
|
||||
logger.info( "context:", clientconf.uploadzip[ files.data.name ] )
|
||||
var newPath = `${config.tribes}/${req.session.header.xworkon}/${clientconf.uploadzip[files.data.name].dest}`;
|
||||
//console.log( 'oldPath', oldPath )
|
||||
//console.log( 'newPath', `${newPath}/${files.data.name}` )
|
||||
//logger.info( 'oldPath', oldPath )
|
||||
//logger.info( 'newPath', `${newPath}/${files.data.name}` )
|
||||
fs.moveSync( oldPath, `${newPath}/${files.data.name}`, { overwrite: true } );
|
||||
const cp = require( 'child_process' );
|
||||
//console.log( `7z e -p${clientconf.uploadzip[ files.data.name ].psw} ${newPath}/${files.data.name}` );
|
||||
console.log( '7z', [ 'e', `-p${clientconf.uploadzip[ files.data.name ].psw}`, `${newPath}/${files.data.name}`, `-o${config.tribes}/${req.session.header.xworkon}/${clientconf.uploadzip[ files.data.name ].dest}`, clientconf.uploadzip[ files.data.name ].unzipoption ] );
|
||||
//logger.info( `7z e -p${clientconf.uploadzip[ files.data.name ].psw} ${newPath}/${files.data.name}` );
|
||||
logger.info( '7z', [ 'e', `-p${clientconf.uploadzip[ files.data.name ].psw}`, `${newPath}/${files.data.name}`, `-o${config.tribes}/${req.session.header.xworkon}/${clientconf.uploadzip[ files.data.name ].dest}`, clientconf.uploadzip[ files.data.name ].unzipoption ] );
|
||||
var newFiles = cp.spawnSync( '7z', [ 'e', `-p${clientconf.uploadzip[ files.data.name ].psw}`, `${newPath}/${files.data.name}`, `-o${config.tribes}/${req.session.header.xworkon}/${clientconf.uploadzip[ files.data.name ].dest}`, clientconf.uploadzip[ files.data.name ].unzipoption ] );
|
||||
console.log( newFiles.output.toString() )
|
||||
logger.info( newFiles.output.toString() )
|
||||
if( newFiles.output.toString()
|
||||
.includes( 'Everything is Ok' ) ) {
|
||||
if( clientconf.uploadzip[ files.data.name ].callback ) {
|
||||
const integ = require( `${config.tribes}/${req.session.header.xworkon}/${clientconf.uploadzip[files.data.name].callback}` )
|
||||
.run();
|
||||
console.log( 'integration', integ )
|
||||
logger.info( 'integration', integ )
|
||||
return res.status( integ.status )
|
||||
.send( integ.payload );
|
||||
} else {
|
||||
@ -326,9 +326,9 @@ router.post( '/upload', checkHeaders, isAuthenticated, ( req, res ) => {
|
||||
<input type="submit" value="upload"/>
|
||||
</form>
|
||||
*/
|
||||
console.log( 'Envoie image' )
|
||||
console.log( 'body', req.body );
|
||||
console.log( 'params', req.params );
|
||||
logger.info( 'Envoie image' )
|
||||
logger.info( 'body', req.body );
|
||||
logger.info( 'params', req.params );
|
||||
//const authfolder = Tribes.checkaccessfiles( req.params.rep, 'C', req.session.header.accessrights, req.session.header.xpaganid );
|
||||
// cheack autorisation to create or replace a file for this accessrights user
|
||||
const authfolder = { ok: "tt" }
|
||||
@ -345,7 +345,7 @@ router.post( '/upload', checkHeaders, isAuthenticated, ( req, res ) => {
|
||||
if( err ) { next( err ); return; }
|
||||
let thefile = files.filebond.path;
|
||||
fs.outputFileSync()
|
||||
console.log( 'thefile:' + thefile );
|
||||
logger.info( 'thefile:' + thefile );
|
||||
res.writeHead( 200, { 'Content-Type': 'text/plain' } );
|
||||
res.end( theFile );
|
||||
} )
|
||||
|
@ -12,23 +12,23 @@ const isAuthenticated = require( '../middlewares/isAuthenticated' );
|
||||
const router = express.Router();
|
||||
|
||||
router.post( '/', checkHeaders, ( req, res ) => {
|
||||
console.log( 'route uploadFile post ' );
|
||||
logger.info( 'route uploadFile post ' );
|
||||
const saveFile = UploadFile.add( req, req.session.header );
|
||||
res.send( saveFile );
|
||||
// res.send({ status: 200, payload: { info: 'fine' } });
|
||||
} );
|
||||
|
||||
router.post( '/json', checkHeaders, ( req, res ) => {
|
||||
console.log( 'route uploadFile post de fichier json ' );
|
||||
logger.info( 'route uploadFile post de fichier json ' );
|
||||
// a callback can be pass to req.body to run a specific process after upload
|
||||
const saveFile = UploadFile.addjson( req.body, req.session.header );
|
||||
console.log( saveFile );
|
||||
logger.info( saveFile );
|
||||
res.send( saveFile );
|
||||
// res.send({ status: 200, payload: { info: 'fine' } });
|
||||
} );
|
||||
|
||||
router.get( '/:filename', checkHeaders, isAuthenticated, ( req, res ) => {
|
||||
console.log( 'route uploadFile get ', req.params.filename );
|
||||
logger.info( 'route uploadFile get ', req.params.filename );
|
||||
const pushFile = UploadFile.get(
|
||||
req.params.filename.replace( /______/g, '/' ),
|
||||
req.session.header
|
||||
|
@ -3,7 +3,7 @@ const path = require( 'path' );
|
||||
const fs = require( 'fs' );
|
||||
const config = {};
|
||||
if( !process.env.NODE_ENV ) process.env.NODE_ENV = "dev"
|
||||
console.log( 'apixtribe process.env.NODE_ENV: ', process.env.NODE_ENV );
|
||||
logger.info( 'apixtribe process.env.NODE_ENV: ', process.env.NODE_ENV );
|
||||
// VOIR l'ancien fichier de cnfig au cas ou il manque des chemins dans config
|
||||
// voir la doc http://gitlab.ndda.fr/philc/apiamaildigitfr/wikis/InstallConf
|
||||
config.prod = {
|
||||
@ -89,7 +89,7 @@ config.dev = {
|
||||
}
|
||||
};
|
||||
if( !config[ process.env.NODE_ENV ] ) {
|
||||
console.log( 'config.js -> Exit setup due to node_ENV have to be set as prod or dev instead of ', process.env.NODE_ENV )
|
||||
logger.info( 'config.js -> Exit setup due to node_ENV have to be set as prod or dev instead of ', process.env.NODE_ENV )
|
||||
process.exit();
|
||||
}
|
||||
const confuse = config[ process.env.NODE_ENV ];
|
||||
|
@ -50,7 +50,7 @@ pwa.auth.check = () => {
|
||||
}
|
||||
};
|
||||
pwa.auth.route = ( destination ) => {
|
||||
console.log( 'auth.route to', destination );
|
||||
logger.info( 'auth.route to', destination );
|
||||
//if check Authenticated && exist #signin button[data-routeto] then redirect browser to button[data-routeto]
|
||||
//else manage component action auth
|
||||
if( pwa.state && pwa.state.data && pwa.state.data.login && pwa.state.data.login.isAuthenticated ) {
|
||||
@ -74,24 +74,24 @@ pwa.auth.isAuthenticate = async function () {
|
||||
// then try pwa.auth.isAuthenticate if rememberMe auto reconnect
|
||||
// if jwt is ok then return true in other case => false
|
||||
// this is the first test then depending of action see ACCESSRIGHTS of user
|
||||
console.log( 'lance isauth', {
|
||||
logger.info( 'lance isauth', {
|
||||
headers: pwa.state.data.headers.xpaganid
|
||||
} )
|
||||
//alert( 'uuid ' + pwa.state.data.headers.xpaganid )
|
||||
console.log( `https://${pwa.state.data.ctx.urlbackoffice}/users/isauth`, {
|
||||
logger.info( `https://${pwa.state.data.ctx.urlbackoffice}/users/isauth`, {
|
||||
headers: pwa.state.data.headers
|
||||
} )
|
||||
try {
|
||||
const repisauth = await axios.get( `https://${pwa.state.data.ctx.urlbackoffice}/users/isauth`, {
|
||||
headers: pwa.state.data.headers
|
||||
} )
|
||||
console.log( repisauth )
|
||||
console.log( 'isAauthenticate: yes' )
|
||||
logger.info( repisauth )
|
||||
logger.info( 'isAauthenticate: yes' )
|
||||
return true;
|
||||
} catch ( err ) {
|
||||
if( err.response ) { console.log( "response err ", err.response.data ) }
|
||||
if( err.request ) { console.log( "request err", err.request ) }
|
||||
console.log( 'isAuthenticate: no' )
|
||||
if( err.response ) { logger.info( "response err ", err.response.data ) }
|
||||
if( err.request ) { logger.info( "request err", err.request ) }
|
||||
logger.info( 'isAuthenticate: no' )
|
||||
pwa.state.data.headers.xpaganid = "1";
|
||||
if( pwa.state.data.login.rememberMe.login ) {
|
||||
if( await pwa.auth.authentification( pwa.state.data.login.rememberMe ) ) {
|
||||
@ -107,13 +107,13 @@ pwa.auth.authentification = async function ( data ) {
|
||||
// in case of sucess update paw.state.data.login
|
||||
console.groupCollapsed( "Post Authentification for standard on : https://" + pwa.state.data.ctx.urlbackoffice + "/users/login param data", data )
|
||||
|
||||
console.log( 'header de login', pwa.state.data.headers )
|
||||
logger.info( 'header de login', pwa.state.data.headers )
|
||||
let auth;
|
||||
try {
|
||||
auth = await axios.post( `https://${pwa.state.data.ctx.urlbackoffice }/users/login`, data, {
|
||||
headers: pwa.state.data.headers
|
||||
} );
|
||||
console.log( "retour de login successfull ", auth );
|
||||
logger.info( "retour de login successfull ", auth );
|
||||
//Maj variable globale authentifié
|
||||
pwa.state.data.headers.xpaganid = auth.data.payload.data.UUID;
|
||||
pwa.state.data.headers.xauth = auth.data.payload.data.TOKEN;
|
||||
@ -128,9 +128,9 @@ pwa.auth.authentification = async function ( data ) {
|
||||
console.groupEnd();
|
||||
return true;
|
||||
} catch ( err ) {
|
||||
if( err.response ) { console.log( "resp", err.response.data ) }
|
||||
if( err.request ) { console.log( "req", err.request.data ) }
|
||||
console.log( 'erreur de login reinit de rememberMe', err )
|
||||
if( err.response ) { logger.info( "resp", err.response.data ) }
|
||||
if( err.request ) { logger.info( "req", err.request.data ) }
|
||||
logger.info( 'erreur de login reinit de rememberMe', err )
|
||||
pwa.state.data.login.rememberMe = {};
|
||||
document.querySelector( "#signin p.msginfo" )
|
||||
.innerHTML = document.querySelector( "#signin [data-msgko]" )
|
||||
@ -140,7 +140,7 @@ pwa.auth.authentification = async function ( data ) {
|
||||
}
|
||||
};
|
||||
pwa.auth.logout = function () {
|
||||
console.log( "remove ", pwa.state.data.ctx.website );
|
||||
logger.info( "remove ", pwa.state.data.ctx.website );
|
||||
localStorage.removeItem( pwa.state.data.ctx.website );
|
||||
window.location.href = "/";
|
||||
}
|
||||
@ -161,7 +161,7 @@ pwa.auth.login = async function () {
|
||||
PASSWORD: document.querySelector( "#signin input[name='password']" )
|
||||
.value
|
||||
}
|
||||
console.log( 'check password', checkdata.test.password( "", data.PASSWORD ) )
|
||||
logger.info( 'check password', checkdata.test.password( "", data.PASSWORD ) )
|
||||
if( data.LOGIN.length < 4 || !checkdata.test.password( "", data.PASSWORD ) ) {
|
||||
/*$("#loginpart p.msginfo")
|
||||
.html("")
|
||||
@ -175,15 +175,15 @@ pwa.auth.login = async function () {
|
||||
pwa.state.data.login.rememberMe = data;
|
||||
}
|
||||
if( await pwa.auth.authentification( data ) ) {
|
||||
console.log( 'Authentification VALIDE' )
|
||||
logger.info( 'Authentification VALIDE' )
|
||||
document.querySelector( '#signin p.msginfo' )
|
||||
.innerHTML = document.querySelector( "#signin [data-msgok]" )
|
||||
.getAttribute( 'data-msgok' );
|
||||
//state l'état isAuthenticated et check la route
|
||||
pwa.state.data.login.isAuthenticated = true;
|
||||
pwa.state.save();
|
||||
console.log( pwa.state.data.login )
|
||||
console.log( 'Auth ok route to ', document.querySelector( '#signin button[data-routeto]' )
|
||||
logger.info( pwa.state.data.login )
|
||||
logger.info( 'Auth ok route to ', document.querySelector( '#signin button[data-routeto]' )
|
||||
.getAttribute( 'data-routeto' ) );
|
||||
pwa.auth.route( document.querySelector( '#signin button[data-routeto]' )
|
||||
.getAttribute( 'data-routeto' ) );
|
||||
@ -200,9 +200,9 @@ pwa.auth.forgetpsw = async function ( event ) {
|
||||
.getAttribute( "data-tribeid" );
|
||||
const email = $( '.forgetpsw .email' )
|
||||
.val();
|
||||
console.log( `Reinit email: ${email} for tribeid: ${tribeid}` )
|
||||
logger.info( `Reinit email: ${email} for tribeid: ${tribeid}` )
|
||||
try {
|
||||
console.log( `https://${pwa.state.data.ctx.urlbackoffice }/users/getlinkwithoutpsw/${email}` )
|
||||
logger.info( `https://${pwa.state.data.ctx.urlbackoffice }/users/getlinkwithoutpsw/${email}` )
|
||||
const reinit = await axios.get( `https://${pwa.state.data.ctx.urlbackoffice }/users/getlinkwithoutpsw/${email}`, {
|
||||
headers: pwa.state.data.headers
|
||||
} )
|
||||
@ -210,7 +210,7 @@ pwa.auth.forgetpsw = async function ( event ) {
|
||||
.html( "Regardez votre boite email" );
|
||||
return true;
|
||||
} catch ( er ) {
|
||||
console.log( "Pb d'accès au back check apiamaildigit" )
|
||||
logger.info( "Pb d'accès au back check apiamaildigit" )
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
@ -23,11 +23,11 @@ checkdata.test.emailadress = ( ctx, email ) => {
|
||||
* it check if each eamil separate by , are correct
|
||||
*/
|
||||
checkdata.test.emailadresslist = ( ctx, emaillist ) => {
|
||||
//console.log(emaillist.split(','))
|
||||
//logger.info(emaillist.split(','))
|
||||
if( emaillist.length > 0 ) {
|
||||
const emails = emaillist.split( ',' );
|
||||
for( var i in emails ) {
|
||||
//console.log(emails[i])
|
||||
//logger.info(emails[i])
|
||||
if( !checkdata.test.emailadress( "", emails[ i ].trim() ) ) {
|
||||
return false
|
||||
}
|
||||
@ -52,7 +52,7 @@ checkdata.test.unique = ( ctx, val ) => {
|
||||
if( ctx.list[ ctx.currentfield ] ) {
|
||||
return !ctx.list[ ctx.currentfield ].includes( val );
|
||||
} else {
|
||||
console.log( 'ERR no list for field:' + ctx.currentfield );
|
||||
logger.info( 'ERR no list for field:' + ctx.currentfield );
|
||||
return false;
|
||||
}
|
||||
};
|
||||
@ -84,11 +84,11 @@ checkdata.test.phoneNumber = ( ctx, phoneNumber ) => {
|
||||
* it check if each phone separate by , are correct
|
||||
*/
|
||||
checkdata.test.phoneNumberlist = ( ctx, phonelist ) => {
|
||||
//console.log(emaillist.split(','))
|
||||
//logger.info(emaillist.split(','))
|
||||
if( phonelist.length > 0 ) {
|
||||
const phones = phonelist.split( ',' );
|
||||
for( var i in phones ) {
|
||||
//console.log(emails[i])
|
||||
//logger.info(emails[i])
|
||||
if( !checkdata.test.phoneNumber( "", phones[ i ].trim() ) ) {
|
||||
return false
|
||||
}
|
||||
@ -116,8 +116,8 @@ checkdata.normalize.zfill10 = ( ctx, num ) => {
|
||||
return s;
|
||||
};
|
||||
/*let tt = "+33 1 02.03 04 05";
|
||||
console.log(checkdata.test.phoneNumber('', tt))
|
||||
console.log(checkdata.normalize.phoneNumber('', tt))
|
||||
logger.info(checkdata.test.phoneNumber('', tt))
|
||||
logger.info(checkdata.normalize.phoneNumber('', tt))
|
||||
*/
|
||||
checkdata.evaluate = ( contexte, referential, data ) => {
|
||||
/*
|
||||
@ -128,9 +128,9 @@ checkdata.evaluate = ( contexte, referential, data ) => {
|
||||
clean data eventually reformated
|
||||
updateDatabase}
|
||||
*/
|
||||
console.log( 'contexte', contexte );
|
||||
console.log( 'referentiel', referential );
|
||||
console.log( 'data', data );
|
||||
logger.info( 'contexte', contexte );
|
||||
logger.info( 'referentiel', referential );
|
||||
logger.info( 'data', data );
|
||||
const invalidefor = [];
|
||||
const objectdef = {};
|
||||
const listfield = referential.map( ch => {
|
||||
@ -149,7 +149,7 @@ checkdata.evaluate = ( contexte, referential, data ) => {
|
||||
if( objectdef[ field ].check ) {
|
||||
// check data with rule list in check
|
||||
objectdef[ field ].check.forEach( ctrl => {
|
||||
console.log( 'ctrl', ctrl );
|
||||
logger.info( 'ctrl', ctrl );
|
||||
contexte.currentfield = field;
|
||||
if( !checkdata.test[ ctrl ] ) {
|
||||
invalidefor.push( 'ERR check function does not exist :' + ctrl + '___' + field )
|
||||
@ -162,7 +162,7 @@ checkdata.evaluate = ( contexte, referential, data ) => {
|
||||
|
||||
if( objectdef[ field ].nouserupdate ) {
|
||||
// check if user can modify this information
|
||||
console.log(
|
||||
logger.info(
|
||||
'evaluation :' + field + ' -- ' + objectdef[ field ].nouserupdate,
|
||||
eval( objectdef[ field ].nouserupdate )
|
||||
);
|
||||
@ -171,7 +171,7 @@ checkdata.evaluate = ( contexte, referential, data ) => {
|
||||
}
|
||||
}
|
||||
} );
|
||||
console.log( {
|
||||
logger.info( {
|
||||
invalidefor,
|
||||
data
|
||||
} );
|
||||
|
@ -47,16 +47,16 @@ pwa.main.init = () => {
|
||||
};
|
||||
|
||||
pwa.main.loadmenu = async () => {
|
||||
console.log( 'pwa.main.loadmenu running' );
|
||||
console.log( 'Status of pwa.state.data.login.isAuthenticated =', pwa.state.data.login.isAuthenticated );
|
||||
logger.info( 'pwa.main.loadmenu running' );
|
||||
logger.info( 'Status of pwa.state.data.login.isAuthenticated =', pwa.state.data.login.isAuthenticated );
|
||||
let datasidebar, datatopbar;
|
||||
/* Build datasidebar and datatopbar depending of list of module allowed by user in his ACCESSRIGHTS profil.
|
||||
app[`${pwa.state.data.ctx.tribeid}:${pwa.state.data.ctx.website}`].js;
|
||||
|
||||
*/
|
||||
//console.log( 'List of tpldata', pwa.main.tpldata )
|
||||
//console.log( 'List of tpl', pwa.main.tpl )
|
||||
console.log( `run pwa.state.loadfile with pwa.state.data.ctx.refreshstorage = ${pwa.state.data.ctx.refreshstorage} if true=> refresh anyway, if false refresh only if dest.name does not exist` );
|
||||
//logger.info( 'List of tpldata', pwa.main.tpldata )
|
||||
//logger.info( 'List of tpl', pwa.main.tpl )
|
||||
logger.info( `run pwa.state.loadfile with pwa.state.data.ctx.refreshstorage = ${pwa.state.data.ctx.refreshstorage} if true=> refresh anyway, if false refresh only if dest.name does not exist` );
|
||||
await pwa.state.loadfile( pwa.main.tpl, 'tpl' );
|
||||
await pwa.state.loadfile( pwa.main.tpldata, 'tpldata' );
|
||||
datasidebar = pwa.state.data.tpldata.sidebar;
|
||||
@ -123,12 +123,12 @@ pwa.main.clickactive = () => {
|
||||
const cleanactive = () => {
|
||||
const el = document.querySelectorAll( '.sidebar-item' )
|
||||
for( var i = 0; i < el.length; i++ ) {
|
||||
//console.log( 'clean', el[ i ].classList )
|
||||
//logger.info( 'clean', el[ i ].classList )
|
||||
el[ i ].classList.remove( 'active' );
|
||||
}
|
||||
}
|
||||
document.addEventListener( "click", ( e ) => {
|
||||
console.log( 'click', e );
|
||||
logger.info( 'click', e );
|
||||
if( e.target.classList.contains( 'sidebar-link' ) ) {
|
||||
cleanactive();
|
||||
e.target.closest( '.sidebar-item' )
|
||||
|
@ -13,16 +13,16 @@ ____________________
|
||||
pwa.notification = {};
|
||||
|
||||
pwa.notification.update = () => {
|
||||
console.log( 'get notification update for a user' );
|
||||
logger.info( 'get notification update for a user' );
|
||||
axios.get( `https://${pwa.state.data.ctx.urlbackoffice}/notifications/user`, { headers: pwa.state.data.headers } )
|
||||
.then( rep => {
|
||||
console.log( "list des notifs", rep.data.payload.data )
|
||||
logger.info( "list des notifs", rep.data.payload.data )
|
||||
rep.data.payload.data.number = rep.data.payload.data.notifs.length;
|
||||
document.getElementById( "topbarmenuright" )
|
||||
.innerHTML = Mustache.render( pwa.state.data.tpl.notiflist, rep.data.payload.data ) + document.getElementById( "topbarmenuright" )
|
||||
.innerHTML;
|
||||
} )
|
||||
.catch( err => {
|
||||
console.log( `Err pwa.notification.update data for user into header ${pwa.state.data.headers}`, err );
|
||||
logger.info( `Err pwa.notification.update data for user into header ${pwa.state.data.headers}`, err );
|
||||
} );
|
||||
};
|
||||
|
@ -29,23 +29,23 @@ pwa.state.refresh = () => {
|
||||
const currenthtml = location.pathname.split( '/' )
|
||||
.at( -1 )
|
||||
.replace( '.html', '.json' );
|
||||
//console.log( currenthtml )
|
||||
//logger.info( currenthtml )
|
||||
axios.get( ` ${pwa.state.data.ctx.urlbase}/static/lastchange/${currenthtml}` )
|
||||
.then(
|
||||
data => {
|
||||
//console.log( data.data.time, pwa.state.data.ctx.version )
|
||||
//logger.info( data.data.time, pwa.state.data.ctx.version )
|
||||
if( data.data.time > pwa.state.data.ctx.version ) {
|
||||
//console.log( "reload la page pour cause de lastchange detecté" );
|
||||
//logger.info( "reload la page pour cause de lastchange detecté" );
|
||||
pwa.state.data.ctx.version = data.data.time;
|
||||
pwa.state.data.ctx.refreshstorage = true;
|
||||
pwa.state.save();
|
||||
location.reload();
|
||||
} else {
|
||||
//console.log( 'nothing change' )
|
||||
//logger.info( 'nothing change' )
|
||||
}
|
||||
},
|
||||
error => {
|
||||
console.log( error );
|
||||
logger.info( error );
|
||||
}
|
||||
);
|
||||
};
|
||||
@ -75,9 +75,9 @@ pwa.state.route = async () => {
|
||||
Then it hide all <div class="pagemd" and show the one with <div id="page"+name
|
||||
*/
|
||||
console.groupCollapsed( `pwa.state.route with window.location` );
|
||||
console.log( 'List of pwa available ', Object.keys( pwa ) );
|
||||
logger.info( 'List of pwa available ', Object.keys( pwa ) );
|
||||
if( !pwa.auth ) {
|
||||
console.log( 'Warning, no auth.js, not a pb if no authentification need, if not check js order to be sur auth.js load before state.js' )
|
||||
logger.info( 'Warning, no auth.js, not a pb if no authentification need, if not check js order to be sur auth.js load before state.js' )
|
||||
} else {
|
||||
// check if still authenticated
|
||||
if( pwa.state.data.login.isAuthenticated ) {
|
||||
@ -85,21 +85,21 @@ pwa.state.route = async () => {
|
||||
}
|
||||
//check if need authentification to show this page
|
||||
if( pwa.state.data.ctx.pageneedauthentification && !pwa.state.data.login.isAuthenticated ) {
|
||||
console.log( 'reload page cause not auth and page require an auth' )
|
||||
logger.info( 'reload page cause not auth and page require an auth' )
|
||||
window.location = `${pwa.state.data.ctx.pageredirectforauthentification}_${pwa.state.data.ctx.lang}.html`;
|
||||
}
|
||||
}
|
||||
const loc = window.location;
|
||||
if( loc.search ) {
|
||||
console.log( Object.keys( pwa ) )
|
||||
logger.info( Object.keys( pwa ) )
|
||||
const urlpar = new URLSearchParams( loc.search );
|
||||
if( urlpar.get( 'action' ) ) {
|
||||
const act = 'pwa.' + urlpar.get( 'action' ) + '("' + loc.search + '")';
|
||||
try {
|
||||
eval( act );
|
||||
console.log( 'Specific action request to pwa.' + act )
|
||||
logger.info( 'Specific action request to pwa.' + act )
|
||||
} catch ( err ) {
|
||||
console.log( err )
|
||||
logger.info( err )
|
||||
console.error( `You request ${act}, this action does not exist ` );
|
||||
alert( `Sorry but you have no access to ${act}, ask your admin` );
|
||||
}
|
||||
@ -111,7 +111,7 @@ pwa.state.route = async () => {
|
||||
//route to page content
|
||||
Array.from( document.getElementsByClassName( "pagemd" ) )
|
||||
.forEach( e => {
|
||||
console.log( "detect pagemd", e.getAttribute( 'data-url' ) );
|
||||
logger.info( "detect pagemd", e.getAttribute( 'data-url' ) );
|
||||
e.classList.add( "d-none" );
|
||||
} );
|
||||
if( document.getElementById( pgid ) ) {
|
||||
@ -129,8 +129,8 @@ pwa.state.loadfile = async ( list, dest ) => {
|
||||
// if dest=='js' then it eval the js and store origin in pwa.state.data.js={name:url}
|
||||
//For at true refreshstorage if destination pwa.state.dest does not exist
|
||||
|
||||
//console.log( 'list', list )
|
||||
//console.log( 'pwa.state.data.ctx.refreshstorage', pwa.state.data.ctx.refreshstorage )
|
||||
//logger.info( 'list', list )
|
||||
//logger.info( 'pwa.state.data.ctx.refreshstorage', pwa.state.data.ctx.refreshstorage )
|
||||
if( pwa.state.data.ctx.refreshstorage || !pwa.state.data[ dest ] || Object.keys( pwa.state.data[ dest ] )
|
||||
.length == 0 ) {
|
||||
if( !pwa.state.data[ dest ] ) pwa.state.data[ dest ] = {};
|
||||
@ -145,7 +145,7 @@ pwa.state.loadfile = async ( list, dest ) => {
|
||||
reqload.push( v );
|
||||
}
|
||||
};
|
||||
//console.log( pwa.state.data.ctx.urlbase, reqload )
|
||||
//logger.info( pwa.state.data.ctx.urlbase, reqload )
|
||||
let resload = await Promise.all( reqload.map( r => {
|
||||
if( dest == 'tpldata' ) r = `${r}_${pwa.state.data.ctx.lang}.json`;
|
||||
return axios.get( `${pwa.state.data.ctx.urlbase}/${r}`, { headers: pwa.state.data.headers } )
|
||||
@ -185,13 +185,13 @@ pwa.state.update = async function () {
|
||||
ctx.pageforauthentification = domhtml.getAttribute( 'data-pageforauthentification' );
|
||||
}
|
||||
console.groupCollapsed( `update pwa.state with html attribut or from localstorage into ${ctx.website}` );
|
||||
console.log( 'html context:', ctx );
|
||||
logger.info( 'html context:', ctx );
|
||||
if( localStorage.getItem( ctx.website ) ) {
|
||||
pwa.state.data = JSON.parse( localStorage.getItem( ctx.website ) );
|
||||
//alert( 'recupere pwa.state.data xpaganid:' + pwa.state.data.headers.xpaganid )
|
||||
}
|
||||
if( !( pwa.state.data && pwa.state.data.ctx.tribeid == ctx.tribeid && pwa.state.data.ctx.website == ctx.website ) ) {
|
||||
console.log( " reinitialise localstorage cause work on a different project or first access" );
|
||||
logger.info( " reinitialise localstorage cause work on a different project or first access" );
|
||||
delete pwa.state.data;
|
||||
localStorage.removeItem( ctx.website )
|
||||
}
|
||||
@ -221,7 +221,7 @@ pwa.state.update = async function () {
|
||||
'xapp': `${ctx.tribeid}:${ctx.website}`
|
||||
}
|
||||
}
|
||||
console.log( 'load new state.data', pwa.state.data )
|
||||
logger.info( 'load new state.data', pwa.state.data )
|
||||
}
|
||||
// Check if external component need to be load
|
||||
const app = `${pwa.state.data.ctx.tribeid}:${pwa.state.data.ctx.website}`;
|
||||
@ -229,9 +229,9 @@ pwa.state.update = async function () {
|
||||
pwa.state.data.login.user.ACCESSRIGHTS.app[ app ] &&
|
||||
pwa.state.data.login.user.ACCESSRIGHTS.app[ app ].js
|
||||
) {
|
||||
console.log( 'tttt', pwa.state.data.login.isAuthenticated, pwa.state.data.login.user.ACCESSRIGHTS.app[ app ].js )
|
||||
logger.info( 'tttt', pwa.state.data.login.isAuthenticated, pwa.state.data.login.user.ACCESSRIGHTS.app[ app ].js )
|
||||
pwa.state.data.login.user.ACCESSRIGHTS.app[ app ].js.some( ( u ) => {
|
||||
console.log( `load from user ACCESSRIGHTS app[${pwa.state.data.ctx.tribeid}:${pwa.state.data.ctx.website}].js : ${u}` )
|
||||
logger.info( `load from user ACCESSRIGHTS app[${pwa.state.data.ctx.tribeid}:${pwa.state.data.ctx.website}].js : ${u}` )
|
||||
const script = document.createElement( 'script' );
|
||||
script.src = u;
|
||||
script.async = false;
|
||||
|
@ -20,7 +20,7 @@ var pwa = pwa || {};
|
||||
pwa.app = {};
|
||||
pwa.app.init = async () => {
|
||||
//Load template in pwa.state.data.tpl
|
||||
console.log( 'app.init()' );
|
||||
logger.info( 'app.init()' );
|
||||
const tpllist = {};
|
||||
[ 'appsidebarmenu', 'apptopbarmenu', 'apptopbarnotification', 'apptopbarmessage' ].forEach( c => {
|
||||
tpllist[ c ] = `static/components/appmesa/${c}.mustache`
|
||||
@ -42,10 +42,10 @@ pwa.app.getcontent = async () => {
|
||||
if( pwa.state.data.login && pwa.state.data.login.isAuthenticated ) {
|
||||
//Get personnal menu user.ACCESSRIGHTS={app:{ "tribeid:website":{sidebar,top}
|
||||
const appname = `${pwa.state.data.ctx.tribeid}:${pwa.state.data.ctx.website}`
|
||||
//console.log( 'with ', appname )
|
||||
//logger.info( 'with ', appname )
|
||||
menu = menu.concat( pwa.state.data.login.user.ACCESSRIGHTS.app[ appname ] )
|
||||
}
|
||||
//console.log( 'update pwa.state.data.menu with ', menu )
|
||||
//logger.info( 'update pwa.state.data.menu with ', menu )
|
||||
menu.forEach( c => { menubase[ c ] = `static/data/${c}_${pwa.state.data.ctx.lang}.json` } );
|
||||
await pwa.state.loadfile( menubase, 'menu' );
|
||||
pwa.app.loadsidebarmenu( menu.filter( m => m.includes( 'sidebar' ) ) )
|
||||
@ -57,15 +57,15 @@ pwa.app.getcontent = async () => {
|
||||
pwa.app.clickactive();
|
||||
}
|
||||
pwa.app.loadsidebarmenu = ( list ) => {
|
||||
//console.log( 'list de menu sidebar', list )
|
||||
//logger.info( 'list de menu sidebar', list )
|
||||
const data = pwa.state.data.menu.sidebar
|
||||
for( let m of list ) {
|
||||
if( m != 'sidebar' ) {
|
||||
console.log( m )
|
||||
logger.info( m )
|
||||
data.sbgroupmenu = data.sbgroupmenu.concat( pwa.state.data.menu[ m ] )
|
||||
}
|
||||
}
|
||||
//console.log( data )
|
||||
//logger.info( data )
|
||||
document.querySelector( "#sidebar" )
|
||||
.innerHTML = Mustache.render( pwa.state.data.tpl.appsidebarmenu, data )
|
||||
}
|
||||
@ -91,7 +91,7 @@ pwa.app.loadtopmenu = ( list ) => {
|
||||
//pwa.message.update( );
|
||||
}
|
||||
}
|
||||
//console.log( 'topbar data', data.menuprofil );
|
||||
//logger.info( 'topbar data', data.menuprofil );
|
||||
document.querySelector( "#navbar" )
|
||||
.innerHTML = Mustache.render( pwa.state.data.tpl.apptopbarmenu, data )
|
||||
}
|
||||
@ -118,12 +118,12 @@ pwa.app.clickactive = () => {
|
||||
const cleanactive = () => {
|
||||
const el = document.querySelectorAll( '.sidebar-item' )
|
||||
for( var i = 0; i < el.length; i++ ) {
|
||||
//console.log( 'clean', el[ i ].classList )
|
||||
//logger.info( 'clean', el[ i ].classList )
|
||||
el[ i ].classList.remove( 'active' );
|
||||
}
|
||||
}
|
||||
document.addEventListener( "click", ( e ) => {
|
||||
console.log( 'click', e );
|
||||
logger.info( 'click', e );
|
||||
if( e.target.classList.contains( 'sidebar-link' ) ) {
|
||||
cleanactive();
|
||||
e.target.closest( '.sidebar-item' )
|
||||
|
@ -10,7 +10,7 @@ ____________________
|
||||
pwa.message = {};
|
||||
|
||||
pwa.message.update = ( urlmsg ) => {
|
||||
console.log( 'get message update' );
|
||||
logger.info( 'get message update' );
|
||||
axios.get( urlmsg, { headers: pwa.state.data.headers } )
|
||||
.then( rep => {
|
||||
const tpl = document.getElementById( "app_menutopmessage" );
|
||||
@ -18,7 +18,7 @@ pwa.message.update = ( urlmsg ) => {
|
||||
.innerHTML = Mustache.render( tpl.innerHTML, { messagess: rep.data } );
|
||||
} )
|
||||
.catch( err => {
|
||||
console.log( `Err pwa.notification.update data from ${urlmsg}`, err );
|
||||
logger.info( `Err pwa.notification.update data from ${urlmsg}`, err );
|
||||
} );
|
||||
|
||||
};
|
||||
|
@ -73,14 +73,14 @@ pwa.referential.set = async ( sourceref ) => {
|
||||
}
|
||||
if( pwa.MODE_ENV == "dev" || !( pwa.state.data.app.referentials[ o ][ r ].version == clientconfref[ o ][ r ].version ) ) {
|
||||
// alors nouvelle version à mettre à jour en dev on le fait systematiquement
|
||||
console.log( `${pwa.urlbackauth}/referentials/content/${o}/${r}`, pwa.state.data.headers );
|
||||
logger.info( `${pwa.urlbackauth}/referentials/content/${o}/${r}`, pwa.state.data.headers );
|
||||
const dataref = await axios.get( `${pwa.urlbackauth}/referentials/content/${o}/${r}`, { headers: pwa.state.data.headers } )
|
||||
if( dataref.status == 200 ) {
|
||||
console.log( `${o} - ${r}`, dataref.data )
|
||||
logger.info( `${o} - ${r}`, dataref.data )
|
||||
pwa.state.data[ sourceref ].referentials[ o ][ r ] = dataref.data;
|
||||
pwa.state.save()
|
||||
} else {
|
||||
console.log( `ERREUR de recuperation de referentiel ${o} ${r}` )
|
||||
logger.info( `ERREUR de recuperation de referentiel ${o} ${r}` )
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -137,7 +137,7 @@ pwa.referential.setting = async ( objecttype ) => {
|
||||
const data = await axios.get( `${pwa.urlbackauth}/referentials/contentlist/${objecttype}`, {
|
||||
headers: pwa.state.data.headers
|
||||
} );
|
||||
console.log( "Liste des referentiels ", data.data )
|
||||
logger.info( "Liste des referentiels ", data.data )
|
||||
let reqref = []
|
||||
let ref = []
|
||||
//init a temporary (opposite od state that is save) data to work on referential (each time referentials/object is load this variable is refresh)
|
||||
@ -150,7 +150,7 @@ pwa.referential.setting = async ( objecttype ) => {
|
||||
} );
|
||||
axios.all( reqref )
|
||||
.then( axios.spread( ( ...rep ) => {
|
||||
console.log( rep )
|
||||
logger.info( rep )
|
||||
rep.forEach( ( d, i ) => {
|
||||
pwa.tmp[ ref[ i ] ] = d.data
|
||||
const submenuit = {
|
||||
@ -174,13 +174,13 @@ pwa.referential.setting = async ( objecttype ) => {
|
||||
pwa.state.data.app.referentials.json.referentialsetting.submenuitems.forEach( tab => {
|
||||
document.getElementById( tab.id )
|
||||
.innerHTML = Mustache.render( tpleditor, tab );
|
||||
//console.log( tab.id, tab )
|
||||
//logger.info( tab.id, tab )
|
||||
// Convert each div with formfieldtype to a form field set with value if exist and listen button to run callback
|
||||
pwa.referential.init( tab );
|
||||
} );
|
||||
} ) )
|
||||
.catch( err => {
|
||||
console.log( "eeeee", err );
|
||||
logger.info( "eeeee", err );
|
||||
const submenuit = {
|
||||
active: "",
|
||||
groupinfo: objecttype,
|
||||
@ -198,7 +198,7 @@ pwa.referential.init = ( tab ) => {
|
||||
const doctab = document.querySelector( `#${tab.id}` );
|
||||
const editor = new JSONEditor( doctab.querySelector( `.jsoneditor` ), tab.optionjsoneditor );
|
||||
console.table( tab )
|
||||
console.log( tab.objecttype, tab.groupinfo )
|
||||
logger.info( tab.objecttype, tab.groupinfo )
|
||||
editor.set( pwa.tmp[ tab.groupinfo ] );
|
||||
editor.expandAll();
|
||||
// ajoute un listener sur btn pour faire axios post with editor.get()
|
||||
@ -206,20 +206,20 @@ pwa.referential.init = ( tab ) => {
|
||||
.forEach( act => {
|
||||
act.addEventListener( 'click', e => {
|
||||
e.preventDefault();
|
||||
console.log( 'cliiiiiiiiiiiiiiiiiiiick', tab.id )
|
||||
logger.info( 'cliiiiiiiiiiiiiiiiiiiick', tab.id )
|
||||
if( e.target.classList.contains( 'save' ) ) {
|
||||
/*
|
||||
axios.put( `${pwa.urlbackauth}/referentials/content/${tab.objecttype}/${tab.groupinfo}`, editor.get(), {
|
||||
headers: pwa.state.data.headers
|
||||
} )
|
||||
.then( data => {
|
||||
console.log( "affiche message done" );
|
||||
logger.info( "affiche message done" );
|
||||
} )
|
||||
.catch( err => {
|
||||
console.log( "affiche message err ", err )
|
||||
logger.info( "affiche message err ", err )
|
||||
} );
|
||||
*/
|
||||
console.log( 'editor', editor.get() );
|
||||
logger.info( 'editor', editor.get() );
|
||||
}
|
||||
if( e.target.classList.contains( 'delete' ) ) {
|
||||
//axios.get( @tTODO la mise à jour du nouveau referentiel avec la version)
|
||||
@ -227,9 +227,9 @@ pwa.referential.init = ( tab ) => {
|
||||
if( e.target.classList.contains( 'copy' ) ) {
|
||||
//@TODO create a new referential file localy from an existing one
|
||||
}
|
||||
/*console.log( e.target.closest( '[data-idref]' )
|
||||
/*logger.info( e.target.closest( '[data-idref]' )
|
||||
.getAttribute( 'data-idref' ) )
|
||||
console.log( editor.get() );
|
||||
logger.info( editor.get() );
|
||||
envoyer à axios et modifier pwa.state. en cours
|
||||
*/
|
||||
} );
|
||||
@ -267,5 +267,5 @@ pwa.referential.initold = async ( object ) => {
|
||||
*/
|
||||
// get json
|
||||
const updatedJson = editor.get()
|
||||
console.log( 'updatedJson', updatedJson )
|
||||
logger.info( 'updatedJson', updatedJson )
|
||||
}
|
||||
|
@ -8,5 +8,5 @@ Reporting js managing sceen
|
||||
//--##
|
||||
pwa.reporting = pwa.reporting || {};
|
||||
pwa.reporting.init = () => {
|
||||
console.log('charge reporting');
|
||||
logger.info('charge reporting');
|
||||
}
|
||||
|
@ -52,7 +52,7 @@ pwa.search.req = ( info ) => {
|
||||
}
|
||||
const searchtxt = document.getElementById( info )
|
||||
.value;
|
||||
console.log( info, searchtxt );
|
||||
logger.info( info, searchtxt );
|
||||
|
||||
const req = { searchtxt };
|
||||
req.results = [ {
|
||||
|
@ -76,7 +76,7 @@ pwa.tuto.menutop = () => {
|
||||
|
||||
|
||||
pwa.tuto.init = async (object) => {
|
||||
console.log('Lance tuto')
|
||||
logger.info('Lance tuto')
|
||||
pwa.tuto.menutop();
|
||||
|
||||
// Reinit et ajoute des bloc
|
||||
|
@ -65,7 +65,7 @@ pwa.userManager.ficheUser = function(iduser) {
|
||||
}];
|
||||
}
|
||||
userForm.main = pwa.forms.genereForm(pwa.state.app.ref.users, datauser);
|
||||
console.log("button envoye", userForm.button);
|
||||
logger.info("button envoye", userForm.button);
|
||||
userForm.buttonup = Mustache.render($("#actionButton")
|
||||
.html(), {
|
||||
button: userForm.button
|
||||
|
@ -13,16 +13,16 @@ ____________________
|
||||
pwa.notification = {};
|
||||
|
||||
pwa.notification.update = () => {
|
||||
console.log( 'get notification update for a user' );
|
||||
logger.info( 'get notification update for a user' );
|
||||
axios.get( `https://${pwa.state.data.ctx.urlbackoffice}/notifications/user`, { headers: pwa.state.data.headers } )
|
||||
.then( rep => {
|
||||
console.log( "list des notifs", rep.data.payload.data )
|
||||
logger.info( "list des notifs", rep.data.payload.data )
|
||||
rep.data.payload.data.number = rep.data.payload.data.notifs.length;
|
||||
document.getElementById( "topbarmenuright" )
|
||||
.innerHTML = Mustache.render( pwa.state.data.tpl.notiflist, rep.data.payload.data ) + document.getElementById( "topbarmenuright" )
|
||||
.innerHTML;
|
||||
} )
|
||||
.catch( err => {
|
||||
console.log( `Err pwa.notification.update data for user into header ${pwa.state.data.headers}`, err );
|
||||
logger.info( `Err pwa.notification.update data for user into header ${pwa.state.data.headers}`, err );
|
||||
} );
|
||||
};
|
||||
|
@ -24,7 +24,7 @@ pwa.userprofile = pwa.userprofile || {};
|
||||
|
||||
pwa.userprofile.settings = ( e ) => {
|
||||
console.groupCollapsed( 'load user settings' );
|
||||
console.log( Object.keys( pwa ) )
|
||||
logger.info( Object.keys( pwa ) )
|
||||
//data form init from pwa.state or axios.get( data from user )
|
||||
const data = pwa.state.data.userlogin;
|
||||
// add meta data object to create forms
|
||||
@ -38,11 +38,11 @@ pwa.userprofile.settings = ( e ) => {
|
||||
// genere html field care do html after tag if not
|
||||
f.html = pwa.form.tpl[ f.tpl ].html( f )
|
||||
meta.users[ f.idfield ] = f
|
||||
//console.log( f.idfield )
|
||||
//console.log( f )
|
||||
//logger.info( f.idfield )
|
||||
//logger.info( f )
|
||||
} );
|
||||
pwa.state.data.app.referentials.json.usersetting.submenuitems[ 0 ].meta = meta;
|
||||
console.log( "meta", pwa.state.data.app.referentials.json.usersetting )
|
||||
logger.info( "meta", pwa.state.data.app.referentials.json.usersetting )
|
||||
// tpl in #usersetting data in referentials json usersetting
|
||||
document.getElementById( 'maincontent' )
|
||||
.innerHTML = Mustache.render( document.getElementById( 'setting' )
|
||||
@ -52,14 +52,14 @@ pwa.userprofile.settings = ( e ) => {
|
||||
document.getElementById( tab.id )
|
||||
.innerHTML = Mustache.render( document.getElementById( `${tab.id}tpl` )
|
||||
.innerHTML, tab );
|
||||
console.log( tab.id, tab )
|
||||
logger.info( tab.id, tab )
|
||||
// Convert each div with formfieldtype to a form field set with value if exist and listen button to run callback
|
||||
pwa.form.init( tab.id, { ...tab, ...data }, pwa.userprofile.save )
|
||||
} )
|
||||
console.groupEnd();
|
||||
}
|
||||
pwa.userprofile.save = ( data ) => {
|
||||
console.log( "data to save", data )
|
||||
logger.info( "data to save", data )
|
||||
}
|
||||
pwa.userprofile.activities = () => {
|
||||
console.group( 'load user activity' );
|
||||
|
@ -4,7 +4,7 @@
|
||||
$('figure.article-item').on ('click',function(e){
|
||||
e.preventDefault();
|
||||
var data = {};
|
||||
console.log($(this).find('button').attr('href').replace('.html','.json'));
|
||||
logger.info($(this).find('button').attr('href').replace('.html','.json'));
|
||||
$.getJSON($(this).find('button').attr('href').replace('.html','.json') , function( contenuarticle){
|
||||
$('#articlecontent').html(Mustache.to_html($('#tpl_article').html(),contenuarticle))
|
||||
.promise().done(function(){
|
||||
|
@ -44,16 +44,16 @@
|
||||
$.ajax({ url: "/log", type: "POST", data:senddata, cache: false,
|
||||
success: function(e) {
|
||||
e=JSON.parse(e);
|
||||
console.log(e);
|
||||
logger.info(e);
|
||||
if (e.etat == "ok") {
|
||||
login(blocauth);
|
||||
}else{
|
||||
console.log(e.etat);
|
||||
logger.info(e.etat);
|
||||
$('#register-form p.msgreg').html(e.etat);
|
||||
};
|
||||
},
|
||||
error: function(e) {
|
||||
console.log(e);
|
||||
logger.info(e);
|
||||
$('#register-form p.msgreg').html($('#register-form').data('msgko'));
|
||||
},
|
||||
});
|
||||
@ -75,15 +75,15 @@
|
||||
data: { login: 'max' },
|
||||
cache: false,
|
||||
success: function (data, textStatus, request) {
|
||||
console.log('URL Node-File-Manager here!');
|
||||
console.log('data: ', data);
|
||||
logger.info('URL Node-File-Manager here!');
|
||||
logger.info('data: ', data);
|
||||
displayIframe(blocauth, data.url);
|
||||
},
|
||||
error: function (response) {
|
||||
console.log('err: ', response);
|
||||
logger.info('err: ', response);
|
||||
},
|
||||
always: function (response) {
|
||||
console.log('Hello!')
|
||||
logger.info('Hello!')
|
||||
}
|
||||
});
|
||||
});
|
||||
@ -92,7 +92,7 @@
|
||||
|
||||
function displayIframe(blocauth, url) {
|
||||
blocauth.html(`<iframe height="600" width="800" src=${url}> </iframe>`).promise().done(function() {
|
||||
console.log('displayIframe');
|
||||
logger.info('displayIframe');
|
||||
})
|
||||
}
|
||||
|
||||
@ -129,12 +129,12 @@
|
||||
cache: false,
|
||||
success: function (data, textStatus, request) {
|
||||
const token = request.getResponseHeader('x-auth');
|
||||
console.log('token: ', token);
|
||||
logger.info('token: ', token);
|
||||
localStorage.setItem('token', token);
|
||||
directorySelector(blocauth);
|
||||
},
|
||||
error: function (response) {
|
||||
console.log('err: ', response.responseJSON.error);
|
||||
logger.info('err: ', response.responseJSON.error);
|
||||
}
|
||||
});
|
||||
}
|
||||
@ -194,13 +194,13 @@
|
||||
}else{
|
||||
$.ajax({ url: "http://maildigit.ndda.fr:3000/msg", type: "POST", data:senddata, cache: false,
|
||||
success: function(e) {
|
||||
//console.log(e);
|
||||
//logger.info(e);
|
||||
lastdatasend = senddata;
|
||||
console.log(form.data('msgok'));
|
||||
logger.info(form.data('msgok'));
|
||||
form.find("p.msgform").html(msgok);
|
||||
},
|
||||
error: function(e) {
|
||||
console.log(e);
|
||||
logger.info(e);
|
||||
form.find("p.msgform").html(msgko);
|
||||
},
|
||||
});
|
||||
|
@ -23,11 +23,11 @@ checkdata.test.emailadress = ( ctx, email ) => {
|
||||
* it check if each eamil separate by , are correct
|
||||
*/
|
||||
checkdata.test.emailadresslist = ( ctx, emaillist ) => {
|
||||
//console.log(emaillist.split(','))
|
||||
//logger.info(emaillist.split(','))
|
||||
if( emaillist.length > 0 ) {
|
||||
const emails = emaillist.split( ',' );
|
||||
for( var i in emails ) {
|
||||
//console.log(emails[i])
|
||||
//logger.info(emails[i])
|
||||
if( !checkdata.test.emailadress( "", emails[ i ].trim() ) ) {
|
||||
return false
|
||||
}
|
||||
@ -53,7 +53,7 @@ checkdata.test.unique = ( ctx, val ) => {
|
||||
if( ctx.list[ ctx.currentfield ] ) {
|
||||
return !ctx.list[ ctx.currentfield ].includes( val );
|
||||
} else {
|
||||
console.log( 'ERR no list for field:' + ctx.currentfield );
|
||||
logger.info( 'ERR no list for field:' + ctx.currentfield );
|
||||
return false;
|
||||
}
|
||||
};
|
||||
@ -85,11 +85,11 @@ checkdata.test.phoneNumber = ( ctx, phoneNumber ) => {
|
||||
* it check if each phone separate by , are correct
|
||||
*/
|
||||
checkdata.test.phoneNumberlist = ( ctx, phonelist ) => {
|
||||
//console.log(emaillist.split(','))
|
||||
//logger.info(emaillist.split(','))
|
||||
if( phonelist.length > 0 ) {
|
||||
const phones = phonelist.split( ',' );
|
||||
for( var i in phones ) {
|
||||
//console.log(emails[i])
|
||||
//logger.info(emails[i])
|
||||
if( !checkdata.test.phoneNumber( "", phones[ i ].trim() ) ) {
|
||||
return false
|
||||
}
|
||||
@ -117,8 +117,8 @@ checkdata.normalize.zfill10 = ( ctx, num ) => {
|
||||
return s;
|
||||
};
|
||||
/*let tt = "+33 1 02.03 04 05";
|
||||
console.log(checkdata.test.phoneNumber('', tt))
|
||||
console.log(checkdata.normalize.phoneNumber('', tt))
|
||||
logger.info(checkdata.test.phoneNumber('', tt))
|
||||
logger.info(checkdata.normalize.phoneNumber('', tt))
|
||||
*/
|
||||
checkdata.evaluate = ( contexte, referential, data ) => {
|
||||
/*
|
||||
@ -129,9 +129,9 @@ checkdata.evaluate = ( contexte, referential, data ) => {
|
||||
clean data eventually reformated
|
||||
updateDatabase}
|
||||
*/
|
||||
console.log( 'contexte', contexte );
|
||||
console.log( 'referentiel', referential );
|
||||
console.log( 'data', data );
|
||||
logger.info( 'contexte', contexte );
|
||||
logger.info( 'referentiel', referential );
|
||||
logger.info( 'data', data );
|
||||
const invalidefor = [];
|
||||
const objectdef = {};
|
||||
const listfield = referential.map( ch => {
|
||||
@ -150,7 +150,7 @@ checkdata.evaluate = ( contexte, referential, data ) => {
|
||||
if( objectdef[ field ].check ) {
|
||||
// check data with rule list in check
|
||||
objectdef[ field ].check.forEach( ctrl => {
|
||||
console.log( 'ctrl', ctrl );
|
||||
logger.info( 'ctrl', ctrl );
|
||||
contexte.currentfield = field;
|
||||
if( !checkdata.test[ ctrl ] ) {
|
||||
invalidefor.push( 'ERR check function does not exist :' + ctrl + '___' + field )
|
||||
@ -163,7 +163,7 @@ checkdata.evaluate = ( contexte, referential, data ) => {
|
||||
|
||||
if( objectdef[ field ].nouserupdate ) {
|
||||
// check if user can modify this information
|
||||
console.log(
|
||||
logger.info(
|
||||
'evaluation :' + field + ' -- ' + objectdef[ field ].nouserupdate,
|
||||
eval( objectdef[ field ].nouserupdate )
|
||||
);
|
||||
@ -172,7 +172,7 @@ checkdata.evaluate = ( contexte, referential, data ) => {
|
||||
}
|
||||
}
|
||||
} );
|
||||
console.log( {
|
||||
logger.info( {
|
||||
invalidefor,
|
||||
data
|
||||
} );
|
||||
|
@ -60,7 +60,7 @@ edit.article = function() {
|
||||
//alert('todo from template ')
|
||||
var origin = $(this).closest('[data-uuid]')
|
||||
var originuuid = origin.attr('data-uuid')
|
||||
console.log(originuuid)
|
||||
logger.info(originuuid)
|
||||
})
|
||||
|
||||
}
|
||||
@ -69,7 +69,7 @@ edit.image = function() {
|
||||
// Add button to upload file below each image
|
||||
if($('input.imageLoader').length == 0) {
|
||||
$('img').each(function(i) {
|
||||
//console.log($(this).attr('src'));
|
||||
//logger.info($(this).attr('src'));
|
||||
$(this).attr('src', $(this).attr('src'));
|
||||
$(this).attr('H', $(this).css('height'));
|
||||
$(this).attr('L', $(this).attr('width'));
|
||||
@ -99,8 +99,8 @@ edit.image = function() {
|
||||
// Add fileName and tree in data attributes
|
||||
imgEl[0].dataset.fileName = fileName;
|
||||
imgEl[0].dataset.tree = tree + fileName;
|
||||
console.log(imgEl[0].dataset.tree);
|
||||
// console.log(imgEl);
|
||||
logger.info(imgEl[0].dataset.tree);
|
||||
// logger.info(imgEl);
|
||||
|
||||
if(ext === 'gif' || ext === 'png' || ext === 'jpeg' || ext === 'jpg') {
|
||||
var reader = new FileReader();
|
||||
|
@ -15,7 +15,7 @@ edit.init = function() {
|
||||
|
||||
// Add button to upload file below each image
|
||||
$('img').each(function(i) {
|
||||
//console.log($(this).attr('src'));
|
||||
//logger.info($(this).attr('src'));
|
||||
$(this).attr('src', $(this).attr('src'));
|
||||
$(this).attr('H', $(this).css('height'));
|
||||
$(this).attr('L', $(this).attr('width'));
|
||||
@ -44,8 +44,8 @@ edit.init = function() {
|
||||
// Add fileName and tree in data attributes
|
||||
imgEl[0].dataset.fileName = fileName;
|
||||
imgEl[0].dataset.tree = tree + fileName;
|
||||
console.log(imgEl[0].dataset.tree);
|
||||
// console.log(imgEl);
|
||||
logger.info(imgEl[0].dataset.tree);
|
||||
// logger.info(imgEl);
|
||||
|
||||
if (ext === 'gif' || ext === 'png' || ext === 'jpeg' || ext === 'jpg') {
|
||||
var reader = new FileReader();
|
||||
|
@ -9,8 +9,8 @@ const form = {};
|
||||
url: `https://api.maildigit.fr/template/${url}.mustache`,
|
||||
datatype: "jsonp",
|
||||
success: function(tplq) {
|
||||
console.log(tplq);
|
||||
console.log(quest);
|
||||
logger.info(tplq);
|
||||
logger.info(quest);
|
||||
return Mustache.render(tplq, quest);
|
||||
}
|
||||
});
|
||||
@ -30,7 +30,7 @@ form.create = () => {
|
||||
// let bloc = $(this).attr("id");
|
||||
const bloc = form.getAttribute('id');
|
||||
axios.get(`./${bloc}.json`).then(async formd => {
|
||||
console.log('formd', formd);
|
||||
logger.info('formd', formd);
|
||||
const formdef = formd.data;
|
||||
// Liste les template
|
||||
let tplname = [formdef.info.tpl];
|
||||
@ -49,12 +49,12 @@ form.create = () => {
|
||||
tplname.forEach((t, i) => {
|
||||
template[t] = tplhtml[i].data;
|
||||
});
|
||||
console.log('template', template);
|
||||
logger.info('template', template);
|
||||
// Construire le form
|
||||
formdef.content = formdef.quest
|
||||
.map(quest => Mustache.render(template[quest.tpl], quest))
|
||||
.join(' ');
|
||||
console.log('formdef', formdef);
|
||||
logger.info('formdef', formdef);
|
||||
form.innerHTML = Mustache.render(template[formdef.info.tpl], formdef);
|
||||
});
|
||||
});
|
||||
@ -63,23 +63,23 @@ form.create = () => {
|
||||
//https://api.maildigit.fr/template/
|
||||
$.get(data.info.tpl, function(tpl) {
|
||||
let questhtml = $.map(data.quest, function(quest, q) {
|
||||
console.log(q);
|
||||
console.log(quest);
|
||||
logger.info(q);
|
||||
logger.info(quest);
|
||||
quest.idhtml = data.info.idhtml;
|
||||
$.ajax({
|
||||
url: data.quest[q].tpl,
|
||||
datatype: "jsonp",
|
||||
async: false,
|
||||
success: function(tplq) {
|
||||
console.log(tplq);
|
||||
console.log(quest);
|
||||
logger.info(tplq);
|
||||
logger.info(quest);
|
||||
return Mustache.render(tplq, quest);
|
||||
}
|
||||
});
|
||||
});
|
||||
console.log(questhtml);
|
||||
logger.info(questhtml);
|
||||
data.content = questhtml.join(" ");
|
||||
console.log(data);
|
||||
logger.info(data);
|
||||
let fullform = Mustache.to_html(tpl, data);
|
||||
$(data.info.idhtml).html(fullform);
|
||||
$(".formmanager form").gestionform();
|
||||
@ -116,7 +116,7 @@ form.manage = function() {
|
||||
const reponsemultiple = [];
|
||||
const blocmultichoix = $(this).closest('.type-multichoix');
|
||||
const champsdest = blocmultichoix.attr('data-champs');
|
||||
console.log(champsdest);
|
||||
logger.info(champsdest);
|
||||
const nbmaxchoix = parseInt(blocmultichoix.attr('data-nbmaxchoix'));
|
||||
$(this).toggleClass('active');
|
||||
if ($('.type-multichoix button.active').length > nbmaxchoix) {
|
||||
@ -174,8 +174,8 @@ form.manage = function() {
|
||||
*/
|
||||
var reg = /^(([^<>()[\]\\.,;:\s@\"]+(\.[^<>()[\]\\.,;:\s@\"]+)*)|(\".+\"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/;
|
||||
|
||||
// console.log($(this).val());
|
||||
// console.log(reg.test($(this).val()) );
|
||||
// logger.info($(this).val());
|
||||
// logger.info(reg.test($(this).val()) );
|
||||
if (!reg.test($(this).val())) {
|
||||
if (!$(this).data('msgerreur')) {
|
||||
etat += 'invalid email';
|
||||
@ -238,7 +238,7 @@ form.manage = function() {
|
||||
}
|
||||
}
|
||||
});
|
||||
console.log(JSON.stringify(senddata));
|
||||
logger.info(JSON.stringify(senddata));
|
||||
let diff = false;
|
||||
if (lastdatasend == '') {
|
||||
diff = true;
|
||||
@ -253,14 +253,14 @@ form.manage = function() {
|
||||
'Action déjà effectuée, si vous recliquez, vous allez renvoyer la même demande';
|
||||
lastdatasend = '';
|
||||
}
|
||||
console.log(etat);
|
||||
logger.info(etat);
|
||||
if (etat != '') {
|
||||
formencours.find('p.msgform').html(etat);
|
||||
} else {
|
||||
console.log(
|
||||
logger.info(
|
||||
'https://api.maildigit.fr' + formencours.data('formmaildigit')
|
||||
);
|
||||
console.log(
|
||||
logger.info(
|
||||
formencours.data('tribeid') +
|
||||
'_' +
|
||||
$('html').attr('lang') +
|
||||
@ -281,21 +281,21 @@ form.manage = function() {
|
||||
data: senddata,
|
||||
cache: false,
|
||||
success: function(res) {
|
||||
// console.log(e);
|
||||
// logger.info(e);
|
||||
var app = app || {};
|
||||
lastdatasend = senddata;
|
||||
console.log(formencours.data('msgok'));
|
||||
console.log(res);
|
||||
logger.info(formencours.data('msgok'));
|
||||
logger.info(res);
|
||||
if (res.idpanier) {
|
||||
$('button.clickvalidecommande').data('idpanier', res.idpanier);
|
||||
console.log('trig clickvalidecommande via form');
|
||||
logger.info('trig clickvalidecommande via form');
|
||||
$('button.clickvalidecommande').trigger('click');
|
||||
}
|
||||
formencours.find('p.msgform').html(msgok);
|
||||
// var chargeext = app.custom(senddata) || 'Pas de custom à executer';
|
||||
},
|
||||
error: function(e) {
|
||||
console.log(e);
|
||||
logger.info(e);
|
||||
// formencours.data(state).val('error');
|
||||
formencours.find('p.msgform').html(msgko);
|
||||
}
|
||||
|
@ -8,8 +8,8 @@ var ajaxget = function(url) {
|
||||
url: `https://api.maildigit.fr/template/${url}.mustache`,
|
||||
datatype: "jsonp",
|
||||
success: function(tplq) {
|
||||
console.log(tplq);
|
||||
console.log(quest);
|
||||
logger.info(tplq);
|
||||
logger.info(quest);
|
||||
return Mustache.render(tplq, quest);
|
||||
}
|
||||
});
|
||||
@ -26,14 +26,14 @@ $(document).ready(function() {
|
||||
let bloc = form.getAttribute("id");
|
||||
axios.get(`./${bloc}.json`).then(function(rep) {
|
||||
// recup les templatemp
|
||||
console.log("rep ddddd", rep);
|
||||
logger.info("rep ddddd", rep);
|
||||
let tpl = [rep.data.info.tpl];
|
||||
$.each(rep.data.quest, function(q, quest) {
|
||||
console.log(quest);
|
||||
logger.info(quest);
|
||||
if (tpl.indexOf(quest.tpl) == -1) {
|
||||
tpl.push(quest.tpl);
|
||||
}
|
||||
console.log("template", tpl);
|
||||
logger.info("template", tpl);
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -42,23 +42,23 @@ $(document).ready(function() {
|
||||
//https://api.maildigit.fr/template/
|
||||
$.get(data.info.tpl, function(tpl) {
|
||||
let questhtml = $.map(data.quest, function(quest, q) {
|
||||
console.log(q);
|
||||
console.log(quest);
|
||||
logger.info(q);
|
||||
logger.info(quest);
|
||||
quest.idhtml = data.info.idhtml;
|
||||
$.ajax({
|
||||
url: data.quest[q].tpl,
|
||||
datatype: "jsonp",
|
||||
async: false,
|
||||
success: function(tplq) {
|
||||
console.log(tplq);
|
||||
console.log(quest);
|
||||
logger.info(tplq);
|
||||
logger.info(quest);
|
||||
return Mustache.render(tplq, quest);
|
||||
}
|
||||
});
|
||||
});
|
||||
console.log(questhtml);
|
||||
logger.info(questhtml);
|
||||
data.content = questhtml.join(" ");
|
||||
console.log(data);
|
||||
logger.info(data);
|
||||
let fullform = Mustache.to_html(tpl, data);
|
||||
$(data.info.idhtml).html(fullform);
|
||||
$(".formmanager form").gestionform();
|
||||
@ -96,7 +96,7 @@ $(document).ready(function() {
|
||||
var reponsemultiple = [];
|
||||
var blocmultichoix = $(this).closest(".type-multichoix");
|
||||
var champsdest = blocmultichoix.attr("data-champs");
|
||||
console.log(champsdest);
|
||||
logger.info(champsdest);
|
||||
var nbmaxchoix = parseInt(blocmultichoix.attr("data-nbmaxchoix"));
|
||||
$(this).toggleClass("active");
|
||||
if ($(".type-multichoix button.active").length > nbmaxchoix) {
|
||||
@ -154,8 +154,8 @@ $(document).ready(function() {
|
||||
*/
|
||||
var reg = /^(([^<>()[\]\\.,;:\s@\"]+(\.[^<>()[\]\\.,;:\s@\"]+)*)|(\".+\"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/;
|
||||
|
||||
//console.log($(this).val());
|
||||
//console.log(reg.test($(this).val()) );
|
||||
//logger.info($(this).val());
|
||||
//logger.info(reg.test($(this).val()) );
|
||||
if (!reg.test($(this).val())) {
|
||||
if (!$(this).data("msgerreur")) {
|
||||
etat += "invalid email";
|
||||
@ -218,7 +218,7 @@ $(document).ready(function() {
|
||||
}
|
||||
}
|
||||
});
|
||||
console.log(JSON.stringify(senddata));
|
||||
logger.info(JSON.stringify(senddata));
|
||||
var diff = false;
|
||||
if (lastdatasend == "") {
|
||||
diff = true;
|
||||
@ -233,14 +233,14 @@ $(document).ready(function() {
|
||||
"Action déjà effectuée, si vous recliquez, vous allez renvoyer la même demande";
|
||||
lastdatasend = "";
|
||||
}
|
||||
console.log(etat);
|
||||
logger.info(etat);
|
||||
if (etat != "") {
|
||||
formencours.find("p.msgform").html(etat);
|
||||
} else {
|
||||
console.log(
|
||||
logger.info(
|
||||
"https://api.maildigit.fr" + formencours.data("formmaildigit")
|
||||
);
|
||||
console.log(
|
||||
logger.info(
|
||||
formencours.data("tribeid") +
|
||||
"_" +
|
||||
$("html").attr("lang") +
|
||||
@ -264,21 +264,21 @@ $(document).ready(function() {
|
||||
data: senddata,
|
||||
cache: false,
|
||||
success: function(res) {
|
||||
//console.log(e);
|
||||
//logger.info(e);
|
||||
var app = app || {};
|
||||
lastdatasend = senddata;
|
||||
console.log(formencours.data("msgok"));
|
||||
console.log(res);
|
||||
logger.info(formencours.data("msgok"));
|
||||
logger.info(res);
|
||||
if (res.idpanier) {
|
||||
$("button.clickvalidecommande").data("idpanier", res.idpanier);
|
||||
console.log("trig clickvalidecommande via form");
|
||||
logger.info("trig clickvalidecommande via form");
|
||||
$("button.clickvalidecommande").trigger("click");
|
||||
}
|
||||
formencours.find("p.msgform").html(msgok);
|
||||
//var chargeext = app.custom(senddata) || 'Pas de custom à executer';
|
||||
},
|
||||
error: function(e) {
|
||||
console.log(e);
|
||||
logger.info(e);
|
||||
//formencours.data(state).val('error');
|
||||
formencours.find("p.msgform").html(msgko);
|
||||
}
|
||||
|
@ -7,7 +7,7 @@ if (location.href.indexOf('https:')>-1){
|
||||
|
||||
app.test = function(){
|
||||
$('a.test').on('click',function(){
|
||||
console.log('lance test pdf');
|
||||
logger.info('lance test pdf');
|
||||
$.ajax({
|
||||
url: "/pdf",
|
||||
type: "POST",
|
||||
@ -17,7 +17,7 @@ app.test = function(){
|
||||
},
|
||||
cache: false,
|
||||
success: function(res) {
|
||||
console.log(res.success)
|
||||
logger.info(res.success)
|
||||
},
|
||||
error: function(res) {
|
||||
},
|
||||
|
@ -9,20 +9,20 @@ if (location.href.indexOf('https:')>-1){
|
||||
md.questio = function(quest,iduser){
|
||||
|
||||
function sendDataToServer(survey) {
|
||||
console.log(survey)
|
||||
logger.info(survey)
|
||||
$.ajax({url:md.u+'/questiodone',type: "POST", data:{'questio':survey.data,'quest':quest,'iduser':iduser}, cache: false,
|
||||
success:function(e){
|
||||
console.log(e.msg)
|
||||
logger.info(e.msg)
|
||||
}
|
||||
});
|
||||
}
|
||||
$.ajax({ url: md.u+"/questiotodo", type: "GET",
|
||||
data:{quest:quest,iduser:iduser}, cache: false,
|
||||
success: function(e) {
|
||||
console.log(e);
|
||||
logger.info(e);
|
||||
if (e.surveyjson) {
|
||||
console.log('survey')
|
||||
console.log(e.surveyjson)
|
||||
logger.info('survey')
|
||||
logger.info(e.surveyjson)
|
||||
var survey = new Survey.Model(e.surveyjson);
|
||||
$("#modalsurvey .survey").Survey({
|
||||
model:survey,
|
||||
@ -32,13 +32,13 @@ md.questio = function(quest,iduser){
|
||||
}
|
||||
},
|
||||
error: function(e) {
|
||||
console.log(e);
|
||||
logger.info(e);
|
||||
},
|
||||
});
|
||||
}
|
||||
$(document).ready(function(){
|
||||
$('[data-tgsurvey][data-tgid]').on('click',function(){
|
||||
console.log($(this).data('tgsurvey')+"-"+$(this).data('tgid'))
|
||||
logger.info($(this).data('tgsurvey')+"-"+$(this).data('tgid'))
|
||||
//pour tester enlever cette ligne pour la prod
|
||||
var idt = $(this).data('tgid')
|
||||
idt = $("input.testid").val()
|
||||
|
@ -25,12 +25,12 @@ tg.plan = {
|
||||
tg.content = [];
|
||||
if(localStorage.getItem('tgcontent')) tg.content = JSON.parse(localStorage.getItem('tgcontent'));
|
||||
tg.listen = function() {
|
||||
//$('.data-tgmd').onclick(function(){console.log("test")})
|
||||
//$('.data-tgmd').onclick(function(){logger.info("test")})
|
||||
tg.plan.action.forEach(e => {
|
||||
$(e.selector)
|
||||
.on(e.action, function() {
|
||||
//envoie un poste avec tag et onclick
|
||||
console.log('declenche ' + e.action + ' sur ' + e.selector + 'avec valeur de ' + $(e.selector)
|
||||
logger.info('declenche ' + e.action + ' sur ' + e.selector + 'avec valeur de ' + $(e.selector)
|
||||
.val());
|
||||
tgcontent.push([e.action, e.selector, $(e.selector)
|
||||
.val(), Date.now()
|
||||
@ -88,7 +88,7 @@ tg.getCookie = (key) => {
|
||||
|
||||
tg.setlocalS = (info) => {
|
||||
let r = localStorage.getItem('r');
|
||||
console.log(r);
|
||||
logger.info(r);
|
||||
if(r == undefined || !r) {
|
||||
r = 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c) {
|
||||
var r = (Math.random() * 16) | 0,
|
||||
@ -101,7 +101,7 @@ tg.setlocalS = (info) => {
|
||||
if(info.id) {
|
||||
localStorage.setItem('id', info.id);
|
||||
}
|
||||
console.log(info)
|
||||
logger.info(info)
|
||||
tg.action(info)
|
||||
}
|
||||
tg.agreement = () => {
|
||||
@ -127,7 +127,7 @@ tg.action = (post) => {
|
||||
if(localStorage.getItem('r')) post.r = localStorage.getItem('r');
|
||||
if(localStorage.getItem('id')) post.id = localStorage.getItem('id');
|
||||
if(localStorage.getItem('accept')) post.accept = localStorage.getItem('accept');
|
||||
console.log('post content', post);
|
||||
logger.info('post content', post);
|
||||
let xhttp = new XMLHttpRequest();
|
||||
xhttp.onreadystatechange = function() {};
|
||||
xhttp.open(
|
||||
@ -156,9 +156,9 @@ $(document)
|
||||
pwa.tribeid = $('#mxp')
|
||||
.attr('data-tribeid');
|
||||
}
|
||||
console.log('pwa', pwa)
|
||||
console.log('url', window.location.href)
|
||||
console.log('info', tg.getUrlParams(window.location.href))
|
||||
logger.info('pwa', pwa)
|
||||
logger.info('url', window.location.href)
|
||||
logger.info('info', tg.getUrlParams(window.location.href))
|
||||
tg.setlocalS(tg.getUrlParams(window.location.href));
|
||||
tg.agreement();
|
||||
});
|
||||
|
@ -1,6 +1,6 @@
|
||||
'use strict';
|
||||
|
||||
console.log('md.u:' + md.u);
|
||||
logger.info('md.u:' + md.u);
|
||||
md.cookiepol = function() {
|
||||
function setCookie(key, value) {
|
||||
var expires = new Date();
|
||||
@ -27,12 +27,12 @@ md.cookiepol = function() {
|
||||
md.tg = function(v) {
|
||||
md.cookiepol();
|
||||
var l = window.location.href.split('#coo');
|
||||
console.log(l);
|
||||
logger.info(l);
|
||||
if (l.length == 2) {
|
||||
v += l[1];
|
||||
}
|
||||
var r = localStorage.getItem('r');
|
||||
console.log(r);
|
||||
logger.info(r);
|
||||
if (r == undefined) {
|
||||
r = 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c) {
|
||||
var r = (Math.random() * 16) | 0,
|
||||
@ -42,7 +42,7 @@ md.tg = function(v) {
|
||||
localStorage.setItem('r', r);
|
||||
}
|
||||
//$.get(md.u+'/?a=tg&r='+r+v,function(data,status){});
|
||||
console.log(
|
||||
logger.info(
|
||||
md.u + '/?a=tg&r=' + r + '&tit=' + encodeURIComponent(document.title) + v
|
||||
);
|
||||
var xhttp = new XMLHttpRequest();
|
||||
|
9
src/core/logger.js
Normal file
9
src/core/logger.js
Normal file
@ -0,0 +1,9 @@
|
||||
const winston = require('winston');
|
||||
|
||||
const logConfiguration = {
|
||||
'transports': [
|
||||
new winston.transports.Console()
|
||||
]
|
||||
};
|
||||
|
||||
module.exports = winston.createLogger(logConfiguration);
|
Loading…
Reference in New Issue
Block a user