diff --git a/middlewares/ASUPhaveAccessrighttoanobject.js b/middlewares/ASUPhaveAccessrighttoanobject.js deleted file mode 100755 index 09c8566..0000000 --- a/middlewares/ASUPhaveAccessrighttoanobject.js +++ /dev/null @@ -1,95 +0,0 @@ -const jwt = require( 'jwt-simple' ); -const jsonfile = require( 'jsonfile' ); -const fs = require( 'fs-extra' ); -const moment = require( 'moment' ); -const glob = require( 'glob' ); -const path = require( 'path' ); - -// A REMPLACER PAR hasAccessrighton.js -/* -qui permet de passer en parametre des tests d'actions autoriser sur une objet - -*/ - - -// Check if package is installed or not to pickup the right config file -const src = ( __dirname.indexOf( '/node_modules/' ) > -1 ) ? '../../..' : '..'; -const config = require( path.normalize( `${__dirname}/${src}/config.js` ) ); - -const haveAccessrighttoanobject = ( req, res, next ) => { - /* - from isAuthenticated req.session.header.accessrights={app:{'tribeid:projet':profile}, - data:{ "sitewebsrc": "RWCDO", - "contacts": "RWCDO"}} - from the last successfull authentification. - profile is a keyword menu available into clientconf.json of tribeid - data, list of object accessright Read Write Create Delete Owner - a xuuid can read any objet if R - if O wner means that it can only read write its object create by himself - - This middleware check that we apply RESTFull CRUD concept depending of access right of a xuuid trying to act onto a xworkon tribeid - Action get = Read put = Update post = Create delete = Delete - object = req.Urlpath.split(/)[0] - */ - console.log( 'haveAccessrighttoanobject()?' ); - // req.originalUrl contain /object/action/id object id to run action - // req.route.methods ={ put:true, delete:true post:true, get:true } - const objet = req.baseUrl.slice( 1 ); //contain /object - const model = objet.charAt( 0 ) - .toUpperCase() + objet.slice( 1 ); // model u object with first letter in uppercase - let droit = ""; - let ownby = []; - /* - Check if object exist and get the OWNBY array, not relevant for referentials object that is only manage by CRUD no Owner logic - */ - if( objet != "referentials" ) { - if( !fs.existsSync( `${config.tribes}/${req.session.header.xworkon}/${objet}/${req.params.id}.json` ) ) { - res.status( 404 ) - .send( { - payload: { - info: [ 'idNotfound' ], - model, - moreinfo: `${config.tribes}/${req.session.header.xworkon}/${objet}/${req.params.id}.json does not exist ` - } - } ); - } else { - ownby = jsonfile.readFileSync( `${config.tribes}/${req.session.header.xworkon}/${objet}/${req.params.id}.json` ) - .OWNBY; - } - } - //console.log( req.session.header ) - if( req.session.header.xpaganid == config.devnoauthxuuid ) { - console.log( 'haveAccessrighttoanobject yes cause dev test user' ); - } else { - // accessrights was load from isAuthenticated.js middleware to make it available in req.session.header to be used into route for specific access if needed mainly to filter data in the get request depending of profil and data accessright. - if( Object.keys( req.session.header.accessrights.data ) - .includes( "Alltribeid" ) && req.session.header.accessrights.data[ "Alltribeid" ][ objet ] ) { - droit = req.session.header.accessrights.data[ "Alltribeid" ][ objet ]; - } - // erase rights if tribeid is specified in addition of Alltribeid - if( ( req.session.header.accessrights.data[ req.session.header.xworkon ] ) && - req.session.header.accessrights.data[ req.session.header.xworkon ][ objet ] ) { - droit = req.session.header.accessrights.data[ req.session.header.xworkon ][ objet ]; - if( ( req.route.methods.get && droit.includes( 'R' ) ) || - ( req.route.methods.put && droit.includes( 'U' ) ) || - ( req.route.methods.delete && droit.includes( 'D' ) ) || - ownby.includes( req.params.id ) ) { - console.log( 'haveAccessrighttoanobject yes' ) - } else if( req.route.methods.post && droit.includes( 'C' ) ) { - console.log( 'haveAccessrighttoanobject yes create' ); - } else { - console.log( 'haveAccessrighttoanobject no' ) - res.status( 403 ) - .send( { - payload: { - info: [ 'NoAccessrights' ], - model, - moreinfo: `User ${req.session.header.xpaganid} accessrights are not set to do this action` - } - } ); - } - } - } - next(); -}; -module.exports = haveAccessrighttoanobject; diff --git a/middlewares/isAuthenticated.js b/middlewares/isAuthenticated.js index e041140..73d6c0d 100755 --- a/middlewares/isAuthenticated.js +++ b/middlewares/isAuthenticated.js @@ -40,7 +40,7 @@ const isAuthenticated = ( req, res, next ) => { // update tokens from file in case recently logged try { console.log( 'token not in list of token (req.app.locals.tokens) try to refresh from file' ); - req.app.locals.tokens = jsonfile.readFileSync( `${config.tmp}/tokens.json` ); + req.app.locals.tokens = fs.readJsonSync( `${config.tmp}/tokens.json` ); } catch ( err ) { console.log( `check isAuthenticated issue in reading ${config.tmp}/tokens.json` ); } @@ -104,7 +104,7 @@ const isAuthenticated = ( req, res, next ) => { } }; req.app.locals.tokens = newtokens; - jsonfile.writeFileSync( `${config.tmp}/tokens.json`, newtokens ); + fs.outputJsonSync( `${config.tmp}/tokens.json`, newtokens ); fs.writeFileSync( `${config.tmp}/menagedone${currentday}`, 'fichier semaphore to clean data each day can be deleted with no consequence', 'utf-8' ); } next(); diff --git a/models/Contracts.js b/models/Contracts.js index 5c4d52d..a9fe1f6 100755 --- a/models/Contracts.js +++ b/models/Contracts.js @@ -1,13 +1,9 @@ const fs = require( 'fs-extra' ); const jsonfile = require( 'jsonfile' ); const glob = require( 'glob' ); -const path = require( 'path' ); const moment = require( 'moment' ); const axios = require( 'axios' ); -const scrapeit = require( 'scrape-it' ); -const cheerio = require( 'cheerio' ); -const Mustache = require( 'mustache' ); -const qrcode = require( 'qrcode' ); + // Check if package is installed or not to pickup the right config file @@ -62,14 +58,14 @@ Contracts.initActiontodo = async ( envoie ) => { nbactionerr: 0, actionlist: "" }; - const listclient = jsonfile.readFileSync( `${config.tribes}/tribeids.json` ); + const listclient = fs.readJsonSync( `${config.tribes}/tribeids.json` ); for( let clid in listclient ) { console.log( listclient[ clid ] ); let listaction = glob.sync( `${config.tribes}/${listclient[clid]}/actions/todo/*.json` ); for( let action in listaction ) { console.log( listaction[ action ] ) log.nbaction++; - todo = jsonfile.readFileSync( listaction[ action ] ); + todo = fs.readJsonSync( listaction[ action ] ); let passdate = true; // currentdate doit etre après la startDate si existe et avant valideuntilDate si existe // console.log('test now est avant date start ', moment() < moment(todo.startDate, 'YYYYMMDD HH:mm:ss').toDate()); @@ -99,12 +95,12 @@ Contracts.initActiontodo = async ( envoie ) => { }; if( parseInt( todo.maxnumberoftime ) && todo.maxnumberoftime != "999" && ( todo.datesRun.length >= parseInt( todo.maxnumberoftime ) ) ) { //archive en done this triggeraction - jsonfile.writeFileSync( listaction[ action ].replace( '/todo/', '/done/' ), todo, { + fs.outputJsonSync( listaction[ action ].replace( '/todo/', '/done/' ), todo, { spaces: 2 } ); fs.unlinkSync( listaction[ action ] ); } else { - jsonfile.writeFileSync( listaction[ action ], todo, { + fs.outputJsonSync( listaction[ action ], todo, { spaces: 2 } ); } diff --git a/models/Messages.js b/models/Messages.js index e1d5465..5cc7507 100755 --- a/models/Messages.js +++ b/models/Messages.js @@ -324,7 +324,7 @@ Messages.request = ( tribeid, uuid, ACCESSRIGHTS, apprequest ) => { Messages.aggregate(); //for test purpose - //const notif = jsonfile.readFileSync( `${config.tribes}/ndda/spacedev/mesa/src/static/components/notification/data_notiflist_fr.json` ); + //const notif = fs.readJsonSync( `${config.tribes}/ndda/spacedev/mesa/src/static/components/notification/data_notiflist_fr.json` ); let notif; if( !fs.existsSync( `${config.tribes}/${apprequest.tribeid}/spacedev/${apprequest.website}/src/static/components/notification/data_notiflist_${apprequest.lang}.json` ) ) { // by default we send back this but this highlght an issue @@ -345,7 +345,7 @@ Messages.request = ( tribeid, uuid, ACCESSRIGHTS, apprequest ) => { } ] }; } else { - notif = jsonfile.readFileSync( `${config.tribes}/${apprequest.tribeid}/spacedev/${apprequest.website}/src/static/components/notification/data_notiflist_${apprequest.lang}.json` ); + notif = fs.readJsonSync( `${config.tribes}/${apprequest.tribeid}/spacedev/${apprequest.website}/src/static/components/notification/data_notiflist_${apprequest.lang}.json` ); //clean up example notif notif.notifs = []; } diff --git a/models/Outputs.js b/models/Outputs.js index 651d687..ed3f3b2 100755 --- a/models/Outputs.js +++ b/models/Outputs.js @@ -178,10 +178,10 @@ Outputs.generemsg = async ( msg, header ) => { // Recupere les parametre smtp du domainName à utiliser console.log( 'pass Outputs.generemsg' ) try { - const confclientexpediteur = jsonfile.readFileSync( `${config.tribes}/${msg.tribeidperso.tribeidexpediteur}/clientconf.json` ); + const confclientexpediteur = fs.readJsonSync( `${config.tribes}/${msg.tribeidperso.tribeidexpediteur}/clientconf.json` ); //console.log('expediteur', confclientexpediteur); msg.smtp = confclientexpediteur.smtp; - /* const confclient = jsonfile.readFileSync( + /* const confclient = fs.readJsonSync( `${config.tribes}/${msg.tribeidperso.tribeid}/clientconf.json` );*/ } catch ( err ) { @@ -433,7 +433,7 @@ Outputs.addjson = function ( data, header ) { */ // console.log(req.body.jsonp); try { - jsonfile.writeFileSync( header.destinationfile + '/' + header.filename, data.jsonp ); + fs.outputJsonSync( header.destinationfile + '/' + header.filename, data.jsonp ); if( data.callback ) { const execCB = require( `${config.mainDir}/models/tribeid/${header.xworkon }` ); diff --git a/models/OutputsDev.js b/models/OutputsDev.js index 444da52..3861c53 100755 --- a/models/OutputsDev.js +++ b/models/OutputsDev.js @@ -147,7 +147,7 @@ Outputs.generemsg = async ( msg, header ) => { // Recupere les parametre smtp du domainName à utiliser console.log( 'pass Outputs.generemsg' ) try { - const confclientexpediteur = jsonfile.readFileSync( `${config.tribes}/${msg.tribeidperso.tribeidexpediteur}/clientconf.json` ); + const confclientexpediteur = fs.readJsonSync( `${config.tribes}/${msg.tribeidperso.tribeidexpediteur}/clientconf.json` ); msg.smtp = confclientexpediteur.smtp; } catch ( err ) { console.log( 'la conf smtp du client n\'est pas definie' ); @@ -348,7 +348,7 @@ Outputs.addjson = function ( data, header ) { */ // console.log(req.body.jsonp); try { - jsonfile.writeFileSync( header.destinationfile + '/' + header.filename, data.jsonp ); + fs.outputJsonSync( header.destinationfile + '/' + header.filename, data.jsonp ); if( data.callback ) { const execCB = require( `${config.mainDir}/models/tribeid/${header.xworkon }` ); diff --git a/models/Pagans.js b/models/Pagans.js index a003301..ca98b6c 100755 --- a/models/Pagans.js +++ b/models/Pagans.js @@ -141,7 +141,7 @@ Pagans.updateDatabase = ( user, tribeid, rm = false ) => { console.group( `Pagans.updateDatabase for ${tribeid} with user ${user.LOGIN}` ) console.assert( config.loglevel == "quiet", 'user', user ); const loginsIndex = `${config.tribes}/${tribeid}/users/searchindex/logins.json`; - jsonfile.readFile( loginsIndex, function ( err, logins ) { + fs.readJson( loginsIndex, function ( err, logins ) { console.assert( config.loglevel == "quiet", 'logins', logins ); try { if( rm ) { @@ -149,7 +149,7 @@ Pagans.updateDatabase = ( user, tribeid, rm = false ) => { } else { logins[ user.LOGIN ] = user.UUID; } - jsonfile.writeFile( loginsIndex, logins, { + fs.outputJson( loginsIndex, logins, { spaces: 2 }, err => { if( err ) console.log( err ); @@ -159,7 +159,7 @@ Pagans.updateDatabase = ( user, tribeid, rm = false ) => { } } ); const uidsIndex = `${config.tribes}/${tribeid}/users/searchindex/uids.json`; - jsonfile.readFile( uidsIndex, function ( err, uids ) { + fs.readJson( uidsIndex, function ( err, uids ) { try { if( rm ) { delete uids[ user.UUID ]; @@ -172,7 +172,7 @@ Pagans.updateDatabase = ( user, tribeid, rm = false ) => { user.TOKEN ]; } - jsonfile.writeFile( uidsIndex, uids, { + fs.outputJson( uidsIndex, uids, { spaces: 2 }, err => { if( err ) console.log( err ); @@ -182,7 +182,7 @@ Pagans.updateDatabase = ( user, tribeid, rm = false ) => { } } ); const emailsIndex = `${config.tribes}/${tribeid}/users/searchindex/emails.json`; - jsonfile.readFile( emailsIndex, function ( err, emails ) { + fs.readJson( emailsIndex, function ( err, emails ) { console.assert( config.loglevel == "quiet", 'emailss', emails ); try { if( rm ) { @@ -190,7 +190,7 @@ Pagans.updateDatabase = ( user, tribeid, rm = false ) => { } else { emails[ user.EMAIL ] = user.UUID; } - jsonfile.writeFile( emailsIndex, emails, { + fs.outputJson( emailsIndex, emails, { spaces: 2 }, err => { if( err ) console.log( err ); @@ -202,19 +202,19 @@ Pagans.updateDatabase = ( user, tribeid, rm = false ) => { const tokensIndex = `${config.tmp}/tokens.json`; let tokens = {}; try { - tokens = jsonfile.readFileSync( tokensIndex ); + tokens = fs.readJsonSync( tokensIndex ); } catch ( err ) { console.log( 'tokens.json not available' ) } // tokens[user.UUID] = user.TOKEN; tokens[ user.UUID ] = { TOKEN: user.TOKEN, ACCESSRIGHTS: user.ACCESSRIGHTS }; - jsonfile.writeFileSync( tokensIndex, tokens, { + fs.outputJsonSync( tokensIndex, tokens, { spaces: 2 } ); /* - jsonfile.readFile(tokensIndex, function(err, tokens) { + fs.readJson(tokensIndex, function(err, tokens) { tokens[user.UUID] = user.TOKEN; - jsonfile.writeFile(tokensIndex, tokens, { spaces: 2 }, err => { + fs.outputJson(tokensIndex, tokens, { spaces: 2 }, err => { if (err) console.log(err); }); });*/ @@ -254,7 +254,7 @@ Pagans.getUserlist = ( header, filter, field ) => { const Userlist = []; glob.sync( `${config.tribes}/${header.xworkon}/users/*/profil.json` ) .forEach( f => { - const infouser = jsonfile.readFileSync( f ); + const infouser = fs.readJsonSync( f ); // Ajouter filter et liste de field if( filter != 'all' ) { // decode filter et test @@ -283,7 +283,7 @@ Pagans.getinfoPagans = ( tribeid, accessrights, listindex ) => { indexs.forEach( index => { access = !( [ 'emails', 'logins', 'uids' ].includes( index ) && !( accessrights.data[ object ] && accessrights.data[ object ].includes( 'R' ) ) ); if( access && fs.existsSync( `${config.tribes}/${tribeid}/${object}/searchindex/${index}.json` ) ) { - info[ index ] = jsonfile.readFileSync( `${config.tribes}/${tribeid}/${object}/searchindex/${index}.json` ) + info[ index ] = fs.readJsonSync( `${config.tribes}/${tribeid}/${object}/searchindex/${index}.json` ) } } ) console.log( info ) @@ -304,7 +304,7 @@ Pagans.getUser = ( UUID, tribeid, accessrights ) => { } } } - const user = jsonfile.readFileSync( `${config.tribes}/${tribeid}/users/${UUID}.json` ); + const user = fs.readJsonSync( `${config.tribes}/${tribeid}/users/${UUID}.json` ); let access = true; //console.log("test accessrights.data['users'].includes('R')", accessrights.data['users'].includes('R')) console.assert( config.loglevel == "quiet", 'accessrights', accessrights ) @@ -341,7 +341,7 @@ Pagans.getUserIdFromEMAIL = ( tribeid, EMAIL ) => { } }; } - const emailsIndex = jsonfile.readFileSync( `${config.tribes}/${tribeid}/users/searchindex/emails.json` ); + const emailsIndex = fs.readJsonSync( `${config.tribes}/${tribeid}/users/searchindex/emails.json` ); if( !emailsIndex.hasOwnProperty( EMAIL ) ) { return { status: 404, @@ -371,7 +371,7 @@ Pagans.updateUserpassword = ( UUID, header, data ) => { // console.log('Credentials are matching!'); if( checkdata.test.password( {}, data.pswnew ) ) { user.PASSWORD = bcrypt.hashSync( data.pswnew, config.saltRounds ); - jsonfile.writeFileSync( `${config.tribes}/${header.xworkon}/users/${UUID}.json`, user, { + fs.outputJsonSync( `${config.tribes}/${header.xworkon}/users/${UUID}.json`, user, { spaces: 2 } ); Pagans.updateDatabase( user, header.xworkon, false ); @@ -399,11 +399,11 @@ Pagans.createUser = ( header, data ) => { */ console.log( 'createUser on header.xworkon:' + header.xworkon + ' by user:' + header.xpaganid ); console.assert( config.loglevel == "quiet", 'with data:', data ); - const ref = jsonfile.readFileSync( `${config.tribes}/${header.xworkon}/referentials/${header.xlang}/object/users.json` ); - const logins = jsonfile.readFileSync( `${config.tribes}/${header.xworkon}/users/searchindex/logins.json` ); + const ref = fs.readJsonSync( `${config.tribes}/${header.xworkon}/referentials/${header.xlang}/object/users.json` ); + const logins = fs.readJsonSync( `${config.tribes}/${header.xworkon}/users/searchindex/logins.json` ); const LOGIN = Object.keys( logins ); console.assert( config.loglevel == "quiet", 'LOGIN list', LOGIN ); - const emails = jsonfile.readFileSync( `${config.tribes}/${header.xworkon}/users/searchindex/emails.json` ); + const emails = fs.readJsonSync( `${config.tribes}/${header.xworkon}/users/searchindex/emails.json` ); console.assert( config.loglevel == "quiet", 'emails', emails ); const EMAIL = Object.keys( emails ); console.assert( config.loglevel == "quiet", 'EMAIL list', EMAIL ); @@ -427,7 +427,7 @@ Pagans.createUser = ( header, data ) => { } }; } - const clientConfig = jsonfile.readFileSync( `${config.tribes}/${header.xworkon}/clientconf.json` ); + const clientConfig = fs.readJsonSync( `${config.tribes}/${header.xworkon}/clientconf.json` ); const user = check.data; user.DATE_CREATE = new Date() .toISOString(); @@ -441,7 +441,7 @@ Pagans.createUser = ( header, data ) => { user.ACCESSRIGHTS = { app: {}, data: {} }; } user.ACCESSRIGHTS.data[ header.xworkon ] = { users: "O" }; - jsonfile.writeFileSync( `${config.tribes}/${header.xworkon}/users/${user.UUID}.json`, user, { + fs.outputJsonSync( `${config.tribes}/${header.xworkon}/users/${user.UUID}.json`, user, { spaces: 2 } ); Pagans.updateDatabase( user, header.xworkon, false ); @@ -490,14 +490,14 @@ Pagans.updateUser = ( UUID, header, data ) => { }; } */ - const ref = jsonfile.readFileSync( `${config.tribes}/${header.xworkon}/referentials/object/users_${ + const ref = fs.readJsonSync( `${config.tribes}/${header.xworkon}/referentials/object/users_${ header.xlang }.json` ); - const logins = jsonfile.readFileSync( `${config.tribes}/${header.xworkon}/users/searchindex/logins.json` ); + const logins = fs.readJsonSync( `${config.tribes}/${header.xworkon}/users/searchindex/logins.json` ); const LOGIN = Object.keys( logins ) .filter( l => logins[ l ] != user.UUID ); // console.log( 'LOGIN list', LOGIN ); - const emails = jsonfile.readFileSync( `${config.tribes}/${header.xworkon}/users/searchindex/emails.json` ); + const emails = fs.readJsonSync( `${config.tribes}/${header.xworkon}/users/searchindex/emails.json` ); // console.log( 'emails', emails ); const EMAIL = Object.keys( emails ) .filter( e => emails[ e ] != user.UUID ); @@ -546,7 +546,7 @@ Pagans.updateUser = ( UUID, header, data ) => { .toISOString(); } try { - jsonfile.writeFileSync( `${config.tribes}/${header.xworkon}/users/${UUID}.json`, user, { + fs.outputJsonSync( `${config.tribes}/${header.xworkon}/users/${UUID}.json`, user, { spaces: 2 } ); //console.log( 'declenche updatabase', updateDatabase ) @@ -579,7 +579,7 @@ Pagans.deleteUser = ( UUID, header ) => { // Delete remove from users object UUID and update index // Activity is not deleted => means some activity can concern an UUID that does not exist anymore. // update index - const infouser = jsonfile.readFileSync( `${config.tribes}/${header.xworkon}/users/${UUID}.json` ); + const infouser = fs.readJsonSync( `${config.tribes}/${header.xworkon}/users/${UUID}.json` ); Pagans.updateDatabase( infouser, header.xworkon, true ); fs.removeSync( `${config.tribes}/${header.xworkon}/users/${UUID}.json` ); return { @@ -612,7 +612,7 @@ Pagans.loginUser = ( header, body, checkpsw ) => { // il faut donc modifier le header au moment du LOGIN // pour que l'update du user au moment du LOGIN concerne bien le bon domain header.xworkon = header.xtribe - const LOGINdom = jsonfile.readFileSync( `${config.tmp}/loginsglob.json` ); + const LOGINdom = fs.readJsonSync( `${config.tmp}/loginsglob.json` ); console.assert( config.loglevel == "quiet", LOGINdom ) console.assert( config.loglevel == "quiet", body ) if( !LOGINdom[ body.LOGIN ] ) { @@ -621,7 +621,7 @@ Pagans.loginUser = ( header, body, checkpsw ) => { data: { info: [ 'LoginDoesNotExist' ], model: 'Pagans' } }; } - const logins = jsonfile.readFileSync( `${config.tribes}/${LOGINdom[body.LOGIN]}/users/searchindex/logins.json` ); + const logins = fs.readJsonSync( `${config.tribes}/${LOGINdom[body.LOGIN]}/users/searchindex/logins.json` ); if( !Object.keys( logins ) .includes( body.LOGIN ) ) { return { @@ -678,7 +678,7 @@ Pagans.loginUser = ( header, body, checkpsw ) => { if( user.ACCESSRIGHTS.data[ "Alltribeid" ] ) { //cas admin on transforme les droits sur tous les tribeid existant const newaccessrightsdata = {} - jsonfile.readFileSync( `${config.tribes}/tribeids.json` ) + fs.readJsonSync( `${config.tribes}/tribeids.json` ) .forEach( cid => { newaccessrightsdata[ cid ] = user.ACCESSRIGHTS.data[ "Alltribeid" ] } ) @@ -701,7 +701,7 @@ Pagans.loginUser = ( header, body, checkpsw ) => { Pagans.getlinkwithoutpsw = async ( EMAIL, header ) => { // check le domain d'appartenance de l'eamail dans /tribes/emailsglob.json={email:cleintId} // on remplace le header.xtribeid - const domforemail = jsonfile.readFileSync( `${config.tribes}/emailsglob.json`, 'utf-8' ); + const domforemail = fs.readJsonSync( `${config.tribes}/emailsglob.json`, 'utf-8' ); if( domforemail[ EMAIL ] ) { header.xtribe = domforemail[ EMAIL ] } else { @@ -710,10 +710,10 @@ Pagans.getlinkwithoutpsw = async ( EMAIL, header ) => { // recupere le uuid du user dans /tribes/tribeid/users/searchindex/emails.json // puis l'ensemble des info des user du domain /uuids.json // infoforuuid[uuidforemail[EMAIL]] permet de récupérer toutes info du user, droit, etc... - const uuidforemail = jsonfile.readFileSync( `${config.tribes}/${header.xtribe}/users/searchindex/emails.json`, 'utf8' ); - const infoforuuid = jsonfile.readFileSync( `${config.tribes}/${header.xtribe}/users/searchindex/uids.json`, 'utf8' ); + const uuidforemail = fs.readJsonSync( `${config.tribes}/${header.xtribe}/users/searchindex/emails.json`, 'utf8' ); + const infoforuuid = fs.readJsonSync( `${config.tribes}/${header.xtribe}/users/searchindex/uids.json`, 'utf8' ); // On recupere le modele d'email appemailinfo qui doit être présent dans clientconf.json - let confdom = jsonfile.readFileSync( `${config.tribes}/${header.xtribe}/clientconf.json`, 'utf8' ); + let confdom = fs.readJsonSync( `${config.tribes}/${header.xtribe}/clientconf.json`, 'utf8' ); let checkinfomail = ""; if( !confdom.appemailinfo ) { checkinfomail += ' Erreur de clientconfig il manque un objet appemailinfo pour poursuivre'; diff --git a/models/Tags.js b/models/Tags.js index ab64b3d..0ed7c0a 100755 --- a/models/Tags.js +++ b/models/Tags.js @@ -57,7 +57,7 @@ Tags.getfile = ( filename, req ) => { } } if( infotg[ 0 ] == "imgtg" ) { - jsonfile.writeFile( `${config.tribes}/${infotg[1]}/tags/imgtg/${Date.now()}.json`, Tags.info( { filename: filename, messageId: infotg[ 2 ], operationId: infotg[ 3 ], identifiant: infotg[ 4 ] }, req ), function ( err ) { + fs.outputJson( `${config.tribes}/${infotg[1]}/tags/imgtg/${Date.now()}.json`, Tags.info( { filename: filename, messageId: infotg[ 2 ], operationId: infotg[ 3 ], identifiant: infotg[ 4 ] }, req ), function ( err ) { if( err ) { console.log( `Erreur de sauvegarde de tag:${filename}` ) } @@ -75,7 +75,7 @@ Tags.savehits = ( req ) => { return false; } else { const info = JSON.parse( JSON.stringify( req.body ) ); - jsonfile.writeFile( `${config.tribes}/${req.params.tribeid}/tags/hits/${Date.now()}.json`, Tags.info( info, req ), function ( err ) { + fs.outputJson( `${config.tribes}/${req.params.tribeid}/tags/hits/${Date.now()}.json`, Tags.info( info, req ), function ( err ) { if( err ) { console.log( `Erreur de sauvegarde de tag pour ${req.params.tribeid} check si /tags/hits et /tags/imgtg exist bien ` ) } @@ -131,15 +131,15 @@ Tags.dataloadstat = ( tribeid ) => { } }; try { - agrege.data = jsonfile.readfileSync( `${config.tribes}/${tribeid}/tags/stats/data.json`, "utf-8" ); - agrege.graph = jsonfile.readfileSync( `${config.tribes}/${tribeid}/tags/stats/graph.json`, "utf-8" ); + agrege.data = fs.readJsonSync( `${config.tribes}/${tribeid}/tags/stats/data.json`, "utf-8" ); + agrege.graph = fs.readJsonSync( `${config.tribes}/${tribeid}/tags/stats/graph.json`, "utf-8" ); } catch ( err ) { console.log( "ATTENTION tag reinitialisé en data.json et graph.json, s'il s'agit de 1ere connexion pas de pb. Le risque est de perdre les tag historiques" ) //return { status: 503, payload: { info: ['Errconfig'], model: 'Tags', moreinfo: `Il manque un ${config.tribes}/${tribeid}/tags/stats/data.json ou stats/graph.json` } } } glob.sync( `${config.tribes}/${tribeid}/tags/hits/*` ) .forEach( f => { - const hit = jsonfile.readFileSync( f ); + const hit = fs.readJsonSync( f ); const ts = parseInt( path.basename( f ) .split( ".json" )[ 0 ] ); //console.log(moment(ts).format('DD-MM-YYYY h:mm:ss')); @@ -214,8 +214,8 @@ Tags.dataloadstat = ( tribeid ) => { } } } ) - jsonfile.writeFileSync( `${config.tribes}/${tribeid}/tags/stats/data.json`, agrege.data, 'utf-8' ); - jsonfile.writeFileSync( `${config.tribes}/${tribeid}/tags/stats/graph.json`, agrege.graph, 'utf-8' ); + fs.outputJsonSync( `${config.tribes}/${tribeid}/tags/stats/data.json`, agrege.data, 'utf-8' ); + fs.outputJsonSync( `${config.tribes}/${tribeid}/tags/stats/graph.json`, agrege.graph, 'utf-8' ); return { status: 200, payload: { info: [ 'Statsupdated' ], model: 'Tags' } } } //console.log(Tags.dataloadstat('yes')); diff --git a/models/UploadFiles.js b/models/UploadFiles.js index c814527..bfb9c36 100755 --- a/models/UploadFiles.js +++ b/models/UploadFiles.js @@ -36,7 +36,7 @@ UploadFiles.addjson = function ( data, header ) { */ // console.log(req.body.jsonp); try { - jsonfile.writeFileSync( header.destinationfile + '/' + header.filename, data.jsonp ); + fs.outputJsonSync( header.destinationfile + '/' + header.filename, data.jsonp ); if( data.callback ) { const execCB = require( `${config.mainDir}/models/tribeid/${ header.xworkon @@ -106,10 +106,10 @@ UploadFiles.updateEvent = function ( domainId, eventId, event ) { // checkAndCreateNeededDirectories(domainId); const eventsFile = `${config.tribes}/${domainId}/actions/events/events.json`; if( !fs.existsSync( eventsFile ) ) { - jsonfile.writeFileSync( eventsFile, {} ); + fs.outputJsonSync( eventsFile, {} ); return { status: 404, payload: 'You have not any events.' }; } - const events = jsonfile.readFileSync( eventsFile ); + const events = fs.readJsonSync( eventsFile ); if( !events.hasOwnProperty( eventId ) ) { return { status: 404, @@ -121,7 +121,7 @@ UploadFiles.updateEvent = function ( domainId, eventId, event ) { eventDate: event.eventDate, eventDescription: event.eventDescription }; - jsonfile.writeFileSync( eventsFile, events, { spaces: 2 } ); + fs.outputJsonSync( eventsFile, events, { spaces: 2 } ); return { status: 200, payload: events @@ -131,13 +131,13 @@ UploadFiles.deleteEvent = function ( domainId, eventId ) { // checkAndCreateNeededDirectories(domainId); const eventsFile = `${config.tribes}/${domainId}/actions/events/events.json`; if( !fs.existsSync( eventsFile ) ) { - jsonfile.writeFileSync( eventsFile, {} ); + fs.outputJsonSync( eventsFile, {} ); return { status: 404, payload: 'You have not any events.' }; } - const events = jsonfile.readFileSync( eventsFile ); + const events = fs.readJsonSync( eventsFile ); if( events.hasOwnProperty( eventId ) ) { delete events[ eventId ]; - jsonfile.writeFileSync( eventsFile, events, { spaces: 2 } ); + fs.outputJsonSync( eventsFile, events, { spaces: 2 } ); return { status: 200, payload: events diff --git a/package.json b/package.json index 07852fd..1b38405 100755 --- a/package.json +++ b/package.json @@ -18,8 +18,7 @@ "startblockchain": "pm2 start ./models/Blockchains.js --log-date-format 'DD-MM HH:mm:ss:SSS'", "logpm2": "pm2 logs apxtrib.js --lines 200", "setup": "node models/Setup.js", - "dev": "node apxtrib.js", - "dev-watch": "nodemon apxtrib.js" + "dev": "node apxtrib.js" }, "apidoc": { "url": "https://apxtrib.crabdance.com/apidoc" @@ -55,74 +54,28 @@ }, "license": "MIT", "dependencies": { - "@editorjs/editorjs": "^2.22.2", - "@fortawesome/fontawesome-free": "^5.15.4", - "@popperjs/core": "^2.11.2", - "@web-component/code-flask": "^0.0.1", - "add-to-calendar-button": "^1.6.1", - "aos": "^3.0.0-beta.6", "async": "^3.2.0", - "axios": "^0.18.0", + "axios": "^0.21.1", "baunsu": "^0.2.3", "bcrypt": "^5.0.0", - "bootstrap": "^5.1.3", - "cheerio": "^1.0.0-rc.3", "cors": "^2.8.4", "crypto-js": "^4.1.1", "dns-sync": "^0.2.1", "express": "^4.16.3", - "feather-icons": "^4.28.0", - "filepond": "^4.30.3", - "filepond-plugin-file-metadata": "^1.0.8", - "filepond-plugin-image-crop": "^2.0.6", - "filepond-plugin-image-edit": "^1.6.3", - "filepond-plugin-image-exif-orientation": "^1.0.11", - "filepond-plugin-image-filter": "^1.0.1", - "filepond-plugin-image-preview": "^4.6.11", - "filepond-plugin-image-validate-size": "^1.2.7", - "flatpickr": "^4.6.9", - "formidable": "^1.2.1", - "fs-extra": "^6.0.1", + "fs-extra": "^11.1.0", "glob": "^7.1.2", - "google-spreadsheet": "^3.1.15", - "html-to-text": "^5.1.1", - "jquery": "^3.6.0", - "js-beautify": "^1.14.0", - "jsdom": "^11.11.0", - "jsonfile": "^5.0.0", "jszip": "^3.7.1", - "juice": "^5.2.0", "jwt-simple": "^0.5.1", - "libxmljs": "^0.19.10", - "lodash": "^4.17.10", "luxon": "^2.1.1", - "mailparser": "^2.3.2", - "minifyjs": "^0.2.0", "moment": "^2.22.1", - "multer": "^1.3.0", "mustache": "^2.3.0", - "natural": "^0.6.3", - "node-mailjet": "^3.3.4", - "node-mbox": "^1.0.0", - "node-sass": "^6.0.1", + "node-mailjet": "^6.0.2", "nodemailer": "^6.1.1", "nodemailer-smtp-transport": "^2.7.4", - "parse-filepath": "^1.0.2", "pdf-creator-node": "^2.2.2", "pm2": "^5.1.2", - "prompt": "^1.2.0", - "qrcode": "^1.3.3", - "request-promise": "^4.2.4", - "rimraf": "^2.6.2", - "sass": "^1.54.5", - "scrape-it": "^5.1.4", - "serialize-javascript": "^5.0.1", - "simplebar": "^5.3.6", "stripe": "^7.4.0", - "swiper": "^8.0.6", - "uuid": "^3.2.1" + "uuid": "^9.0.0" }, - "devDependencies": { - "nodemon": "^1.17.3" - } + "devDependencies": {} } diff --git a/routes/uploadFiles.js b/routes/uploadFiles.js index 3652da1..e44fc93 100755 --- a/routes/uploadFiles.js +++ b/routes/uploadFiles.js @@ -35,7 +35,7 @@ router.get( '/:filename', checkHeaders, isAuthenticated, ( req, res ) => { ); if( pushFile.status == 200 ) { if( path.extname( pushFile.payload.file ) === '.json' ) { - jsonfile.readFile( pushFile.payload.file, ( err, p ) => { + fs.readJson( pushFile.payload.file, ( err, p ) => { if( err ) console.error( err ); res.jsonp( p ); } );