Actions rewrite to handle multiple data

This commit is contained in:
mrfry 2020-11-23 10:08:05 +01:00
parent 677c320e90
commit 1acc122e26
5 changed files with 116 additions and 83 deletions

View file

@ -68,7 +68,10 @@ function GetApp() {
// files in public dirs
const recivedFiles = publicDir + 'recivedfiles'
const uloadFiles = publicDir + 'f'
const dataFile = publicDir + 'data.json'
const dataFiles = [
{ path: `${publicDir}oldData.json`, name: 'oldData' },
{ path: `${publicDir}data.json`, name: 'newData' },
]
const motdFile = publicDir + 'motd'
const userSpecificMotdFile = publicDir + 'userSpecificMotd.json'
const versionFile = publicDir + 'version'
@ -113,7 +116,7 @@ function GetApp() {
})
)
var data = actions.LoadJSON(dataFile)
var questionDbs = actions.LoadJSON(dataFiles)
var version = ''
var motd = ''
var userSpecificMotd = {}
@ -187,7 +190,7 @@ function GetApp() {
}
function Load() {
actions.backupData(data)
actions.backupData(questionDbs)
utils.WatchFile(userSpecificMotdFile, () => {
logger.Log(`User Specific Motd updated`, logger.GetColor('green'))
@ -800,7 +803,11 @@ function GetApp() {
app.get('/allqr.txt', function(req, res) {
res.set('Content-Type', 'text/plain')
res.send(dataToString(data))
const stringifiedData = questionDbs.map((qdb) => {
return dataToString(qdb.data)
})
// TODO: test this
res.send(stringifiedData.join('\n\n'))
res.end()
logger.LogReq(req)
})
@ -881,18 +888,19 @@ function GetApp() {
)
// making backup
utils.CopyFile(
'./' + dataFile,
`./publicDirs/qminingPublic/backs/data_before_${
user.name
}_${utils.GetDateString().replace(/ /g, '_')}`
) // TODO: rewrite to dinamyc public!!!
logger.Log('Backup made')
// writing data
utils.WriteFile(JSON.stringify(newData), dataFile)
logger.Log('New data file written')
// reloading data file
data = [...newData]
// TODO
// utils.CopyFile(
// './' + dataFile,
// `./publicDirs/qminingPublic/backs/data_before_${
// user.name
// }_${utils.GetDateString().replace(/ /g, '_')}`
// ) // TODO: rewrite to dinamyc public!!!
// logger.Log('Backup made')
// // writing data
// utils.WriteFile(JSON.stringify(newData), dataFile)
// logger.Log('New data file written')
// // reloading data file
// data = [...newData]
// data = newData
logger.Log('Data set to newData')
@ -920,7 +928,7 @@ function GetApp() {
actions
.ProcessIncomingRequest(
req.body.datatoadd || req.body,
data,
questionDbs,
dryRun,
user
)

View file

@ -23,7 +23,6 @@ module.exports = {
backupData: backupData,
}
const dataFile = './publicDirs/qminingPublic/data.json'
const recDataFile = './stats/recdata'
const dataLockFile = './data/lockData'
@ -39,7 +38,17 @@ const minMatchToAmmountToAdd = 90
const writeAfter = 1 // write after # of adds FIXME: set reasonable save rate
var currWrites = 0
function ProcessIncomingRequest(recievedData, qdb, dryRun, user) {
// TODO: function that runs ProcessINcReq for every questiondb
function ProcessIncomingRequest(recievedData, questionDbs, dryRun, user) {
return Promise.all(
questionDbs.map((qdb) => {
return ProcessIncomingRequestUsingDb(recievedData, qdb, dryRun, user)
})
)
}
function ProcessIncomingRequestUsingDb(recievedData, qdb, dryRun, user) {
return new Promise((resolve, reject) => {
logger.DebugLog('Processing incoming request', 'actions', 1)
@ -102,7 +111,9 @@ function ProcessIncomingRequest(recievedData, qdb, dryRun, user) {
)
logger.DebugLog(currentQuestion, 'actions', 3)
recievedQuestions.push(currentQuestion)
questionSearchPromises.push(searchData(qdb, currentQuestion, data.subj))
questionSearchPromises.push(
searchData(qdb.data, currentQuestion, data.subj)
)
})
Promise.all(questionSearchPromises)
@ -131,7 +142,7 @@ function ProcessIncomingRequest(recievedData, qdb, dryRun, user) {
3
)
logger.DebugLog(currentQuestion, 'actions', 3)
addQuestion(qdb, sName, currentQuestion)
addQuestion(qdb.data, sName, currentQuestion)
})
currWrites++
@ -143,7 +154,7 @@ function ProcessIncomingRequest(recievedData, qdb, dryRun, user) {
if (currWrites >= writeAfter && !dryRun) {
currWrites = 0
logger.DebugLog('Writing data.json', 'actions', 1)
utils.WriteFile(JSON.stringify(qdb), dataFile)
utils.WriteFile(JSON.stringify(qdb.data), qdb.path)
logger.Log('\tData file written', color)
} else if (dryRun) {
logger.Log('\tDry run')
@ -197,30 +208,45 @@ function ProcessIncomingRequest(recievedData, qdb, dryRun, user) {
})
}
// loading stuff
function LoadJSON(dataFile) {
var data = JSON.parse(utils.ReadFile(dataFile))
if (!data) {
logger.Log(
"data is undefined! Couldn't load data!",
logger.GetColor('redbg')
)
}
return data
function LoadJSON(dataFiles) {
return dataFiles.reduce((acc, dataFile) => {
if (!utils.FileExists(dataFile.path)) {
utils.WriteFile(JSON.stringify([]), dataFile.path)
}
try {
acc.push({
...dataFile,
data: JSON.parse(utils.ReadFile(dataFile.path)),
})
} catch (err) {
console.error(err)
logger.Log(
"data is undefined! Couldn't load data!",
logger.GetColor('redbg')
)
}
return acc
}, [])
}
function backupData(data) {
const path = './publicDirs/qminingPublic/backs/'
utils.CreatePath(path)
try {
logger.Log('Backing up data...')
utils.WriteFile(
JSON.stringify(data),
`${path}data_${utils.GetDateString(true)}.json`
)
logger.Log('Done')
} catch (err) {
logger.Log('Error backing up data file!', logger.GetColor('redbg'))
console.error(err)
}
function backupData(questionDbs) {
questionDbs.forEach((data) => {
const path = './publicDirs/qminingPublic/backs/'
utils.CreatePath(path)
try {
logger.Log(`Backing up ${data.name}...`)
utils.WriteFile(
JSON.stringify(data.data),
`${path}${data.name}_${utils.GetDateString(true)}.json`
)
logger.Log('Done')
} catch (err) {
logger.Log(
`Error backing up data file ${data.name}!`,
logger.GetColor('redbg')
)
console.error(err)
}
})
}