Actions rewrite to handle multiple data

This commit is contained in:
mrfry 2020-11-23 10:08:05 +01:00
parent 677c320e90
commit 1acc122e26
5 changed files with 116 additions and 83 deletions

View file

@ -23,7 +23,6 @@ module.exports = {
backupData: backupData,
}
const dataFile = './publicDirs/qminingPublic/data.json'
const recDataFile = './stats/recdata'
const dataLockFile = './data/lockData'
@ -39,7 +38,17 @@ const minMatchToAmmountToAdd = 90
const writeAfter = 1 // write after # of adds FIXME: set reasonable save rate
var currWrites = 0
function ProcessIncomingRequest(recievedData, qdb, dryRun, user) {
// TODO: function that runs ProcessINcReq for every questiondb
function ProcessIncomingRequest(recievedData, questionDbs, dryRun, user) {
return Promise.all(
questionDbs.map((qdb) => {
return ProcessIncomingRequestUsingDb(recievedData, qdb, dryRun, user)
})
)
}
function ProcessIncomingRequestUsingDb(recievedData, qdb, dryRun, user) {
return new Promise((resolve, reject) => {
logger.DebugLog('Processing incoming request', 'actions', 1)
@ -102,7 +111,9 @@ function ProcessIncomingRequest(recievedData, qdb, dryRun, user) {
)
logger.DebugLog(currentQuestion, 'actions', 3)
recievedQuestions.push(currentQuestion)
questionSearchPromises.push(searchData(qdb, currentQuestion, data.subj))
questionSearchPromises.push(
searchData(qdb.data, currentQuestion, data.subj)
)
})
Promise.all(questionSearchPromises)
@ -131,7 +142,7 @@ function ProcessIncomingRequest(recievedData, qdb, dryRun, user) {
3
)
logger.DebugLog(currentQuestion, 'actions', 3)
addQuestion(qdb, sName, currentQuestion)
addQuestion(qdb.data, sName, currentQuestion)
})
currWrites++
@ -143,7 +154,7 @@ function ProcessIncomingRequest(recievedData, qdb, dryRun, user) {
if (currWrites >= writeAfter && !dryRun) {
currWrites = 0
logger.DebugLog('Writing data.json', 'actions', 1)
utils.WriteFile(JSON.stringify(qdb), dataFile)
utils.WriteFile(JSON.stringify(qdb.data), qdb.path)
logger.Log('\tData file written', color)
} else if (dryRun) {
logger.Log('\tDry run')
@ -197,30 +208,45 @@ function ProcessIncomingRequest(recievedData, qdb, dryRun, user) {
})
}
// loading stuff
function LoadJSON(dataFile) {
var data = JSON.parse(utils.ReadFile(dataFile))
if (!data) {
logger.Log(
"data is undefined! Couldn't load data!",
logger.GetColor('redbg')
)
}
return data
function LoadJSON(dataFiles) {
return dataFiles.reduce((acc, dataFile) => {
if (!utils.FileExists(dataFile.path)) {
utils.WriteFile(JSON.stringify([]), dataFile.path)
}
try {
acc.push({
...dataFile,
data: JSON.parse(utils.ReadFile(dataFile.path)),
})
} catch (err) {
console.error(err)
logger.Log(
"data is undefined! Couldn't load data!",
logger.GetColor('redbg')
)
}
return acc
}, [])
}
function backupData(data) {
const path = './publicDirs/qminingPublic/backs/'
utils.CreatePath(path)
try {
logger.Log('Backing up data...')
utils.WriteFile(
JSON.stringify(data),
`${path}data_${utils.GetDateString(true)}.json`
)
logger.Log('Done')
} catch (err) {
logger.Log('Error backing up data file!', logger.GetColor('redbg'))
console.error(err)
}
function backupData(questionDbs) {
questionDbs.forEach((data) => {
const path = './publicDirs/qminingPublic/backs/'
utils.CreatePath(path)
try {
logger.Log(`Backing up ${data.name}...`)
utils.WriteFile(
JSON.stringify(data.data),
`${path}${data.name}_${utils.GetDateString(true)}.json`
)
logger.Log('Done')
} catch (err) {
logger.Log(
`Error backing up data file ${data.name}!`,
logger.GetColor('redbg')
)
console.error(err)
}
})
}