mirror of
https://gitlab.com/MrFry/mrfrys-node-server
synced 2025-04-01 20:24:18 +02:00
Old question removing and updating
This commit is contained in:
parent
4305fe2023
commit
5c2b46f2a3
7 changed files with 440 additions and 36 deletions
|
@ -27,7 +27,7 @@ import {
|
|||
WorkerResult,
|
||||
SearchResultQuestion,
|
||||
} from '../utils/classes'
|
||||
import { doALongTask } from './workerPool'
|
||||
import { doALongTask, msgAllWorker } from './workerPool'
|
||||
import idStats from '../utils/ids'
|
||||
import utils from '../utils/utils'
|
||||
import { addQuestion, getSubjNameWithoutYear } from './classes'
|
||||
|
@ -201,32 +201,20 @@ function processIncomingRequestUsingDb(
|
|||
.then((results: Array<WorkerResult>) => {
|
||||
const allQuestions: Question[] = [] // all new questions here that do not have result
|
||||
results.forEach((result: WorkerResult, i) => {
|
||||
const add = result.result.every((res: SearchResultQuestion) => {
|
||||
return res.match < minMatchAmmountToAdd
|
||||
})
|
||||
const add = (result.result as SearchResultQuestion[]).every(
|
||||
(res: SearchResultQuestion) => {
|
||||
return res.match < minMatchAmmountToAdd
|
||||
}
|
||||
)
|
||||
if (add && !result.error) {
|
||||
allQuestions.push(recievedQuestions[i])
|
||||
}
|
||||
})
|
||||
|
||||
try {
|
||||
const subjName = getSubjNameWithoutYear(recievedData.subj)
|
||||
if (allQuestions.length > 0) {
|
||||
allQuestions.forEach((currentQuestion) => {
|
||||
const sName = getSubjNameWithoutYear(recievedData.subj)
|
||||
logger.DebugLog(
|
||||
'Adding question with subjName: ' + sName + ' :',
|
||||
'isadding',
|
||||
3
|
||||
)
|
||||
logger.DebugLog(currentQuestion, 'isadding', 3)
|
||||
addQuestion(qdb.data, sName, {
|
||||
...currentQuestion,
|
||||
data: {
|
||||
...currentQuestion.data,
|
||||
date: new Date().getTime(),
|
||||
},
|
||||
})
|
||||
})
|
||||
addQuestionsToDb(allQuestions, subjName, qdb)
|
||||
|
||||
currWrites++
|
||||
logger.DebugLog(
|
||||
|
@ -258,6 +246,7 @@ function processIncomingRequestUsingDb(
|
|||
qdbIndex: qdb.index,
|
||||
qdbName: qdb.name,
|
||||
})
|
||||
runCleanWorker(recievedData.quiz, subjName, qdb)
|
||||
} catch (error) {
|
||||
console.error(error)
|
||||
logger.Log(
|
||||
|
@ -287,6 +276,124 @@ function processIncomingRequestUsingDb(
|
|||
})
|
||||
}
|
||||
|
||||
function addQuestionsToDb(
|
||||
allQuestions: Question[],
|
||||
subjName: string,
|
||||
qdb: QuestionDb
|
||||
) {
|
||||
allQuestions.forEach((currentQuestion) => {
|
||||
logger.DebugLog(
|
||||
'Adding question with subjName: ' + subjName + ' :',
|
||||
'isadding',
|
||||
3
|
||||
)
|
||||
logger.DebugLog(currentQuestion, 'isadding', 3)
|
||||
addQuestion(qdb.data, subjName, {
|
||||
...currentQuestion,
|
||||
data: {
|
||||
...currentQuestion.data,
|
||||
date: new Date().getTime(),
|
||||
},
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function runCleanWorker(
|
||||
recievedQuesitons: Question[],
|
||||
subjName: string,
|
||||
qdb: QuestionDb
|
||||
) {
|
||||
if (qdb.overwrites && qdb.overwrites.length) {
|
||||
// check if subject needs to be updated, and qdb has overwriteFromDate
|
||||
const overwrite = qdb.overwrites.find((x) => {
|
||||
return subjName.toLowerCase().includes(x.subjName.toLowerCase())
|
||||
})
|
||||
|
||||
if (!overwrite) {
|
||||
return
|
||||
}
|
||||
// logger.Log(
|
||||
// `\tStarting cleaning in subject "${logger.C(
|
||||
// 'green'
|
||||
// )}${subjName}${logger.C('')}" (matched: "${logger.C('green')}${
|
||||
// overwrite.subjName
|
||||
// }${logger.C('')}")`
|
||||
// )
|
||||
// pass recieved questions to a worker
|
||||
doALongTask({
|
||||
type: 'dbClean',
|
||||
data: {
|
||||
questions: recievedQuesitons,
|
||||
subjToClean: subjName,
|
||||
overwriteFromDate: overwrite.overwriteFromDate,
|
||||
qdbIndex: qdb.index,
|
||||
},
|
||||
}).then(({ result: questionIndexesToRemove }) => {
|
||||
const subjIndex = qdb.data.findIndex((x) => {
|
||||
return x.Name.toLowerCase().includes(subjName.toLowerCase())
|
||||
})
|
||||
// sends msgs to all workers to remove it too
|
||||
|
||||
msgAllWorker({
|
||||
type: 'rmQuestions',
|
||||
data: {
|
||||
questionIndexesToRemove: questionIndexesToRemove as number[][],
|
||||
subjIndex: subjIndex,
|
||||
qdbIndex: qdb.index,
|
||||
recievedQuestions: recievedQuesitons,
|
||||
},
|
||||
})
|
||||
|
||||
// it adds the recieved question WITH DATE!
|
||||
// recievedQuestions doesnt have date-s
|
||||
qdb.data[subjIndex].Questions = updateQuestionsInArray(
|
||||
questionIndexesToRemove as number[][],
|
||||
qdb.data[subjIndex].Questions,
|
||||
recievedQuesitons
|
||||
)
|
||||
|
||||
// saves the file
|
||||
writeData(qdb.data, qdb.path)
|
||||
logger.Log(
|
||||
`\tRemoved ${logger.C('green')}${
|
||||
(questionIndexesToRemove as number[][]).filter(
|
||||
(x: number[]) => x.length > 1
|
||||
).length
|
||||
}${logger.C()} old questions from ${logger.C(
|
||||
'green'
|
||||
)}${subjName}${logger.C()}`
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export function updateQuestionsInArray(
|
||||
questionIndexesToRemove: number[][],
|
||||
questions: Question[],
|
||||
newQuestions: Question[]
|
||||
): Question[] {
|
||||
const indexesToRemove = questionIndexesToRemove.reduce((acc, x) => {
|
||||
if (x.length > 1) {
|
||||
return [...acc, ...x]
|
||||
}
|
||||
return acc
|
||||
}, [])
|
||||
|
||||
const newQuestionsToAdd: Question[] = newQuestions.filter((_q, i) => {
|
||||
return questionIndexesToRemove[i].length > 1
|
||||
})
|
||||
|
||||
return [
|
||||
...questions.filter((_x, i) => {
|
||||
return !indexesToRemove.includes(i)
|
||||
}),
|
||||
...newQuestionsToAdd.map((x) => {
|
||||
x.data.date = new Date()
|
||||
return x
|
||||
}),
|
||||
]
|
||||
}
|
||||
|
||||
export function isQuestionValid(question: Question): boolean {
|
||||
if (!question.Q) {
|
||||
return false
|
||||
|
@ -435,12 +542,12 @@ export function backupData(questionDbs: Array<QuestionDb>): void {
|
|||
const path = './publicDirs/qminingPublic/backs/'
|
||||
utils.CreatePath(path)
|
||||
try {
|
||||
logger.Log(`Backing up ${data.name}...`)
|
||||
// logger.Log(`Backing up ${data.name}...`)
|
||||
writeData(
|
||||
data.data,
|
||||
`${path}${data.name}_${utils.GetDateString(undefined, true)}.json`
|
||||
)
|
||||
logger.Log('Done')
|
||||
// logger.Log('Done')
|
||||
} catch (err) {
|
||||
logger.Log(
|
||||
`Error backing up data file ${data.name}!`,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue