Added worker to search for stuff in db #10

This commit is contained in:
mrfry 2020-10-01 17:15:26 +02:00
parent 66e4a03bdf
commit 655aab8ddf
6 changed files with 192 additions and 98 deletions

View file

@ -12,10 +12,10 @@ module.exports = {
SharedArrayBuffer: 'readonly', SharedArrayBuffer: 'readonly',
}, },
rules: { rules: {
'no-undef': ['warn'], 'no-undef': ['error'],
eqeqeq: ['warn', 'smart'], eqeqeq: ['warn', 'smart'],
'no-unused-vars': 'off', 'no-unused-vars': 'off',
'no-prototype-builtins': 'off', 'no-prototype-builtins': 'off',
'id-length': ['warn', { exceptions: [] }], 'id-length': ['warn', { exceptions: ['i', 'j'] }],
}, },
} }

View file

@ -22,7 +22,7 @@
const express = require('express') const express = require('express')
const bodyParser = require('body-parser') const bodyParser = require('body-parser')
const busboy = require('connect-busboy') const busboy = require('connect-busboy')
const uuidv4 = require('uuid/v4') // TODO: deprecated, but imports are not supported const { v4: uuidv4 } = require('uuid')
const fs = require('fs') const fs = require('fs')
const app = express() const app = express()

View file

@ -35,7 +35,7 @@ const http = require('http')
const https = require('https') const https = require('https')
const cors = require('cors') const cors = require('cors')
const cookieParser = require('cookie-parser') const cookieParser = require('cookie-parser')
const uuidv4 = require('uuid/v4') // TODO: deprecated, but imports are not supported const { v4: uuidv4 } = require('uuid');
const dbtools = require('./utils/dbtools.js') const dbtools = require('./utils/dbtools.js')
const reqlogger = require('./middlewares/reqlogger.middleware.js') const reqlogger = require('./middlewares/reqlogger.middleware.js')

View file

@ -20,11 +20,14 @@ Question Server
module.exports = { module.exports = {
ProcessIncomingRequest: ProcessIncomingRequest, ProcessIncomingRequest: ProcessIncomingRequest,
LoadJSON: LoadJSON, LoadJSON: LoadJSON,
LoadJSONFromObject: LoadJSONFromObject,
} }
const dataFile = './qminingPublic/data.json' const dataFile = './qminingPublic/data.json'
const recDataFile = './stats/recdata' const recDataFile = './stats/recdata'
const processDataWorkerFile = './src/utils/processData.js'
const { Worker } = require('worker_threads')
const logger = require('../utils/logger.js') const logger = require('../utils/logger.js')
const idStats = require('../utils/ids.js') const idStats = require('../utils/ids.js')
idStats.Load() // FIXME: dont always load when actions.js is used idStats.Load() // FIXME: dont always load when actions.js is used
@ -33,7 +36,6 @@ const classes = require('./classes.js')
classes.initLogger(logger.DebugLog) classes.initLogger(logger.DebugLog)
// if a recievend question doesnt match at least this % to any other question in the db it gets // if a recievend question doesnt match at least this % to any other question in the db it gets
// added to db // added to db
const minMatchAmmountToAdd = 90 // FIXME: test this value
const writeAfter = 1 // write after # of adds FIXME: set reasonable save rate const writeAfter = 1 // write after # of adds FIXME: set reasonable save rate
var currWrites = 0 var currWrites = 0
@ -59,75 +61,92 @@ function ProcessIncomingRequest(recievedData, qdb, infos, dryRun) {
'\n------------------------------------------------------------------------------\n' '\n------------------------------------------------------------------------------\n'
utils.AppendToFile(towrite, recDataFile) utils.AppendToFile(towrite, recDataFile)
logger.DebugLog('recDataFile written', 'actions', 1) logger.DebugLog('recDataFile written', 'actions', 1)
} catch (e) { } catch (err) {
logger.log('Error writing recieved data.') logger.log('Error writing recieved data.')
} }
try { try {
// recievedData: { version: "", id: "", subj: "" quiz: {} } // recievedData: { version: "", id: "", subj: "" quiz: {} }
let d = recievedData let data = recievedData
// FIXME: if is for backwards compatibility, remove this sometime in the future // FIXME: if is for backwards compatibility, remove this sometime in the future
if (typeof d !== 'object') { if (typeof data !== 'object') {
d = JSON.parse(recievedData) data = JSON.parse(recievedData)
} }
logger.DebugLog('recievedData JSON parsed', 'actions', 1) logger.DebugLog('recievedData JSON parsed', 'actions', 1)
logger.DebugLog(d, 'actions', 3) logger.DebugLog(data, 'actions', 3)
let allQLength = d.quiz.length let allQLength = data.quiz.length
let allQuestions = []
d.quiz.forEach((question) => { const worker = new Worker(processDataWorkerFile, {
logger.DebugLog('Question:', 'actions', 2) workerData: {
logger.DebugLog(question, 'actions', 2) data: data,
let q = new classes.Question(question.Q, question.A, question.data) qdb: qdb,
logger.DebugLog( },
'Searching for question in subj ' + d.subj,
'actions',
3
)
logger.DebugLog(q, 'actions', 3)
let sames = qdb.Search(q, d.subj)
logger.DebugLog('Same questions:', 'actions', 2)
logger.DebugLog('Length: ' + sames.length, 'actions', 2)
logger.DebugLog(sames, 'actions', 3)
// if it didnt find any question, or every found questions match is lower thatn 80
let isNew =
sames.length === 0 ||
sames.every((searchResItem) => {
return searchResItem.match < minMatchAmmountToAdd
}) })
logger.DebugLog('isNew: ' + isNew, 'actions', 2)
if (isNew) { worker.on('error', (err) => {
allQuestions.push(q) logger.Log('Process Data Worker error!', logger.GetColor('redbg'))
console.error(err)
reject(err)
})
worker.on('exit', (code) => {
logger.DebugLog('ProcessData exit, code: ' + code, 'actions', 1)
if (code !== 0) {
logger.Log(
'Process Data Worker error! Exit code is not 0',
logger.GetColor('redbg')
)
reject(new Error('Process Data Worker exit code is not 0!'))
} }
}) })
worker.on('message', (workerMsg) => {
logger.DebugLog('Message from processData', 'actions', 1)
logger.DebugLog(workerMsg, 'actions', 1)
const allQuestions = workerMsg.map((resultQuestion) => {
return new classes.Question(
resultQuestion.Q,
resultQuestion.A,
resultQuestion.data
)
})
try {
let color = logger.GetColor('green') let color = logger.GetColor('green')
let msg = '' let msg = ''
if (allQuestions.length > 0) { if (allQuestions.length > 0) {
color = logger.GetColor('blue') color = logger.GetColor('blue')
msg += `New questions: ${allQuestions.length} ( All: ${allQLength} )` msg += `New questions: ${allQuestions.length} ( All: ${allQLength} )`
allQuestions.forEach((q) => { allQuestions.forEach((currentQuestion) => {
const sName = classes.SUtils.GetSubjNameWithoutYear(d.subj) const sName = classes.SUtils.GetSubjNameWithoutYear(data.subj)
logger.DebugLog( logger.DebugLog(
'Adding question with subjName: ' + sName + ' :', 'Adding question with subjName: ' + sName + ' :',
'actions', 'actions',
3 3
) )
logger.DebugLog(q, 'actions', 3) logger.DebugLog(currentQuestion, 'actions', 3)
qdb.AddQuestion(sName, q) qdb.AddQuestion(sName, currentQuestion)
}) })
currWrites++ currWrites++
logger.DebugLog('currWrites for data.json: ' + currWrites, 'actions', 1) logger.DebugLog(
'currWrites for data.json: ' + currWrites,
'actions',
1
)
if (currWrites >= writeAfter && !dryRun) { if (currWrites >= writeAfter && !dryRun) {
currWrites = 0 currWrites = 0
try { try {
qdb.version = infos.version qdb.version = infos.version
qdb.motd = infos.motd qdb.motd = infos.motd
logger.DebugLog('version and motd set for data.json', 'actions', 3) logger.DebugLog(
} catch (e) { 'version and motd set for data.json',
'actions',
3
)
} catch (err) {
logger.Log('MOTD/Version writing/reading error!') logger.Log('MOTD/Version writing/reading error!')
} }
logger.DebugLog('Writing data.json', 'actions', 1) logger.DebugLog('Writing data.json', 'actions', 1)
@ -140,14 +159,14 @@ function ProcessIncomingRequest(recievedData, qdb, infos, dryRun) {
msg += `No new data ( ${allQLength} )` msg += `No new data ( ${allQLength} )`
} }
let subjRow = '\t' + d.subj let subjRow = '\t' + data.subj
if (d.id) { if (data.id) {
subjRow += ' ( CID: ' + logger.logHashed(d.id) + ')' subjRow += ' ( CID: ' + logger.logHashed(data.id) + ')'
idStats.LogId(d.id, d.subj) idStats.LogId(data.id, data.subj)
} }
logger.Log(subjRow) logger.Log(subjRow)
if (d.version !== undefined) { if (data.version !== undefined) {
msg += '. Version: ' + d.version msg += '. Version: ' + data.version
} }
logger.Log('\t' + msg, color) logger.Log('\t' + msg, color)
@ -156,8 +175,17 @@ function ProcessIncomingRequest(recievedData, qdb, infos, dryRun) {
logger.DebugLog('ProcessIncomingRequest done', 'actions', 1) logger.DebugLog('ProcessIncomingRequest done', 'actions', 1)
resolve(allQLength.length) resolve(allQLength.length)
} catch (e) { } catch (error) {
console.log(e) console.log(error)
logger.Log(
'Error while processing processData worker result!',
logger.GetColor('redbg')
)
reject(new Error('Error while processing processData worker result!'))
}
})
} catch (err) {
console.log(err)
logger.Log('Couldnt parse JSON data', logger.GetColor('redbg')) logger.Log('Couldnt parse JSON data', logger.GetColor('redbg'))
reject(new Error('Couldnt parse JSON data')) reject(new Error('Couldnt parse JSON data'))
} }
@ -166,27 +194,31 @@ function ProcessIncomingRequest(recievedData, qdb, infos, dryRun) {
// loading stuff // loading stuff
function LoadJSON(dataFile) { function LoadJSON(dataFile) {
var data = JSON.parse(utils.ReadFile(dataFile))
return LoadJSONFromObject(data)
}
function LoadJSONFromObject(data) {
try { try {
var d = JSON.parse(utils.ReadFile(dataFile)) var result = new classes.QuestionDB()
var r = new classes.QuestionDB()
var rt = [] var rt = []
for (var i = 0; i < d.Subjects.length; i++) { for (var i = 0; i < data.Subjects.length; i++) {
let s = new classes.Subject(d.Subjects[i].Name) let subject = new classes.Subject(data.Subjects[i].Name)
var j = 0 var j = 0
for (j = 0; j < d.Subjects[i].Questions.length; j++) { for (j = 0; j < data.Subjects[i].Questions.length; j++) {
var currQ = d.Subjects[i].Questions[j] var currQ = data.Subjects[i].Questions[j]
s.AddQuestion(new classes.Question(currQ.Q, currQ.A, currQ.data)) subject.AddQuestion(new classes.Question(currQ.Q, currQ.A, currQ.data))
} }
rt.push({ rt.push({
name: d.Subjects[i].Name, name: data.Subjects[i].Name,
count: j, count: j,
}) })
r.AddSubject(s) result.AddSubject(subject)
} }
return r return result
} catch (e) { } catch (err) {
logger.Log('Error loading sutff', logger.GetColor('redbg'), true) logger.Log('Error loading sutff', logger.GetColor('redbg'), true)
console.log(e) console.log(err)
} }
} }

View file

@ -3,7 +3,7 @@ const logger = require('../utils/logger.js')
const dbtools = require('../utils/dbtools.js') const dbtools = require('../utils/dbtools.js')
const dbStructPath = '../modules/api/apiDBStruct.json' const dbStructPath = '../modules/api/apiDBStruct.json'
const usersDBPath = '../data/dbs/users.db' const usersDBPath = '../data/dbs/users.db'
const uuidv4 = require('uuid/v4') // TODO: deprecated, but imports are not supported const { v4: uuidv4 } = require('uuid')
let authDB let authDB

62
src/utils/processData.js Normal file
View file

@ -0,0 +1,62 @@
const dataFile = './qminingPublic/data.json'
const recDataFile = './stats/recdata'
const {
Worker,
isMainThread,
parentPort,
workerData,
} = require('worker_threads')
const logger = require('../utils/logger.js')
const actions = require('../utils/actions.js')
const classes = require('./classes.js')
classes.initLogger(logger.DebugLog)
const minMatchAmmountToAdd = 90 // FIXME: test this value
if (!isMainThread) {
logger.DebugLog('Starting worker thread', 'processdata', 1)
console.log(workerData)
parentPort.postMessage(
ProcessData(workerData.data, actions.LoadJSONFromObject(workerData.qdb))
)
} else {
logger.Log(
'Porcess data should not run on main thread!',
logger.GetColor('redbg')
)
}
function ProcessData(data, qdb) {
let allQuestions = []
data.quiz.forEach((question) => {
logger.DebugLog('Question:', 'actions', 2)
logger.DebugLog(question, 'actions', 2)
let currentQuestion = new classes.Question(
question.Q,
question.A,
question.data
)
logger.DebugLog('Searching for question in subj ' + data.subj, 'actions', 3)
logger.DebugLog(currentQuestion, 'actions', 3)
let sames = qdb.Search(currentQuestion, data.subj)
logger.DebugLog('Same questions:', 'actions', 2)
logger.DebugLog('Length: ' + sames.length, 'actions', 2)
logger.DebugLog(sames, 'actions', 3)
// if it didnt find any question, or every found questions match is lower thatn 80
let isNew =
sames.length === 0 ||
sames.every((searchResItem) => {
return searchResItem.match < minMatchAmmountToAdd
})
logger.DebugLog('isNew: ' + isNew, 'actions', 2)
if (isNew) {
allQuestions.push(currentQuestion)
}
})
return allQuestions
}