Added worker to search for stuff in db #10

This commit is contained in:
mrfry 2020-10-01 17:15:26 +02:00
parent 66e4a03bdf
commit 655aab8ddf
6 changed files with 192 additions and 98 deletions

View file

@ -12,10 +12,10 @@ module.exports = {
SharedArrayBuffer: 'readonly',
},
rules: {
'no-undef': ['warn'],
'no-undef': ['error'],
eqeqeq: ['warn', 'smart'],
'no-unused-vars': 'off',
'no-prototype-builtins': 'off',
'id-length': ['warn', { exceptions: [] }],
'id-length': ['warn', { exceptions: ['i', 'j'] }],
},
}

View file

@ -22,7 +22,7 @@
const express = require('express')
const bodyParser = require('body-parser')
const busboy = require('connect-busboy')
const uuidv4 = require('uuid/v4') // TODO: deprecated, but imports are not supported
const { v4: uuidv4 } = require('uuid')
const fs = require('fs')
const app = express()

View file

@ -35,7 +35,7 @@ const http = require('http')
const https = require('https')
const cors = require('cors')
const cookieParser = require('cookie-parser')
const uuidv4 = require('uuid/v4') // TODO: deprecated, but imports are not supported
const { v4: uuidv4 } = require('uuid');
const dbtools = require('./utils/dbtools.js')
const reqlogger = require('./middlewares/reqlogger.middleware.js')

View file

@ -20,11 +20,14 @@ Question Server
module.exports = {
ProcessIncomingRequest: ProcessIncomingRequest,
LoadJSON: LoadJSON,
LoadJSONFromObject: LoadJSONFromObject,
}
const dataFile = './qminingPublic/data.json'
const recDataFile = './stats/recdata'
const processDataWorkerFile = './src/utils/processData.js'
const { Worker } = require('worker_threads')
const logger = require('../utils/logger.js')
const idStats = require('../utils/ids.js')
idStats.Load() // FIXME: dont always load when actions.js is used
@ -33,7 +36,6 @@ const classes = require('./classes.js')
classes.initLogger(logger.DebugLog)
// if a recievend question doesnt match at least this % to any other question in the db it gets
// added to db
const minMatchAmmountToAdd = 90 // FIXME: test this value
const writeAfter = 1 // write after # of adds FIXME: set reasonable save rate
var currWrites = 0
@ -59,75 +61,92 @@ function ProcessIncomingRequest(recievedData, qdb, infos, dryRun) {
'\n------------------------------------------------------------------------------\n'
utils.AppendToFile(towrite, recDataFile)
logger.DebugLog('recDataFile written', 'actions', 1)
} catch (e) {
} catch (err) {
logger.log('Error writing recieved data.')
}
try {
// recievedData: { version: "", id: "", subj: "" quiz: {} }
let d = recievedData
let data = recievedData
// FIXME: if is for backwards compatibility, remove this sometime in the future
if (typeof d !== 'object') {
d = JSON.parse(recievedData)
if (typeof data !== 'object') {
data = JSON.parse(recievedData)
}
logger.DebugLog('recievedData JSON parsed', 'actions', 1)
logger.DebugLog(d, 'actions', 3)
let allQLength = d.quiz.length
let allQuestions = []
logger.DebugLog(data, 'actions', 3)
let allQLength = data.quiz.length
d.quiz.forEach((question) => {
logger.DebugLog('Question:', 'actions', 2)
logger.DebugLog(question, 'actions', 2)
let q = new classes.Question(question.Q, question.A, question.data)
logger.DebugLog(
'Searching for question in subj ' + d.subj,
'actions',
3
)
logger.DebugLog(q, 'actions', 3)
let sames = qdb.Search(q, d.subj)
logger.DebugLog('Same questions:', 'actions', 2)
logger.DebugLog('Length: ' + sames.length, 'actions', 2)
logger.DebugLog(sames, 'actions', 3)
// if it didnt find any question, or every found questions match is lower thatn 80
let isNew =
sames.length === 0 ||
sames.every((searchResItem) => {
return searchResItem.match < minMatchAmmountToAdd
const worker = new Worker(processDataWorkerFile, {
workerData: {
data: data,
qdb: qdb,
},
})
logger.DebugLog('isNew: ' + isNew, 'actions', 2)
if (isNew) {
allQuestions.push(q)
worker.on('error', (err) => {
logger.Log('Process Data Worker error!', logger.GetColor('redbg'))
console.error(err)
reject(err)
})
worker.on('exit', (code) => {
logger.DebugLog('ProcessData exit, code: ' + code, 'actions', 1)
if (code !== 0) {
logger.Log(
'Process Data Worker error! Exit code is not 0',
logger.GetColor('redbg')
)
reject(new Error('Process Data Worker exit code is not 0!'))
}
})
worker.on('message', (workerMsg) => {
logger.DebugLog('Message from processData', 'actions', 1)
logger.DebugLog(workerMsg, 'actions', 1)
const allQuestions = workerMsg.map((resultQuestion) => {
return new classes.Question(
resultQuestion.Q,
resultQuestion.A,
resultQuestion.data
)
})
try {
let color = logger.GetColor('green')
let msg = ''
if (allQuestions.length > 0) {
color = logger.GetColor('blue')
msg += `New questions: ${allQuestions.length} ( All: ${allQLength} )`
allQuestions.forEach((q) => {
const sName = classes.SUtils.GetSubjNameWithoutYear(d.subj)
allQuestions.forEach((currentQuestion) => {
const sName = classes.SUtils.GetSubjNameWithoutYear(data.subj)
logger.DebugLog(
'Adding question with subjName: ' + sName + ' :',
'actions',
3
)
logger.DebugLog(q, 'actions', 3)
qdb.AddQuestion(sName, q)
logger.DebugLog(currentQuestion, 'actions', 3)
qdb.AddQuestion(sName, currentQuestion)
})
currWrites++
logger.DebugLog('currWrites for data.json: ' + currWrites, 'actions', 1)
logger.DebugLog(
'currWrites for data.json: ' + currWrites,
'actions',
1
)
if (currWrites >= writeAfter && !dryRun) {
currWrites = 0
try {
qdb.version = infos.version
qdb.motd = infos.motd
logger.DebugLog('version and motd set for data.json', 'actions', 3)
} catch (e) {
logger.DebugLog(
'version and motd set for data.json',
'actions',
3
)
} catch (err) {
logger.Log('MOTD/Version writing/reading error!')
}
logger.DebugLog('Writing data.json', 'actions', 1)
@ -140,14 +159,14 @@ function ProcessIncomingRequest(recievedData, qdb, infos, dryRun) {
msg += `No new data ( ${allQLength} )`
}
let subjRow = '\t' + d.subj
if (d.id) {
subjRow += ' ( CID: ' + logger.logHashed(d.id) + ')'
idStats.LogId(d.id, d.subj)
let subjRow = '\t' + data.subj
if (data.id) {
subjRow += ' ( CID: ' + logger.logHashed(data.id) + ')'
idStats.LogId(data.id, data.subj)
}
logger.Log(subjRow)
if (d.version !== undefined) {
msg += '. Version: ' + d.version
if (data.version !== undefined) {
msg += '. Version: ' + data.version
}
logger.Log('\t' + msg, color)
@ -156,8 +175,17 @@ function ProcessIncomingRequest(recievedData, qdb, infos, dryRun) {
logger.DebugLog('ProcessIncomingRequest done', 'actions', 1)
resolve(allQLength.length)
} catch (e) {
console.log(e)
} catch (error) {
console.log(error)
logger.Log(
'Error while processing processData worker result!',
logger.GetColor('redbg')
)
reject(new Error('Error while processing processData worker result!'))
}
})
} catch (err) {
console.log(err)
logger.Log('Couldnt parse JSON data', logger.GetColor('redbg'))
reject(new Error('Couldnt parse JSON data'))
}
@ -166,27 +194,31 @@ function ProcessIncomingRequest(recievedData, qdb, infos, dryRun) {
// loading stuff
function LoadJSON(dataFile) {
var data = JSON.parse(utils.ReadFile(dataFile))
return LoadJSONFromObject(data)
}
function LoadJSONFromObject(data) {
try {
var d = JSON.parse(utils.ReadFile(dataFile))
var r = new classes.QuestionDB()
var result = new classes.QuestionDB()
var rt = []
for (var i = 0; i < d.Subjects.length; i++) {
let s = new classes.Subject(d.Subjects[i].Name)
for (var i = 0; i < data.Subjects.length; i++) {
let subject = new classes.Subject(data.Subjects[i].Name)
var j = 0
for (j = 0; j < d.Subjects[i].Questions.length; j++) {
var currQ = d.Subjects[i].Questions[j]
s.AddQuestion(new classes.Question(currQ.Q, currQ.A, currQ.data))
for (j = 0; j < data.Subjects[i].Questions.length; j++) {
var currQ = data.Subjects[i].Questions[j]
subject.AddQuestion(new classes.Question(currQ.Q, currQ.A, currQ.data))
}
rt.push({
name: d.Subjects[i].Name,
name: data.Subjects[i].Name,
count: j,
})
r.AddSubject(s)
result.AddSubject(subject)
}
return r
} catch (e) {
return result
} catch (err) {
logger.Log('Error loading sutff', logger.GetColor('redbg'), true)
console.log(e)
console.log(err)
}
}

View file

@ -3,7 +3,7 @@ const logger = require('../utils/logger.js')
const dbtools = require('../utils/dbtools.js')
const dbStructPath = '../modules/api/apiDBStruct.json'
const usersDBPath = '../data/dbs/users.db'
const uuidv4 = require('uuid/v4') // TODO: deprecated, but imports are not supported
const { v4: uuidv4 } = require('uuid')
let authDB

62
src/utils/processData.js Normal file
View file

@ -0,0 +1,62 @@
const dataFile = './qminingPublic/data.json'
const recDataFile = './stats/recdata'
const {
Worker,
isMainThread,
parentPort,
workerData,
} = require('worker_threads')
const logger = require('../utils/logger.js')
const actions = require('../utils/actions.js')
const classes = require('./classes.js')
classes.initLogger(logger.DebugLog)
const minMatchAmmountToAdd = 90 // FIXME: test this value
if (!isMainThread) {
logger.DebugLog('Starting worker thread', 'processdata', 1)
console.log(workerData)
parentPort.postMessage(
ProcessData(workerData.data, actions.LoadJSONFromObject(workerData.qdb))
)
} else {
logger.Log(
'Porcess data should not run on main thread!',
logger.GetColor('redbg')
)
}
function ProcessData(data, qdb) {
let allQuestions = []
data.quiz.forEach((question) => {
logger.DebugLog('Question:', 'actions', 2)
logger.DebugLog(question, 'actions', 2)
let currentQuestion = new classes.Question(
question.Q,
question.A,
question.data
)
logger.DebugLog('Searching for question in subj ' + data.subj, 'actions', 3)
logger.DebugLog(currentQuestion, 'actions', 3)
let sames = qdb.Search(currentQuestion, data.subj)
logger.DebugLog('Same questions:', 'actions', 2)
logger.DebugLog('Length: ' + sames.length, 'actions', 2)
logger.DebugLog(sames, 'actions', 3)
// if it didnt find any question, or every found questions match is lower thatn 80
let isNew =
sames.length === 0 ||
sames.every((searchResItem) => {
return searchResItem.match < minMatchAmmountToAdd
})
logger.DebugLog('isNew: ' + isNew, 'actions', 2)
if (isNew) {
allQuestions.push(currentQuestion)
}
})
return allQuestions
}