Domain specific data files

This commit is contained in:
mrfry 2020-12-22 16:11:01 +01:00
parent 81577ad621
commit f87e165084
9 changed files with 174 additions and 71 deletions

View file

@ -33,9 +33,8 @@ import {
processIncomingRequest, processIncomingRequest,
logResult, logResult,
backupData, backupData,
shouldSaveDataFile,
loadJSON, loadJSON,
getQuestionDbsWithoutFunct,
RecievedData,
Result, Result,
} from '../../utils/actions' } from '../../utils/actions'
import dbtools from '../../utils/dbtools' import dbtools from '../../utils/dbtools'
@ -48,7 +47,13 @@ import {
} from '../../utils/workerPool' } from '../../utils/workerPool'
import { SetupData } from '../../server' import { SetupData } from '../../server'
import { ModuleType, User, DataFile, Request } from '../../types/basicTypes' import {
ModuleType,
User,
DataFile,
Request,
QuestionDb,
} from '../../types/basicTypes'
// files // files
const msgFile = 'stats/msgs' const msgFile = 'stats/msgs'
@ -65,6 +70,7 @@ const todosFile = 'data/todos.json'
const userScriptFile = 'submodules/moodle-test-userscript/stable.user.js' const userScriptFile = 'submodules/moodle-test-userscript/stable.user.js'
const rootRedirectToFile = 'data/apiRootRedirectTo' const rootRedirectToFile = 'data/apiRootRedirectTo'
const recievedQuestionFile = 'stats/recievedQuestions' const recievedQuestionFile = 'stats/recievedQuestions'
const dbsFile = 'data/dbs.json'
// other constants // other constants
const line = '====================================================' // lol const line = '====================================================' // lol
@ -90,30 +96,7 @@ function GetApp(): ModuleType {
// files in public dirs // files in public dirs
const recivedFiles = publicDir + 'recivedfiles' const recivedFiles = publicDir + 'recivedfiles'
const uloadFiles = publicDir + 'f' const uloadFiles = publicDir + 'f'
// FIXME: this to seperate file? const dataFiles: Array<DataFile> = utils.ReadJSON(dbsFile)
const dataFiles: Array<DataFile> = [
{
path: `${publicDir}oldData.json`,
name: 'oldData',
shouldSave: (recData: RecievedData): boolean => {
return recData.version.startsWith('2.0.')
},
},
{
path: `${publicDir}data.json`,
name: 'newData',
shouldSave: (recData: RecievedData): boolean => {
return recData.version.startsWith('2.1.')
},
},
{
path: `${publicDir}fromwebsiteData.json`,
name: 'fromwebsiteData',
shouldSave: (recData: RecievedData): boolean => {
return recData.version === 'WEBSITE'
},
},
]
const motdFile = publicDir + 'motd' const motdFile = publicDir + 'motd'
const userSpecificMotdFile = publicDir + 'userSpecificMotd.json' const userSpecificMotdFile = publicDir + 'userSpecificMotd.json'
@ -161,7 +144,7 @@ function GetApp(): ModuleType {
}) })
) )
const questionDbs = loadJSON(dataFiles) const questionDbs = loadJSON(dataFiles, publicDir)
let version = '' let version = ''
let rootRedirectURL = '' let rootRedirectURL = ''
let motd = '' let motd = ''
@ -169,7 +152,7 @@ function GetApp(): ModuleType {
// FIXME: check type from file // FIXME: check type from file
let testUsers: any = [] let testUsers: any = []
initWorkerPool(getQuestionDbsWithoutFunct(questionDbs)) initWorkerPool(questionDbs)
function mergeObjSum(a, b) { function mergeObjSum(a, b) {
const res = { ...b } const res = { ...b }
@ -931,7 +914,61 @@ function GetApp(): ModuleType {
const dryRun = testUsers.includes(user.id) const dryRun = testUsers.includes(user.id)
try { try {
processIncomingRequest(req.body, questionDbs, dryRun, user) let maxIndex = -1
const suitedQuestionDbs = questionDbs.filter((qdb) => {
if (maxIndex < qdb.index) {
maxIndex = qdb.index
}
return shouldSaveDataFile(qdb, req.body)
}, [])
if (suitedQuestionDbs.length === 0) {
const location = req.body.location.split('/')[2]
// TODO: should check if location is a not empty string
logger.Log(
`No suitable questiondbs found for ${location}, creating a new one...`
)
const newDb: DataFile = {
path: `${location}.json`,
name: location,
shouldSave: {
location: {
val: location,
},
},
}
utils.WriteFile(
JSON.stringify(
[
...utils.ReadJSON(dbsFile),
newDb, // stored as 'data.json', but is './publicDirs/.../data.json' runtime
],
null,
2
),
dbsFile
)
// "loading" new db
const loadedNewDb: QuestionDb = {
...newDb,
data: [],
path: publicDir + newDb.path,
index: maxIndex,
}
utils.WriteFile('[]', loadedNewDb.path)
suitedQuestionDbs.push(loadedNewDb)
questionDbs.push(loadedNewDb)
// TODO: problem: new dbs wont get to workers before trying to search with them.
msgAllWorker({
newdb: loadedNewDb,
type: 'newdb',
})
}
processIncomingRequest(req.body, suitedQuestionDbs, dryRun, user)
.then((resultArray: Array<Result>) => { .then((resultArray: Array<Result>) => {
logResult(req.body, resultArray, user.id, dryRun) logResult(req.body, resultArray, user.id, dryRun)
@ -947,7 +984,7 @@ function GetApp(): ModuleType {
if (totalNewQuestions > 0) { if (totalNewQuestions > 0) {
msgAllWorker({ msgAllWorker({
qdbs: getQuestionDbsWithoutFunct(questionDbs), qdbs: questionDbs,
type: 'update', type: 'update',
}) })
} }

View file

@ -18,7 +18,7 @@
<center> <center>
<h1>404</h1> <h1>404</h1>
<iframe width="660" height="465" src="https://www.youtube-nocookie.com/embed/SjfspM5sDIA" frameborder="0" allow="accelerometer; autoplay; encrypted-media; gyroscope; picture-in-picture" allowfullscreen></iframe> <iframe width="660" height="465" src="https://www.youtube-nocookie.com/embed/yztzob4k2xk" frameborder="0" allow="accelerometer; autoplay; encrypted-media; gyroscope; picture-in-picture" allowfullscreen></iframe>
</center> </center>
</body> </body>

View file

@ -0,0 +1,29 @@
const fs = require('fs')
const { simplifyString } = require('./src/utils/classes.js')
const file = './publicDirs/qminingPublic/data.json'
const data = JSON.parse(fs.readFileSync(file, 'utf8'))
const res = []
data.forEach((subj) => {
const questions = []
subj.Questions.forEach((question) => {
const res = {}
if (question.Q) {
res.Q = simplifyString(question.Q)
}
if (question.A) {
res.A = simplifyString(question.A)
}
res.data = question.data
questions.push(res)
})
res.push({
Name: subj.Name,
Questions: questions,
})
})
fs.writeFileSync(file + '.res', JSON.stringify(res))

View file

@ -20,12 +20,20 @@ export interface Subject {
export interface DataFile { export interface DataFile {
path: string path: string
name: string name: string
shouldSave: (recData: any) => boolean shouldSave: {
location?: {
val: string
}
version?: {
compare: string
val: string
}
}
} }
export interface QuestionDb extends DataFile { export interface QuestionDb extends DataFile {
data: Array<Subject> data: Array<Subject>
index: Number index: number
} }
export interface User { export interface User {

View file

@ -43,6 +43,7 @@ export interface RecievedData {
id: string id: string
version: string version: string
scriptVersion: string scriptVersion: string
location: string
} }
export interface Result { export interface Result {
@ -86,7 +87,7 @@ export function logResult(
logger.Log('\t' + msg, color) logger.Log('\t' + msg, color)
}) })
} else { } else {
logger.Log('\tNo db-s passed shouldSave!', logger.GetColor('red')) logger.Log('\tResults length is zero!', logger.GetColor('red'))
} }
} }
@ -130,9 +131,7 @@ export function processIncomingRequest(
// FIXME: this many promises and stuff might be unnecesarry // FIXME: this many promises and stuff might be unnecesarry
const promises: Array<Promise<Result>> = questionDbs.reduce((acc, qdb) => { const promises: Array<Promise<Result>> = questionDbs.reduce((acc, qdb) => {
if (qdb.shouldSave(recievedData)) {
acc.push(processIncomingRequestUsingDb(recievedData, qdb, dryRun, user)) acc.push(processIncomingRequestUsingDb(recievedData, qdb, dryRun, user))
}
return acc return acc
}, []) }, [])
return Promise.all(promises) return Promise.all(promises)
@ -262,17 +261,53 @@ function processIncomingRequestUsingDb(
}) })
} }
export function loadJSON(dataFiles: Array<DataFile>): Array<QuestionDb> { export function shouldSaveDataFile(
df: DataFile,
recievedData: RecievedData
): Boolean {
if (df.shouldSave.version) {
const { compare, val } = df.shouldSave.version
if (compare === 'equals') {
return recievedData.version === val
} else if (compare === 'startsWith') {
return recievedData.version.startsWith(val)
}
}
if (df.shouldSave.version) {
const { compare, val } = df.shouldSave.version
if (compare === 'equals') {
return recievedData.version === val
} else if (compare === 'startsWith') {
return recievedData.version.startsWith(val)
}
}
if (df.shouldSave.location) {
const { val } = df.shouldSave.location
return recievedData.location.includes(val)
}
return false
}
export function loadJSON(
dataFiles: Array<DataFile>,
dataDir: string
): Array<QuestionDb> {
return dataFiles.reduce((acc, dataFile, index) => { return dataFiles.reduce((acc, dataFile, index) => {
if (!utils.FileExists(dataFile.path)) { const dataPath = dataDir + dataFile.path
utils.WriteFile(JSON.stringify([]), dataFile.path)
if (!utils.FileExists(dataPath)) {
utils.WriteFile(JSON.stringify([]), dataPath)
} }
try { try {
acc.push({ acc.push({
...dataFile, ...dataFile,
path: dataPath,
index: index, index: index,
data: JSON.parse(utils.ReadFile(dataFile.path)), data: JSON.parse(utils.ReadFile(dataPath)),
}) })
} catch (err) { } catch (err) {
console.error(err) console.error(err)
@ -305,12 +340,3 @@ export function backupData(questionDbs: Array<QuestionDb>): void {
} }
}) })
} }
export function getQuestionDbsWithoutFunct(
questionDbs: Array<QuestionDb> // FIXME: type for dis
): Array<any> {
return questionDbs.map((qdb) => {
const { shouldSave, ...res } = qdb // eslint-disable-line
return res
})
}

View file

@ -509,11 +509,11 @@ if (!isMainThread) {
const index = msg.index const index = msg.index
const searchIn = msg.data.searchIn const searchIn = msg.data.searchIn
console.log( // console.log(
`[THREAD #${workerIndex}]: staring work${ // `[THREAD #${workerIndex}]: staring work${
!isNaN(index) ? ` on job index #${index}` : '' // !isNaN(index) ? ` on job index #${index}` : ''
}` // }`
) // )
let searchResult = [] let searchResult = []
@ -549,18 +549,21 @@ if (!isMainThread) {
result: sortedResult, result: sortedResult,
}) })
console.log( // console.log(
`[THREAD #${workerIndex}]: Work ${ // `[THREAD #${workerIndex}]: Work ${
!isNaN(index) ? `#${index}` : '' // !isNaN(index) ? `#${index}` : ''
}done!` // }done!`
) // )
} else if (msg.type === 'update') { } else if (msg.type === 'update') {
qdbs = msg.qdbs qdbs = msg.qdbs
console.log(`[THREAD #${workerIndex}]: update`) // console.log(`[THREAD #${workerIndex}]: update`)
} else if (msg.type === 'newdb') {
qdbs.push(msg.newdb)
// console.log(`[THREAD #${workerIndex}]: newdb`)
} }
}) })
} else { } else {
console.log('[THREAD]: Main thread!') // console.log('[THREAD]: Main thread!')
} }
// ------------------------------------------------------------------------ // ------------------------------------------------------------------------

View file

@ -32,7 +32,7 @@ export default {
logHashed: logHashed, logHashed: logHashed,
hr: hr, hr: hr,
C: C, C: C,
setNewLogfileName, setNewLogfileName: setNewLogfileName,
} }
const DELIM = C('green') + '|' + C() const DELIM = C('green') + '|' + C()
@ -166,7 +166,7 @@ function LogReq(req: Request, toFile?: boolean, statusCode?: string): void {
if (!toFile) { if (!toFile) {
Log(logEntry) Log(logEntry)
} else { } else {
const defLogs = getColoredDateString() + dl + logEntry const defLogs = utils.GetDateString() + dl + logEntry
utils.AppendToFile(defLogs, vlogDir + logFileName) utils.AppendToFile(defLogs, vlogDir + logFileName)
} }

View file

@ -27,7 +27,7 @@ export function doALongTask(obj: any): Promise<any> {
resolve(res) resolve(res)
// TODO: check if result is really a result, and want to release port // TODO: check if result is really a result, and want to release port
pool.release(client) pool.release(client)
console.log('[RELEASE]: #' + client.index) // console.log('[RELEASE]: #' + client.index)
}) })
}) })
.catch(function(err) { .catch(function(err) {
@ -56,9 +56,9 @@ export function initWorkerPool(initData: any): void {
} }
}, },
destroy: function(client) { destroy: function(client) {
console.log('[DESTROY]') // console.log('[DESTROY]')
client.worker.terminate() client.worker.terminate()
console.log('[DESTROYED] #' + client.index) // console.log('[DESTROYED] #' + client.index)
}, },
} }
@ -115,9 +115,9 @@ function getAWorker(i, initData) {
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
function doSomething(client, obj) { function doSomething(client, obj) {
const { index, worker } = client const { /* index, */ worker } = client
return new Promise((resolve) => { return new Promise((resolve) => {
console.log('[ACCUIRE]: #' + index) // console.log('[ACCUIRE]: #' + index)
worker.postMessage(obj) worker.postMessage(obj)
worker.once('message', (msg) => { worker.once('message', (msg) => {
resolve(msg) resolve(msg)

@ -1 +1 @@
Subproject commit bc776b3307c975f826afb1338874330924d601ba Subproject commit d798a8322d33cdaf0b38771f78a5fb7b7207eefc