worker file split, sending new questions to peers instantly

This commit is contained in:
mrfry 2023-04-24 20:39:15 +02:00
parent 8c4e184741
commit 252826a081
25 changed files with 1016 additions and 705 deletions

View file

@ -0,0 +1,30 @@
import { parentPort } from 'node:worker_threads'
import { cleanDb } from '../../utils/qdbUtils'
import { Question, QuestionDb } from '../../types/basicTypes'
import { WorkerResult } from '../worker'
export type DbCleanTaskObject = {
type: 'dbClean'
data: {
questions: Question[]
subjToClean: string
overwriteBeforeDate: number
qdbIndex: number
}
}
export const handleDbClean = async (
qdbs: QuestionDb[],
msg: DbCleanTaskObject,
workerIndex: number
): Promise<void> => {
const removedIndexes = cleanDb(msg.data, qdbs)
const workerResult: WorkerResult = {
msg: `From thread #${workerIndex}: db clean done`,
workerIndex: workerIndex,
result: removedIndexes,
}
parentPort.postMessage(workerResult)
}

View file

@ -0,0 +1,34 @@
import { parentPort } from 'node:worker_threads'
import { QuestionDb } from '../../types/basicTypes'
import { Edits, editDb } from '../../utils/actions'
import logger from '../../utils/logger'
export type DbEditTaskObject = {
type: 'dbEdit'
data: { dbIndex: number; edits: Edits }
}
export const handleDbEdit = async (
qdbs: QuestionDb[],
msg: DbEditTaskObject,
workerIndex: number,
setQdbs: (newVal: Array<QuestionDb>) => void
): Promise<void> => {
const { dbIndex, edits }: { dbIndex: number; edits: Edits } = msg.data
const { resultDb } = editDb(qdbs[dbIndex], edits)
setQdbs(
qdbs.map((qdb, i) => {
if (i === dbIndex) {
return resultDb
} else {
return qdb
}
})
)
logger.DebugLog(`Worker db edit ${workerIndex}`, 'worker update', 1)
parentPort.postMessage({
msg: `From thread #${workerIndex}: db edit`,
workerIndex: workerIndex,
})
}

View file

@ -0,0 +1,36 @@
import { parentPort } from 'node:worker_threads'
import { QuestionDb } from '../../types/basicTypes'
import { getSubjectDifference } from '../../utils/qdbUtils'
export type MergeTaskObject = {
type: 'merge'
data: {
localQdbIndex: number
remoteQdb: QuestionDb
}
}
export const handleMerge = async (
qdbs: QuestionDb[],
msg: MergeTaskObject,
workerIndex: number
): Promise<void> => {
const {
localQdbIndex,
remoteQdb,
}: { localQdbIndex: number; remoteQdb: QuestionDb } = msg.data
const localQdb = qdbs.find((qdb) => qdb.index === localQdbIndex)
const { newData, newSubjects } = getSubjectDifference(
localQdb.data,
remoteQdb.data
)
parentPort.postMessage({
msg: `From thread #${workerIndex}: merge done`,
workerIndex: workerIndex,
newData: newData,
newSubjects: newSubjects,
localQdbIndex: localQdbIndex,
})
}

View file

@ -0,0 +1,22 @@
import { parentPort } from 'node:worker_threads'
import { QuestionDb } from '../../types/basicTypes'
export type NewDbTaskObject = {
type: 'newdb'
data: QuestionDb
}
export const handleNewDb = async (
qdbs: QuestionDb[],
msg: NewDbTaskObject,
workerIndex: number,
setQdbs: (newVal: Array<QuestionDb>) => void
): Promise<void> => {
const { data }: { data: QuestionDb } = msg
setQdbs([...qdbs, data])
parentPort.postMessage({
msg: `From thread #${workerIndex}: new db add done`,
workerIndex: workerIndex,
})
}

View file

@ -0,0 +1,81 @@
import { parentPort } from 'node:worker_threads'
import { QuestionDb } from '../../types/basicTypes'
import logger from '../../utils/logger'
import { createQuestion } from '../../utils/qdbUtils'
import { Result } from '../../utils/actions'
export type NewQuestionTaskObject = {
type: 'newQuestions'
data: Omit<Result, 'qdbName'>
}
export const handleNewQuestions = async (
qdbs: QuestionDb[],
msg: NewQuestionTaskObject,
workerIndex: number,
setQdbs: (newVal: Array<QuestionDb>) => void
): Promise<void> => {
const { subjName, qdbIndex, newQuestions } = msg.data
const newQuestionsWithCache = newQuestions.map((question) => {
if (!question.cache) {
return createQuestion(question)
} else {
return question
}
})
let added = false
setQdbs(
qdbs.map((qdb) => {
if (qdb.index === qdbIndex) {
return {
...qdb,
data: qdb.data.map((subj) => {
if (subj.Name === subjName) {
added = true
return {
Name: subj.Name,
Questions: [
...subj.Questions,
...newQuestionsWithCache,
],
}
} else {
return subj
}
}),
}
} else {
return qdb
}
})
)
if (!added) {
setQdbs(
qdbs.map((qdb) => {
if (qdb.index === qdbIndex) {
return {
...qdb,
data: [
...qdb.data,
{
Name: subjName,
Questions: [...newQuestionsWithCache],
},
],
}
} else {
return qdb
}
})
)
}
logger.DebugLog(`Worker new question ${workerIndex}`, 'worker update', 1)
parentPort.postMessage({
msg: `From thread #${workerIndex}: update done`,
workerIndex: workerIndex,
})
}

View file

@ -0,0 +1,153 @@
import { parentPort } from 'node:worker_threads'
import { PeerInfo, Question, QuestionDb } from '../../types/basicTypes'
import { files, paths, readAndValidateFile } from '../../utils/files'
import utils from '../../utils/utils'
import { RecievedData } from '../../utils/actions'
import { removeCacheFromQuestion } from '../../utils/qdbUtils'
import { QuestionAddResponse } from '../../modules/api/submodules/qminingapi'
import logger from '../../utils/logger'
import {
loginToPeer,
peerToString,
updatePeersFile,
} from '../../utils/p2putils'
import { post } from '../../utils/networkUtils'
const login = async (peer: PeerInfo): Promise<string> => {
const loginResult = await loginToPeer(peer)
if (typeof loginResult === 'string') {
return loginResult
} else {
return null
}
}
export type QuestionsToPeersTaskObject = {
type: 'sendQuestionsToPeers'
data: {
newQuestions: Question[]
location: string
subj: string
}
}
export const handleQuestionsToPeers = async (
_qdbs: QuestionDb[],
msg: QuestionsToPeersTaskObject,
workerIndex: number
): Promise<void> => {
const { newQuestions, location, subj } = msg.data
const domain = utils.ReadFile(paths.domainFile).trim()
const peers = readAndValidateFile<PeerInfo[]>(files.peersFile)
if (!peers || peers.length === 0 || newQuestions.length === 0) {
parentPort.postMessage({
msg: `From thread #${workerIndex}: sendQuestionsToPeers done`,
workerIndex: workerIndex,
})
return
}
const dataToSend: RecievedData = {
fromPeer: true,
subj: subj,
location: location,
id: domain, // client ID
version: 'P2P',
quiz: newQuestions.map((question) => {
return removeCacheFromQuestion({
...question,
data: {
...question.data,
source: domain,
},
})
}),
}
const results: {
errors: PeerInfo[]
hasNew: PeerInfo[]
sent: PeerInfo[]
loginErrors: PeerInfo[]
} = {
errors: [],
hasNew: [],
sent: [],
loginErrors: [],
}
const postData = (peer: PeerInfo, sessionCookie: string) => {
return post<QuestionAddResponse>({
hostname: peer.host,
port: peer.port,
http: peer.http,
path: '/api/isAdding',
bodyObject: dataToSend,
cookies: `sessionID=${sessionCookie}`,
})
}
for (const peer of peers) {
let sessionCookie = peer.sessionCookie
if (!sessionCookie) {
sessionCookie = await login(peer)
if (!sessionCookie) {
results.loginErrors.push(peer)
continue
}
updatePeersFile(peers, { ...peer, sessionCookie: sessionCookie })
}
let res = await postData(peer, sessionCookie)
if (res.data?.result === 'nouser' && sessionCookie) {
sessionCookie = await login(peer)
if (!sessionCookie) {
results.loginErrors.push(peer)
continue
}
updatePeersFile(peers, { ...peer, sessionCookie: sessionCookie })
res = await postData(peer, sessionCookie)
}
if (res.error || !res.data?.success) {
results.errors.push(peer)
} else if (res.data?.totalNewQuestions > 0) {
results.hasNew.push(peer)
} else {
results.sent.push(peer)
}
}
const logMsg: string[] = []
const addToLogMsg = (
peerResult: PeerInfo[],
prefix: string,
color: string
) => {
if (peerResult.length > 0) {
logMsg.push(
`${logger.C(color)}${prefix}:${logger.C()} ` +
peerResult.map((x) => peerToString(x)).join(', ')
)
}
}
addToLogMsg(results.loginErrors, 'Login error', 'red')
addToLogMsg(results.errors, 'Error', 'red')
addToLogMsg(results.hasNew, 'Had new questions', 'blue')
addToLogMsg(results.sent, 'Sent', 'green')
logger.Log(
`\t${logger.C(
'green'
)}Sent new questions to peers${logger.C()}; ${logMsg.join(', ')}`
)
parentPort.postMessage({
msg: `From thread #${workerIndex}: sendQuestionsToPeers done`,
workerIndex: workerIndex,
})
}

View file

@ -0,0 +1,53 @@
import { parentPort } from 'node:worker_threads'
import { Question, QuestionDb } from '../../types/basicTypes'
import { updateQuestionsInArray } from '../../utils/actions'
export type RmQuestionsTaskObject = {
type: 'rmQuestions'
data: {
questionIndexesToRemove: number[][]
subjIndex: number
qdbIndex: number
recievedQuestions: Question[]
}
}
export const handleRmQuestions = async (
qdbs: QuestionDb[],
msg: RmQuestionsTaskObject,
workerIndex: number,
setQdbs: (newVal: QuestionDb[]) => void
): Promise<void> => {
const { questionIndexesToRemove, subjIndex, qdbIndex, recievedQuestions } =
msg.data
const newQdbs = qdbs.map((qdb, i) => {
if (i === qdbIndex) {
return {
...qdb,
data: qdb.data.map((subj, j) => {
if (j === subjIndex) {
return {
...subj,
Questions: updateQuestionsInArray(
questionIndexesToRemove,
qdbs[qdbIndex].data[subjIndex].Questions,
recievedQuestions
),
}
} else {
return subj
}
}),
}
} else {
return qdb
}
})
setQdbs(newQdbs)
parentPort.postMessage({
msg: `From thread #${workerIndex}: rm question done`,
workerIndex: workerIndex,
})
}

View file

@ -0,0 +1,286 @@
import { parentPort } from 'worker_threads'
import {
Question,
QuestionData,
QuestionDb,
Subject,
} from '../../types/basicTypes'
import logger from '../../utils/logger'
import {
SearchResultQuestion,
getSubjNameWithoutYear,
minMatchToNotSearchOtherSubjects,
noPossibleAnswerMatchPenalty,
prepareQuestion,
searchSubject,
} from '../../utils/qdbUtils'
import { recognizeTextFromBase64 } from '../../utils/tesseract'
import { WorkerResult } from '../worker'
export type SearchTaskObject = {
type: 'search'
data: {
searchIn: number[]
question: Question
subjName: string
testUrl?: string
questionData?: QuestionData
searchInAllIfNoResult?: boolean
searchTillMatchPercent?: number
[key: string]: any
}
}
export function doSearch(
data: Array<Subject>,
subjName: string,
question: Question,
searchTillMatchPercent?: number,
searchInAllIfNoResult?: Boolean
): SearchResultQuestion[] {
let result: SearchResultQuestion[] = []
const questionToSearch = prepareQuestion(question)
data.every((subj) => {
if (
subjName
.toLowerCase()
.includes(getSubjNameWithoutYear(subj.Name).toLowerCase())
) {
logger.DebugLog(`Searching in ${subj.Name} `, 'searchworker', 2)
const subjRes = searchSubject(
subj,
questionToSearch,
subjName,
searchTillMatchPercent
)
result = result.concat(subjRes)
if (searchTillMatchPercent) {
return !subjRes.some((sr) => {
return sr.match >= searchTillMatchPercent
})
}
return true
}
return true
})
if (searchInAllIfNoResult) {
// FIXME: dont research subject searched above
if (
result.length === 0 ||
result[0].match < minMatchToNotSearchOtherSubjects
) {
logger.DebugLog(
'Reqults length is zero when comparing names, trying all subjects',
'searchworker',
1
)
data.every((subj) => {
const subjRes = searchSubject(
subj,
questionToSearch,
subjName,
searchTillMatchPercent
)
result = result.concat(subjRes)
if (searchTillMatchPercent) {
const continueSearching = !subjRes.some((sr) => {
return sr.match >= searchTillMatchPercent
})
return continueSearching
}
return true
})
}
}
result = setNoPossibleAnswersPenalties(
questionToSearch.data.possibleAnswers,
result
)
result = result.sort((q1, q2) => {
if (q1.match < q2.match) {
return 1
} else if (q1.match > q2.match) {
return -1
} else {
return 0
}
})
return result
}
export function setNoPossibleAnswersPenalties(
questionPossibleAnswers: QuestionData['possibleAnswers'],
results: SearchResultQuestion[]
): SearchResultQuestion[] {
if (!Array.isArray(questionPossibleAnswers)) {
return results
}
const noneHasPossibleAnswers = results.every((x) => {
return !Array.isArray(x.q.data.possibleAnswers)
})
if (noneHasPossibleAnswers) return results
let possibleAnswerMatch = false
const updated = results.map((result) => {
const matchCount = Array.isArray(result.q.data.possibleAnswers)
? result.q.data.possibleAnswers.filter((resultPossibleAnswer) => {
return questionPossibleAnswers.some(
(questionPossibleAnswer) => {
if (
questionPossibleAnswer.val &&
resultPossibleAnswer.val
) {
return questionPossibleAnswer.val.includes(
resultPossibleAnswer.val
)
} else {
return false
}
}
)
}).length
: 0
if (matchCount === questionPossibleAnswers.length) {
possibleAnswerMatch = true
return result
} else {
return {
...result,
match: result.match - noPossibleAnswerMatchPenalty,
detailedMatch: {
...result.detailedMatch,
qMatch:
result.detailedMatch.qMatch -
noPossibleAnswerMatchPenalty,
},
}
}
})
if (possibleAnswerMatch) {
return updated
} else {
return results
}
}
async function recognizeQuestionImage(question: Question): Promise<Question> {
const base64Data = question.data.base64
if (Array.isArray(base64Data) && base64Data.length) {
const res: string[] = []
for (let i = 0; i < base64Data.length; i++) {
const base64 = base64Data[i]
const text = await recognizeTextFromBase64(base64)
if (text && text.trim()) {
res.push(text)
}
}
if (res.length) {
return {
...question,
Q: res.join(' '),
data: {
...question.data,
type: 'simple',
},
}
}
}
return question
}
export const handleSearch = async (
qdbs: QuestionDb[],
msg: SearchTaskObject,
workerIndex: number
): Promise<void> => {
const {
subjName,
question: originalQuestion,
searchTillMatchPercent,
searchInAllIfNoResult,
searchIn,
index,
} = msg.data
let searchResult: SearchResultQuestion[] = []
let error = false
const question = await recognizeQuestionImage(originalQuestion)
try {
qdbs.forEach((qdb) => {
if (searchIn.includes(qdb.index)) {
const res = doSearch(
qdb.data,
subjName,
question,
searchTillMatchPercent,
searchInAllIfNoResult
)
searchResult = [
...searchResult,
...res.map((x) => {
return {
...x,
detailedMatch: {
...x.detailedMatch,
qdb: qdb.name,
},
}
}),
]
}
})
} catch (err) {
logger.Log('Error in worker thread!', logger.GetColor('redbg'))
console.error(err)
console.error(
JSON.stringify(
{
subjName: subjName,
question: question,
searchTillMatchPercent: searchTillMatchPercent,
searchInAllIfNoResult: searchInAllIfNoResult,
searchIn: searchIn,
index: index,
},
null,
2
)
)
error = true
}
// sorting
const sortedResult: SearchResultQuestion[] = searchResult.sort((q1, q2) => {
if (q1.match < q2.match) {
return 1
} else if (q1.match > q2.match) {
return -1
} else {
return 0
}
})
const workerResult: WorkerResult = {
msg: `From thread #${workerIndex}: job ${
!isNaN(index) ? `#${index}` : ''
}done`,
workerIndex: workerIndex,
result: sortedResult,
error: error,
}
parentPort.postMessage(workerResult)
}

95
src/worker/worker.ts Normal file
View file

@ -0,0 +1,95 @@
import { isMainThread, parentPort, workerData } from 'worker_threads'
import { QuestionDb } from '../types/basicTypes'
import { SearchResultQuestion, countOfQdbs } from '../utils/qdbUtils'
import logger from '../utils/logger'
import { TaskObject } from './workerPool'
import { tesseractLoaded } from '../utils/tesseract'
import { handleSearch } from './handlers/handleSearch'
import { handleMerge } from './handlers/handleMerge'
import { handleDbEdit } from './handlers/handleDbEdit'
import { handleNewQuestions } from './handlers/handleNewQuestion'
import { handleNewDb } from './handlers/handleNewDb'
import { handleDbClean } from './handlers/handleDbClean'
import { handleQuestionsToPeers } from './handlers/handleQuestionsToPeers'
import { handleRmQuestions } from './handlers/handleRmQuestions'
export interface WorkerResult {
msg: string
workerIndex: number
result?: SearchResultQuestion[] | number[][]
error?: boolean
}
if (!isMainThread) {
handleWorkerData()
}
async function handleWorkerData() {
const {
workerIndex,
initData,
}: { workerIndex: number; initData: Array<QuestionDb> } = workerData
let qdbs: Array<QuestionDb> = initData
const setQdbs = (newVal: Array<QuestionDb>) => {
qdbs = newVal
}
const qdbCount = initData.length
const { subjCount, questionCount } = countOfQdbs(initData)
logger.Log(
`[THREAD #${workerIndex}]: Worker ${workerIndex} reporting for duty! qdbs: ${qdbCount}, subjects: ${subjCount.toLocaleString()}, questions: ${questionCount.toLocaleString()}`
)
parentPort.on('message', async (msg: TaskObject) => {
try {
await tesseractLoaded
await handleMessage(qdbs, msg, workerIndex, setQdbs)
} catch (e) {
console.error(e)
parentPort.postMessage({
msg: `From thread #${workerIndex}: unhandled error occured! (${
(msg as any)?.type
})`,
workerIndex: workerIndex,
e: e,
})
}
})
}
async function handleMessage(
qdbs: QuestionDb[],
msg: TaskObject,
workerIndex: number,
setQdbs: (newVal: QuestionDb[]) => void
) {
if (msg.type === 'search') {
await handleSearch(qdbs, msg, workerIndex)
} else if (msg.type === 'merge') {
await handleMerge(qdbs, msg, workerIndex)
} else if (msg.type === 'dbEdit') {
await handleDbEdit(qdbs, msg, workerIndex, setQdbs)
} else if (msg.type === 'newQuestions') {
await handleNewQuestions(qdbs, msg, workerIndex, setQdbs)
} else if (msg.type === 'newdb') {
await handleNewDb(qdbs, msg, workerIndex, setQdbs)
} else if (msg.type === 'dbClean') {
await handleDbClean(qdbs, msg, workerIndex)
} else if (msg.type === 'rmQuestions') {
await handleRmQuestions(qdbs, msg, workerIndex, setQdbs)
} else if (msg.type === 'sendQuestionsToPeers') {
await handleQuestionsToPeers(qdbs, msg, workerIndex)
} else {
logger.Log(`Invalid msg type!`, logger.GetColor('redbg'))
console.error(msg)
parentPort.postMessage({
msg: `From thread #${workerIndex}: Invalid message type (${
(msg as any)?.type
})!`,
workerIndex: workerIndex,
})
}
}

326
src/worker/workerPool.ts Normal file
View file

@ -0,0 +1,326 @@
/* ----------------------------------------------------------------------------
Question Server
GitLab: <https://gitlab.com/MrFry/mrfrys-node-server>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
------------------------------------------------------------------------- */
import { Worker } from 'worker_threads'
import { v4 as uuidv4 } from 'uuid'
import { EventEmitter } from 'events'
import os from 'os'
import type { QuestionDb } from '../types/basicTypes'
import { SearchTaskObject } from './handlers/handleSearch'
import { DbEditTaskObject } from './handlers/handleDbEdit'
import { NewQuestionTaskObject } from './handlers/handleNewQuestion'
import { NewDbTaskObject } from './handlers/handleNewDb'
import { DbCleanTaskObject } from './handlers/handleDbClean'
import { RmQuestionsTaskObject } from './handlers/handleRmQuestions'
import { MergeTaskObject } from './handlers/handleMerge'
import { QuestionsToPeersTaskObject } from './handlers/handleQuestionsToPeers'
import { WorkerResult } from './worker'
import logger from '../utils/logger'
const threadCount = +process.env.NS_THREAD_COUNT || os.cpus().length
interface WorkerObj {
worker: Worker
index: number
free: Boolean
}
export type TaskObject =
| SearchTaskObject
| DbEditTaskObject
| NewQuestionTaskObject
| NewDbTaskObject
| DbCleanTaskObject
| RmQuestionsTaskObject
| MergeTaskObject
| QuestionsToPeersTaskObject
interface PendingJob {
workData: TaskObject
doneEvent: DoneEvent
targetWorkerIndex?: number
}
interface JobEvent extends EventEmitter {
on(event: 'jobDone', listener: () => void): this
on(event: 'newJob', listener: () => void): this
emit(event: 'newJob'): boolean
emit(event: 'jobDone'): boolean
}
interface DoneEvent extends EventEmitter {
once(event: 'done', listener: (result: WorkerResult) => void): this
emit(event: 'done', res: WorkerResult): boolean
}
export const defaultAlertOnPendingCount = 100
let alertOnPendingCount = defaultAlertOnPendingCount
const workerFile = './src/worker/worker.ts'
let workers: Array<WorkerObj>
let getInitData: () => Array<QuestionDb> = null
const pendingJobs: {
[id: string]: PendingJob
} = {}
const jobEvents: JobEvent = new EventEmitter()
jobEvents.on('jobDone', () => {
processJob()
})
jobEvents.on('newJob', () => {
processJob()
})
// ---------------------------------------------------------------------------
function handleWorkerError(worker: WorkerObj, err: Error) {
// TODO: restart worker if exited or things like that
logger.Log('resourcePromise error', logger.GetColor('redbg'))
console.error(err, worker)
}
// TODO: accuire all workers here, and handle errors so they can be removed if threads exit
export function msgAllWorker(data: TaskObject): Promise<WorkerResult[]> {
logger.DebugLog('MSGING ALL WORKER', 'job', 1)
return new Promise((resolve) => {
const promises: Promise<WorkerResult>[] = []
workers.forEach((worker) => {
promises.push(queueWork(data, worker.index))
})
Promise.all(promises).then((res) => {
logger.DebugLog('MSGING ALL WORKER DONE', 'job', 1)
resolve(res)
})
})
}
export function setPendingJobsAlertCount(newVal?: number): void {
const count = newVal != null ? newVal : defaultAlertOnPendingCount
logger.DebugLog('setPendingJobsAlertCount: ' + count, 'job', 1)
alertOnPendingCount = count
}
export function queueWork(
obj: TaskObject,
targetWorkerIndex?: number
): Promise<WorkerResult> {
if (Object.keys(pendingJobs).length > alertOnPendingCount) {
console.error(
`More than ${alertOnPendingCount} callers waiting for free resource! (${
Object.keys(pendingJobs).length
})`
)
}
const jobId = uuidv4()
// FIXME: delete doneEvent?
const doneEvent: DoneEvent = new EventEmitter()
pendingJobs[jobId] = {
workData: obj,
targetWorkerIndex: targetWorkerIndex,
doneEvent: doneEvent,
}
jobEvents.emit('newJob')
return new Promise((resolve) => {
doneEvent.once('done', (result: WorkerResult) => {
jobEvents.emit('jobDone')
resolve(result)
})
})
}
export function initWorkerPool(
initDataGetter: () => Array<QuestionDb>
): Array<WorkerObj> {
getInitData = initDataGetter
if (workers) {
logger.Log('WORKERS ALREADY EXISTS', logger.GetColor('redbg'))
return null
}
workers = []
if (process.env.NS_THREAD_COUNT) {
logger.Log(
`Setting thread count from enviroment variable NS_WORKER_COUNT: '${threadCount}'`,
'yellowbg'
)
}
for (let i = 0; i < threadCount; i++) {
workers.push({
worker: getAWorker(i, getInitData()),
index: i,
free: true,
})
}
return workers
}
// ---------------------------------------------------------------------------
function processJob() {
if (Object.keys(pendingJobs).length > 0) {
const keys = Object.keys(pendingJobs)
let jobKey: string, freeWorker: WorkerObj
let i = 0
while (!freeWorker && i < keys.length) {
jobKey = keys[i]
if (!isNaN(pendingJobs[jobKey].targetWorkerIndex)) {
if (workers[pendingJobs[jobKey].targetWorkerIndex].free) {
freeWorker = workers[pendingJobs[jobKey].targetWorkerIndex]
logger.DebugLog(
`RESERVING WORKER ${pendingJobs[jobKey].targetWorkerIndex}`,
'job',
1
)
}
} else {
freeWorker = workers.find((worker) => {
return worker.free
})
if (freeWorker) {
logger.DebugLog(
`RESERVING FIRST AVAILABLE WORKER ${freeWorker.index}`,
'job',
1
)
}
}
i++
}
if (!freeWorker) {
logger.DebugLog('NO FREE WORKER', 'job', 1)
return
}
if (freeWorker.free) {
freeWorker.free = false
}
const job = pendingJobs[jobKey]
delete pendingJobs[jobKey]
doSomething(freeWorker, job.workData)
.then((res: WorkerResult) => {
freeWorker.free = true
job.doneEvent.emit('done', res)
})
.catch(function (err) {
handleWorkerError(freeWorker, err)
})
}
}
function getAWorker(workerIndex: number, initData: Array<QuestionDb>) {
const worker = workerTs(workerFile, {
workerData: {
workerIndex: workerIndex,
initData: initData,
},
})
worker.setMaxListeners(50)
worker.on('error', (err) => {
logger.Log('Worker error!', logger.GetColor('redbg'))
console.error(err)
})
worker.on('exit', (exitCode) => {
handleWorkerExit(workerIndex, exitCode)
})
return worker
}
function handleWorkerExit(exitedWorkerIndex: number, exitCode: number) {
logger.Log(
`[THREAD #${exitedWorkerIndex}]: exit code: ${exitCode}`,
logger.GetColor('redbg')
)
const exitedWorker = workers.find((worker) => {
return worker.index === exitedWorkerIndex
})
try {
exitedWorker.worker.removeAllListeners()
exitedWorker.worker.terminate()
} catch (e) {
console.log(e)
}
workers = workers.filter((worker) => {
return worker.index !== exitedWorkerIndex
})
if (workers.length < threadCount) {
logger.Log(`[THREAD #${exitedWorkerIndex}]: Restarting ... `)
workers.push({
worker: getAWorker(exitedWorkerIndex, getInitData()),
index: exitedWorkerIndex,
free: true,
})
}
}
// ---------------------------------------------------------------------------
function doSomething(currWorker: WorkerObj, obj: TaskObject) {
const { /* index, */ worker } = currWorker
return new Promise((resolve) => {
worker.postMessage(obj)
worker.once('message', (msg: WorkerResult) => {
resolve(msg)
})
})
}
const workerTs = (
file: string,
wkOpts: {
workerData: {
workerIndex: number
initData: QuestionDb[]
__filename?: string
}
eval?: boolean
}
) => {
wkOpts.eval = true
wkOpts.workerData.__filename = file
return new Worker(
`
try {
const wk = require('worker_threads');
require('ts-node').register();
let file = wk.workerData.__filename;
delete wk.workerData.__filename;
require(file);
} catch (e) {
console.error('Error while creating new Worker:')
console.error(e)
}
`,
wkOpts
)
}