mirror of
https://gitlab.com/MrFry/mrfrys-node-server
synced 2025-04-01 20:24:18 +02:00
create path refactor, removed debug log
This commit is contained in:
parent
4bb8059986
commit
1a3c806e66
19 changed files with 57 additions and 209 deletions
|
@ -22,7 +22,6 @@ import type { Response, NextFunction, RequestHandler } from 'express'
|
|||
import type { Request, User } from '../types/basicTypes'
|
||||
import type { Database } from 'better-sqlite3'
|
||||
|
||||
import logger from '../utils/logger'
|
||||
import dbtools from '../utils/dbtools'
|
||||
|
||||
const EXCEPTIONS = [
|
||||
|
@ -96,12 +95,10 @@ export default function (options: Options): RequestHandler {
|
|||
|
||||
if (!sessionID) {
|
||||
if (isException) {
|
||||
logger.DebugLog(`EXCEPTION: ${req.url}`, 'auth', 1)
|
||||
req.session = { isException: true }
|
||||
next()
|
||||
return
|
||||
}
|
||||
logger.DebugLog(`No session ID: ${req.url}`, 'auth', 1)
|
||||
renderLogin(req, res)
|
||||
return
|
||||
}
|
||||
|
@ -110,12 +107,10 @@ export default function (options: Options): RequestHandler {
|
|||
|
||||
if (!user) {
|
||||
if (isException) {
|
||||
logger.DebugLog(`EXCEPTION: ${req.url}`, 'auth', 1)
|
||||
req.session = { isException: true }
|
||||
next()
|
||||
return
|
||||
}
|
||||
logger.DebugLog(`No user:${req.url}`, 'auth', 1)
|
||||
renderLogin(req, res)
|
||||
return
|
||||
}
|
||||
|
@ -126,8 +121,6 @@ export default function (options: Options): RequestHandler {
|
|||
isException: isException,
|
||||
}
|
||||
|
||||
logger.DebugLog(`ID #${user.id}: ${req.url}`, 'auth', 1)
|
||||
|
||||
dbtools.Update(
|
||||
userDB,
|
||||
'sessions',
|
||||
|
@ -155,8 +148,6 @@ export default function (options: Options): RequestHandler {
|
|||
}
|
||||
|
||||
function GetUserBySessionID(db: Database, sessionID: string) {
|
||||
logger.DebugLog(`Getting user from db`, 'auth', 2)
|
||||
|
||||
const session = dbtools.Select(db, 'sessions', {
|
||||
id: sessionID,
|
||||
})[0]
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
|
||||
import cookie from 'cookie'
|
||||
|
||||
import logger from '../utils/logger'
|
||||
import dbtools from '../utils/dbtools'
|
||||
import { Socket } from '../types/basicTypes'
|
||||
|
||||
|
@ -66,8 +65,6 @@ export default function SocketAuth(options: Options): any {
|
|||
}
|
||||
|
||||
function GetUserBySessionID(db: any, sessionID: string) {
|
||||
logger.DebugLog(`Getting user from db`, 'auth', 2)
|
||||
|
||||
const session = dbtools.Select(db, 'sessions', {
|
||||
id: sessionID,
|
||||
})[0]
|
||||
|
|
|
@ -153,10 +153,6 @@ function getForumData(
|
|||
const forumPath = forumDir + '/' + safeForumName
|
||||
const contentFilePath = forumPath + '/' + forumContentsFileName
|
||||
|
||||
if (!utils.FileExists(forumPath)) {
|
||||
utils.CreatePath(forumPath, true)
|
||||
}
|
||||
|
||||
if (!utils.FileExists(contentFilePath)) {
|
||||
utils.WriteFile('{}', contentFilePath)
|
||||
}
|
||||
|
@ -191,7 +187,7 @@ function setup(data: SubmoduleData): void {
|
|||
const forumFiles = publicDir + 'forumFiles'
|
||||
|
||||
if (!utils.FileExists(forumDir)) {
|
||||
utils.CreatePath(forumDir, true)
|
||||
utils.CreatePath(forumDir)
|
||||
}
|
||||
|
||||
app.get('/forumEntries', (req: Request, res) => {
|
||||
|
|
|
@ -171,7 +171,6 @@ function searchInDbs(
|
|||
})
|
||||
.then((taskResult: WorkerResult) => {
|
||||
try {
|
||||
logger.DebugLog(taskResult, 'ask', 2)
|
||||
resolve({
|
||||
question: question,
|
||||
result: taskResult.result as SearchResultQuestion[],
|
||||
|
@ -211,11 +210,6 @@ function getResult(data: {
|
|||
searchInDbs(question, subj, searchIn, testUrl).then(
|
||||
(res: DbSearchResult) => {
|
||||
if (res.result.length === 0) {
|
||||
logger.DebugLog(
|
||||
`No result while searching specific question db ${testUrl}`,
|
||||
'ask',
|
||||
1
|
||||
)
|
||||
const searchInMore = getDbIndexesToSearchIn(
|
||||
testUrl,
|
||||
questionDbs,
|
||||
|
@ -296,7 +290,7 @@ function saveQuestion(
|
|||
const subjPath = `${savedQuestionsDir}/${subject}`
|
||||
const savedSubjQuestionsFilePath = `${subjPath}/${constants.savedQuestionsFileName}`
|
||||
|
||||
utils.CreatePath(subjPath, true)
|
||||
utils.CreatePath(subjPath)
|
||||
if (!utils.FileExists(savedSubjQuestionsFilePath)) {
|
||||
utils.WriteFile('[]', savedSubjQuestionsFilePath)
|
||||
}
|
||||
|
@ -809,12 +803,6 @@ function setup(data: SubmoduleData): Submodule {
|
|||
uid: uid,
|
||||
loginDate: date,
|
||||
}
|
||||
} else {
|
||||
logger.DebugLog(
|
||||
`cid: ${cid}, uid: ${uid} tried to register multiple times`,
|
||||
'register',
|
||||
1
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -23,7 +23,7 @@ import fs from 'fs'
|
|||
import logger from '../../../utils/logger'
|
||||
import utils from '../../../utils/utils'
|
||||
import { Request, SubmoduleData, User } from '../../../types/basicTypes'
|
||||
import { publicDir } from '../../../utils/files'
|
||||
import { paths, publicDir } from '../../../utils/files'
|
||||
|
||||
const dataFileName = '.data.json'
|
||||
|
||||
|
@ -99,7 +99,7 @@ function setup(data: SubmoduleData): void {
|
|||
const dir = x[2]
|
||||
const fname = x.pop()
|
||||
const dataFilePath =
|
||||
userFilesDir + '/' + dir + '/' + dataFileName
|
||||
paths.userFilesDir + '/' + dir + '/' + dataFileName
|
||||
|
||||
const data = utils.ReadJSON(dataFilePath)
|
||||
|
||||
|
@ -122,28 +122,19 @@ function setup(data: SubmoduleData): void {
|
|||
next()
|
||||
})
|
||||
|
||||
const userFilesDir = publicDir + 'userFiles'
|
||||
if (!utils.FileExists(userFilesDir)) {
|
||||
utils.CreatePath(userFilesDir, true)
|
||||
}
|
||||
|
||||
app.get('/listUserDir', (req: Request, res) => {
|
||||
logger.LogReq(req)
|
||||
|
||||
if (!utils.FileExists(userFilesDir)) {
|
||||
utils.CreatePath(userFilesDir, true)
|
||||
}
|
||||
|
||||
const subdir: string = req.query.subdir
|
||||
|
||||
if (subdir) {
|
||||
const result = listDir(subdir, userFilesDir)
|
||||
const result = listDir(subdir, paths.userFilesDir)
|
||||
res.json(result)
|
||||
} else {
|
||||
res.json({
|
||||
success: true,
|
||||
dirs: utils.ReadDir(userFilesDir).reduce((acc, file) => {
|
||||
const stat = fs.lstatSync(userFilesDir + '/' + file)
|
||||
dirs: utils.ReadDir(paths.userFilesDir).reduce((acc, file) => {
|
||||
const stat = fs.lstatSync(paths.userFilesDir + '/' + file)
|
||||
|
||||
if (!stat.isDirectory()) {
|
||||
return acc
|
||||
|
@ -152,7 +143,8 @@ function setup(data: SubmoduleData): void {
|
|||
acc.push({
|
||||
name: file,
|
||||
date: stat.mtime.getTime(),
|
||||
size: utils.ReadDir(userFilesDir + '/' + file).length,
|
||||
size: utils.ReadDir(paths.userFilesDir + '/' + file)
|
||||
.length,
|
||||
})
|
||||
return acc
|
||||
}, []),
|
||||
|
@ -175,7 +167,8 @@ function setup(data: SubmoduleData): void {
|
|||
}
|
||||
const safeDir = dir.replace(/\.+/g, '').replace(/\/+/g, '')
|
||||
const safeFname = fname.replace(/\.+/g, '.').replace(/\/+/g, '')
|
||||
const filePath = userFilesDir + '/' + safeDir + '/' + safeFname
|
||||
const filePath =
|
||||
paths.userFilesDir + '/' + safeDir + '/' + safeFname
|
||||
|
||||
if (!utils.FileExists(filePath)) {
|
||||
res.json({
|
||||
|
@ -185,7 +178,8 @@ function setup(data: SubmoduleData): void {
|
|||
return
|
||||
}
|
||||
utils.deleteFile(filePath)
|
||||
const usersFile = userFilesDir + '/' + safeDir + '/' + dataFileName
|
||||
const usersFile =
|
||||
paths.userFilesDir + '/' + safeDir + '/' + dataFileName
|
||||
const users = utils.ReadJSON(usersFile)
|
||||
delete users[safeFname]
|
||||
utils.WriteFile(JSON.stringify(users), usersFile)
|
||||
|
@ -209,14 +203,14 @@ function setup(data: SubmoduleData): void {
|
|||
}
|
||||
const safeName = name.replace(/\.+/g, '').replace(/\/+/g, '')
|
||||
|
||||
if (utils.FileExists(userFilesDir + '/' + safeName)) {
|
||||
if (utils.FileExists(paths.userFilesDir + '/' + safeName)) {
|
||||
res.json({
|
||||
success: false,
|
||||
msg: `Dir ${name} already exists`,
|
||||
})
|
||||
return
|
||||
}
|
||||
utils.CreatePath(userFilesDir + '/' + safeName, true)
|
||||
utils.CreatePath(paths.userFilesDir + '/' + safeName)
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
|
@ -236,7 +230,7 @@ function setup(data: SubmoduleData): void {
|
|||
return
|
||||
}
|
||||
const safeDir = dir.replace(/\.+/g, '.').replace(/\/+/g, '/')
|
||||
if (!utils.FileExists(userFilesDir + '/' + safeDir)) {
|
||||
if (!utils.FileExists(paths.userFilesDir + '/' + safeDir)) {
|
||||
res.json({
|
||||
success: false,
|
||||
msg: `dir '${dir}' does not exists!`,
|
||||
|
@ -245,7 +239,7 @@ function setup(data: SubmoduleData): void {
|
|||
}
|
||||
|
||||
utils
|
||||
.uploadFile(req, userFilesDir + '/' + safeDir)
|
||||
.uploadFile(req, paths.userFilesDir + '/' + safeDir)
|
||||
.then((body) => {
|
||||
logger.Log(
|
||||
`Successfull upload ${body.filePath}`,
|
||||
|
@ -253,7 +247,7 @@ function setup(data: SubmoduleData): void {
|
|||
)
|
||||
|
||||
const usersFile =
|
||||
userFilesDir + '/' + safeDir + '/' + dataFileName
|
||||
paths.userFilesDir + '/' + safeDir + '/' + dataFileName
|
||||
const users = utils.ReadJSON(usersFile)
|
||||
users[body.fileName] = { uid: user.id }
|
||||
utils.WriteFile(JSON.stringify(users), usersFile)
|
||||
|
@ -276,7 +270,7 @@ function setup(data: SubmoduleData): void {
|
|||
const x = safePath.split('/')
|
||||
const dir = x[1]
|
||||
const fname = x.pop()
|
||||
const dataFilePath = userFilesDir + '/' + dir + '/' + dataFileName
|
||||
const dataFilePath = paths.userFilesDir + '/' + dir + '/' + dataFileName
|
||||
|
||||
const data = utils.ReadJSON(dataFilePath)
|
||||
|
||||
|
@ -314,7 +308,7 @@ function setup(data: SubmoduleData): void {
|
|||
utils.WriteFile(JSON.stringify(data), dataFilePath)
|
||||
}
|
||||
|
||||
const result = listDir(dir, userFilesDir)
|
||||
const result = listDir(dir, paths.userFilesDir)
|
||||
res.json(result)
|
||||
})
|
||||
|
||||
|
@ -324,17 +318,16 @@ function setup(data: SubmoduleData): void {
|
|||
|
||||
const safeName = name.replace(/\.+/g, '').replace(/\/+/g, '')
|
||||
|
||||
if (!utils.FileExists(userFilesDir + '/' + safeName)) {
|
||||
if (!utils.FileExists(paths.userFilesDir + '/' + safeName)) {
|
||||
res.json({
|
||||
success: false,
|
||||
msg: `Dir ${name} does not exist!`,
|
||||
})
|
||||
return
|
||||
}
|
||||
utils.CreatePath(userFilesDir + '/' + safeName, true)
|
||||
const result = listDir(name, userFilesDir)
|
||||
const result = listDir(name, paths.userFilesDir)
|
||||
if (result.files.length === 0) {
|
||||
utils.deleteDir(userFilesDir + '/' + safeName)
|
||||
utils.deleteDir(paths.userFilesDir + '/' + safeName)
|
||||
} else {
|
||||
res.json({ succes: false, msg: `Dir ${name} is not empty!` })
|
||||
return
|
||||
|
|
|
@ -49,7 +49,7 @@ interface Session {
|
|||
|
||||
function BackupDB(usersDbBackupPath: string, userDB: Database) {
|
||||
logger.Log('Backing up auth DB ...')
|
||||
utils.CreatePath(usersDbBackupPath, true)
|
||||
utils.CreatePath(usersDbBackupPath)
|
||||
userDB
|
||||
.backup(
|
||||
`${usersDbBackupPath}/users.${utils
|
||||
|
|
|
@ -76,7 +76,6 @@ function GetApp(): ModuleType {
|
|||
}, [])
|
||||
|
||||
routes.forEach((route) => {
|
||||
logger.DebugLog(`Added route /${route}`, 'DataEditor routes', 1)
|
||||
app.get(`/${route}`, function (_req: Request, res) {
|
||||
res.redirect(`${route}.html`)
|
||||
})
|
||||
|
|
|
@ -173,11 +173,6 @@ function GetApp(): ModuleType {
|
|||
if (!redirect.nolog) {
|
||||
logger.LogReq(req)
|
||||
}
|
||||
logger.DebugLog(
|
||||
`Qmining module ${redirect.from} redirect`,
|
||||
'infos',
|
||||
1
|
||||
)
|
||||
|
||||
let target = redirect.to
|
||||
if (!redirect.to.includes('https://')) {
|
||||
|
@ -211,7 +206,6 @@ function GetApp(): ModuleType {
|
|||
}, [])
|
||||
|
||||
routes.forEach((route: string) => {
|
||||
logger.DebugLog(`Added route /${route}`, 'Qmining routes', 1)
|
||||
app.get(`/${route}`, function (req: Request, res) {
|
||||
res.redirect(
|
||||
utils.formatUrl({
|
||||
|
|
|
@ -181,13 +181,6 @@ if (!process.env.NS_NO_HTTPS_FORCE) {
|
|||
if (req.secure) {
|
||||
next()
|
||||
} else {
|
||||
logger.DebugLog(
|
||||
`HTTPS ${req.method} redirect to: ${
|
||||
'https://' + req.headers.host + req.url
|
||||
}`,
|
||||
'https',
|
||||
1
|
||||
)
|
||||
if (req.method === 'POST') {
|
||||
res.redirect(307, 'https://' + req.headers.host + req.url)
|
||||
} else {
|
||||
|
@ -286,10 +279,7 @@ function setLogTimer() {
|
|||
0,
|
||||
1
|
||||
)
|
||||
logger.DebugLog(`Next daily action: ${night}`, 'daily', 1)
|
||||
const msToMidnight = night.getTime() - now.getTime() + 10000
|
||||
logger.DebugLog(`msToMidnight: ${msToMidnight}`, 'daily', 1)
|
||||
logger.DebugLog(`Seconds To Midnight: ${msToMidnight / 1000}`, 'daily', 1)
|
||||
|
||||
if (msToMidnight < 0) {
|
||||
logger.Log(
|
||||
|
@ -328,10 +318,8 @@ function rotateLog() {
|
|||
}
|
||||
|
||||
function LogTimerAction() {
|
||||
logger.DebugLog(`Running Log Timer Action`, 'daily', 1)
|
||||
Object.keys(modules).forEach((key) => {
|
||||
const module = modules[key]
|
||||
logger.DebugLog(`Ckecking ${key}`, 'daily', 1)
|
||||
if (module.dailyAction) {
|
||||
try {
|
||||
logger.Log(`Running daily action of ${key}`)
|
||||
|
|
|
@ -42,6 +42,7 @@ import {
|
|||
import { countOfQdbs } from './qdbUtils'
|
||||
import { WorkerResult } from '../worker/worker'
|
||||
import { queueWork, msgAllWorker } from '../worker/workerPool'
|
||||
import { publicDir } from './files'
|
||||
|
||||
// if a recievend question doesnt match at least this % to any other question in the db it gets
|
||||
// added to db
|
||||
|
@ -112,8 +113,6 @@ export function processIncomingRequest(
|
|||
dryRun: boolean,
|
||||
user: User
|
||||
): Promise<Array<Result>> {
|
||||
logger.DebugLog('Processing incoming request', 'isadding', 1)
|
||||
|
||||
if (recievedData === undefined) {
|
||||
logger.Log('\tRecieved data is undefined!', logger.GetColor('redbg'))
|
||||
throw new Error('Recieved data is undefined!')
|
||||
|
@ -131,7 +130,6 @@ export function processIncomingRequest(
|
|||
towrite +=
|
||||
'\n------------------------------------------------------------------------------\n'
|
||||
utils.AppendToFile(towrite, recDataFile)
|
||||
logger.DebugLog('recDataFile written', 'isadding', 1)
|
||||
} catch (err) {
|
||||
logger.Log('Error writing recieved data.')
|
||||
}
|
||||
|
@ -162,24 +160,14 @@ function processIncomingRequestUsingDb(
|
|||
try {
|
||||
const recievedQuestions: Question[] = []
|
||||
|
||||
logger.DebugLog('recievedData JSON parsed', 'isadding', 1)
|
||||
logger.DebugLog(recievedData, 'isadding', 3)
|
||||
const allQLength = recievedData.quiz.length
|
||||
const questionSearchPromises: Promise<WorkerResult>[] = []
|
||||
recievedData.quiz.forEach((question) => {
|
||||
logger.DebugLog('Question:', 'isadding', 2)
|
||||
logger.DebugLog(question, 'isadding', 2)
|
||||
const currentQuestion = createQuestion(
|
||||
question.Q,
|
||||
question.A,
|
||||
question.data
|
||||
)
|
||||
logger.DebugLog(
|
||||
'Searching for question in subj ' + recievedData.subj,
|
||||
'isadding',
|
||||
3
|
||||
)
|
||||
logger.DebugLog(currentQuestion, 'isadding', 3)
|
||||
if (isQuestionValid(currentQuestion)) {
|
||||
recievedQuestions.push(currentQuestion)
|
||||
// This here searches only in relevant subjects, and not all subjects
|
||||
|
@ -194,9 +182,6 @@ function processIncomingRequestUsingDb(
|
|||
},
|
||||
})
|
||||
)
|
||||
} else {
|
||||
logger.DebugLog('Question isnt valid', 'isadding', 1)
|
||||
logger.DebugLog(currentQuestion, 'isadding', 1)
|
||||
}
|
||||
})
|
||||
|
||||
|
@ -228,18 +213,8 @@ function processIncomingRequestUsingDb(
|
|||
addQuestionsToDb(newQuestions, subjName, qdb)
|
||||
|
||||
currWrites++
|
||||
logger.DebugLog(
|
||||
'currWrites for data.json: ' + currWrites,
|
||||
'isadding',
|
||||
1
|
||||
)
|
||||
if (currWrites >= writeAfter && !dryRun) {
|
||||
currWrites = 0
|
||||
logger.DebugLog(
|
||||
'Writing data.json',
|
||||
'isadding',
|
||||
1
|
||||
)
|
||||
writeData(qdb.data, qdb.path)
|
||||
}
|
||||
}
|
||||
|
@ -251,14 +226,6 @@ function processIncomingRequestUsingDb(
|
|||
allQLength
|
||||
)
|
||||
|
||||
logger.DebugLog('New Questions:', 'isadding', 2)
|
||||
logger.DebugLog(newQuestions, 'isadding', 2)
|
||||
|
||||
logger.DebugLog(
|
||||
'ProcessIncomingRequest done',
|
||||
'isadding',
|
||||
1
|
||||
)
|
||||
resolve({
|
||||
newQuestions: newQuestions,
|
||||
subjName: recievedData.subj,
|
||||
|
@ -303,12 +270,6 @@ function addQuestionsToDb(
|
|||
qdb: QuestionDb
|
||||
) {
|
||||
allQuestions.forEach((currentQuestion) => {
|
||||
logger.DebugLog(
|
||||
'Adding question with subjName: ' + subjName + ' :',
|
||||
'isadding',
|
||||
3
|
||||
)
|
||||
logger.DebugLog(currentQuestion, 'isadding', 3)
|
||||
addQuestion(qdb.data, subjName, {
|
||||
...currentQuestion,
|
||||
data: {
|
||||
|
@ -447,7 +408,6 @@ export function shouldSearchDataFile(
|
|||
}
|
||||
}
|
||||
|
||||
logger.DebugLog(`no suitable dbs for ${testUrl}`, 'shouldSearchDataFile', 1)
|
||||
return false
|
||||
}
|
||||
|
||||
|
@ -563,7 +523,7 @@ export function writeData(data: Array<Subject>, path: string): void {
|
|||
|
||||
export function backupData(questionDbs: Array<QuestionDb>): void {
|
||||
questionDbs.forEach((data) => {
|
||||
const path = './publicDirs/qminingPublic/backs/'
|
||||
const path = publicDir + 'backs/'
|
||||
utils.CreatePath(path)
|
||||
try {
|
||||
// logger.Log(`Backing up ${data.name}...`)
|
||||
|
|
|
@ -81,18 +81,12 @@ function GetSqlQuerry(
|
|||
// -------------------------------------------------------------------------
|
||||
|
||||
function GetDB(path: string): Database {
|
||||
utils.CreatePath(path)
|
||||
utils.createDirsForFile(path)
|
||||
const res = new Sqlite(path)
|
||||
res.pragma('synchronous = OFF')
|
||||
return res
|
||||
}
|
||||
|
||||
function DebugLog(msg: string) {
|
||||
if (debugLog) {
|
||||
logger.DebugLog(msg, 'sql', 0)
|
||||
}
|
||||
}
|
||||
|
||||
function AddColumn(
|
||||
db: Database,
|
||||
table: string,
|
||||
|
@ -344,6 +338,8 @@ function PrepareStatement(db: Database, command: string) {
|
|||
'DB is undefined in prepare statement! DB action called with undefined db'
|
||||
)
|
||||
}
|
||||
DebugLog(command)
|
||||
if (debugLog) {
|
||||
logger.Log(command)
|
||||
}
|
||||
return db.prepare(command)
|
||||
}
|
||||
|
|
|
@ -21,11 +21,17 @@ type FileDescriptor = {
|
|||
description?: string
|
||||
errorIfMissing?: boolean
|
||||
warningIfMissing?: boolean
|
||||
isDir?: boolean
|
||||
}
|
||||
|
||||
export const validateAndSetupFiles = (): boolean => {
|
||||
let everythingValid = true
|
||||
Object.entries(files).forEach(([key, file]: [string, FileDescriptor]) => {
|
||||
if (file.isDir) {
|
||||
utils.CreatePath(file.path)
|
||||
return
|
||||
}
|
||||
|
||||
let fileExists = utils.FileExists(file.path)
|
||||
if (file.defaultValue != null && !fileExists) {
|
||||
// FIXME: create path too
|
||||
|
@ -313,6 +319,14 @@ export const files = {
|
|||
keyFile: {
|
||||
path: 'data/p2p/key',
|
||||
},
|
||||
|
||||
// --------------------------------------------------------------------------------
|
||||
// user files
|
||||
// --------------------------------------------------------------------------------
|
||||
userFilesDir: {
|
||||
path: publicDir + 'userFiles',
|
||||
isDir: true,
|
||||
},
|
||||
} as const satisfies Record<string, FileDescriptor>
|
||||
|
||||
export const paths = Object.entries(files).reduce(
|
||||
|
|
|
@ -53,11 +53,6 @@ function getColoredDateString(): string {
|
|||
return GetRandomColor(date.getHours().toString()) + dateString + C()
|
||||
}
|
||||
|
||||
function DebugLog(msg: string | object, name: string, lvl: number): void {
|
||||
console.warn('deprecated')
|
||||
console.log(msg, name, lvl)
|
||||
}
|
||||
|
||||
function Log(msg: string | object, color?: string): void {
|
||||
if (loggingDisabled) return
|
||||
|
||||
|
@ -397,7 +392,6 @@ function logTable(
|
|||
export default {
|
||||
getColoredDateString: getColoredDateString,
|
||||
Log: Log,
|
||||
DebugLog: DebugLog,
|
||||
GetColor: GetColor,
|
||||
LogReq: LogReq,
|
||||
LogStat: LogStat,
|
||||
|
|
|
@ -269,21 +269,11 @@ function compareData(q1: Question, q2: Question): number {
|
|||
return -1
|
||||
} else if (dataType === 'image') {
|
||||
return compareImage(q1.data, q2.data)
|
||||
} else {
|
||||
logger.DebugLog(
|
||||
`Unhandled data type ${dataType}`,
|
||||
'Compare question data',
|
||||
1
|
||||
)
|
||||
logger.DebugLog(q1, 'Compare question data', 2)
|
||||
}
|
||||
} else {
|
||||
return 0
|
||||
}
|
||||
} catch (error) {
|
||||
logger.DebugLog('Error comparing data', 'Compare question data', 1)
|
||||
logger.DebugLog(error.message, 'Compare question data', 1)
|
||||
logger.DebugLog(error, 'Compare question data', 2)
|
||||
console.error(error)
|
||||
}
|
||||
return 0
|
||||
|
@ -370,9 +360,6 @@ export function addQuestion(
|
|||
subj: string,
|
||||
question: Question
|
||||
): void {
|
||||
logger.DebugLog('Adding new question with subjName: ' + subj, 'qdb add', 1)
|
||||
logger.DebugLog(question, 'qdb add', 3)
|
||||
|
||||
const i = data.findIndex((subject) => {
|
||||
return (
|
||||
subject.Name &&
|
||||
|
@ -383,7 +370,6 @@ export function addQuestion(
|
|||
})
|
||||
|
||||
if (i !== -1) {
|
||||
logger.DebugLog('Adding new question to existing subject', 'qdb add', 1)
|
||||
data[i].Questions.push(question)
|
||||
} else {
|
||||
logger.Log(`Creating new subject: "${subj}"`)
|
||||
|
|
|
@ -22,10 +22,10 @@ export default {
|
|||
ReadFile: ReadFile,
|
||||
ReadJSON: ReadJSON,
|
||||
WriteFile: WriteFile,
|
||||
writeFileAsync: writeFileAsync,
|
||||
AppendToFile: AppendToFile,
|
||||
FileExists: FileExists,
|
||||
CreatePath: CreatePath,
|
||||
createDirsForFile: createDirsForFile,
|
||||
WatchFile: WatchFile,
|
||||
ReadDir: ReadDir,
|
||||
CopyFile: CopyFile,
|
||||
|
@ -151,48 +151,26 @@ function WatchFile(file: string, callback: Function): void {
|
|||
}
|
||||
}
|
||||
|
||||
function CreatePath(path: string, onlyPath?: boolean): void {
|
||||
if (FileExists(path)) {
|
||||
return
|
||||
function createDirsForFile(path: string) {
|
||||
let pathPart = path
|
||||
if (pathPart.endsWith('/')) {
|
||||
pathPart = pathPart.slice(0, -1)
|
||||
}
|
||||
pathPart = path.split('/').slice(0, -1).join('/')
|
||||
CreatePath(pathPart)
|
||||
}
|
||||
|
||||
const spath = path.split('/')
|
||||
let currDir = spath[0]
|
||||
for (let i = 1; i < spath.length; i++) {
|
||||
if (currDir !== '' && !fs.existsSync(currDir)) {
|
||||
try {
|
||||
fs.mkdirSync(currDir)
|
||||
} catch (err) {
|
||||
console.log('Failed to make ' + currDir + ' directory... ')
|
||||
console.error(err)
|
||||
}
|
||||
}
|
||||
currDir += '/' + spath[i]
|
||||
}
|
||||
if (onlyPath) {
|
||||
fs.mkdirSync(path)
|
||||
}
|
||||
function CreatePath(path: string): void {
|
||||
fs.mkdirSync(path, { recursive: true })
|
||||
}
|
||||
|
||||
function WriteFile(content: string, path: string): void {
|
||||
CreatePath(path)
|
||||
createDirsForFile(path)
|
||||
fs.writeFileSync(path, content)
|
||||
}
|
||||
|
||||
function writeFileAsync(content: string, path: string): void {
|
||||
CreatePath(path)
|
||||
fs.writeFile(path, content, function (err) {
|
||||
if (err) {
|
||||
logger.Log(
|
||||
'Error writing file: ' + path + ' (sync)',
|
||||
logger.GetColor('redbg')
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function AppendToFile(data: string, file: string): void {
|
||||
CreatePath(file)
|
||||
createDirsForFile(file)
|
||||
try {
|
||||
fs.appendFileSync(file, '\n' + data)
|
||||
} catch (err) {
|
||||
|
@ -241,7 +219,7 @@ function uploadFile(
|
|||
const file = req.files.file
|
||||
// FIXME: Object.keys(req.files).forEach((file) => { saveFile() })
|
||||
|
||||
CreatePath(path, true)
|
||||
createDirsForFile(path)
|
||||
|
||||
let fileName = file.name.replace(/\.+/g, '.').replace(/\/+/g, '/')
|
||||
let fileDestination = path + '/' + fileName
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import { parentPort } from 'node:worker_threads'
|
||||
import { QuestionDb } from '../../types/basicTypes'
|
||||
import { Edits, editDb } from '../../utils/actions'
|
||||
import logger from '../../utils/logger'
|
||||
|
||||
export type DbEditTaskObject = {
|
||||
type: 'dbEdit'
|
||||
|
@ -25,7 +24,6 @@ export const handleDbEdit = async (
|
|||
}
|
||||
})
|
||||
)
|
||||
logger.DebugLog(`Worker db edit ${workerIndex}`, 'worker update', 1)
|
||||
|
||||
parentPort.postMessage({
|
||||
msg: `From thread #${workerIndex}: db edit`,
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import { parentPort } from 'node:worker_threads'
|
||||
import { QuestionDb } from '../../types/basicTypes'
|
||||
import logger from '../../utils/logger'
|
||||
import { createQuestion } from '../../utils/qdbUtils'
|
||||
import { Result } from '../../utils/actions'
|
||||
|
||||
|
@ -72,7 +71,6 @@ export const handleNewQuestions = async (
|
|||
})
|
||||
)
|
||||
}
|
||||
logger.DebugLog(`Worker new question ${workerIndex}`, 'worker update', 1)
|
||||
|
||||
parentPort.postMessage({
|
||||
msg: `From thread #${workerIndex}: update done`,
|
||||
|
|
|
@ -48,7 +48,6 @@ export function doSearch(
|
|||
.toLowerCase()
|
||||
.includes(getSubjNameWithoutYear(subj.Name).toLowerCase())
|
||||
) {
|
||||
logger.DebugLog(`Searching in ${subj.Name} `, 'searchworker', 2)
|
||||
const subjRes = searchSubject(
|
||||
subj,
|
||||
questionToSearch,
|
||||
|
@ -72,11 +71,6 @@ export function doSearch(
|
|||
result.length === 0 ||
|
||||
result[0].match < minMatchToNotSearchOtherSubjects
|
||||
) {
|
||||
logger.DebugLog(
|
||||
'Reqults length is zero when comparing names, trying all subjects',
|
||||
'searchworker',
|
||||
1
|
||||
)
|
||||
data.every((subj) => {
|
||||
const subjRes = searchSubject(
|
||||
subj,
|
||||
|
|
|
@ -102,14 +102,12 @@ function handleWorkerError(worker: WorkerObj, err: Error) {
|
|||
|
||||
// TODO: accuire all workers here, and handle errors so they can be removed if threads exit
|
||||
export function msgAllWorker(data: TaskObject): Promise<WorkerResult[]> {
|
||||
logger.DebugLog('MSGING ALL WORKER', 'job', 1)
|
||||
return new Promise((resolve) => {
|
||||
const promises: Promise<WorkerResult>[] = []
|
||||
workers.forEach((worker) => {
|
||||
promises.push(queueWork(data, worker.index))
|
||||
})
|
||||
Promise.all(promises).then((res) => {
|
||||
logger.DebugLog('MSGING ALL WORKER DONE', 'job', 1)
|
||||
resolve(res)
|
||||
})
|
||||
})
|
||||
|
@ -117,7 +115,6 @@ export function msgAllWorker(data: TaskObject): Promise<WorkerResult[]> {
|
|||
|
||||
export function setPendingJobsAlertCount(newVal?: number): void {
|
||||
const count = newVal != null ? newVal : defaultAlertOnPendingCount
|
||||
logger.DebugLog('setPendingJobsAlertCount: ' + count, 'job', 1)
|
||||
alertOnPendingCount = count
|
||||
}
|
||||
|
||||
|
@ -190,29 +187,16 @@ function processJob() {
|
|||
if (!isNaN(pendingJobs[jobKey].targetWorkerIndex)) {
|
||||
if (workers[pendingJobs[jobKey].targetWorkerIndex].free) {
|
||||
freeWorker = workers[pendingJobs[jobKey].targetWorkerIndex]
|
||||
logger.DebugLog(
|
||||
`RESERVING WORKER ${pendingJobs[jobKey].targetWorkerIndex}`,
|
||||
'job',
|
||||
1
|
||||
)
|
||||
}
|
||||
} else {
|
||||
freeWorker = workers.find((worker) => {
|
||||
return worker.free
|
||||
})
|
||||
if (freeWorker) {
|
||||
logger.DebugLog(
|
||||
`RESERVING FIRST AVAILABLE WORKER ${freeWorker.index}`,
|
||||
'job',
|
||||
1
|
||||
)
|
||||
}
|
||||
}
|
||||
i++
|
||||
}
|
||||
|
||||
if (!freeWorker) {
|
||||
logger.DebugLog('NO FREE WORKER', 'job', 1)
|
||||
return
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue