mirror of
https://gitlab.com/MrFry/mrfrys-node-server
synced 2025-04-01 20:24:18 +02:00
added bit more advanced file existance checking, removed vhosts in favor or routes
This commit is contained in:
parent
ba89f4a342
commit
113a114821
24 changed files with 2720 additions and 2474 deletions
|
@ -526,7 +526,7 @@ function handleWorkerData() {
|
|||
} catch (e) {
|
||||
console.error(e)
|
||||
parentPort.postMessage({
|
||||
msg: `From thread #${workerIndex}: Invalid message type (${msg.type})!`,
|
||||
msg: `From thread #${workerIndex}: unhandled error occured!`,
|
||||
workerIndex: workerIndex,
|
||||
e: e,
|
||||
})
|
||||
|
|
246
src/utils/files.ts
Normal file
246
src/utils/files.ts
Normal file
|
@ -0,0 +1,246 @@
|
|||
import { Schema } from 'jsonschema'
|
||||
import {
|
||||
TestUsersSchema,
|
||||
isJsonValidAndLogError,
|
||||
PeersInfoSchema,
|
||||
PeerInfoSchema,
|
||||
ModulesSchema,
|
||||
} from '../types/typeSchemas'
|
||||
import logger from './logger'
|
||||
import utils from './utils'
|
||||
|
||||
// FIXME: remove all file exists checks from everywhere for files that are created / checked here
|
||||
|
||||
type FileDescriptor = {
|
||||
path: string
|
||||
schema?: Schema
|
||||
defaultValue?: string
|
||||
shouldBe?: string
|
||||
description?: string
|
||||
}
|
||||
|
||||
export const validateFiles = (): boolean => {
|
||||
let everythingValid = true
|
||||
Object.entries(files).forEach(([key, file]: [string, FileDescriptor]) => {
|
||||
let fileExists = utils.FileExists(file.path)
|
||||
if (file.defaultValue != null && !fileExists) {
|
||||
utils.WriteFile(file.defaultValue, file.path)
|
||||
fileExists = true
|
||||
}
|
||||
|
||||
if (file.shouldBe && !fileExists) {
|
||||
const errMsg = [`File "${file.path}" does not exist! (${key})`]
|
||||
if (file.shouldBe) {
|
||||
errMsg.push(`Should be: ${file.shouldBe}`)
|
||||
}
|
||||
logger.Log(errMsg.join(' '), 'redbg')
|
||||
everythingValid = false
|
||||
return
|
||||
}
|
||||
|
||||
if (file.schema && fileExists) {
|
||||
const val = utils.ReadJSON(file.path)
|
||||
if (!isJsonValidAndLogError(val, file.schema, file.path)) {
|
||||
everythingValid = false
|
||||
return
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
return everythingValid
|
||||
}
|
||||
|
||||
export const files = {
|
||||
// --------------------------------------------------------------------------------
|
||||
// server / modules files
|
||||
// --------------------------------------------------------------------------------
|
||||
serverPath: {
|
||||
path: 'dist/server.js',
|
||||
shouldBe:
|
||||
'server main entry file, created after running "npm run build"',
|
||||
},
|
||||
qminingPageDir: {
|
||||
path: 'submodules/qmining-page',
|
||||
shouldBe:
|
||||
'qmining page submodule directory, created by pulling submodules / setup script',
|
||||
},
|
||||
qminingIndexPath: {
|
||||
path: 'nextStatic/qminingPagePublic/index.html',
|
||||
shouldBe:
|
||||
'qmining page-s build index.html, created by "npm run build" in qmining page submodule dir',
|
||||
},
|
||||
dataEditorPageDir: {
|
||||
path: 'submodules/qmining-data-editor',
|
||||
shouldBe:
|
||||
'qmining data editor page submodule directory, created by pulling submodules / setup script',
|
||||
},
|
||||
dataEditorIndexPath: {
|
||||
path: 'nextStatic/dataEditorPublic/index.html',
|
||||
shouldBe:
|
||||
'qmining data editor-s build index.html, created by "npm run build" in qmining data editor submodule dir',
|
||||
},
|
||||
moodleTestUserscriptDir: {
|
||||
path: 'submodules/moodle-test-userscript',
|
||||
shouldBe:
|
||||
'moodle test userscript submodule directory, created by pulling submodules / setup script',
|
||||
},
|
||||
moodleTestUserscriptPath: {
|
||||
path: 'submodules/moodle-test-userscript/stable.user.js',
|
||||
shouldBe:
|
||||
'moodle test userscript file, created by pulling submodules / setup script',
|
||||
},
|
||||
|
||||
domainFile: {
|
||||
path: 'data/domain',
|
||||
shouldBe:
|
||||
'server domain for cookies and stuff, for ex.: "frylabs.net", no "http://" and things like that, just the domain',
|
||||
},
|
||||
// --------------------------------------------------------------------------------
|
||||
// stats files
|
||||
// --------------------------------------------------------------------------------
|
||||
registeredScriptsFile: {
|
||||
path: 'stats/registeredScripts.json',
|
||||
defaultValue: JSON.stringify([]),
|
||||
},
|
||||
askedQuestionFile: {
|
||||
path: 'stats/askedQuestions',
|
||||
description: 'text file of recieved data on /ask',
|
||||
},
|
||||
recievedQuestionFile: {
|
||||
path: 'stats/recievedQuestions',
|
||||
description: 'text file of recieved data on /isAdding',
|
||||
},
|
||||
dailyDataCountFile: {
|
||||
path: 'stats/dailyDataCount',
|
||||
description: 'text file of daily data count',
|
||||
},
|
||||
dataEditsLog: {
|
||||
path: 'stats/dataEdits',
|
||||
description: 'text file of data edit logs',
|
||||
},
|
||||
|
||||
// --------------------------------------------------------------------------------
|
||||
// https files
|
||||
// --------------------------------------------------------------------------------
|
||||
privkeyFile: {
|
||||
path: '/etc/letsencrypt/live/frylabs.net/privkey.pem',
|
||||
description: 'private key file for https',
|
||||
},
|
||||
fullchainFile: {
|
||||
path: '/etc/letsencrypt/live/frylabs.net/fullchain.pem',
|
||||
description: 'full chain key file for https',
|
||||
},
|
||||
chainFile: {
|
||||
path: '/etc/letsencrypt/live/frylabs.net/chain.pem',
|
||||
description: 'chain key file for https',
|
||||
},
|
||||
|
||||
// --------------------------------------------------------------------------------
|
||||
// api files
|
||||
// --------------------------------------------------------------------------------
|
||||
rootRedirectToFile: {
|
||||
path: 'data/apiRootRedirectTo',
|
||||
description: 'url to redirect users trying to acces root api path',
|
||||
},
|
||||
modulesFile: {
|
||||
path: './src/modules.json',
|
||||
shouldBe: 'module files for server',
|
||||
schema: ModulesSchema,
|
||||
},
|
||||
extraModulesFile: {
|
||||
path: './src/extraModules/extraModules.json',
|
||||
description: 'extra private modules for server, not tracked by git',
|
||||
schema: ModulesSchema,
|
||||
},
|
||||
statExcludeFile: {
|
||||
path: './data/statExclude.json',
|
||||
shouldBe:
|
||||
'array of strings which if included in requests url-s then the request itself is not counted in stats',
|
||||
defaultValue: JSON.stringify([]),
|
||||
schema: { type: 'array', items: { type: 'string' } },
|
||||
},
|
||||
usersDBPath: {
|
||||
path: './data/dbs/users.db',
|
||||
shouldBe: 'users sqlite db file',
|
||||
},
|
||||
|
||||
// --------------------------------------------------------------------------------
|
||||
// qmining api
|
||||
// --------------------------------------------------------------------------------
|
||||
testUsersFile: {
|
||||
path: 'data/testUsers.json',
|
||||
defaultValue: JSON.stringify({ userIds: [] }),
|
||||
schema: TestUsersSchema,
|
||||
description: 'test users, which are excluded from stats',
|
||||
},
|
||||
|
||||
// --------------------------------------------------------------------------------
|
||||
// log files
|
||||
// --------------------------------------------------------------------------------
|
||||
vlogDir: {
|
||||
path: 'stats/vlogs/',
|
||||
description: 'verbose logs directory',
|
||||
},
|
||||
logDir: {
|
||||
path: 'stats/logs/',
|
||||
description: 'basic logs directory',
|
||||
},
|
||||
statFile: {
|
||||
path: 'stats/stats',
|
||||
defaultValue: JSON.stringify({}),
|
||||
description: 'json of visited paths all time',
|
||||
},
|
||||
vStatFile: {
|
||||
path: 'stats/vstats',
|
||||
defaultValue: JSON.stringify({}),
|
||||
description: 'json of visited paths by day',
|
||||
},
|
||||
uStatsFile: {
|
||||
path: 'stats/ustats',
|
||||
defaultValue: JSON.stringify({}),
|
||||
description: 'json of visits per user',
|
||||
},
|
||||
uvStatsFile: {
|
||||
path: 'stats/uvstats',
|
||||
defaultValue: JSON.stringify({}),
|
||||
description: 'json of visits per user by day',
|
||||
},
|
||||
nologFile: {
|
||||
path: './data/nolog',
|
||||
defaultValue: '',
|
||||
description:
|
||||
'text file of users seperated by new lines to ignore in logging / stats',
|
||||
},
|
||||
|
||||
// --------------------------------------------------------------------------------
|
||||
// peer files
|
||||
// --------------------------------------------------------------------------------
|
||||
peersPath: {
|
||||
path: 'data/p2p/',
|
||||
description: 'p2p files directory',
|
||||
},
|
||||
peersFile: {
|
||||
path: 'data/p2p/peers.json',
|
||||
description: 'json of list of peers',
|
||||
defaultValue: JSON.stringify([]),
|
||||
schema: PeersInfoSchema,
|
||||
},
|
||||
selfInfoFile: {
|
||||
path: 'data/p2p/selfInfo.json',
|
||||
description: 'json of info of this servers peer functionality',
|
||||
defaultValue: JSON.stringify({}),
|
||||
schema: PeerInfoSchema,
|
||||
},
|
||||
thirdPartyPeersFile: {
|
||||
path: 'data/p2p/thirdPartyPeers.json',
|
||||
description: 'json of third party peers reported by other peers',
|
||||
},
|
||||
keyFile: {
|
||||
path: 'data/p2p/key',
|
||||
},
|
||||
} as const satisfies Record<string, FileDescriptor>
|
||||
|
||||
export const paths = Object.entries(files).reduce(
|
||||
(acc, [key, value]) => ({ ...acc, [key]: value.path }),
|
||||
{}
|
||||
) as Record<keyof typeof files, string>
|
|
@ -26,14 +26,7 @@ const DELIM = C('green') + '|' + C()
|
|||
// import express from 'express'
|
||||
import utils from '../utils/utils'
|
||||
import { Request } from '../types/basicTypes'
|
||||
|
||||
const vlogDir = 'stats/vlogs/'
|
||||
const logDir = 'stats/logs/'
|
||||
const statFile = 'stats/stats'
|
||||
const vStatFile = 'stats/vstats'
|
||||
const uStatsFile = 'stats/ustats'
|
||||
const uvStatsFile = 'stats/uvstats'
|
||||
const nologFile = './data/nolog'
|
||||
import { paths } from './files'
|
||||
|
||||
const colors = ['green', 'red', 'yellow', 'blue', 'magenta', 'cyan']
|
||||
const logFileName = 'log'
|
||||
|
@ -93,7 +86,7 @@ function Log(msg: string | object, color?: string): void {
|
|||
}
|
||||
utils.AppendToFile(
|
||||
typeof log === 'string' ? log : JSON.stringify(log),
|
||||
logDir + logFileName
|
||||
paths.logDir + logFileName
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -156,7 +149,10 @@ function LogReq(
|
|||
}
|
||||
logEntry += GetRandomColor(uid.toString()) + uid + C() + dl
|
||||
|
||||
logEntry += GetRandomColor(req.url.split('?')[0]) + req.url + C()
|
||||
logEntry +=
|
||||
GetRandomColor(req.originalUrl.split('?')[0]) +
|
||||
req.originalUrl +
|
||||
C()
|
||||
|
||||
if (statusCode !== undefined) {
|
||||
logEntry += dl + statusCode
|
||||
|
@ -168,7 +164,7 @@ function LogReq(
|
|||
} else {
|
||||
const defLogs = utils.GetDateString() + dl + logEntry
|
||||
|
||||
utils.AppendToFile(defLogs, vlogDir + logFileName)
|
||||
utils.AppendToFile(defLogs, paths.vlogDir + logFileName)
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
|
@ -187,17 +183,19 @@ function parseNoLogFile(newData: string) {
|
|||
}
|
||||
|
||||
function setNoLogReadInterval() {
|
||||
utils.WatchFile(nologFile, (newData: string) => {
|
||||
utils.WatchFile(paths.nologFile, (newData: string) => {
|
||||
parseNoLogFile(newData)
|
||||
Log('No Log user ID-s changed: ' + noLogIds.join(', '))
|
||||
})
|
||||
|
||||
parseNoLogFile(utils.ReadFile(nologFile))
|
||||
parseNoLogFile(utils.ReadFile(paths.nologFile))
|
||||
}
|
||||
|
||||
function Load(): void {
|
||||
try {
|
||||
uvData = JSON.parse(utils.ReadFile(uStatsFile))
|
||||
if (utils.FileExists(paths.uStatsFile)) {
|
||||
uvData = JSON.parse(utils.ReadFile(paths.uStatsFile))
|
||||
}
|
||||
} catch (err) {
|
||||
Log(
|
||||
'Error at loading logs! (@ first run its normal)',
|
||||
|
@ -207,7 +205,9 @@ function Load(): void {
|
|||
}
|
||||
|
||||
try {
|
||||
udvData = JSON.parse(utils.ReadFile(uvStatsFile))
|
||||
if (utils.FileExists(paths.uvStatsFile)) {
|
||||
udvData = JSON.parse(utils.ReadFile(paths.uvStatsFile))
|
||||
}
|
||||
} catch (err) {
|
||||
Log(
|
||||
'Error at loading logs! (@ first run its normal)',
|
||||
|
@ -217,7 +217,9 @@ function Load(): void {
|
|||
}
|
||||
|
||||
try {
|
||||
vData = utils.ReadJSON(statFile)
|
||||
if (utils.FileExists(paths.statFile)) {
|
||||
vData = utils.ReadJSON(paths.statFile)
|
||||
}
|
||||
} catch (err) {
|
||||
Log(
|
||||
'Error at loading logs! (@ first run its normal)',
|
||||
|
@ -227,7 +229,9 @@ function Load(): void {
|
|||
}
|
||||
|
||||
try {
|
||||
dvData = utils.ReadJSON(vStatFile)
|
||||
if (utils.FileExists(paths.vStatFile)) {
|
||||
dvData = utils.ReadJSON(paths.vStatFile)
|
||||
}
|
||||
} catch (err) {
|
||||
Log(
|
||||
'Error at loading visit logs! (@ first run its normal)',
|
||||
|
@ -329,26 +333,26 @@ function Save() {
|
|||
writes++
|
||||
if (writes === writeInterval) {
|
||||
try {
|
||||
utils.WriteFile(JSON.stringify(uvData), uStatsFile)
|
||||
utils.WriteFile(JSON.stringify(uvData), paths.uStatsFile)
|
||||
} catch (err) {
|
||||
Log('Error at writing logs! (more in stderr)', GetColor('redbg'))
|
||||
console.error(err)
|
||||
}
|
||||
try {
|
||||
utils.WriteFile(JSON.stringify(udvData), uvStatsFile)
|
||||
utils.WriteFile(JSON.stringify(udvData), paths.uvStatsFile)
|
||||
} catch (err) {
|
||||
Log('Error at writing logs! (more in stderr)', GetColor('redbg'))
|
||||
console.error(err)
|
||||
}
|
||||
try {
|
||||
utils.WriteFile(JSON.stringify(vData), statFile)
|
||||
utils.WriteFile(JSON.stringify(vData), paths.statFile)
|
||||
// Log("Stats wrote.");
|
||||
} catch (err) {
|
||||
Log('Error at writing logs! (more in stderr)', GetColor('redbg'))
|
||||
console.error(err)
|
||||
}
|
||||
try {
|
||||
utils.WriteFile(JSON.stringify(dvData), vStatFile)
|
||||
utils.WriteFile(JSON.stringify(dvData), paths.vStatFile)
|
||||
// Log("Stats wrote.");
|
||||
} catch (err) {
|
||||
Log(
|
||||
|
@ -463,8 +467,6 @@ export default {
|
|||
hr: hr,
|
||||
C: C,
|
||||
logFileName: logFileName,
|
||||
logDir: logDir,
|
||||
vlogDir: vlogDir,
|
||||
setLoggingDisabled: setLoggingDisabled,
|
||||
logTable: logTable,
|
||||
}
|
||||
|
|
|
@ -46,7 +46,7 @@ import fs from 'fs'
|
|||
import { v4 as uuidv4 } from 'uuid'
|
||||
import logger from '../utils/logger'
|
||||
|
||||
import constants from '../constants.json'
|
||||
import { paths } from './files'
|
||||
import { Request } from '../types/basicTypes'
|
||||
|
||||
interface URLFormatOptions {
|
||||
|
@ -115,7 +115,7 @@ function ReadDir(path: string, listHidden?: boolean): Array<string> {
|
|||
}
|
||||
}
|
||||
|
||||
function ReadJSON(name: string): any {
|
||||
function ReadJSON<T = any>(name: string): T {
|
||||
try {
|
||||
return JSON.parse(ReadFile(name))
|
||||
} catch (err) {
|
||||
|
@ -329,7 +329,7 @@ function getGitRevision(dir: string): string {
|
|||
}
|
||||
|
||||
function getScriptVersion(): string {
|
||||
const scriptContent = ReadFile(constants.moodleTestUserscriptPath)
|
||||
const scriptContent = ReadFile(paths.moodleTestUserscriptPath)
|
||||
|
||||
let temp: string | string[] = scriptContent.split('\n').find((x) => {
|
||||
return x.includes('@version')
|
||||
|
|
|
@ -186,7 +186,7 @@ export function initWorkerPool(
|
|||
if (process.env.NS_THREAD_COUNT) {
|
||||
logger.Log(
|
||||
`Setting thread count from enviroment variable NS_WORKER_COUNT: '${threadCount}'`,
|
||||
logger.GetColor('red')
|
||||
'yellowbg'
|
||||
)
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue