added bit more advanced file existance checking, removed vhosts in favor or routes

This commit is contained in:
mrfry 2023-03-29 19:10:44 +02:00
parent ba89f4a342
commit 113a114821
24 changed files with 2720 additions and 2474 deletions

View file

@ -1,74 +1,5 @@
# Multifunkcionális Express.js szerver
# Express.js server
## Rövid leírás:
Ez egy Express.js-re épülő node.js szerver, ami egyszerűen kezelhető modul-rendszerrel és különböző alap funkciókkal lett kiegészítve.
## Setup
## Telepítés / Indítás
Bővebben a `devel/readme.md` -ben
## Eddigi modulok
### qmining
Backend és weboldal a [Moodle/Kmooc teszt megoldó kliens](https://gitlab.com/MrFry/moodle-test-userscript)hez. A `./public` mappában található `data.json` fájlból tölti be az összes kérdést, és szolgálja ki a klienseket. Beállított időnként ebbe menti az új kérdéseket is, és a `./public/backs` mappába másol biztonsági mentéseket. Főoldalán `./public/qa` fájlból jeleníti meg a felhasználók kérdéseit, és az üzemeltető válaszait, amit manuálisan kell szerkeszteni.
Fontosabb oldalak:
név | leírás
--- | ---
/legacy| Összes kérdés/válasz egy oldalon
/isAdding| Erre a címre POST-olja a kliens az új kérdéseket
/ask | Ezt a címet kéri le paraméterezve a kliens ha kérdésre keres
### stuff
Egyszerű fájlböngésző, ami a `./public/files` mappa tartalmát listázza ki böngészőben
### sio
Egyszerű oldal a szerverre való fájlfeltöltés megkönnyítésére
### main
Főoldal / minta modul
# Üzemelés
## Új modul létrehozása
Javasol a 'main' modul másolás, és átnevezése a `./modules` mappában, abban szinte minden alapvető funkció benne van. Majd a `./modules.json` fájlba egy új bejegyzést kell létrehozni a többi alapján. Ezt a `./extraModules` fájlban is meg lehet tenni, ami csak azért létezik hogy privát modulokat ne kelljen git-re feltölteni.
A szerver `vhost` csomagot használ, és több aldomainje van, amikre érdemes figyelni
## ./stats mappa
Ebben található az összes statisztika és naplófájl
név | leírás
--- | ---
./stats/logs | részletes request napló
./stats/nlogs | fontosabb request napló
./stats/stats | összes lekért oldal JSON
./stats/vstats | napokba rendezett összes lekérd oldal JSON
./stats/idstats | Összes kérdés hozzáadó kliens egyedi azonosító statisztika JSON
./stats/idvstats | Összes kérdés hozzáadó kliens egyedi azonosító napokba rendezve JSON
## ./utils mappa
Különböző hasznos eszközök
név | leírás
--- | ---
logger.js | minden naplózást kezel
dbcheck.js | paraméterként kapott adatbázist ellenőrzi, hogy van-e a kérdéseknek `.Q` propertyje, ami a régi fajta módszernél volt használatos
actions.js | qmining modul beérkező kérdés feldolgozás
utils.js | alapvető eszközök, pl fájl beolvasás
motd.js | `data.json` és ./public/motd -be írja a paraméterként kapott szöveget
ids.js | egyedi felhasználó azonosítókat írja statisztika fájlba
dataUpdater.js | régifajta adatbázist, amiben még van `.Q` propertyjű kérdés alakít át
changedataversion.js | `data.json`-ban és a ./public/version ban írja át a teszt megoldó kliens aktuális verzióját
merge.sh | Biztonsági mentést készít, és egyszerűsíti az adatbázist, majd felülírja az újjal
classes.js | Összehasonlításhoz és tároláshoz szükséges osztályok
dbSetup.js | Üres / előre userrel feltöltött adatbázist hoz létre
dbtools.js | Javascript wrapper gyakran használt SQL utasításokhoz
rmDuplicates.js | Paraméterként átadott JSON kérdés adatbázisból távolítja el az ugyanolyan kérdéseket
runSqliteCmds.sh | Paraméterként átadott adatbázison futtatja a második paraméterben található Sqlite parancsokat
# Egyéb
Jelenleg sok optimalizálatlan rész található benne, cél ezek kijavítása, szépítése
# Licensz:
GPLv3
Run `./scripts/setup.sh`, then `npm run dev` for development, or `npm run start` for prod

4240
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -31,7 +31,7 @@
"@types/cookie-parser": "^1.4.2",
"@types/express-fileupload": "^1.2.2",
"@types/jest": "^27.4.1",
"@types/node": "^17.0.21",
"@types/node": "^18.15.11",
"@types/uuid": "^8.3.4",
"@types/vhost": "^3.0.4",
"@typescript-eslint/eslint-plugin": "^5.15.0",

View file

@ -8,7 +8,7 @@ hr() {
log() {
hr
echo -e "\033[0;32m${@}\033[0m"
echo -e "\033[0;34m${@}\033[0m"
hr
}
@ -33,6 +33,7 @@ makeNextSubmodule() {
npm audit fix > /dev/null 2> /dev/null
npm run export
popd || exit
// TODO: check if link exists
linkTarget="$PWD/nextStatic/${2}"
if [ ! -f "${linkTarget}" ]; then
ln -sf "$PWD/submodules/${1}/out" "${linkTarget}"
@ -45,8 +46,8 @@ checkFile "$PWD/src/server.ts"
checkFile "$PWD/package.json"
checkFile "$PWD/package-lock.json"
git pull
git submodule update --init --recursive
git pull || exit
git submodule update --init --recursive || exit
log "Making project"
@ -54,7 +55,6 @@ npm install
npm audit fix > /dev/null 2> /dev/null
npm run export
log "Seting up next.js static stuff..."
mkdir "$PWD/nextStatic"
# ------------------------------------------------------------------------------------
@ -71,11 +71,11 @@ ln -sfv "$PWD/submodules/moodle-test-userscript/stable.user.js" "$PWD/publicDirs
log "mkdir-ing/touching :3"
# TODO: make server create these itself
mkdir -p stats
mkdir -p stats/logs
mkdir -p stats/vlogs
mkdir -p data
mkdir -p data/dbs
mkdir -pv stats
mkdir -pv stats/logs
mkdir -pv stats/vlogs
mkdir -pv data
mkdir -pv data/dbs
touch data/nolog
#JSONS
@ -92,7 +92,7 @@ touch publicDirs/qminingPublic/motd
if [ -z "$(ls -A ./data/dbs)" ]; then
log "Making DB-s"
pushd src/standaloneUtils || exit
NS_LOGLEVEL=2 node dbSetup.js
NS_SQL_DEBUG_LOG=1 NS_LOGLEVEL=2 node dbSetup.js
mv ./*.db ../../data/dbs
popd || exit
fi
@ -100,4 +100,4 @@ fi
log "Disabling next telemetry"
npx --yes next telemetry disable
log "Done!"
log "Done! development mode: 'npm run dev', prod mode: 'npm run start', tests: 'npm run test'"

View file

@ -1,9 +1,3 @@
{
"serverPath": "dist/server.js",
"qminingPageDir": "submodules/qmining-page",
"qminingIndexPath": "nextStatic/qminingPagePublic/index.html",
"dataEditorPageDir": "submodules/qmining-data-editor",
"dataEditorIndexPath": "nextStatic/dataEditorPublic/index.html",
"moodleTestUserscriptDir": "submodules/moodle-test-userscript",
"moodleTestUserscriptPath": "submodules/moodle-test-userscript/stable.user.js"
"savedQuestionsFileName": "savedQuestions.json"
}

View file

@ -1,48 +1,30 @@
{
"dataEditor": {
"path": "./modules/dataEditor/dataEditor.js",
"publicdirs": [
"publicDirs/qminingPublic/"
],
"publicdirs": ["publicDirs/qminingPublic/"],
"nextdir": "nextStatic/dataEditorPublic",
"name": "dataeditor",
"urls": [
"dataeditor.frylabs.net"
],
"route": "/data-editor",
"isNextJs": true
},
"qmining": {
"path": "./modules/qmining/qmining.js",
"publicdirs": [
"publicDirs/qminingPublic/"
],
"publicdirs": ["publicDirs/qminingPublic/"],
"nextdir": "nextStatic/qminingPagePublic",
"name": "qmining",
"urls": [
"qmining.frylabs.net"
],
"route": "/",
"isNextJs": true
},
"api": {
"path": "./modules/api/api.js",
"publicdirs": [
"publicDirs/qminingPublic/"
],
"publicdirs": ["publicDirs/qminingPublic/"],
"name": "api",
"urls": [
"api.frylabs.net",
"localhost"
]
"route": "/api"
},
"main": {
"path": "./modules/main/main.js",
"publicdirs": [
"publicDirs/public/"
],
"publicdirs": ["publicDirs/public/"],
"name": "main",
"urls": [
"frylabs.net",
"www.frylabs.net"
]
"route": "/frylabs"
}
}

View file

@ -40,11 +40,9 @@ import {
} from '../../types/basicTypes'
import { loadJSON } from '../../utils/actions'
import { initWorkerPool } from '../../utils/workerPool'
import { paths } from '../../utils/files'
// files
const rootRedirectToFile = 'data/apiRootRedirectTo'
// other constants
// other paths
const moduleName = 'API'
// stuff gotten from server.js
@ -106,14 +104,14 @@ function GetApp(): ModuleType {
let rootRedirectURL = ''
function reloadRootRedirectURL() {
if (utils.FileExists(rootRedirectToFile)) {
rootRedirectURL = utils.ReadFile(rootRedirectToFile)
if (utils.FileExists(paths.rootRedirectToFile)) {
rootRedirectURL = utils.ReadFile(paths.rootRedirectToFile)
}
}
const filesToWatch = [
{
fname: rootRedirectToFile,
fname: paths.rootRedirectToFile,
logMsg: 'Root redirect URL changed',
action: reloadRootRedirectURL,
},
@ -142,7 +140,7 @@ function GetApp(): ModuleType {
app.get('/', function (req: Request, res: any) {
logger.LogReq(req)
if (reloadRootRedirectURL) {
if (rootRedirectURL) {
res.redirect(rootRedirectURL)
} else {
res.json({ msg: 'hi c:' })
@ -154,6 +152,9 @@ function GetApp(): ModuleType {
const dbsFile = publicDir + 'questionDbs.json'
// FIXME: is dataFiles only a temp variable? does this cause any problems?
if (!utils.FileExists(dbsFile)) {
utils.WriteFile('[]', dbsFile)
}
const dataFiles: Array<DataFile> = utils.ReadJSON(dbsFile)
let questionDbs: Array<QuestionDb> = loadJSON(dataFiles, publicDir)
initWorkerPool(() => questionDbs)
@ -176,14 +177,6 @@ function GetApp(): ModuleType {
// -------------------------------------------------------------------------------------------
app.get('*', function (_req: Request, res: any) {
res.status(404).render('404')
})
app.post('*', function (_req: Request, res: any) {
res.status(404).render('404')
})
function DailyAction() {
submoduleDatas.forEach((data) => {
if (data.dailyAction) {

View file

@ -51,4 +51,4 @@ const DbStruct = {
},
}
exports = DbStruct
exports.default = DbStruct

View file

@ -55,11 +55,11 @@ import {
removeCacheFromQuestion,
} from '../../../utils/qdbUtils'
import {
isJsonValidAndLogError,
PeersInfoSchema,
SelfInfoSchema,
validateJSON,
} from '../../../types/typeSchemas'
import constants from '../../../constants.json'
import { paths } from '../../../utils/files'
// TODO: remove FINALIZE-s and TOTEST-s
@ -301,7 +301,7 @@ function writeNewData(
function updateLastSync(selfInfo: PeerInfo, newDate: number) {
utils.WriteFile(
JSON.stringify({ ...selfInfo, lastSync: newDate }, null, 2),
selfInfoFile
paths.selfInfoFile
)
}
@ -321,14 +321,6 @@ function setupQuestionsForMerge(qdb: QuestionDb, peer: PeerInfo) {
}
}
// files
const peersPath = 'data/p2p/'
const peersFile = peersPath + 'peers.json'
// writes it)
const selfInfoFile = peersPath + 'selfInfo.json'
const thirdPartyPeersFile = peersPath + 'thirdPartyPeers.json'
const keyFile = peersPath + 'key' // key.pub key.priv
function setup(data: SubmoduleData): Submodule {
const {
app,
@ -344,37 +336,32 @@ function setup(data: SubmoduleData): Submodule {
// SETUP
// ---------------------------------------------------------------------------------------
if (!utils.FileExists(peersFile)) {
if (!utils.FileExists(paths.peersFile)) {
logger.Log(
`Warning: peers file was missing, so it was created`,
'yellowbg'
)
utils.CreatePath(peersPath)
utils.WriteFile('[]', peersFile)
utils.CreatePath(paths.peersPath)
utils.WriteFile('[]', paths.peersFile)
}
if (!utils.FileExists(selfInfoFile)) {
logger.Log(
'Self info file for p2p does not exist! P2P functionality will not be loaded',
'redbg'
)
logger.Log(
`File should be at: ${selfInfoFile} with the interface 'PeerInfo'`
)
throw new Error('p2p error')
if (!utils.FileExists(paths.selfInfoFile)) {
const msg = `Self info file for p2p does not exist! (${paths.selfInfoFile}) P2P functionality will not be loaded`
logger.Log(msg, 'redbg')
return {}
}
let publicKey: string
let privateKey: string
if (
!utils.FileExists(keyFile + '.priv') ||
!utils.FileExists(keyFile + '.pub')
!utils.FileExists(paths.keyFile + '.priv') ||
!utils.FileExists(paths.keyFile + '.pub')
) {
createKeyPair().then(({ publicKey: pubk, privateKey: privk }) => {
// at first start there won't be a keypair available until this finishes
utils.WriteFile(pubk, keyFile + '.pub')
utils.WriteFile(privk, keyFile + '.priv')
utils.WriteFile(pubk, paths.keyFile + '.pub')
utils.WriteFile(privk, paths.keyFile + '.priv')
publicKey = pubk
privateKey = privk
@ -384,35 +371,21 @@ function setup(data: SubmoduleData): Submodule {
'yellowbg'
)
} else {
publicKey = utils.ReadFile(keyFile + '.pub')
privateKey = utils.ReadFile(keyFile + '.priv')
publicKey = utils.ReadFile(paths.keyFile + '.pub')
privateKey = utils.ReadFile(paths.keyFile + '.priv')
// checking only here, because if it got generated in the other branch then it must be good
if (!isKeypairValid(publicKey, privateKey)) {
logger.Log('Loaded keypair is not valid!', 'redbg')
}
}
let peers: PeerInfo[] = utils.ReadJSON(peersFile)
let selfInfo: PeerInfo = utils.ReadJSON(selfInfoFile)
const { isValid: isPeersValid, errorMsg: peersErrorMsg } = validateJSON(
peers,
PeersInfoSchema
)
if (!isPeersValid) {
logger.Log(`Peers file (${peersFile}) has invalid contents!`, 'redbg')
peersErrorMsg.forEach((x) => logger.Log(x, 'red'))
let peers: PeerInfo[] = utils.ReadJSON(paths.peersFile)
let selfInfo: PeerInfo = utils.ReadJSON(paths.selfInfoFile)
if (!isJsonValidAndLogError(peers, PeersInfoSchema, paths.peersFile)) {
throw new Error('Invalid peers file')
}
const { isValid: isSelfInfoValid, errorMsg: selfInfoErrorMsg } =
validateJSON(selfInfo, SelfInfoSchema)
if (!isSelfInfoValid) {
logger.Log(
`Self info file (${selfInfoFile}) has invalid contents!`,
'redbg'
)
selfInfoErrorMsg.forEach((x) => logger.Log(x, 'red'))
if (!isJsonValidAndLogError(selfInfo, SelfInfoSchema, paths.selfInfoFile)) {
throw new Error('Invalid peers file')
}
// self info file is not required to have the publicKey, as it is always added on init
@ -420,17 +393,17 @@ function setup(data: SubmoduleData): Submodule {
const filesToWatch = [
{
fname: peersFile,
fname: paths.peersFile,
logMsg: 'Peers file updated',
action: () => {
peers = utils.ReadJSON(peersFile)
peers = utils.ReadJSON(paths.peersFile)
},
},
{
fname: selfInfoFile,
fname: paths.selfInfoFile,
logMsg: 'P2P self info file changed',
action: () => {
selfInfo = utils.ReadJSON(selfInfoFile)
selfInfo = utils.ReadJSON(paths.selfInfoFile)
selfInfo.publicKey = publicKey
},
},
@ -466,22 +439,20 @@ function setup(data: SubmoduleData): Submodule {
}
result.serverRevision = utils.getGitRevision(__dirname)
result.scriptRevision = utils.getGitRevision(
constants.moodleTestUserscriptDir
)
result.qminingPageRevision = utils.getGitRevision(
constants.qminingPageDir
paths.moodleTestUserscriptDir
)
result.qminingPageRevision = utils.getGitRevision(paths.qminingPageDir)
result.dataEditorRevision = utils.getGitRevision(
constants.dataEditorPageDir
paths.dataEditorPageDir
)
result.qminingPageBuildTime = utils
.statFile(constants.qminingIndexPath)
.statFile(paths.qminingIndexPath)
?.mtime.getTime()
result.serverBuildTime = utils
.statFile(constants.serverPath)
.statFile(paths.serverPath)
?.mtime.getTime()
result.dataEditorBuildTime = utils
.statFile(constants.dataEditorIndexPath)
.statFile(paths.dataEditorIndexPath)
?.mtime.getTime()
result.scriptVersion = utils.getScriptVersion()
@ -556,7 +527,7 @@ function setup(data: SubmoduleData): Submodule {
async function syncData() {
if (peers.length === 0) {
logger.Log(
`There are no peers specified in ${peersFile}, aborting sync`,
`There are no peers specified in ${paths.peersFile}, aborting sync`,
'yellowbg'
)
return {
@ -687,14 +658,14 @@ function setup(data: SubmoduleData): Submodule {
if (thirdPartyPeers.length > 0) {
utils.WriteFile(
JSON.stringify(thirdPartyPeers, null, 2),
thirdPartyPeersFile
paths.thirdPartyPeersFile
)
logger.Log(
`\tPeers reported ${logger.C('green')}${
thirdPartyPeers.length
}${logger.C()} third party peer(s) not connected to this server. See ${logger.C(
'blue'
)}${thirdPartyPeersFile}${logger.C()} for details`
)}${paths.thirdPartyPeersFile}${logger.C()} for details`
)
}
@ -830,7 +801,7 @@ function setup(data: SubmoduleData): Submodule {
utils.WriteFile(
JSON.stringify(updatedPeersFile, null, 2),
peersFile
paths.peersFile
)
}

View file

@ -59,7 +59,12 @@ import {
getSubjNameWithoutYear,
SearchResultQuestion,
} from '../../../utils/qdbUtils'
import { paths } from '../../../utils/files'
import constants from '../../../constants.json'
import {
isJsonValidAndLogError,
TestUsersSchema,
} from '../../../types/typeSchemas'
interface SavedQuestionData {
fname: string
@ -78,14 +83,6 @@ interface SavedQuestionData {
// }
const line = '====================================================' // lol
const registeredScriptsFile = 'stats/registeredScripts.json'
const testUsersFile = 'data/testUsers.json'
const askedQuestionFile = 'stats/askedQuestions'
const recievedQuestionFile = 'stats/recievedQuestions'
const savedQuestionsFileName = 'savedQuestions.json'
const oldMotdFile = 'publicDirs/qminingPublic/oldMotd'
const dailyDataCountFile = 'stats/dailyDataCount'
const dataEditsLog = 'stats/dataEdits'
function getSubjCount(qdbs: QuestionDb[]): number {
return qdbs.reduce((acc, qdb) => {
@ -114,7 +111,7 @@ function ExportDailyDataCount(questionDbs: QuestionDb[], userDB: Database) {
questionDbsCount: questionDbs.length,
userCount: dbtools.TableInfo(userDB, 'users').dataCount,
}),
dailyDataCountFile
paths.dailyDataCountFile
)
}
@ -157,17 +154,6 @@ function getDetailedRes(questionDbs: QuestionDb[]) {
})
}
function getMotd(version: string, motd: string) {
if (version) {
if (version.startsWith('2.0.')) {
if (utils.FileExists(oldMotdFile)) {
return utils.ReadFile(oldMotdFile)
}
}
}
return motd
}
function searchInDbs(
question: Question,
subj: string,
@ -268,7 +254,7 @@ function writeAskData(body: QuestionFromScript) {
towrite += JSON.stringify(body)
towrite +=
'\n------------------------------------------------------------------------------\n'
utils.AppendToFile(towrite, askedQuestionFile)
utils.AppendToFile(towrite, paths.askedQuestionFile)
} catch (err) {
logger.Log('Error writing revieved /ask POST data')
console.error(err)
@ -283,7 +269,7 @@ function writeIsAddingData(body: RecievedData) {
towrite += JSON.stringify(body)
towrite +=
'\n------------------------------------------------------------------------------\n'
utils.AppendToFile(towrite, recievedQuestionFile)
utils.AppendToFile(towrite, paths.recievedQuestionFile)
} catch (err) {
logger.Log('Error writing revieved /ask POST data')
console.error(err)
@ -312,7 +298,7 @@ function saveQuestion(
const fname = `${utils.GetDateString()}_${userid}_${testUrl}.json`
const subject = getSubjNameWithoutYear(subj).replace(/\//g, '-')
const subjPath = `${savedQuestionsDir}/${subject}`
const savedSubjQuestionsFilePath = `${subjPath}/${savedQuestionsFileName}`
const savedSubjQuestionsFilePath = `${subjPath}/${constants.savedQuestionsFileName}`
utils.CreatePath(subjPath, true)
if (!utils.FileExists(savedSubjQuestionsFilePath)) {
@ -358,9 +344,7 @@ function saveQuestion(
}
function loadSupportedSites() {
const script = utils
.ReadFile(constants.moodleTestUserscriptPath)
.split('\n')
const script = utils.ReadFile(paths.moodleTestUserscriptPath).split('\n')
let i = 0
let stayIn = true
@ -393,11 +377,18 @@ function LoadMOTD(motdFile: string) {
}
function LoadTestUsers() {
let testUsers = utils.ReadJSON(testUsersFile)
if (testUsers) {
testUsers = testUsers.userIds
if (!utils.FileExists(paths.testUsersFile)) {
utils.WriteFile('{}', paths.testUsersFile)
}
const testUsers = utils.ReadJSON<{ userIds: number[] }>(paths.testUsersFile)
if (
!isJsonValidAndLogError(testUsers, TestUsersSchema, paths.testUsersFile)
) {
return []
} else {
return testUsers.userIds
}
return testUsers
}
function getNewQdb(
@ -481,14 +472,14 @@ function setup(data: SubmoduleData): Submodule {
},
},
{
fname: testUsersFile,
fname: paths.testUsersFile,
logMsg: 'Test Users file changed',
action: () => {
testUsers = LoadTestUsers()
},
},
{
fname: constants.moodleTestUserscriptPath,
fname: paths.moodleTestUserscriptPath,
logMsg: 'User script file changed',
action: () => {
version = utils.getScriptVersion()
@ -754,7 +745,7 @@ function setup(data: SubmoduleData): Submodule {
result.version = version
}
if (req.query.motd) {
result.motd = getMotd(req.query.cversion, motd)
result.motd = motd
}
res.json(result)
})
@ -762,13 +753,13 @@ function setup(data: SubmoduleData): Submodule {
app.post('/registerscript', function (req: Request, res) {
logger.LogReq(req)
if (!utils.FileExists(registeredScriptsFile)) {
utils.WriteFile('[]', registeredScriptsFile)
if (!utils.FileExists(paths.registeredScriptsFile)) {
utils.WriteFile('[]', paths.registeredScriptsFile)
}
const ua: string = req.headers['user-agent']
const registeredScripts: RegisteredUserEntry[] = utils.ReadJSON(
registeredScriptsFile
paths.registeredScriptsFile
)
const { cid, uid, version, installSource, date } = req.body
@ -810,7 +801,7 @@ function setup(data: SubmoduleData): Submodule {
utils.WriteFile(
JSON.stringify(registeredScripts, null, 2),
registeredScriptsFile
paths.registeredScriptsFile
)
res.json({ msg: 'done' })
@ -828,7 +819,7 @@ function setup(data: SubmoduleData): Submodule {
})
res.json({
savedQuestionsFileName: savedQuestionsFileName,
savedQuestionsFileName: constants.savedQuestionsFileName,
subjects: files.map((subj) => {
return {
name: subj,
@ -844,7 +835,7 @@ function setup(data: SubmoduleData): Submodule {
const subj = req.body.subj
const file = req.body.file
const savedQuestionsPath = `${savedQuestionsDir}/${subj}/${savedQuestionsFileName}`
const savedQuestionsPath = `${savedQuestionsDir}/${subj}/${constants.savedQuestionsFileName}`
const savedQuestions: SavedQuestionData[] =
utils.ReadJSON(savedQuestionsPath)
let path = `${savedQuestionsDir}/${subj}/${file}`
@ -943,11 +934,11 @@ function setup(data: SubmoduleData): Submodule {
)
utils.AppendToFile(
`${date}: User ${user.id} deleted a question from '${subjName}' (index: ${index})`,
dataEditsLog
paths.dataEditsLog
)
utils.AppendToFile(
JSON.stringify(deletedQuestion, null, 2),
dataEditsLog
paths.dataEditsLog
)
}
@ -959,7 +950,7 @@ function setup(data: SubmoduleData): Submodule {
)
utils.AppendToFile(
`${date}: User ${user.id} edited a question in '${subjName}' (index: ${index})`,
dataEditsLog
paths.dataEditsLog
)
utils.AppendToFile(
JSON.stringify(
@ -970,7 +961,7 @@ function setup(data: SubmoduleData): Submodule {
null,
2
),
dataEditsLog
paths.dataEditsLog
)
}
@ -982,7 +973,7 @@ function setup(data: SubmoduleData): Submodule {
)
utils.AppendToFile(
`${date} User #${user.id} modified '${subjName}'. Edited: ${deletedQuestions.length}, deleted: ${deletedQuestions.length}`,
dataEditsLog
paths.dataEditsLog
)
utils.AppendToFile(
JSON.stringify(
@ -993,7 +984,7 @@ function setup(data: SubmoduleData): Submodule {
null,
2
),
dataEditsLog
paths.dataEditsLog
)
}
// ------------------

View file

@ -63,6 +63,19 @@ function BackupDB(usersDbBackupPath: string, userDB: Database) {
})
}
function createDefaultUser(userDb: Database) {
logger.Log('The user DB is empty, creating user #1', 'yellowbg')
const pw = uuidv4()
const insertRes = dbtools.Insert(userDb, 'users', {
pw: pw,
avaiblePWRequests: 0,
created: utils.GetDateString(),
})
logger.Log('ID and PW for user #1: ', 'yellowbg')
console.log(`ID: #${insertRes.lastInsertRowid}, PW: "${pw}"`)
logger.Log('It can be also viewed from the user db file.')
}
// TODO: figure out if this is needed
// const validationTokenNameFile = 'data/validationTokenName'
// function readValidationTokenName() {
@ -83,12 +96,13 @@ function setup(data: SubmoduleData): Submodule {
domain = domain.join('.') // "frylabs.net"
logger.DebugLog(`Cookie domain: ${domain}`, 'cookie', 1)
logger.Log(
`User count: ${dbtools
const userCount = dbtools
.TableInfo(userDB, 'users')
.dataCount.toLocaleString()} users`,
'blue'
)
.dataCount.toLocaleString()
logger.Log(`User count: ${userCount} users`, 'blue')
if (+userCount === 0) {
createDefaultUser(userDB)
}
app.get('/avaiblePWS', (req: Request, res: any) => {
logger.LogReq(req)

View file

@ -97,4 +97,4 @@ const DbStruct = {
},
}
exports = DbStruct
exports.default = DbStruct

View file

@ -95,14 +95,6 @@ function GetApp(): ModuleType {
logger.LogReq(req)
})
app.get('*', function (_req: Request, res) {
res.status(404).render('404')
})
app.post('*', function (_req: Request, res) {
res.status(404).render('404')
})
return {
app: app,
}

View file

@ -53,14 +53,6 @@ function GetApp(): ModuleType {
})
})
app.get('*', function (_req, res) {
res.status(404).render('404')
})
app.post('*', function (_req, res) {
res.status(404).render('404')
})
return {
app: app,
}

View file

@ -29,6 +29,7 @@ import logger from '../../utils/logger'
import auth from '../../middlewares/auth.middleware'
import { SetupData } from '../../server'
import { ModuleType, Request } from '../../types/basicTypes'
import { LinksSchema, validateJSON } from '../../types/typeSchemas'
// stuff gotten from server.js
let publicdirs: string[] = []
@ -75,7 +76,17 @@ function GetApp(): ModuleType {
function loadDonateURL() {
try {
if (utils.FileExists(linksFile)) {
links = utils.ReadJSON(linksFile)
const { isValid, errorMsg } = validateJSON(links, LinksSchema)
if (!isValid) {
logger.Log(
`Peers file (${linksFile}) has invalid contents!`,
'redbg'
)
errorMsg.forEach((x) => logger.Log(x, 'red'))
}
}
} catch (err) {
logger.Log('Couldnt read donate URL file!', logger.GetColor('red'))
console.error(err)
@ -90,7 +101,10 @@ function GetApp(): ModuleType {
loadDonateURL()
})
} else {
logger.Log('Couldnt read donate URL file!', logger.GetColor('red'))
logger.Log(
`Couldnt read links file! (${linksFile})`,
logger.GetColor('redbg')
)
}
// --------------------------------------------------------------
@ -229,14 +243,6 @@ function GetApp(): ModuleType {
logger.LogReq(req)
})
app.get('*', function (_req: Request, res) {
res.status(404).render('404')
})
app.post('*', function (_req: Request, res) {
res.status(404).render('404')
})
return {
app: app,
}

View file

@ -21,9 +21,6 @@
console.log('Node version: ' + process.version)
console.log('Current working directory: ' + process.cwd())
const startHTTPS = true
const isRoot = process.getuid && process.getuid() === 0
const port = process.env.PORT || 8080
const httpsport = 5001
@ -32,7 +29,6 @@ const httpsport = 5001
// console.log(`Process priority set to ${os.getPriority()}`)
import express from 'express'
import vhost from 'vhost'
import http from 'http'
import https from 'https'
import cors from 'cors'
@ -45,12 +41,10 @@ import utils from './utils/utils'
import dbtools from './utils/dbtools'
import reqlogger from './middlewares/reqlogger.middleware'
import idStats from './utils/ids'
const extraModulesFile = './src/extraModules/extraModules.json'
const statExcludeFile = './data/statExclude.json'
const modulesFile = './src/modules.json'
const usersDBPath = './data/dbs/users.db'
const logFile = logger.logDir + logger.logFileName
const vlogFile = logger.vlogDir + logger.logFileName
import { paths, validateFiles } from './utils/files'
const logFile = paths.logDir + logger.logFileName
const vlogFile = paths.vlogDir + logger.logFileName
function moveLogIfNotFromToday(path: string, to: string) {
if (utils.FileExists(path)) {
@ -65,8 +59,8 @@ function moveLogIfNotFromToday(path: string, to: string) {
}
}
}
moveLogIfNotFromToday(logFile, logger.logDir)
moveLogIfNotFromToday(vlogFile, logger.vlogDir)
moveLogIfNotFromToday(logFile, paths.logDir)
moveLogIfNotFromToday(vlogFile, paths.vlogDir)
idStats.Load()
logger.Load()
@ -79,7 +73,7 @@ interface Module {
path: string
publicdirs: Array<string>
name: string
urls: Array<string>
route: string
nextdir?: string
isNextJs?: boolean
app: express.Application
@ -96,11 +90,16 @@ export interface SetupData {
httpsServer: https.Server
}
if (!utils.FileExists(usersDBPath)) {
throw new Error('No user DB exists yet! please run utils/dbSetup.js first!')
const filesValid = validateFiles()
if (!filesValid) {
const msg =
'Not all files are valid which are needed to run the server! Please resolve the above issues, and start again.'
logger.Log(msg, 'red')
throw new Error(msg)
}
const userDB = dbtools.GetDB(usersDBPath)
let modules: Modules = utils.ReadJSON(modulesFile)
const userDB = dbtools.GetDB(paths.usersDBPath)
let modules: Modules = utils.ReadJSON(paths.modulesFile)
const debugLevel = parseInt(process.env.NS_LOGLEVEL) || 0
logger.Log('Loglevel is: ' + debugLevel)
@ -108,8 +107,8 @@ logger.Log(`Log path: ${logFile}`)
logger.Log(`vLog path: ${vlogFile}`)
try {
if (utils.FileExists(extraModulesFile)) {
const extraModules = JSON.parse(utils.ReadFile(extraModulesFile))
if (utils.FileExists(paths.extraModulesFile)) {
const extraModules = JSON.parse(utils.ReadFile(paths.extraModulesFile))
modules = {
...extraModules,
...modules,
@ -147,28 +146,20 @@ function exit(reason: string) {
process.exit()
}
// https://certbot.eff.org/
const privkeyFile = '/etc/letsencrypt/live/frylabs.net/privkey.pem'
const fullchainFile = '/etc/letsencrypt/live/frylabs.net/fullchain.pem'
const chainFile = '/etc/letsencrypt/live/frylabs.net/chain.pem'
let certsLoaded = false
let certs: { key: string; cert: string; ca: string }
// https://certbot.eff.org/
if (
startHTTPS &&
utils.FileExists(privkeyFile) &&
utils.FileExists(fullchainFile) &&
utils.FileExists(chainFile)
utils.FileExists(paths.privkeyFile) &&
utils.FileExists(paths.fullchainFile) &&
utils.FileExists(paths.chainFile)
) {
try {
const key = utils.ReadFile(privkeyFile)
const cert = utils.ReadFile(fullchainFile)
const ca = utils.ReadFile(chainFile)
certs = {
key: key,
cert: cert,
ca: ca,
key: utils.ReadFile(paths.privkeyFile),
cert: utils.ReadFile(paths.fullchainFile),
ca: utils.ReadFile(paths.chainFile),
}
certsLoaded = true
} catch (err) {
@ -218,11 +209,13 @@ app.use(
const cookieSecret = uuidv4()
app.use(cookieParser(cookieSecret))
app.set('view engine', 'ejs')
app.set('views', ['./src/modules/api/views', './src/sharedViews'])
if (!utils.FileExists(statExcludeFile)) {
utils.WriteFile('[]', statExcludeFile)
if (!utils.FileExists(paths.statExcludeFile)) {
utils.WriteFile('[]', paths.statExcludeFile)
}
const excludeFromStats = utils.ReadJSON(statExcludeFile)
const excludeFromStats = utils.ReadJSON(paths.statExcludeFile)
app.use(
reqlogger({
@ -233,12 +226,13 @@ app.use(
})
)
const domain = utils.ReadFile(paths.domainFile)
Object.keys(modules).forEach(function (key) {
const module = modules[key]
try {
const mod = require(module.path).default // eslint-disable-line
// const mod = require(module.path)
logger.Log(`Loading ${mod.name} module`, logger.GetColor('yellow'))
module.publicdirs.forEach((pdir) => {
utils.CreatePath(pdir)
@ -246,7 +240,7 @@ Object.keys(modules).forEach(function (key) {
if (mod.setup) {
mod.setup({
url: 'https://' + module.urls[0],
url: domain,
userDB: userDB,
publicdirs: module.publicdirs,
nextdir: module.nextdir,
@ -259,14 +253,32 @@ Object.keys(modules).forEach(function (key) {
module.app = modApp.app
module.dailyAction = modApp.dailyAction
module.cleanup = modApp.cleanup
module.urls.forEach((url) => {
app.use(vhost(url, module.app))
})
logger.Log(
`Module "${mod.name}" loaded at "${module.route}"`,
logger.GetColor('yellow')
)
app.use(module.route, module.app)
} catch (err) {
console.error(err)
}
})
app.get('*', (req, res) => {
if (req.is('application/json')) {
res.status(404).end()
} else {
res.status(404).render('404')
}
})
app.post('*', (req, res) => {
if (req.is('application/json')) {
res.status(404).end()
} else {
res.status(404).render('404')
}
})
setLogTimer()
function setLogTimer() {
const now = new Date()
@ -309,10 +321,10 @@ function rotateLog() {
('0' + date.getDate()).slice(-2)
if (utils.FileExists(logFile)) {
utils.CopyFile(logFile, logger.logDir + fname)
utils.CopyFile(logFile, paths.logDir + fname)
}
if (utils.FileExists(vlogFile)) {
utils.CopyFile(vlogFile, logger.vlogDir + fname)
utils.CopyFile(vlogFile, paths.vlogDir + fname)
}
utils.WriteFile(fname, logFile)
@ -345,9 +357,9 @@ function LogTimerAction() {
logger.Log('Node version: ' + process.version)
logger.Log('Current working directory: ' + process.cwd())
logger.Log('Listening on port: ' + port)
if (isRoot) {
logger.Log('Running as root', logger.GetColor('red'))
logger.Log('Listening on port: ' + logger.C('blue') + port + logger.C())
if (process.getuid && process.getuid() === 0) {
logger.Log('Running as root', 'redbg')
}
httpServer.listen(port)

View file

@ -1,11 +1,12 @@
const utils = require('../../dist/utils/utils.js').default // eslint-disable-line
// TODO: logger creates stat dir in pwd
const logger = require('../../dist/utils/logger.js').default // eslint-disable-line
const dbtools = require('../../dist/utils/dbtools.js').default // eslint-disable-line
const { v4: uuidv4 } = require('uuid') // eslint-disable-line
const dbStructPaths = [
{ structPath: '../modules/api/usersDBStruct.js', name: 'users.db' },
{ structPath: '../modules/api/msgsDbStruct.js', name: 'msgs.db' },
{ structPath: '../../src/modules/api/usersDBStruct.js', name: 'users.db' },
{ structPath: '../../src/modules/api/msgsDbStruct.js', name: 'msgs.db' },
]
dbStructPaths.forEach((data) => {
@ -14,13 +15,15 @@ dbStructPaths.forEach((data) => {
})
function createDB(path, name) {
console.log(path, name)
// eslint-disable-next-line @typescript-eslint/no-var-requires
const dbStruct = require(path)
const dbStruct = require(path).default
const db = dbtools.GetDB(`./${name}`)
db.pragma('synchronous = OFF')
Object.keys(dbStruct).forEach((tableName) => {
const tableData = dbStruct[tableName]
logger.Log(`Creating table ${tableName} ...`)
dbtools.CreateTable(
db,
tableName,
@ -28,10 +31,11 @@ function createDB(path, name) {
tableData.foreignKey
)
})
printDb(db, dbStruct)
// logger.Log(`${name} db info:`)
// printDb(db, dbStruct)
db.close()
logger.Log('Done')
logger.Log(`Created db ${name} at ${path}`)
}
function printDb(db, dbStruct) {

View file

@ -1,4 +1,5 @@
import { Schema, Validator } from 'jsonschema'
import logger from '../utils/logger'
// https://json-schema.org/learn/getting-started-step-by-step
const validator = new Validator()
@ -18,21 +19,21 @@ export const validateJSON = (
errorMsg: errorMsg,
}
}
export class InvalidJSONError extends Error {
constructor(errorDetails: {
msg: string
expected: Schema
actual: unknown
}) {
const { msg, expected, actual } = errorDetails
super(
msg +
'\nExpected:\n' +
JSON.stringify(expected, null, 2) +
'\nActual:\n' +
JSON.stringify(actual, null, 2)
)
export const isJsonValidAndLogError = (
object: unknown,
schema: Schema,
filePath: string
): boolean => {
const { isValid: isSelfInfoValid, errorMsg: selfInfoErrorMsg } =
validateJSON(object, schema)
if (!isSelfInfoValid) {
logger.Log(`File (${filePath}) has invalid contents!`, 'redbg')
selfInfoErrorMsg.forEach((x) => logger.Log(x, 'red'))
return false
}
return true
}
const PeerInfoSchemaBase = {
@ -108,3 +109,44 @@ export const QuestoinDbFileSchema: Schema = {
type: 'array',
items: SubjectSchema,
}
export const LinksSchema: Schema = {
type: 'object',
properties: {
donate: { type: 'string' },
patreon: { type: 'string' },
},
required: ['donate', 'patreon'],
}
export const TestUsersSchema: Schema = {
type: 'object',
properties: {
userIds: { type: 'array', items: { type: 'number' } },
},
required: ['userIds'],
}
export const ModuleSchema: Schema = {
type: 'object',
properties: {
path: { type: 'string' },
publicdirs: {
type: 'array',
minItems: 1, // FIXME: make this a single string, no need for array
items: { type: 'string' },
},
nextDir: { type: 'string' },
name: { type: 'string' },
route: { type: 'string' },
isNextJs: { type: 'boolean' },
},
required: ['path', 'publicdirs', 'name', 'route'],
}
export const ModulesSchema: Schema = {
type: 'object',
patternProperties: {
'.*': ModuleSchema,
},
}

View file

@ -526,7 +526,7 @@ function handleWorkerData() {
} catch (e) {
console.error(e)
parentPort.postMessage({
msg: `From thread #${workerIndex}: Invalid message type (${msg.type})!`,
msg: `From thread #${workerIndex}: unhandled error occured!`,
workerIndex: workerIndex,
e: e,
})

246
src/utils/files.ts Normal file
View file

@ -0,0 +1,246 @@
import { Schema } from 'jsonschema'
import {
TestUsersSchema,
isJsonValidAndLogError,
PeersInfoSchema,
PeerInfoSchema,
ModulesSchema,
} from '../types/typeSchemas'
import logger from './logger'
import utils from './utils'
// FIXME: remove all file exists checks from everywhere for files that are created / checked here
type FileDescriptor = {
path: string
schema?: Schema
defaultValue?: string
shouldBe?: string
description?: string
}
export const validateFiles = (): boolean => {
let everythingValid = true
Object.entries(files).forEach(([key, file]: [string, FileDescriptor]) => {
let fileExists = utils.FileExists(file.path)
if (file.defaultValue != null && !fileExists) {
utils.WriteFile(file.defaultValue, file.path)
fileExists = true
}
if (file.shouldBe && !fileExists) {
const errMsg = [`File "${file.path}" does not exist! (${key})`]
if (file.shouldBe) {
errMsg.push(`Should be: ${file.shouldBe}`)
}
logger.Log(errMsg.join(' '), 'redbg')
everythingValid = false
return
}
if (file.schema && fileExists) {
const val = utils.ReadJSON(file.path)
if (!isJsonValidAndLogError(val, file.schema, file.path)) {
everythingValid = false
return
}
}
})
return everythingValid
}
export const files = {
// --------------------------------------------------------------------------------
// server / modules files
// --------------------------------------------------------------------------------
serverPath: {
path: 'dist/server.js',
shouldBe:
'server main entry file, created after running "npm run build"',
},
qminingPageDir: {
path: 'submodules/qmining-page',
shouldBe:
'qmining page submodule directory, created by pulling submodules / setup script',
},
qminingIndexPath: {
path: 'nextStatic/qminingPagePublic/index.html',
shouldBe:
'qmining page-s build index.html, created by "npm run build" in qmining page submodule dir',
},
dataEditorPageDir: {
path: 'submodules/qmining-data-editor',
shouldBe:
'qmining data editor page submodule directory, created by pulling submodules / setup script',
},
dataEditorIndexPath: {
path: 'nextStatic/dataEditorPublic/index.html',
shouldBe:
'qmining data editor-s build index.html, created by "npm run build" in qmining data editor submodule dir',
},
moodleTestUserscriptDir: {
path: 'submodules/moodle-test-userscript',
shouldBe:
'moodle test userscript submodule directory, created by pulling submodules / setup script',
},
moodleTestUserscriptPath: {
path: 'submodules/moodle-test-userscript/stable.user.js',
shouldBe:
'moodle test userscript file, created by pulling submodules / setup script',
},
domainFile: {
path: 'data/domain',
shouldBe:
'server domain for cookies and stuff, for ex.: "frylabs.net", no "http://" and things like that, just the domain',
},
// --------------------------------------------------------------------------------
// stats files
// --------------------------------------------------------------------------------
registeredScriptsFile: {
path: 'stats/registeredScripts.json',
defaultValue: JSON.stringify([]),
},
askedQuestionFile: {
path: 'stats/askedQuestions',
description: 'text file of recieved data on /ask',
},
recievedQuestionFile: {
path: 'stats/recievedQuestions',
description: 'text file of recieved data on /isAdding',
},
dailyDataCountFile: {
path: 'stats/dailyDataCount',
description: 'text file of daily data count',
},
dataEditsLog: {
path: 'stats/dataEdits',
description: 'text file of data edit logs',
},
// --------------------------------------------------------------------------------
// https files
// --------------------------------------------------------------------------------
privkeyFile: {
path: '/etc/letsencrypt/live/frylabs.net/privkey.pem',
description: 'private key file for https',
},
fullchainFile: {
path: '/etc/letsencrypt/live/frylabs.net/fullchain.pem',
description: 'full chain key file for https',
},
chainFile: {
path: '/etc/letsencrypt/live/frylabs.net/chain.pem',
description: 'chain key file for https',
},
// --------------------------------------------------------------------------------
// api files
// --------------------------------------------------------------------------------
rootRedirectToFile: {
path: 'data/apiRootRedirectTo',
description: 'url to redirect users trying to acces root api path',
},
modulesFile: {
path: './src/modules.json',
shouldBe: 'module files for server',
schema: ModulesSchema,
},
extraModulesFile: {
path: './src/extraModules/extraModules.json',
description: 'extra private modules for server, not tracked by git',
schema: ModulesSchema,
},
statExcludeFile: {
path: './data/statExclude.json',
shouldBe:
'array of strings which if included in requests url-s then the request itself is not counted in stats',
defaultValue: JSON.stringify([]),
schema: { type: 'array', items: { type: 'string' } },
},
usersDBPath: {
path: './data/dbs/users.db',
shouldBe: 'users sqlite db file',
},
// --------------------------------------------------------------------------------
// qmining api
// --------------------------------------------------------------------------------
testUsersFile: {
path: 'data/testUsers.json',
defaultValue: JSON.stringify({ userIds: [] }),
schema: TestUsersSchema,
description: 'test users, which are excluded from stats',
},
// --------------------------------------------------------------------------------
// log files
// --------------------------------------------------------------------------------
vlogDir: {
path: 'stats/vlogs/',
description: 'verbose logs directory',
},
logDir: {
path: 'stats/logs/',
description: 'basic logs directory',
},
statFile: {
path: 'stats/stats',
defaultValue: JSON.stringify({}),
description: 'json of visited paths all time',
},
vStatFile: {
path: 'stats/vstats',
defaultValue: JSON.stringify({}),
description: 'json of visited paths by day',
},
uStatsFile: {
path: 'stats/ustats',
defaultValue: JSON.stringify({}),
description: 'json of visits per user',
},
uvStatsFile: {
path: 'stats/uvstats',
defaultValue: JSON.stringify({}),
description: 'json of visits per user by day',
},
nologFile: {
path: './data/nolog',
defaultValue: '',
description:
'text file of users seperated by new lines to ignore in logging / stats',
},
// --------------------------------------------------------------------------------
// peer files
// --------------------------------------------------------------------------------
peersPath: {
path: 'data/p2p/',
description: 'p2p files directory',
},
peersFile: {
path: 'data/p2p/peers.json',
description: 'json of list of peers',
defaultValue: JSON.stringify([]),
schema: PeersInfoSchema,
},
selfInfoFile: {
path: 'data/p2p/selfInfo.json',
description: 'json of info of this servers peer functionality',
defaultValue: JSON.stringify({}),
schema: PeerInfoSchema,
},
thirdPartyPeersFile: {
path: 'data/p2p/thirdPartyPeers.json',
description: 'json of third party peers reported by other peers',
},
keyFile: {
path: 'data/p2p/key',
},
} as const satisfies Record<string, FileDescriptor>
export const paths = Object.entries(files).reduce(
(acc, [key, value]) => ({ ...acc, [key]: value.path }),
{}
) as Record<keyof typeof files, string>

View file

@ -26,14 +26,7 @@ const DELIM = C('green') + '|' + C()
// import express from 'express'
import utils from '../utils/utils'
import { Request } from '../types/basicTypes'
const vlogDir = 'stats/vlogs/'
const logDir = 'stats/logs/'
const statFile = 'stats/stats'
const vStatFile = 'stats/vstats'
const uStatsFile = 'stats/ustats'
const uvStatsFile = 'stats/uvstats'
const nologFile = './data/nolog'
import { paths } from './files'
const colors = ['green', 'red', 'yellow', 'blue', 'magenta', 'cyan']
const logFileName = 'log'
@ -93,7 +86,7 @@ function Log(msg: string | object, color?: string): void {
}
utils.AppendToFile(
typeof log === 'string' ? log : JSON.stringify(log),
logDir + logFileName
paths.logDir + logFileName
)
}
@ -156,7 +149,10 @@ function LogReq(
}
logEntry += GetRandomColor(uid.toString()) + uid + C() + dl
logEntry += GetRandomColor(req.url.split('?')[0]) + req.url + C()
logEntry +=
GetRandomColor(req.originalUrl.split('?')[0]) +
req.originalUrl +
C()
if (statusCode !== undefined) {
logEntry += dl + statusCode
@ -168,7 +164,7 @@ function LogReq(
} else {
const defLogs = utils.GetDateString() + dl + logEntry
utils.AppendToFile(defLogs, vlogDir + logFileName)
utils.AppendToFile(defLogs, paths.vlogDir + logFileName)
}
} catch (err) {
console.error(err)
@ -187,17 +183,19 @@ function parseNoLogFile(newData: string) {
}
function setNoLogReadInterval() {
utils.WatchFile(nologFile, (newData: string) => {
utils.WatchFile(paths.nologFile, (newData: string) => {
parseNoLogFile(newData)
Log('No Log user ID-s changed: ' + noLogIds.join(', '))
})
parseNoLogFile(utils.ReadFile(nologFile))
parseNoLogFile(utils.ReadFile(paths.nologFile))
}
function Load(): void {
try {
uvData = JSON.parse(utils.ReadFile(uStatsFile))
if (utils.FileExists(paths.uStatsFile)) {
uvData = JSON.parse(utils.ReadFile(paths.uStatsFile))
}
} catch (err) {
Log(
'Error at loading logs! (@ first run its normal)',
@ -207,7 +205,9 @@ function Load(): void {
}
try {
udvData = JSON.parse(utils.ReadFile(uvStatsFile))
if (utils.FileExists(paths.uvStatsFile)) {
udvData = JSON.parse(utils.ReadFile(paths.uvStatsFile))
}
} catch (err) {
Log(
'Error at loading logs! (@ first run its normal)',
@ -217,7 +217,9 @@ function Load(): void {
}
try {
vData = utils.ReadJSON(statFile)
if (utils.FileExists(paths.statFile)) {
vData = utils.ReadJSON(paths.statFile)
}
} catch (err) {
Log(
'Error at loading logs! (@ first run its normal)',
@ -227,7 +229,9 @@ function Load(): void {
}
try {
dvData = utils.ReadJSON(vStatFile)
if (utils.FileExists(paths.vStatFile)) {
dvData = utils.ReadJSON(paths.vStatFile)
}
} catch (err) {
Log(
'Error at loading visit logs! (@ first run its normal)',
@ -329,26 +333,26 @@ function Save() {
writes++
if (writes === writeInterval) {
try {
utils.WriteFile(JSON.stringify(uvData), uStatsFile)
utils.WriteFile(JSON.stringify(uvData), paths.uStatsFile)
} catch (err) {
Log('Error at writing logs! (more in stderr)', GetColor('redbg'))
console.error(err)
}
try {
utils.WriteFile(JSON.stringify(udvData), uvStatsFile)
utils.WriteFile(JSON.stringify(udvData), paths.uvStatsFile)
} catch (err) {
Log('Error at writing logs! (more in stderr)', GetColor('redbg'))
console.error(err)
}
try {
utils.WriteFile(JSON.stringify(vData), statFile)
utils.WriteFile(JSON.stringify(vData), paths.statFile)
// Log("Stats wrote.");
} catch (err) {
Log('Error at writing logs! (more in stderr)', GetColor('redbg'))
console.error(err)
}
try {
utils.WriteFile(JSON.stringify(dvData), vStatFile)
utils.WriteFile(JSON.stringify(dvData), paths.vStatFile)
// Log("Stats wrote.");
} catch (err) {
Log(
@ -463,8 +467,6 @@ export default {
hr: hr,
C: C,
logFileName: logFileName,
logDir: logDir,
vlogDir: vlogDir,
setLoggingDisabled: setLoggingDisabled,
logTable: logTable,
}

View file

@ -46,7 +46,7 @@ import fs from 'fs'
import { v4 as uuidv4 } from 'uuid'
import logger from '../utils/logger'
import constants from '../constants.json'
import { paths } from './files'
import { Request } from '../types/basicTypes'
interface URLFormatOptions {
@ -115,7 +115,7 @@ function ReadDir(path: string, listHidden?: boolean): Array<string> {
}
}
function ReadJSON(name: string): any {
function ReadJSON<T = any>(name: string): T {
try {
return JSON.parse(ReadFile(name))
} catch (err) {
@ -329,7 +329,7 @@ function getGitRevision(dir: string): string {
}
function getScriptVersion(): string {
const scriptContent = ReadFile(constants.moodleTestUserscriptPath)
const scriptContent = ReadFile(paths.moodleTestUserscriptPath)
let temp: string | string[] = scriptContent.split('\n').find((x) => {
return x.includes('@version')

View file

@ -186,7 +186,7 @@ export function initWorkerPool(
if (process.env.NS_THREAD_COUNT) {
logger.Log(
`Setting thread count from enviroment variable NS_WORKER_COUNT: '${threadCount}'`,
logger.GetColor('red')
'yellowbg'
)
}

@ -1 +1 @@
Subproject commit 7f5c1ae7a7a0dd4016bc57f2b26e606e1553c79d
Subproject commit 755891710a291494c244529cd54d59ec99d80079