This commit is contained in:
2026-03-22 22:31:29 +01:00
parent 97d4a990a8
commit 6f87742882
16 changed files with 371 additions and 107 deletions
-64
View File
@@ -1,64 +0,0 @@
name: Docker
on:
push:
branches: [ "main" ]
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
jobs:
build:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
id-token: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Install cosign
if: github.event_name != 'pull_request'
uses: sigstore/cosign-installer@v3.5.0
with:
cosign-release: 'v2.1.1'
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
with:
platforms: linux/amd64,linux/arm64
- name: Log into registry ${{ env.REGISTRY }}
if: github.event_name != 'pull_request'
uses: docker/login-action@v3
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract Docker metadata
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
- name: Build and push Docker image
id: build-and-push
uses: docker/build-push-action@v5
with:
context: .
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=gha
cache-to: type=gha,mode=max
platforms: linux/amd64,linux/arm64
- name: Sign the published Docker image
if: ${{ github.event_name != 'pull_request' }}
env:
DIGEST: ${{ steps.build-and-push.outputs.digest }}
run: echo "${TAGS}" | xargs -I {} cosign sign --yes {}@${DIGEST}
+48
View File
@@ -0,0 +1,48 @@
name: release
on:
push:
tags:
- "v*"
workflow_dispatch:
permissions:
contents: write
packages: write
jobs:
goreleaser:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up Go
uses: actions/setup-go@v5
with:
go-version-file: "go.mod"
cache: true
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Run GoReleaser
uses: goreleaser/goreleaser-action@v6
with:
distribution: goreleaser
version: latest
args: release --clean
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+1 -1
View File
@@ -16,7 +16,7 @@ jobs:
- name: Set up Go
uses: actions/setup-go@v4
with:
go-version: '1.22.3'
go-version: '1.26.1'
- name: Build
run: go build -v ./...
+3 -2
View File
@@ -1,2 +1,3 @@
.env**
data/**
.env
data/
dist/
+67
View File
@@ -0,0 +1,67 @@
version: 2
before:
hooks:
- go mod tidy
snapshot:
version_template: "{{ .Version }}"
builds:
- env:
- CGO_ENABLED=0
goos:
- linux
goarch:
- amd64
- arm64
ldflags:
- -s -w
flags:
- -trimpath
archives:
- format: tar.gz
name_template: >-
{{ .ProjectName }}_
{{- .Version }}_
{{- .Os }}_
{{- .Arch }}
dockers:
- image_templates:
- "ghcr.io/skidoodle/{{ .ProjectName }}:{{ .Tag }}-amd64"
- "ghcr.io/skidoodle/{{ .ProjectName }}:latest-amd64"
dockerfile: Dockerfile.release
use: buildx
goos: linux
goarch: amd64
extra_files:
- web/
- image_templates:
- "ghcr.io/skidoodle/{{ .ProjectName }}:{{ .Tag }}-arm64"
- "ghcr.io/skidoodle/{{ .ProjectName }}:latest-arm64"
dockerfile: Dockerfile.release
use: buildx
goos: linux
goarch: arm64
extra_files:
- web/
docker_manifests:
- name_template: "ghcr.io/skidoodle/{{ .ProjectName }}:{{ .Tag }}"
image_templates:
- "ghcr.io/skidoodle/{{ .ProjectName }}:{{ .Tag }}-amd64"
- "ghcr.io/skidoodle/{{ .ProjectName }}:{{ .Tag }}-arm64"
- name_template: "ghcr.io/skidoodle/{{ .ProjectName }}:latest"
image_templates:
- "ghcr.io/skidoodle/{{ .ProjectName }}:latest-amd64"
- "ghcr.io/skidoodle/{{ .ProjectName }}:latest-arm64"
changelog:
sort: asc
filters:
exclude:
- "^docs:"
- "^test:"
+21 -6
View File
@@ -1,16 +1,31 @@
FROM golang:alpine AS builder
FROM golang:1.26.1-alpine AS builder
RUN apk update && apk add --no-cache git ca-certificates tzdata
RUN addgroup -S -g 10001 appgroup && \
adduser -S -u 10001 -G appgroup appuser
WORKDIR /build
COPY go.mod go.sum ./
RUN go mod download
COPY . .
RUN CGO_ENABLED=0 go build -trimpath -ldflags="-s -w" -o /out/ncore-stats .
RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o ncore-stats .
RUN mkdir -p /app/data && chown -R 10001:10001 /app/data
FROM scratch
COPY --from=builder /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/
COPY --from=builder /usr/share/zoneinfo /usr/share/zoneinfo
COPY --from=builder /etc/passwd /etc/passwd
COPY --from=builder /etc/group /etc/group
COPY --from=builder --chown=10001:10001 /build/ncore-stats /app/ncore-stats
COPY --from=builder --chown=10001:10001 /build/web /app/web
COPY --from=builder --chown=10001:10001 /app/data /app/data
FROM alpine:3
RUN apk add --no-cache ca-certificates
WORKDIR /app
COPY --from=builder /out/ncore-stats .
COPY web ./web
USER 10001
EXPOSE 3000
CMD ["./ncore-stats"]
+26
View File
@@ -0,0 +1,26 @@
FROM alpine:latest AS sys-context
RUN apk add --no-cache ca-certificates tzdata
RUN echo "appuser:x:10001:10001:appuser:/:/sbin/nologin" > /etc/passwd_app \
&& echo "appuser:x:10001:appuser" > /etc/group_app
RUN mkdir -p /app/data && chown -R 10001:10001 /app
FROM scratch
COPY --from=sys-context /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/
COPY --from=sys-context /usr/share/zoneinfo /usr/share/zoneinfo
COPY --from=sys-context /etc/passwd_app /etc/passwd
COPY --from=sys-context /etc/group_app /etc/group
COPY --from=sys-context --chown=10001:10001 /app /app
# Binaries and static files provided by goreleaser
COPY --chown=10001:10001 ncore-stats /app/ncore-stats
COPY --chown=10001:10001 web /app/web
WORKDIR /app
USER 10001
EXPOSE 3000
ENTRYPOINT ["/app/ncore-stats"]
+2 -1
View File
@@ -1,6 +1,6 @@
services:
ncore-stats:
image: ghcr.io/skidoodle/ncore-stats:main
image: ghcr.io/skidoodle/ncore-stats:latest
container_name: ncore-stats
restart: unless-stopped
ports:
@@ -10,5 +10,6 @@ services:
environment:
- NICK=${NICK}
- PASSWORD=${PASS}
volumes:
data:
+67 -1
View File
@@ -19,16 +19,82 @@ func initDB(cfg *Configuration) *sql.DB {
schemas := []string{
`CREATE TABLE IF NOT EXISTS users (id INTEGER PRIMARY KEY AUTOINCREMENT, display_name TEXT UNIQUE, profile_id TEXT);`,
`CREATE TABLE IF NOT EXISTS profile_history (id INTEGER PRIMARY KEY AUTOINCREMENT, user_id INTEGER, timestamp DATETIME, rank INTEGER, upload TEXT, current_upload TEXT, current_download TEXT, points INTEGER, seeding_count INTEGER, FOREIGN KEY(user_id) REFERENCES users(id) ON DELETE CASCADE);`,
`CREATE TABLE IF NOT EXISTS profile_history (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id INTEGER,
timestamp DATETIME,
rank INTEGER,
upload TEXT,
upload_bytes INTEGER,
current_upload TEXT,
current_download TEXT,
points INTEGER,
seeding_count INTEGER,
FOREIGN KEY(user_id) REFERENCES users(id) ON DELETE CASCADE
);`,
`CREATE INDEX IF NOT EXISTS idx_history_user_ts ON profile_history(user_id, timestamp);`,
}
for _, s := range schemas {
if _, err := db.Exec(s); err != nil {
logrus.Fatalf("Schema error: %v", err)
}
}
migrate(db)
return db
}
func migrate(db *sql.DB) {
var columnExists bool
_ = db.QueryRow("SELECT COUNT(*) FROM pragma_table_info('profile_history') WHERE name='upload_bytes'").Scan(&columnExists)
if !columnExists {
logrus.Info("Migrating: Adding upload_bytes column...")
_, err := db.Exec("ALTER TABLE profile_history ADD COLUMN upload_bytes INTEGER")
if err != nil {
logrus.Errorf("Migration failed (add column): %v", err)
return
}
logrus.Info("Migrating: Backfilling upload_bytes from existing strings...")
type updateRow struct {
id int64
bytes int64
}
var updates []updateRow
rows, err := db.Query("SELECT id, upload FROM profile_history WHERE upload_bytes IS NULL")
if err == nil {
for rows.Next() {
var id int64
var upload string
if err := rows.Scan(&id, &upload); err == nil {
updates = append(updates, updateRow{id: id, bytes: parseToBytes(upload)})
}
}
rows.Close()
}
if len(updates) > 0 {
tx, err := db.Begin()
if err != nil {
logrus.Errorf("Transaction start failed: %v", err)
return
}
stmt, _ := tx.Prepare("UPDATE profile_history SET upload_bytes = ? WHERE id = ?")
for _, up := range updates {
_, _ = stmt.Exec(up.bytes, up.id)
}
stmt.Close()
if err := tx.Commit(); err != nil {
logrus.Errorf("Transaction commit failed: %v", err)
}
}
logrus.Info("Migration complete.")
}
}
func (s *State) getLatest() ([]ProfileData, error) {
query := `
SELECT u.display_name, ph.timestamp, ph.rank, ph.upload, ph.current_upload, ph.current_download, ph.points, ph.seeding_count
+43 -5
View File
@@ -5,6 +5,7 @@ import (
"fmt"
"html/template"
"net/http"
"time"
"github.com/sirupsen/logrus"
)
@@ -27,7 +28,7 @@ func (s *State) historyHandler(w http.ResponseWriter, r *http.Request) {
http.Error(w, "Owner required", http.StatusBadRequest)
return
}
rows, err := s.db.Query(`SELECT u.display_name, ph.timestamp, ph.rank, ph.upload, ph.current_upload, ph.current_download, ph.points, ph.seeding_count FROM profile_history ph JOIN users u ON ph.user_id = u.id WHERE u.display_name = ? ORDER BY ph.timestamp ASC`, owner)
rows, err := s.db.Query(`SELECT ph.timestamp, ph.rank, ph.upload, ph.points, ph.seeding_count FROM profile_history ph JOIN users u ON ph.user_id = u.id WHERE u.display_name = ? ORDER BY ph.timestamp ASC`, owner)
if err != nil {
http.Error(w, "Internal Server Error", http.StatusInternalServerError)
return
@@ -37,8 +38,7 @@ func (s *State) historyHandler(w http.ResponseWriter, r *http.Request) {
var history []ProfileData
for rows.Next() {
var p ProfileData
if err := rows.Scan(&p.Owner, &p.Timestamp, &p.Rank, &p.Upload, &p.CurrentUpload, &p.CurrentDownload, &p.Points, &p.SeedingCount); err != nil {
logrus.Errorf("Scan history failed: %v", err)
if err := rows.Scan(&p.Timestamp, &p.Rank, &p.Upload, &p.Points, &p.SeedingCount); err != nil {
continue
}
history = append(history, p)
@@ -53,9 +53,47 @@ func (s *State) historyModalHandler(w http.ResponseWriter, r *http.Request) {
http.Error(w, "Owner required", http.StatusBadRequest)
return
}
fmt.Fprintf(w, `<div id="chart-data-container" data-owner="%s" x-init="renderChart('%s')"></div>`, owner, owner)
}
rows, err := s.db.Query(`SELECT ph.timestamp, ph.rank, ph.upload_bytes, ph.points, ph.seeding_count
FROM profile_history ph
JOIN users u ON ph.user_id = u.id
WHERE u.display_name = ?
ORDER BY ph.timestamp ASC`, owner)
if err != nil {
http.Error(w, "DB Error", http.StatusInternalServerError)
return
}
defer rows.Close()
res := CompactHistory{Owner: owner}
for rows.Next() {
var (
ts time.Time
rank int
uploadBytes int64
points int
seeding int
)
if err := rows.Scan(&ts, &rank, &uploadBytes, &points, &seeding); err == nil {
res.Timestamp = append(res.Timestamp, ts.Unix()*1000)
res.Rank = append(res.Rank, rank)
tib := float64(uploadBytes) / (1024 * 1024 * 1024 * 1024)
res.Upload = append(res.Upload, tib)
res.Points = append(res.Points, points)
res.Seeding = append(res.Seeding, seeding)
}
}
dataJSON, _ := json.Marshal(res)
fmt.Fprintf(w, `
<div id="chart-mount"
style="height: 100%%; width: 100%%;"
x-init='renderChart(%s)'>
</div>`, string(dataJSON))
}
func (s *State) rootHandler(w http.ResponseWriter, r *http.Request) {
if r.URL.Path != "/" {
http.NotFound(w, r)
+11
View File
@@ -25,6 +25,7 @@ type ProfileData struct {
Timestamp time.Time `json:"timestamp"`
Rank int `json:"rank"`
Upload string `json:"upload"`
UploadBytes int64 `json:"upload_bytes"`
CurrentUpload string `json:"current_upload"`
CurrentDownload string `json:"current_download"`
Points int `json:"points"`
@@ -43,3 +44,13 @@ type State struct {
db *sql.DB
client *http.Client
}
// CompactHistory represents an optimized, columnar history format.
type CompactHistory struct {
Owner string `json:"owner"`
Timestamp []int64 `json:"t"`
Rank []int `json:"r"`
Upload []float64 `json:"u"`
Points []int `json:"p"`
Seeding []int `json:"s"`
}
+1 -1
View File
@@ -58,7 +58,7 @@ A simple Go project to scrape and track profile statistics (rank, upload, downlo
```yaml
services:
ncore-stats:
image: ghcr.io/skidoodle/ncore-stats:main
image: ghcr.io/skidoodle/ncore-stats:latest
container_name: ncore-stats
restart: unless-stopped
ports:
+28 -2
View File
@@ -76,8 +76,8 @@ func (s *State) scrapeAll(ctx context.Context) {
return
}
_, err = s.db.Exec(`INSERT INTO profile_history(user_id, timestamp, rank, upload, current_upload, current_download, points, seeding_count) VALUES(?, ?, ?, ?, ?, ?, ?, ?)`,
user.ID, profile.Timestamp, profile.Rank, profile.Upload, profile.CurrentUpload, profile.CurrentDownload, profile.Points, profile.SeedingCount)
_, err = s.db.Exec(`INSERT INTO profile_history(user_id, timestamp, rank, upload, upload_bytes, current_upload, current_download, points, seeding_count) VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?)`,
user.ID, profile.Timestamp, profile.Rank, profile.Upload, profile.UploadBytes, profile.CurrentUpload, profile.CurrentDownload, profile.Points, profile.SeedingCount)
if err != nil {
logrus.Errorf("[%s] DB log failed: %v", user.DisplayName, err)
} else {
@@ -123,6 +123,7 @@ func (s *State) fetchProfile(ctx context.Context, user User) (*ProfileData, erro
p.Rank, _ = strconv.Atoi(strings.TrimSuffix(value, "."))
} else if strings.Contains(label, "feltöltés") { // Upload
p.Upload = value
p.UploadBytes = parseToBytes(value)
} else if strings.Contains(label, "pontok") { // Points
p.Points, _ = strconv.Atoi(strings.ReplaceAll(value, " ", ""))
}
@@ -146,3 +147,28 @@ func (s *State) fetchProfile(ctx context.Context, user User) (*ProfileData, erro
return p, nil
}
func parseToBytes(value string) int64 {
valStr := strings.ReplaceAll(value, ",", "")
parts := strings.Fields(valStr)
if len(parts) < 2 {
return 0
}
num, _ := strconv.ParseFloat(parts[0], 64)
unit := strings.ToLower(parts[1])
var multiplier float64
switch unit {
case "tib":
multiplier = 1024 * 1024 * 1024 * 1024
case "gib":
multiplier = 1024 * 1024 * 1024
case "mib":
multiplier = 1024 * 1024
case "kib":
multiplier = 1024
default:
multiplier = 1
}
return int64(num * multiplier)
}
+9 -24
View File
@@ -6,51 +6,35 @@ const config = {
let currentChart = null;
async function renderChart(owner) {
async function renderChart(data) {
const root = document.getElementById('modal-stats-root');
if (!root) return;
if (!root || !data) return;
try {
const response = await fetch(`${config.api.history}${encodeURIComponent(owner)}`); if (!response.ok) throw new Error('Network error');
const historyData = await response.json();
if (!historyData || historyData.length === 0) {
if (!data.t || data.t.length === 0) {
root.innerHTML = '<p class="stat-label" style="text-align: center; padding: 2rem; color: var(--muted);">No history available.</p>';
return;
}
const parseUploadValue = (value) => {
if (typeof value !== 'string') return 0;
const num = parseFloat(value.replace(/,/g, '').replace(/TiB|GiB|MiB|KiB|B/i, '').trim());
if (isNaN(num)) return 0;
const lowerVal = value.toLowerCase();
if (lowerVal.includes('tib')) return num;
if (lowerVal.includes('gib')) return num / 1024;
if (lowerVal.includes('mib')) return num / 1024 / 1024;
if (lowerVal.includes('kib')) return num / 1024 / 1024 / 1024;
return num / 1024 / 1024 / 1024 / 1024;
};
const series = [
{
name: 'Upload',
data: historyData.map(r => ({ x: new Date(r.timestamp).getTime(), y: parseUploadValue(r.upload) }))
data: data.t.map((ts, i) => ({ x: ts, y: data.u[i] }))
},
{
name: 'Rank',
data: historyData.map(r => ({ x: new Date(r.timestamp).getTime(), y: r.rank }))
data: data.t.map((ts, i) => ({ x: ts, y: data.r[i] }))
},
{
name: 'Points',
data: historyData.map(r => ({ x: new Date(r.timestamp).getTime(), y: r.points }))
data: data.t.map((ts, i) => ({ x: ts, y: data.p[i] }))
},
{
name: 'Seeding',
data: historyData.map(r => ({ x: new Date(r.timestamp).getTime(), y: r.seeding_count }))
data: data.t.map((ts, i) => ({ x: ts, y: data.s[i] }))
}
];
root.innerHTML = '<div id="chart-mount" style="height: 100%; width: 100%;"></div>';
const options = {
series: series,
chart: {
@@ -61,7 +45,7 @@ async function renderChart(owner) {
foreColor: '#71717a',
fontFamily: 'Inter, system-ui, sans-serif',
toolbar: { show: false },
animations: { enabled: true, easing: 'easeinout', speed: 800 }
animations: { enabled: false }
},
responsive: [
{
@@ -166,6 +150,7 @@ async function renderChart(owner) {
currentChart.render();
} catch (e) {
console.error(e);
root.innerHTML = `<div class="spinner-container"><p class="stat-label" style="color: #ef4444;">Error: ${e.message}</p></div>`;
}
}
+44
View File
@@ -179,6 +179,50 @@ main {
border-color: var(--accent);
}
.btn-range {
background: var(--bg);
border: 1px solid var(--border);
color: var(--muted);
padding: 0.4rem 0.8rem;
font-size: 0.7rem;
font-weight: 700;
border-radius: 6px;
cursor: pointer;
transition: all 0.2s ease;
}
.btn-range:hover {
color: #fff;
border-color: var(--muted);
}
.spinner-container {
display: flex;
justify-content: center;
align-items: center;
height: 100%;
width: 100%;
}
.spinner {
width: 30px;
height: 30px;
border: 3px solid var(--border);
border-top: 3px solid var(--accent);
border-radius: 50%;
animation: spin 1s linear infinite;
}
@keyframes spin {
0% {
transform: rotate(0deg);
}
100% {
transform: rotate(360deg);
}
}
.modal-overlay {
position: fixed;
inset: 0;