mirror of
https://github.com/skidoodle/safebin.git
synced 2026-04-28 11:17:42 +02:00
feat: initial release
This commit is contained in:
@@ -0,0 +1,48 @@
|
||||
name: release
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "v*"
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
packages: write
|
||||
|
||||
jobs:
|
||||
goreleaser:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
cache: true
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Run GoReleaser
|
||||
uses: goreleaser/goreleaser-action@v6
|
||||
with:
|
||||
distribution: goreleaser
|
||||
version: latest
|
||||
args: release --clean
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
@@ -0,0 +1,3 @@
|
||||
storage/*
|
||||
# Added by goreleaser init:
|
||||
dist/
|
||||
@@ -0,0 +1,77 @@
|
||||
version: 2
|
||||
|
||||
before:
|
||||
hooks:
|
||||
- go mod tidy
|
||||
|
||||
builds:
|
||||
- env:
|
||||
- CGO_ENABLED=0
|
||||
goos:
|
||||
- linux
|
||||
goarch:
|
||||
- amd64
|
||||
- arm64
|
||||
ldflags:
|
||||
- -s -w -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{.Date}}
|
||||
flags:
|
||||
- -trimpath
|
||||
|
||||
archives:
|
||||
- name_template: >-
|
||||
{{ .ProjectName }}_
|
||||
{{- title .Os }}_
|
||||
{{- if eq .Arch "amd64" }}x86_64
|
||||
{{- else if eq .Arch "386" }}i386
|
||||
{{- else }}{{ .Arch }}{{ end }}
|
||||
formats: ["tar.gz"]
|
||||
files:
|
||||
- web/**/*
|
||||
- README.md
|
||||
- CHANGELOG.md
|
||||
|
||||
dockers:
|
||||
- image_templates:
|
||||
- "ghcr.io/skidoodle/safebin:{{ .Version }}-amd64"
|
||||
- "ghcr.io/skidoodle/safebin:latest-amd64"
|
||||
use: buildx
|
||||
goos: linux
|
||||
goarch: amd64
|
||||
dockerfile: Dockerfile.release
|
||||
extra_files:
|
||||
- web
|
||||
build_flag_templates:
|
||||
- "--platform=linux/amd64"
|
||||
- "--label=org.opencontainers.image.title={{ .ProjectName }}"
|
||||
- "--label=org.opencontainers.image.version={{ .Version }}"
|
||||
|
||||
- image_templates:
|
||||
- "ghcr.io/skidoodle/safebin:{{ .Version }}-arm64"
|
||||
- "ghcr.io/skidoodle/safebin:latest-arm64"
|
||||
use: buildx
|
||||
goos: linux
|
||||
goarch: arm64
|
||||
dockerfile: Dockerfile.release
|
||||
extra_files:
|
||||
- web
|
||||
build_flag_templates:
|
||||
- "--platform=linux/arm64"
|
||||
- "--label=org.opencontainers.image.title={{ .ProjectName }}"
|
||||
- "--label=org.opencontainers.image.version={{ .Version }}"
|
||||
|
||||
docker_manifests:
|
||||
- name_template: "ghcr.io/skidoodle/safebin:{{ .Version }}"
|
||||
image_templates:
|
||||
- "ghcr.io/skidoodle/safebin:{{ .Version }}-amd64"
|
||||
- "ghcr.io/skidoodle/safebin:{{ .Version }}-arm64"
|
||||
- name_template: "ghcr.io/skidoodle/safebin:latest"
|
||||
image_templates:
|
||||
- "ghcr.io/skidoodle/safebin:latest-amd64"
|
||||
- "ghcr.io/skidoodle/safebin:latest-arm64"
|
||||
|
||||
changelog:
|
||||
sort: asc
|
||||
filters:
|
||||
exclude:
|
||||
- "^docs:"
|
||||
- "^test:"
|
||||
@@ -0,0 +1,44 @@
|
||||
# Changelog
|
||||
|
||||
## [3.0.0](https://github.com/skidoodle/safebin/compare/v2.0.0...v3.0.0) (2026-01-16)
|
||||
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
* Docker volume paths and environment variables have been updated. The internal storage path in the container has changed from `/home/appuser/storage` to `/app/storage`. Existing deployments must update their volume mappings and environment variable names to maintain persistence.
|
||||
|
||||
### Code Refactoring
|
||||
|
||||
* relocate core logic to internal package and modernize project structure ([43be383](https://github.com/skidoodle/safebin/commit/43be383fdbfb0263036284b8beb0ce3c646db87c))
|
||||
|
||||
## [2.0.0](https://github.com/skidoodle/safebin/compare/v1.1.0...v2.0.0) (2026-01-16)
|
||||
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
* The encryption scheme and URL structure have been completely redesigned. Links generated with previous versions of safebin are no longer compatible and cannot be decrypted by this version.
|
||||
|
||||
### Features
|
||||
|
||||
* overhaul encryption to zero-knowledge at rest and modernize UI ([599347e](https://github.com/skidoodle/safebin/commit/599347e867444288fa58f8e358269121c5d32e36))
|
||||
|
||||
## [1.1.0](https://github.com/skidoodle/safebin/compare/v1.0.1...v1.1.0) (2026-01-14)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* implement chunked uploads and environment-based configuration ([1ccc80a](https://github.com/skidoodle/safebin/commit/1ccc80ad4e5b949a8f1d1f3a8b3b4e8c4d2e1353))
|
||||
|
||||
## [1.0.1](https://github.com/skidoodle/safebin/compare/v1.0.0...v1.0.1) (2026-01-14)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* better dockerfile ([c1ecbe5](https://github.com/skidoodle/safebin/commit/c1ecbe567a24eb4e755f19fee68422025f3b15b2))
|
||||
|
||||
## 1.0.0 (2026-01-13)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add automated release and docker workflow ([e40e6d0](https://github.com/skidoodle/safebin/commit/e40e6d01afd0067bba5d0cf4a9b1ff3d7122259f))
|
||||
+38
@@ -0,0 +1,38 @@
|
||||
FROM --platform=$BUILDPLATFORM golang:1.25.5 AS builder
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY . .
|
||||
|
||||
ARG TARGETOS
|
||||
ARG TARGETARCH
|
||||
|
||||
RUN --mount=type=cache,target=/root/.cache/go-build \
|
||||
CGO_ENABLED=0 GOOS=$TARGETOS GOARCH=$TARGETARCH go build \
|
||||
-ldflags="-s -w" \
|
||||
-trimpath \
|
||||
-o /app/safebin .
|
||||
|
||||
FROM debian:trixie-slim
|
||||
|
||||
LABEL org.opencontainers.image.source="https://github.com/skidoodle/safebin"
|
||||
LABEL org.opencontainers.image.description="Minimalist, self-hosted file storage with Zero-Knowledge at Rest encryption."
|
||||
LABEL org.opencontainers.image.licenses="GPL-2.0-only"
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
ca-certificates \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN useradd -m -u 10001 -s /bin/bash appuser
|
||||
WORKDIR /app
|
||||
|
||||
COPY --from=builder /app/safebin .
|
||||
COPY --from=builder /app/web ./web
|
||||
|
||||
RUN mkdir -p /app/storage && chown 10001:10001 /app/storage
|
||||
VOLUME ["/app/storage"]
|
||||
|
||||
USER 10001
|
||||
EXPOSE 8080
|
||||
|
||||
ENTRYPOINT ["/app/safebin"]
|
||||
@@ -0,0 +1,19 @@
|
||||
FROM debian:trixie-slim
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
ca-certificates \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN useradd -m -u 10001 -s /bin/bash appuser
|
||||
WORKDIR /app
|
||||
|
||||
COPY safebin .
|
||||
COPY web ./web
|
||||
|
||||
RUN mkdir -p /app/storage && chown 10001:10001 /app/storage
|
||||
VOLUME ["/app/storage"]
|
||||
|
||||
USER 10001
|
||||
EXPOSE 8080
|
||||
|
||||
ENTRYPOINT ["/app/safebin"]
|
||||
@@ -0,0 +1,97 @@
|
||||
# safebin
|
||||
|
||||
`safebin` is a minimalist, self-hosted file storage service with **Zero-Knowledge at Rest** encryption.
|
||||
|
||||
## Features
|
||||
|
||||
- **Server-Side Encryption**: Files are encrypted using AES-256-GCM before touching the disk.
|
||||
- **Log-Safe Keys**: The decryption key is stored in the URL fragment (`#`). Since fragments are never sent to the server, the key never appears in your HTTP access logs.
|
||||
- **Integrity**: Uses GCM (Galois/Counter Mode) to ensure files cannot be tampered with while stored.
|
||||
- **Deterministic**: Identical files result in the same ID, allowing for storage deduplication.
|
||||
|
||||
## Usage
|
||||
|
||||
You can interact with the service via the web interface or through the command line.
|
||||
|
||||
### Uploading a file
|
||||
|
||||
```bash
|
||||
curl -F 'file=@archive.zip' https://bin.example.com
|
||||
```
|
||||
|
||||
The server will return a URL containing the file ID and the decryption key:
|
||||
`https://bin.example.com/vS6_1_8pS-Y_8-8_...`
|
||||
|
||||
### Downloading a file
|
||||
|
||||
Simply open the link in a browser or use `curl`:
|
||||
|
||||
```bash
|
||||
curl https://bin.example.com/vS6_1_8pS-Y_8-8_... > archive.zip
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
`safebin` is configured via command-line flags:
|
||||
|
||||
| Flag | Description | Default |
|
||||
| :--- | :--- | :--- |
|
||||
| `-h` | Bind address for the server. | `0.0.0.0` |
|
||||
| `-p` | Port to listen on. | `8080` |
|
||||
| `-s` | Directory where encrypted files are stored. | `./storage` |
|
||||
| `-m` | Maximum file size in mb. | `512` |
|
||||
|
||||
## Running Locally
|
||||
|
||||
### With Docker
|
||||
|
||||
```bash
|
||||
git clone https://github.com/skidoodle/safebin
|
||||
cd safebin
|
||||
docker compose -f compose.dev.yaml up --build
|
||||
```
|
||||
|
||||
### Without Docker
|
||||
|
||||
Requires Go 1.25 or higher.
|
||||
|
||||
```bash
|
||||
git clone https://github.com/skidoodle/safebin
|
||||
cd safebin
|
||||
go build -o safebin .
|
||||
./safebin -p 8080 -s ./data
|
||||
```
|
||||
|
||||
## Deploying
|
||||
|
||||
### Docker Compose
|
||||
|
||||
The easiest way to deploy is using the provided `compose.yaml`.
|
||||
|
||||
```yaml
|
||||
services:
|
||||
safebin:
|
||||
image: ghcr.io/skidoodle/safebin:main
|
||||
container_name: safebin
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- 8080:8080
|
||||
environment:
|
||||
- SAFEBIN_HOST=0.0.0.0
|
||||
- SAFEBIN_PORT=8080
|
||||
- SAFEBIN_STORAGE=/app/storage
|
||||
- SAFEBIN_MAX_MB=512
|
||||
volumes:
|
||||
- data:/app/storage
|
||||
|
||||
volumes:
|
||||
data:
|
||||
```
|
||||
|
||||
## Retention Policy
|
||||
|
||||
The server runs a cleanup task every hour. Retention is calculated using a cubic scaling formula to balance disk usage:
|
||||
- **Small files (< 1MB)**: Up to 365 days.
|
||||
- **Large files (512MB)**: 24 hours.
|
||||
|
||||
This ensures that the server doesn't run out of disk space due to large binary blobs while allowing small text files or images to persist for longer periods.
|
||||
@@ -0,0 +1,8 @@
|
||||
services:
|
||||
safebin:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
container_name: safebin-dev
|
||||
ports:
|
||||
- 8080:8080
|
||||
@@ -0,0 +1,17 @@
|
||||
services:
|
||||
safebin:
|
||||
image: ghcr.io/skidoodle/safebin:main
|
||||
container_name: safebin
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- 8080:8080
|
||||
environment:
|
||||
- SAFEBIN_HOST=0.0.0.0
|
||||
- SAFEBIN_PORT=8080
|
||||
- SAFEBIN_STORAGE=/app/storage
|
||||
- SAFEBIN_MAX_MB=512
|
||||
volumes:
|
||||
- data:/app/storage
|
||||
|
||||
volumes:
|
||||
data:
|
||||
@@ -0,0 +1,58 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"html/template"
|
||||
"log/slog"
|
||||
"os"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type Config struct {
|
||||
Addr string
|
||||
StorageDir string
|
||||
MaxMB int64
|
||||
}
|
||||
|
||||
type App struct {
|
||||
Conf Config
|
||||
Tmpl *template.Template
|
||||
Logger *slog.Logger
|
||||
}
|
||||
|
||||
func LoadConfig() Config {
|
||||
h := getEnv("SAFEBIN_HOST", "0.0.0.0")
|
||||
p := getEnvInt("SAFEBIN_PORT", 8080)
|
||||
s := getEnv("SAFEBIN_STORAGE", "./storage")
|
||||
mDefault := int64(getEnvInt("SAFEBIN_MAX_MB", 512))
|
||||
|
||||
var m int64
|
||||
flag.StringVar(&h, "h", h, "Bind address")
|
||||
flag.IntVar(&p, "p", p, "Port")
|
||||
flag.StringVar(&s, "s", s, "Storage directory")
|
||||
flag.Int64Var(&m, "m", mDefault, "Max file size in MB")
|
||||
flag.Parse()
|
||||
|
||||
return Config{Addr: fmt.Sprintf("%s:%d", h, p), StorageDir: s, MaxMB: m}
|
||||
}
|
||||
|
||||
func getEnv(k, f string) string {
|
||||
if v, ok := os.LookupEnv(k); ok {
|
||||
return v
|
||||
}
|
||||
return f
|
||||
}
|
||||
|
||||
func getEnvInt(k string, f int) int {
|
||||
if v, ok := os.LookupEnv(k); ok {
|
||||
if i, err := strconv.Atoi(v); err == nil {
|
||||
return i
|
||||
}
|
||||
}
|
||||
return f
|
||||
}
|
||||
|
||||
func ParseTemplates() *template.Template {
|
||||
return template.Must(template.ParseGlob("./web/templates/*.html"))
|
||||
}
|
||||
@@ -0,0 +1,174 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"io"
|
||||
"mime"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strconv"
|
||||
|
||||
"github.com/skidoodle/safebin/internal/crypto"
|
||||
)
|
||||
|
||||
var reUploadID = regexp.MustCompile(`^[a-zA-Z0-9]{10,50}$`)
|
||||
|
||||
func (app *App) HandleHome(w http.ResponseWriter, r *http.Request) {
|
||||
err := app.Tmpl.ExecuteTemplate(w, "base", map[string]any{
|
||||
"MaxMB": app.Conf.MaxMB,
|
||||
"Host": r.Host,
|
||||
})
|
||||
if err != nil {
|
||||
app.Logger.Error("Template error", "err", err)
|
||||
}
|
||||
}
|
||||
|
||||
func (app *App) HandleUpload(w http.ResponseWriter, r *http.Request) {
|
||||
limit := (app.Conf.MaxMB << 20) + (1 << 20)
|
||||
r.Body = http.MaxBytesReader(w, r.Body, limit)
|
||||
|
||||
file, header, err := r.FormFile("file")
|
||||
if err != nil {
|
||||
app.SendError(w, r, http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
tmpPath := filepath.Join(app.Conf.StorageDir, "tmp", fmt.Sprintf("up_%d", os.Getpid()))
|
||||
tmp, _ := os.Create(tmpPath)
|
||||
defer os.Remove(tmpPath)
|
||||
defer tmp.Close()
|
||||
|
||||
if _, err := io.Copy(tmp, file); err != nil {
|
||||
app.SendError(w, r, http.StatusRequestEntityTooLarge)
|
||||
return
|
||||
}
|
||||
|
||||
app.FinalizeFile(w, r, tmp, header.Filename)
|
||||
}
|
||||
|
||||
func (app *App) HandleChunk(w http.ResponseWriter, r *http.Request) {
|
||||
uid := r.FormValue("upload_id")
|
||||
idx, _ := strconv.Atoi(r.FormValue("index"))
|
||||
|
||||
if !reUploadID.MatchString(uid) || idx > 1000 {
|
||||
app.SendError(w, r, http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
file, _, err := r.FormFile("chunk")
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
dir := filepath.Join(app.Conf.StorageDir, "tmp", uid)
|
||||
os.MkdirAll(dir, 0700)
|
||||
|
||||
dest, _ := os.Create(filepath.Join(dir, strconv.Itoa(idx)))
|
||||
defer dest.Close()
|
||||
io.Copy(dest, file)
|
||||
}
|
||||
|
||||
func (app *App) HandleFinish(w http.ResponseWriter, r *http.Request) {
|
||||
uid := r.FormValue("upload_id")
|
||||
total, _ := strconv.Atoi(r.FormValue("total"))
|
||||
|
||||
if !reUploadID.MatchString(uid) || total > 1000 {
|
||||
app.SendError(w, r, http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
tmpPath := filepath.Join(app.Conf.StorageDir, "tmp", "m_"+uid)
|
||||
merged, _ := os.Create(tmpPath)
|
||||
defer os.Remove(tmpPath)
|
||||
defer merged.Close()
|
||||
|
||||
for i := range total {
|
||||
partPath := filepath.Join(app.Conf.StorageDir, "tmp", uid, strconv.Itoa(i))
|
||||
part, err := os.Open(partPath)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
io.Copy(merged, part)
|
||||
part.Close()
|
||||
}
|
||||
|
||||
app.FinalizeFile(w, r, merged, r.FormValue("filename"))
|
||||
os.RemoveAll(filepath.Join(app.Conf.StorageDir, "tmp", uid))
|
||||
}
|
||||
|
||||
func (app *App) HandleGetFile(w http.ResponseWriter, r *http.Request) {
|
||||
slug := r.PathValue("slug")
|
||||
if len(slug) < 22 {
|
||||
app.SendError(w, r, http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
keyBase64 := slug[:22]
|
||||
ext := slug[22:]
|
||||
|
||||
key, err := base64.RawURLEncoding.DecodeString(keyBase64)
|
||||
if err != nil || len(key) != 16 {
|
||||
app.SendError(w, r, http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
|
||||
id := crypto.GetID(key, ext)
|
||||
path := filepath.Join(app.Conf.StorageDir, id)
|
||||
|
||||
info, err := os.Stat(path)
|
||||
if err != nil {
|
||||
app.SendError(w, r, http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
|
||||
f, _ := os.Open(path)
|
||||
defer f.Close()
|
||||
|
||||
streamer, _ := crypto.NewGCMStreamer(key)
|
||||
decryptor := crypto.NewDecryptor(f, streamer.AEAD, info.Size())
|
||||
|
||||
contentType := mime.TypeByExtension(ext)
|
||||
if contentType == "" {
|
||||
contentType = "application/octet-stream"
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", contentType)
|
||||
w.Header().Set("Content-Security-Policy", "default-src 'none'; img-src 'self' data:; media-src 'self' data:; style-src 'unsafe-inline'; sandbox allow-forms allow-scripts allow-downloads allow-same-origin")
|
||||
w.Header().Set("X-Content-Type-Options", "nosniff")
|
||||
w.Header().Set("Content-Disposition", fmt.Sprintf("inline; filename=%q", slug))
|
||||
|
||||
http.ServeContent(w, r, slug, info.ModTime(), decryptor)
|
||||
}
|
||||
|
||||
func (app *App) FinalizeFile(w http.ResponseWriter, r *http.Request, src *os.File, filename string) {
|
||||
src.Seek(0, 0)
|
||||
key, _ := crypto.DeriveKey(src)
|
||||
|
||||
ext := filepath.Ext(filename)
|
||||
id := crypto.GetID(key, ext)
|
||||
|
||||
src.Seek(0, 0)
|
||||
finalPath := filepath.Join(app.Conf.StorageDir, id)
|
||||
|
||||
if _, err := os.Stat(finalPath); err == nil {
|
||||
app.RespondWithLink(w, r, key, filename)
|
||||
return
|
||||
}
|
||||
|
||||
out, _ := os.Create(finalPath + ".tmp")
|
||||
streamer, _ := crypto.NewGCMStreamer(key)
|
||||
if err := streamer.EncryptStream(out, src); err != nil {
|
||||
out.Close()
|
||||
os.Remove(finalPath + ".tmp")
|
||||
app.SendError(w, r, http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
out.Close()
|
||||
os.Rename(finalPath+".tmp", finalPath)
|
||||
app.RespondWithLink(w, r, key, filename)
|
||||
}
|
||||
@@ -0,0 +1,58 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
func (app *App) Routes() *http.ServeMux {
|
||||
mux := http.NewServeMux()
|
||||
|
||||
fs := http.FileServer(http.Dir("./web/static"))
|
||||
mux.Handle("GET /static/", http.StripPrefix("/static/", fs))
|
||||
|
||||
mux.HandleFunc("GET /{$}", app.HandleHome)
|
||||
mux.HandleFunc("POST /{$}", app.HandleUpload)
|
||||
mux.HandleFunc("POST /upload/chunk", app.HandleChunk)
|
||||
mux.HandleFunc("POST /upload/finish", app.HandleFinish)
|
||||
mux.HandleFunc("GET /{slug}", app.HandleGetFile)
|
||||
|
||||
return mux
|
||||
}
|
||||
|
||||
func (app *App) RespondWithLink(w http.ResponseWriter, r *http.Request, key []byte, originalName string) {
|
||||
keySlug := base64.RawURLEncoding.EncodeToString(key)
|
||||
ext := filepath.Ext(originalName)
|
||||
|
||||
link := fmt.Sprintf("%s/%s%s", r.Host, keySlug, ext)
|
||||
|
||||
if r.Header.Get("X-Requested-With") == "XMLHttpRequest" {
|
||||
fmt.Fprintf(w, `
|
||||
<div style="text-align: left;">
|
||||
<div class="dim" style="margin-bottom: 8px;">Upload Complete:</div>
|
||||
<div class="copy-box">
|
||||
<input type="text" value="%s" id="share-url" readonly onclick="this.select()">
|
||||
<button onclick="copyToClipboard(this)">Copy</button>
|
||||
</div>
|
||||
<button class="reset-btn" onclick="resetUI()">Upload another</button>
|
||||
</div>`, link)
|
||||
return
|
||||
}
|
||||
|
||||
scheme := "https"
|
||||
if r.TLS == nil {
|
||||
scheme = "http"
|
||||
}
|
||||
fmt.Fprintf(w, "%s://%s\n", scheme, link)
|
||||
}
|
||||
|
||||
func (app *App) SendError(w http.ResponseWriter, r *http.Request, code int) {
|
||||
if r.Header.Get("X-Requested-With") == "XMLHttpRequest" {
|
||||
w.WriteHeader(code)
|
||||
fmt.Fprintf(w, `<div class="error-text">Error %d</div><button class="reset-btn" onclick="resetUI()">Try again</button>`, code)
|
||||
return
|
||||
}
|
||||
http.Error(w, http.StatusText(code), code)
|
||||
}
|
||||
@@ -0,0 +1,50 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"context"
|
||||
"math"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"time"
|
||||
)
|
||||
|
||||
func (app *App) StartCleanupTask(ctx context.Context) {
|
||||
ticker := time.NewTicker(1 * time.Hour)
|
||||
for {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return
|
||||
case <-ticker.C:
|
||||
app.CleanDir(app.Conf.StorageDir, false)
|
||||
app.CleanDir(filepath.Join(app.Conf.StorageDir, "tmp"), true)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (app *App) CleanDir(path string, isTmp bool) {
|
||||
entries, _ := os.ReadDir(path)
|
||||
for _, entry := range entries {
|
||||
info, _ := entry.Info()
|
||||
expiry := 4 * time.Hour
|
||||
if !isTmp {
|
||||
expiry = CalculateRetention(info.Size(), app.Conf.MaxMB)
|
||||
}
|
||||
|
||||
if time.Since(info.ModTime()) > expiry {
|
||||
os.RemoveAll(filepath.Join(path, entry.Name()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func CalculateRetention(fileSize int64, maxMB int64) time.Duration {
|
||||
const (
|
||||
minAge = 24 * time.Hour
|
||||
maxAge = 365 * 24 * time.Hour
|
||||
)
|
||||
ratio := math.Max(0, math.Min(1, float64(fileSize)/float64(maxMB<<20)))
|
||||
retention := float64(maxAge) * math.Pow(1.0-ratio, 3)
|
||||
if retention < float64(minAge) {
|
||||
return minAge
|
||||
}
|
||||
return time.Duration(retention)
|
||||
}
|
||||
@@ -0,0 +1,70 @@
|
||||
package crypto
|
||||
|
||||
import (
|
||||
"crypto/aes"
|
||||
"crypto/cipher"
|
||||
"crypto/sha256"
|
||||
"encoding/base64"
|
||||
"encoding/binary"
|
||||
"io"
|
||||
)
|
||||
|
||||
const (
|
||||
GCMChunkSize = 64 * 1024
|
||||
NonceSize = 12
|
||||
)
|
||||
|
||||
func DeriveKey(r io.Reader) ([]byte, error) {
|
||||
h := sha256.New()
|
||||
if _, err := io.Copy(h, r); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return h.Sum(nil)[:16], nil
|
||||
}
|
||||
|
||||
func GetID(key []byte, ext string) string {
|
||||
h := sha256.New()
|
||||
h.Write(key)
|
||||
h.Write([]byte(ext))
|
||||
return base64.RawURLEncoding.EncodeToString(h.Sum(nil)[:9])
|
||||
}
|
||||
|
||||
type GCMStreamer struct {
|
||||
AEAD cipher.AEAD
|
||||
}
|
||||
|
||||
func NewGCMStreamer(key []byte) (*GCMStreamer, error) {
|
||||
b, err := aes.NewCipher(key)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
g, err := cipher.NewGCM(b)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &GCMStreamer{AEAD: g}, nil
|
||||
}
|
||||
|
||||
func (g *GCMStreamer) EncryptStream(dst io.Writer, src io.Reader) error {
|
||||
buf := make([]byte, GCMChunkSize)
|
||||
var chunkIdx uint64 = 0
|
||||
for {
|
||||
n, err := io.ReadFull(src, buf)
|
||||
if n > 0 {
|
||||
nonce := make([]byte, NonceSize)
|
||||
binary.BigEndian.PutUint64(nonce[4:], chunkIdx)
|
||||
ciphertext := g.AEAD.Seal(nil, nonce, buf[:n], nil)
|
||||
if _, werr := dst.Write(ciphertext); werr != nil {
|
||||
return werr
|
||||
}
|
||||
chunkIdx++
|
||||
}
|
||||
if err == io.EOF || err == io.ErrUnexpectedEOF {
|
||||
break
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@@ -0,0 +1,92 @@
|
||||
package crypto
|
||||
|
||||
import (
|
||||
"crypto/cipher"
|
||||
"encoding/binary"
|
||||
"errors"
|
||||
"io"
|
||||
)
|
||||
|
||||
type Decryptor struct {
|
||||
rs io.ReadSeeker
|
||||
aead cipher.AEAD
|
||||
size int64
|
||||
offset int64
|
||||
}
|
||||
|
||||
func NewDecryptor(rs io.ReadSeeker, aead cipher.AEAD, encryptedSize int64) *Decryptor {
|
||||
overhead := int64(aead.Overhead())
|
||||
fullBlocks := encryptedSize / (GCMChunkSize + overhead)
|
||||
remainder := encryptedSize % (GCMChunkSize + overhead)
|
||||
|
||||
plainSize := (fullBlocks * GCMChunkSize)
|
||||
if remainder > overhead {
|
||||
plainSize += (remainder - overhead)
|
||||
}
|
||||
|
||||
return &Decryptor{
|
||||
rs: rs,
|
||||
aead: aead,
|
||||
size: plainSize,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *Decryptor) Read(p []byte) (int, error) {
|
||||
if d.offset >= d.size {
|
||||
return 0, io.EOF
|
||||
}
|
||||
|
||||
chunkIdx := d.offset / GCMChunkSize
|
||||
overhang := d.offset % GCMChunkSize
|
||||
|
||||
overhead := int64(d.aead.Overhead())
|
||||
actualChunkSize := int64(GCMChunkSize + overhead)
|
||||
|
||||
_, err := d.rs.Seek(chunkIdx*actualChunkSize, io.SeekStart)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
encrypted := make([]byte, actualChunkSize)
|
||||
n, err := io.ReadFull(d.rs, encrypted)
|
||||
if err != nil && err != io.ErrUnexpectedEOF {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
nonce := make([]byte, NonceSize)
|
||||
binary.BigEndian.PutUint64(nonce[4:], uint64(chunkIdx))
|
||||
|
||||
plaintext, err := d.aead.Open(nil, nonce, encrypted[:n], nil)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
if overhang >= int64(len(plaintext)) {
|
||||
return 0, io.EOF
|
||||
}
|
||||
|
||||
available := plaintext[overhang:]
|
||||
nCopied := copy(p, available)
|
||||
d.offset += int64(nCopied)
|
||||
|
||||
return nCopied, nil
|
||||
}
|
||||
|
||||
func (d *Decryptor) Seek(offset int64, whence int) (int64, error) {
|
||||
var abs int64
|
||||
switch whence {
|
||||
case io.SeekStart:
|
||||
abs = offset
|
||||
case io.SeekCurrent:
|
||||
abs = d.offset + offset
|
||||
case io.SeekEnd:
|
||||
abs = d.size + offset
|
||||
default:
|
||||
return 0, errors.New("invalid whence")
|
||||
}
|
||||
if abs < 0 {
|
||||
return 0, errors.New("negative bias")
|
||||
}
|
||||
d.offset = abs
|
||||
return abs, nil
|
||||
}
|
||||
@@ -0,0 +1,65 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"os"
|
||||
"os/signal"
|
||||
"syscall"
|
||||
"time"
|
||||
|
||||
"github.com/skidoodle/safebin/internal/app"
|
||||
)
|
||||
|
||||
func main() {
|
||||
cfg := app.LoadConfig()
|
||||
logger := slog.New(slog.NewTextHandler(os.Stderr, &slog.HandlerOptions{Level: slog.LevelDebug}))
|
||||
|
||||
logger.Info("Initializing Safebin Server",
|
||||
"storage_dir", cfg.StorageDir,
|
||||
"max_file_size", fmt.Sprintf("%dMB", cfg.MaxMB),
|
||||
)
|
||||
|
||||
if err := os.MkdirAll(fmt.Sprintf("%s/tmp", cfg.StorageDir), 0700); err != nil {
|
||||
logger.Error("Failed to initialize storage directory", "err", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
application := &app.App{
|
||||
Conf: cfg,
|
||||
Logger: logger,
|
||||
Tmpl: app.ParseTemplates(),
|
||||
}
|
||||
|
||||
ctx, stop := signal.NotifyContext(context.Background(), os.Interrupt, syscall.SIGTERM)
|
||||
defer stop()
|
||||
|
||||
go application.StartCleanupTask(ctx)
|
||||
|
||||
srv := &http.Server{
|
||||
Addr: cfg.Addr,
|
||||
Handler: application.Routes(),
|
||||
ReadTimeout: 10 * time.Minute,
|
||||
WriteTimeout: 10 * time.Minute,
|
||||
}
|
||||
|
||||
go func() {
|
||||
application.Logger.Info("Server is ready and listening", "addr", cfg.Addr)
|
||||
if err := srv.ListenAndServe(); err != nil && err != http.ErrServerClosed {
|
||||
application.Logger.Error("Server failed to start", "err", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}()
|
||||
|
||||
<-ctx.Done()
|
||||
application.Logger.Info("Shutting down gracefully...")
|
||||
|
||||
shutdownCtx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||
defer cancel()
|
||||
if err := srv.Shutdown(shutdownCtx); err != nil {
|
||||
application.Logger.Error("Forced shutdown", "err", err)
|
||||
}
|
||||
application.Logger.Info("Server stopped")
|
||||
}
|
||||
@@ -0,0 +1,110 @@
|
||||
:root {
|
||||
--bg: #0d1117;
|
||||
--fg: #adbac7;
|
||||
--accent: #4493f8;
|
||||
--border: #30363d;
|
||||
--success: #3fb950;
|
||||
--header-white: #f0f6fc;
|
||||
}
|
||||
|
||||
body {
|
||||
background: var(--bg);
|
||||
color: var(--fg);
|
||||
font-family: -apple-system, system-ui, sans-serif;
|
||||
margin: 0;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
min-height: 100vh;
|
||||
}
|
||||
|
||||
.container {
|
||||
width: 100%;
|
||||
max-width: 600px;
|
||||
padding: 20px;
|
||||
}
|
||||
|
||||
.header {
|
||||
margin-bottom: 30px;
|
||||
border-left: 3px solid var(--accent);
|
||||
padding-left: 16px;
|
||||
}
|
||||
|
||||
.upload-area {
|
||||
border: 2px dashed var(--border);
|
||||
border-radius: 12px;
|
||||
padding: 60px 20px;
|
||||
text-align: center;
|
||||
cursor: pointer;
|
||||
background: #161b22;
|
||||
transition: 0.2s;
|
||||
}
|
||||
|
||||
.upload-area:hover,
|
||||
.upload-area.dragover {
|
||||
border-color: var(--accent);
|
||||
background: #1c2128;
|
||||
}
|
||||
|
||||
.progress-bar {
|
||||
height: 6px;
|
||||
background: var(--border);
|
||||
border-radius: 10px;
|
||||
margin: 25px 0;
|
||||
overflow: hidden;
|
||||
display: none;
|
||||
}
|
||||
|
||||
.progress-fill {
|
||||
height: 100%;
|
||||
background: var(--accent);
|
||||
width: 0%;
|
||||
transition: width 0.3s;
|
||||
}
|
||||
|
||||
.copy-box {
|
||||
display: flex;
|
||||
margin-top: 20px;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
input[type="text"] {
|
||||
flex: 1;
|
||||
background: #0d1117;
|
||||
border: 1px solid var(--border);
|
||||
color: var(--success);
|
||||
padding: 12px;
|
||||
border-radius: 6px;
|
||||
font-family: monospace;
|
||||
outline: none;
|
||||
}
|
||||
|
||||
button {
|
||||
background: var(--accent);
|
||||
color: white;
|
||||
border: none;
|
||||
padding: 10px 20px;
|
||||
border-radius: 6px;
|
||||
cursor: pointer;
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.reset-btn {
|
||||
background: transparent;
|
||||
color: var(--fg);
|
||||
text-decoration: underline;
|
||||
margin-top: 20px;
|
||||
border: none;
|
||||
cursor: pointer;
|
||||
opacity: 0.7;
|
||||
}
|
||||
|
||||
.dim {
|
||||
color: #768390;
|
||||
font-size: 13px;
|
||||
}
|
||||
|
||||
.error-text {
|
||||
color: #f85149;
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
@@ -0,0 +1,83 @@
|
||||
const $ = (id) => document.getElementById(id);
|
||||
const dropZone = $("drop-zone");
|
||||
const fileInput = $("file-input");
|
||||
|
||||
if (dropZone) {
|
||||
dropZone.onclick = () => {
|
||||
if ($("idle-state").style.display !== "none") fileInput.click();
|
||||
};
|
||||
|
||||
fileInput.onchange = () => {
|
||||
if (fileInput.files[0]) handleUpload(fileInput.files[0]);
|
||||
};
|
||||
|
||||
["dragenter", "dragover"].forEach((n) =>
|
||||
dropZone.addEventListener(n, (e) => {
|
||||
e.preventDefault();
|
||||
dropZone.classList.add("dragover");
|
||||
}),
|
||||
);
|
||||
|
||||
["dragleave", "drop"].forEach((n) =>
|
||||
dropZone.addEventListener(n, (e) => {
|
||||
e.preventDefault();
|
||||
dropZone.classList.remove("dragover");
|
||||
}),
|
||||
);
|
||||
|
||||
dropZone.addEventListener("drop", (e) => {
|
||||
e.preventDefault();
|
||||
if (e.dataTransfer.files.length) handleUpload(e.dataTransfer.files[0]);
|
||||
});
|
||||
}
|
||||
|
||||
async function handleUpload(file) {
|
||||
$("idle-state").style.display = "none";
|
||||
$("busy-state").style.display = "block";
|
||||
|
||||
const uploadID = Math.random().toString(36).substring(2, 15);
|
||||
const chunkSize = 1024 * 1024 * 8;
|
||||
const total = Math.ceil(file.size / chunkSize);
|
||||
|
||||
try {
|
||||
for (let i = 0; i < total; i++) {
|
||||
const fd = new FormData();
|
||||
fd.append("upload_id", uploadID);
|
||||
fd.append("index", i);
|
||||
fd.append("chunk", file.slice(i * chunkSize, (i + 1) * chunkSize));
|
||||
const res = await fetch("/upload/chunk", { method: "POST", body: fd });
|
||||
if (!res.ok) throw new Error();
|
||||
$("p-fill").style.width = ((i + 1) / total) * 100 + "%";
|
||||
}
|
||||
|
||||
const finalFd = new FormData();
|
||||
finalFd.append("upload_id", uploadID);
|
||||
finalFd.append("filename", file.name);
|
||||
finalFd.append("total", total);
|
||||
|
||||
const res = await fetch("/upload/finish", {
|
||||
method: "POST",
|
||||
body: finalFd,
|
||||
headers: { "X-Requested-With": "XMLHttpRequest" },
|
||||
});
|
||||
|
||||
$("busy-state").style.display = "none";
|
||||
$("result-state").innerHTML = await res.text();
|
||||
} catch (e) {
|
||||
$("busy-state").style.display = "none";
|
||||
$("result-state").innerHTML = `<div class="error-text">Upload Failed</div><button class="reset-btn" onclick="resetUI()">Try again</button>`;
|
||||
}
|
||||
}
|
||||
|
||||
function copyToClipboard(btn) {
|
||||
const input = $("share-url");
|
||||
input.select();
|
||||
const fullUrl = window.location.protocol + "//" + input.value;
|
||||
navigator.clipboard.writeText(fullUrl);
|
||||
btn.innerText = "Copied!";
|
||||
setTimeout(() => (btn.innerText = "Copy"), 2000);
|
||||
}
|
||||
|
||||
function resetUI() {
|
||||
location.reload();
|
||||
}
|
||||
@@ -0,0 +1,31 @@
|
||||
{{define "base"}}
|
||||
<!doctype html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>safebin</title>
|
||||
<link rel="stylesheet" href="/static/css/style.css" />
|
||||
</head>
|
||||
<body>
|
||||
<div class="container">
|
||||
<header class="header">
|
||||
<h2 style="margin: 0; color: var(--header-white)">safebin</h2>
|
||||
<div class="dim">Encrypted Temporary File Storage</div>
|
||||
</header>
|
||||
|
||||
{{template "content" .}}
|
||||
|
||||
<section style="margin-top: 40px; padding-top: 24px; border-top: 1px solid var(--border)">
|
||||
<div class="dim" style="text-transform: uppercase; font-size: 11px; font-weight: 700; letter-spacing: 1px">CLI Usage</div>
|
||||
<pre style="background: #161b22; padding: 16px; border-radius: 8px; font-size: 13px; overflow-x: auto; border: 1px solid var(--border)">
|
||||
curl -F file=@yourfile {{.Host}}</pre
|
||||
>
|
||||
</section>
|
||||
</div>
|
||||
|
||||
<input type="file" id="file-input" style="display: none" />
|
||||
<script src="/static/js/app.js"></script>
|
||||
</body>
|
||||
</html>
|
||||
{{end}}
|
||||
@@ -0,0 +1,18 @@
|
||||
{{define "content"}}
|
||||
<main class="upload-area" id="drop-zone">
|
||||
<div id="idle-state">
|
||||
<div style="font-size: 32px; color: var(--accent)">↑</div>
|
||||
<div style="font-weight: 500; color: var(--header-white)">Click or drag to upload</div>
|
||||
<div class="dim">Max size: {{.MaxMB}}MB</div>
|
||||
</div>
|
||||
|
||||
<div id="busy-state" style="display: none">
|
||||
<div id="status-msg" style="font-weight: 500">Uploading...</div>
|
||||
<div class="progress-bar" id="p-bar-container" style="display: block">
|
||||
<div class="progress-fill" id="p-fill"></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div id="result-state"></div>
|
||||
</main>
|
||||
{{end}}
|
||||
Reference in New Issue
Block a user