Compare commits

...

34 Commits

Author SHA1 Message Date
yusing
f79a15bac6 update license 2025-05-01 07:29:48 +08:00
yusing
2b4a70a550 fix(docker): fixed retry mechanism 2025-05-01 06:48:38 +08:00
yusing
f06741428c fix(idlewatcher): log error and retry instead instead of stopping 2025-05-01 06:46:24 +08:00
yusing
16e6e72454 feat(access_log): dynamic buffer size 2025-05-01 05:57:02 +08:00
yusing
100d2c392f chore: memory optimization for access log 2025-04-30 18:30:46 +08:00
yusing
829eb08e37 feat: tunable rotate interval 2025-04-30 18:19:00 +08:00
yusing
53d54a09b0 fix: rotate result file size, add "saved" and omit empty values 2025-04-30 18:17:09 +08:00
yusing
62c551c7fe fix: tests 2025-04-30 17:42:51 +08:00
yusing
80e59bb481 fix: nil panic on unmarshaling zero value 2025-04-30 12:06:49 +08:00
yusing
7a5afc3612 fix; compose example 2025-04-30 04:03:11 +08:00
yusing
2c0349c11c chore: remove debug statement 2025-04-30 00:14:53 +08:00
yusing
8e3c2cc8d4 fix: issues when using socket-proxy 2025-04-29 23:56:15 +08:00
yusing
d35afdb3c9 security: exclude socket-proxy from proxying 2025-04-29 16:23:30 +08:00
yusing
ae093ebf40 docs: update wiki URL, add website URL 2025-04-29 15:22:31 +08:00
yusing
aa8af4185b chore: update schema url 2025-04-29 14:45:38 +08:00
yusing
0029cf69d6 fix: setup script and compose 2025-04-29 09:24:22 +08:00
Yuzerion
33e400a17e security: run in rootless by default and drop unnecessary caps (#101)
Co-authored-by: yusing <yusing@6uo.me>
2025-04-29 08:42:30 +08:00
yusing
1d22bcfed9 fix(access_log): file size calculation 2025-04-29 07:33:51 +08:00
yusing
978d82060e docs: move schema to frontend 2025-04-29 07:26:14 +08:00
yusing
7aa1215491 refactor: rename Deserialize to MapUnmarshalValidate 2025-04-29 07:26:14 +08:00
yusing
0b69589586 chore: disable unused last version parsing 2025-04-29 00:47:13 +08:00
yusing
bca3cd84d1 fix(accesslog): os: invalid use of WriteAt on file opened with O_APPEND 2025-04-29 00:46:30 +08:00
yusing
ce4bf2f646 fix(idlewatcher): not started for docker containers 2025-04-28 23:54:13 +08:00
yusing
c49016f22c fix: go.mod and deps upgrade 2025-04-28 11:32:01 +08:00
yusing
8da63daf02 refactor: simplify and remove duplicated code for icon caching 2025-04-28 11:22:49 +08:00
yusing
c5fd21552e fix(oidc): token not being refreshed when receiving simutaneous requests from the same session 2025-04-28 11:19:57 +08:00
yusing
27409abc24 fix: missing proxmox initialization 2025-04-28 05:08:14 +08:00
yusing
21c9e46274 fix: remove redundant event logging 2025-04-28 05:03:17 +08:00
yusing
22a12d3116 chore: remove redundant loadbalancer debug message 2025-04-28 04:57:26 +08:00
yusing
89d93dd878 chore: better error message 2025-04-28 00:48:20 +08:00
yusing
66853dfc52 fix: cloudflare realIP should defaults to be recursive 2025-04-27 23:53:04 +08:00
yusing
c72f66d64b feat(acl): add FORCE_RESOLVE_COUNTRY option to resolve country 2025-04-26 09:48:43 +08:00
yusing
59bc342a40 fix: notfications not being sent 2025-04-26 09:20:03 +08:00
yusing
e11579df10 chore(maxm): improved database update mechanism, fixed db being downloaded twice on first run 2025-04-26 09:08:03 +08:00
122 changed files with 924 additions and 3305 deletions

View File

@@ -4,6 +4,10 @@ TAG=latest
# set timezone to get correct log timestamp
TZ=ETC/UTC
# container uid and gid (must match the owner of mounted directories)
GODOXY_UID=1000
GODOXY_GID=1000
# API JWT Configuration (common)
# generate secret with `openssl rand -base64 32`
GODOXY_API_JWT_SECRET=
@@ -44,9 +48,19 @@ GODOXY_API_PASSWORD=password
GODOXY_HTTP_ADDR=:80
GODOXY_HTTPS_ADDR=:443
# Enable HTTP3
GODOXY_HTTP3_ENABLED=true
# API listening address
GODOXY_API_ADDR=127.0.0.1:8888
# Metrics
GODOXY_METRICS_DISABLE_CPU=false
GODOXY_METRICS_DISABLE_MEMORY=false
GODOXY_METRICS_DISABLE_DISK=false
GODOXY_METRICS_DISABLE_NETWORK=false
GODOXY_METRICS_DISABLE_SENSORS=false
# Frontend listening port
GODOXY_FRONTEND_PORT=3000
@@ -56,6 +70,7 @@ GODOXY_FRONTEND_ALIASES=godoxy
# Docker socket
# /var/run/podman/podman.sock for podman
DOCKER_SOCKET=/var/run/docker.sock
SOCKET_PROXY_LISTEN_ADDR=127.0.0.1:2375
# Debug mode
GODOXY_DEBUG=false

View File

@@ -1,10 +1,10 @@
{
"yaml.schemas": {
"https://github.com/yusing/go-proxy/raw/main/schemas/config.schema.json": [
"https://github.com/yusing/godoxy-webui/raw/refs/heads/main/src/types/godoxy/config.schema.json": [
"config.example.yml",
"config.yml"
],
"https://github.com/yusing/go-proxy/raw/main/schemas/routes.schema.json": [
"https://github.com/yusing/godoxy-webui/raw/refs/heads/main/src/types/godoxy/routes.schema.json": [
"providers.example.yml"
]
}

View File

@@ -6,6 +6,16 @@ HEALTHCHECK NONE
# trunk-ignore(hadolint/DL3018)
RUN apk add --no-cache tzdata make libcap-setcap
ENV GOPATH=/root/go
WORKDIR /src
COPY go.mod go.sum ./
COPY agent ./agent
COPY internal/dnsproviders ./internal/dnsproviders
RUN go mod download -x
# Stage 2: builder
FROM deps AS builder
@@ -17,12 +27,6 @@ COPY internal ./internal
COPY pkg ./pkg
COPY agent ./agent
# Only copy go.mod and go.sum initially for better caching
COPY go.mod go.sum /src/
ENV GOPATH=/root/go
RUN go mod download -x
ARG VERSION
ENV VERSION=${VERSION}
@@ -31,9 +35,8 @@ ENV MAKE_ARGS=${MAKE_ARGS}
ENV GOCACHE=/root/.cache/go-build
ENV GOPATH=/root/go
RUN make ${MAKE_ARGS} build link-binary && \
mv bin /app/ && \
mkdir -p /app/error_pages /app/certs
RUN make ${MAKE_ARGS} docker=1 build
# Stage 3: Final image
FROM scratch
@@ -45,10 +48,7 @@ LABEL proxy.exclude=1
COPY --from=builder /usr/share/zoneinfo /usr/share/zoneinfo
# copy binary
COPY --from=builder /app /app
# copy example config
COPY config.example.yml /app/config/config.yml
COPY --from=builder /app/run /app/run
# copy certs
COPY --from=builder /etc/ssl/certs /etc/ssl/certs

26
LICENSE
View File

@@ -1,6 +1,6 @@
MIT License
Copyright (c) 2024 [fullname]
Copyright (c) 2024 - present Yusing
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
@@ -19,3 +19,27 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
---
internal/net/gphttp/reverseproxy/reverse_proxy_mod.go is copied from et/http/httputil/reverseproxy.go with modifications to adapt to this project.
Copyright 2011 The Go Authors. All rights reserved.
Use of this source code is governed by a BSD-style
license that can be found in the LICENSE file.
---
internal/utils/io.go has a modified version of io.Copy with context and HTTP flusher handling.
Copyright 2009 The Go Authors. All rights reserved.
Use of this source code is governed by a BSD-style
license that can be found in the LICENSE file.
---
internal/utils/strutils/split_join.go is copied from strings.Split and strings.Join with modifications to adapt to this project.
Copyright 2009 The Go Authors. All rights reserved.
Use of this source code is governed by a BSD-style
license that can be found in the LICENSE file.

View File

@@ -1,3 +1,4 @@
shell := /bin/sh
export VERSION ?= $(shell git describe --tags --abbrev=0)
export BUILD_DATE ?= $(shell date -u +'%Y%m%d-%H%M')
export GOOS = linux
@@ -59,20 +60,29 @@ else
SETCAP_CMD = sudo setcap
endif
# CAP_NET_BIND_SERVICE: permission for binding to :80 and :443
POST_BUILD = $(SETCAP_CMD) CAP_NET_BIND_SERVICE=+ep ${BIN_PATH};
ifeq ($(docker), 1)
POST_BUILD += mkdir -p /app && mv ${BIN_PATH} /app/run;
endif
.PHONY: debug
test:
GODOXY_TEST=1 go test ./internal/...
docker-build-test:
docker build -t godoxy .
docker build --build-arg=MAKE_ARGS=agent=1 -t godoxy-agent .
get:
for dir in ${PWD} ${PWD}/agent; do cd $$dir && go get -u ./... && go mod tidy; done
build:
mkdir -p bin
mkdir -p $(shell dirname ${BIN_PATH})
cd ${PWD} && go build ${BUILD_FLAGS} -o ${BIN_PATH} ${CMD_PATH}
# CAP_NET_BIND_SERVICE: permission for binding to :80 and :443
$(SETCAP_CMD) CAP_NET_BIND_SERVICE=+ep ${BIN_PATH}
${POST_BUILD}
run:
[ -f .env ] && godotenv -f .env go run ${BUILD_FLAGS} ${CMD_PATH}
@@ -82,7 +92,7 @@ debug:
sh -c 'HTTP_ADDR=:81 HTTPS_ADDR=:8443 API_ADDR=:8899 DEBUG=1 bin/godoxy-test'
mtrace:
bin/godoxy debug-ls-mtrace > mtrace.json
${BIN_PATH} debug-ls-mtrace > mtrace.json
rapid-crash:
docker run --restart=always --name test_crash -p 80 debian:bookworm-slim /bin/cat &&\
@@ -99,8 +109,5 @@ ci-test:
cloc:
cloc --not-match-f '_test.go$$' cmd internal pkg
link-binary:
ln -s /app/${NAME} bin/run
push-github:
git push origin $(shell git rev-parse --abbrev-ref HEAD)

View File

@@ -10,9 +10,11 @@
A lightweight, simple, and [performant](https://github.com/yusing/godoxy/wiki/Benchmarks) reverse proxy with WebUI.
For full documentation, check out **[Wiki](https://github.com/yusing/godoxy/wiki)**
<h5>
<a href="https://docs.godoxy.dev">Website</a> | <a href="https://docs.godoxy.dev/Home.html">Wiki</a> | <a href="https://discord.gg/umReR62nRd">Discord</a>
</h5>
**EN** | <a href="README_CHT.md">中文</a>
<h5>EN | <a href="README_CHT.md">中文</a></h5>
<img src="screenshots/webui.jpg" style="max-width: 650">

View File

@@ -10,9 +10,11 @@
輕量、易用、 [高效能](https://github.com/yusing/godoxy/wiki/Benchmarks),且帶有主頁和配置面板的反向代理
完整文檔請查閱 **[Wiki](https://github.com/yusing/godoxy/wiki)**(暫未有中文翻譯)
<h5>
<a href="https://docs.godoxy.dev">網站</a> | <a href="https://docs.godoxy.dev/Home.html">文檔</a> | <a href="https://discord.gg/umReR62nRd">Discord</a>
</h5>
<a href="README.md">EN</a> | **中文**
<h5><a href="README.md">EN</a> | 中文</h5>
<img src="https://github.com/user-attachments/assets/4bb371f4-6e4c-425c-89b2-b9e962bdd46f" style="max-width: 650">

View File

@@ -9,7 +9,7 @@ require (
github.com/docker/docker v28.1.1+incompatible
github.com/rs/zerolog v1.34.0
github.com/stretchr/testify v1.10.0
github.com/yusing/go-proxy v0.11.1
github.com/yusing/go-proxy v0.11.5
)
replace github.com/docker/docker => github.com/godoxy-app/docker v0.0.0-20250418000134-7af8fd7b079e
@@ -18,12 +18,15 @@ require (
github.com/Microsoft/go-winio v0.6.2 // indirect
github.com/PuerkitoBio/goquery v1.10.3 // indirect
github.com/andybalholm/cascadia v1.3.3 // indirect
github.com/buger/goterm v1.0.4 // indirect
github.com/bytedance/sonic v1.13.2 // indirect
github.com/bytedance/sonic/loader v0.2.4 // indirect
github.com/cenkalti/backoff/v4 v4.3.0 // indirect
github.com/cloudwego/base64x v0.1.5 // indirect
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
github.com/diskfs/go-diskfs v1.6.0 // indirect
github.com/distribution/reference v0.6.0 // indirect
github.com/djherbis/times v1.6.0 // indirect
github.com/docker/cli v28.1.1+incompatible // indirect
github.com/docker/go-connections v0.5.0 // indirect
github.com/docker/go-units v0.5.0 // indirect
@@ -40,11 +43,15 @@ require (
github.com/goccy/go-yaml v1.17.1 // indirect
github.com/gogo/protobuf v1.3.2 // indirect
github.com/google/pprof v0.0.0-20250423184734-337e5dd93bb4 // indirect
github.com/gotify/server/v2 v2.6.1 // indirect
github.com/gorilla/websocket v1.5.3 // indirect
github.com/gotify/server/v2 v2.6.3 // indirect
github.com/jinzhu/copier v0.4.0 // indirect
github.com/klauspost/cpuid/v2 v2.2.10 // indirect
github.com/leodido/go-urn v1.4.0 // indirect
github.com/lithammer/fuzzysearch v1.1.8 // indirect
github.com/lufia/plan9stats v0.0.0-20250317134145-8bc96cf8fc35 // indirect
github.com/luthermonson/go-proxmox v0.2.2 // indirect
github.com/magefile/mage v1.15.0 // indirect
github.com/mattn/go-colorable v0.1.14 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/miekg/dns v1.1.65 // indirect
@@ -59,7 +66,7 @@ require (
github.com/puzpuzpuz/xsync/v3 v3.5.1 // indirect
github.com/quic-go/qpack v0.5.1 // indirect
github.com/quic-go/quic-go v0.51.0 // indirect
github.com/samber/lo v1.49.1 // indirect
github.com/samber/lo v1.50.0 // indirect
github.com/samber/slog-common v0.18.1 // indirect
github.com/samber/slog-zerolog/v2 v2.7.3 // indirect
github.com/shirou/gopsutil/v4 v4.25.3 // indirect

View File

@@ -43,6 +43,8 @@ github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4
github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
github.com/ebitengine/purego v0.8.2 h1:jPPGWs2sZ1UgOSgD2bClL0MJIqu58nOmIcBuXr62z1I=
github.com/ebitengine/purego v0.8.2/go.mod h1:iIjxzd6CiRiOG0UyXP+V1+jWqUXVjPKLAI0mRfJZTmQ=
github.com/elliotwutingfeng/asciiset v0.0.0-20230602022725-51bbb787efab h1:h1UgjJdAAhj+uPL68n7XASS6bU+07ZX1WJvVS2eyoeY=
github.com/elliotwutingfeng/asciiset v0.0.0-20230602022725-51bbb787efab/go.mod h1:GLo/8fDswSAniFG+BFIaiSPcK610jyzgEhWYPQwuQdw=
github.com/fsnotify/fsnotify v1.9.0 h1:2Ml+OJNzbYCTzsxtv8vKSFD9PbJjmhYF14k/jKC7S9k=
github.com/fsnotify/fsnotify v1.9.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0=
github.com/gabriel-vasile/mimetype v1.4.9 h1:5k+WDwEsD9eTLL8Tz3L0VnmVh9QxGjRmjBvAG7U/oYY=
@@ -68,6 +70,8 @@ github.com/go-playground/validator/v10 v10.26.0 h1:SP05Nqhjcvz81uJaRfEV0YBSSSGMc
github.com/go-playground/validator/v10 v10.26.0/go.mod h1:I5QpIEbmr8On7W0TktmJAumgzX4CA1XNl4ZmDuVHKKo=
github.com/go-task/slim-sprig/v3 v3.0.0 h1:sUs3vkvUymDpBKi3qH1YSqBQk9+9D/8M2mN1vB6EwHI=
github.com/go-task/slim-sprig/v3 v3.0.0/go.mod h1:W848ghGpv3Qj3dhTPRyJypKRiqCdHZiAzKg9hl15HA8=
github.com/go-test/deep v1.0.8 h1:TDsG77qcSprGbC6vTN8OuXp5g+J+b5Pcguhf7Zt61VM=
github.com/go-test/deep v1.0.8/go.mod h1:5C2ZWiW0ErCdrYzpqxLbTX7MG14M9iiw8DgHncVwcsE=
github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y=
github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8=
github.com/goccy/go-yaml v1.17.1 h1:LI34wktB2xEE3ONG/2Ar54+/HJVBriAGJ55PHls4YuY=
@@ -88,14 +92,20 @@ github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg=
github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/gotify/server/v2 v2.6.1 h1:Kf7v5fzBxzELzZa/jonWfwJMkqYqh1LBzBpCmt5QIAI=
github.com/gotify/server/v2 v2.6.1/go.mod h1:Dk8HLyTVDqmXM8YEg6tjROBen6mxyHZFRggJFHTwZLc=
github.com/gotify/server/v2 v2.6.3 h1:2sLDRsQ/No1+hcFwFDvjNtwKepfCSIR8L3BkXl/Vz1I=
github.com/gotify/server/v2 v2.6.3/go.mod h1:IyeQ/iL3vetcuqUAzkCMVObIMGGJx4zb13/mVatIwE8=
github.com/grpc-ecosystem/grpc-gateway/v2 v2.26.1 h1:e9Rjr40Z98/clHv5Yg79Is0NtosR5LXRvdr7o/6NwbA=
github.com/grpc-ecosystem/grpc-gateway/v2 v2.26.1/go.mod h1:tIxuGz/9mpox++sgp9fJjHO0+q1X9/UOWd798aAm22M=
github.com/h2non/gock v1.2.0 h1:K6ol8rfrRkUOefooBC8elXoaNGYkpp7y2qcxGG6BzUE=
github.com/h2non/gock v1.2.0/go.mod h1:tNhoxHYW2W42cYkYb1WqzdbYIieALC99kpYr7rH/BQk=
github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 h1:2VTzZjLZBgl62/EtslCrtky5vbi9dd7HrQPQIx6wqiw=
github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542/go.mod h1:Ow0tF8D4Kplbc8s8sSb3V2oUCygFHVp8gC3Dn6U4MNI=
github.com/jinzhu/copier v0.4.0 h1:w3ciUoD19shMCRargcpm0cm91ytaBhDvuRpz1ODO/U8=
github.com/jinzhu/copier v0.4.0/go.mod h1:DfbEm0FYsaqBcKcFuvmOZb218JkPGtvSHsKg8S8hyyg=
github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo=
github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ=
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
github.com/klauspost/cpuid/v2 v2.2.10 h1:tBs3QSyvjDyFTq3uoc/9xFpCuOsJQFNPiAhYdw2skhE=
github.com/klauspost/cpuid/v2 v2.2.10/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0=
@@ -143,8 +153,12 @@ github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJw
github.com/opencontainers/image-spec v1.1.1/go.mod h1:qpqAh3Dmcf36wStyyWU+kCeDgrGnAve2nCC8+7h8Q0M=
github.com/oschwald/maxminddb-golang v1.13.1 h1:G3wwjdN9JmIK2o/ermkHM+98oX5fS+k5MbwsmL4MRQE=
github.com/oschwald/maxminddb-golang v1.13.1/go.mod h1:K4pgV9N/GcK694KSTmVSDTODk4IsCNThNdTmnaBZ/F8=
github.com/pierrec/lz4/v4 v4.1.17 h1:kV4Ip+/hUBC+8T6+2EgburRtkE9ef4nbY3f4dFhGjMc=
github.com/pierrec/lz4/v4 v4.1.17/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/xattr v0.4.9 h1:5883YPCtkSd8LFbs13nXplj9g9tlrwoJRjgpgMu1/fE=
github.com/pkg/xattr v0.4.9/go.mod h1:di8WF84zAKk8jzR1UBTEWh9AUlIZZ7M/JNt8e9B6ktU=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
@@ -161,8 +175,8 @@ github.com/quic-go/quic-go v0.51.0/go.mod h1:MFlGGpcpJqRAfmYi6NC2cptDPSxRWTOGNuP
github.com/rs/xid v1.6.0/go.mod h1:7XoLgs4eV+QndskICGsho+ADou8ySMSjJKDIan90Nz0=
github.com/rs/zerolog v1.34.0 h1:k43nTLIwcTVQAncfCw4KZ2VY6ukYoZaBPNOE8txlOeY=
github.com/rs/zerolog v1.34.0/go.mod h1:bJsvje4Z08ROH4Nhs5iH600c3IkWhwp44iRc54W6wYQ=
github.com/samber/lo v1.49.1 h1:4BIFyVfuQSEpluc7Fua+j1NolZHiEHEpaSEKdsH0tew=
github.com/samber/lo v1.49.1/go.mod h1:dO6KHFzUKXgP8LDhU0oI8d2hekjXnGOu0DB8Jecxd6o=
github.com/samber/lo v1.50.0 h1:XrG0xOeHs+4FQ8gJR97zDz5uOFMW7OwFWiFVzqopKgY=
github.com/samber/lo v1.50.0/go.mod h1:RjZyNk6WSnUFRKK6EyOhsRJMqft3G+pg7dCWHQCWvsc=
github.com/samber/slog-common v0.18.1 h1:c0EipD/nVY9HG5shgm/XAs67mgpWDMF+MmtptdJNCkQ=
github.com/samber/slog-common v0.18.1/go.mod h1:QNZiNGKakvrfbJ2YglQXLCZauzkI9xZBjOhWFKS3IKk=
github.com/samber/slog-zerolog/v2 v2.7.3 h1:/MkPDl/tJhijN2GvB1MWwBn2FU8RiL3rQ8gpXkQm2EY=
@@ -188,6 +202,8 @@ github.com/tklauser/numcpus v0.10.0 h1:18njr6LDBk1zuna922MgdjQuJFjrdppsZG60sHGfj
github.com/tklauser/numcpus v0.10.0/go.mod h1:BiTKazU708GQTYF4mB+cmlpT2Is1gLk7XVuEeem8LsQ=
github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI=
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
github.com/ulikunitz/xz v0.5.11 h1:kpFauv27b6ynzBNT/Xy+1k+fK4WswhN/6PN5WhFAGw8=
github.com/ulikunitz/xz v0.5.11/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=
github.com/vincent-petithory/dataurl v1.0.0 h1:cXw+kPto8NLuJtlMsI152irrVw9fRDX8AbShPRpg2CI=
github.com/vincent-petithory/dataurl v1.0.0/go.mod h1:FHafX5vmDzyP+1CQATJn7WFKc9CvnvxyvZy6I1MrG/U=
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
@@ -271,8 +287,10 @@ golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201204225414-ed752295db88/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210331175145-43e1dd70ce54/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220615213510-4f61da869c0c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=

View File

@@ -6,13 +6,13 @@ import (
"os"
"sync"
"github.com/yusing/go-proxy/internal"
"github.com/yusing/go-proxy/internal/api/v1/query"
"github.com/yusing/go-proxy/internal/auth"
"github.com/yusing/go-proxy/internal/common"
"github.com/yusing/go-proxy/internal/config"
"github.com/yusing/go-proxy/internal/dnsproviders"
"github.com/yusing/go-proxy/internal/gperr"
"github.com/yusing/go-proxy/internal/homepage"
"github.com/yusing/go-proxy/internal/logging"
"github.com/yusing/go-proxy/internal/logging/memlogger"
"github.com/yusing/go-proxy/internal/metrics/systeminfo"
@@ -50,7 +50,7 @@ func main() {
rawLogger.Println("ok")
return
case common.CommandListIcons:
icons, err := internal.ListAvailableIcons()
icons, err := homepage.ListAvailableIcons()
if err != nil {
rawLogger.Fatal(err)
}
@@ -79,7 +79,7 @@ func main() {
logging.Info().Msgf("GoDoxy version %s", pkg.GetVersion())
logging.Trace().Msg("trace enabled")
parallel(
internal.InitIconListCache,
homepage.InitIconListCache,
systeminfo.Poller.Start,
)

View File

@@ -1,21 +1,48 @@
---
services:
socket-proxy:
container_name: socket-proxy
image: lscr.io/linuxserver/socket-proxy:latest
environment:
- ALLOW_START=1
- ALLOW_STOP=1
- ALLOW_RESTARTS=1
- CONTAINERS=1
- EVENTS=1
- INFO=1
- PING=1
- POST=1
- VERSION=1
volumes:
- ${DOCKER_SOCKET:-/var/run/docker.sock}:/var/run/docker.sock
restart: unless-stopped
tmpfs:
- /run
ports:
- ${SOCKET_PROXY_LISTEN_ADDR:-127.0.0.1:2375}:2375
labels:
proxy.exclude: true
frontend:
image: ghcr.io/yusing/godoxy-frontend:${TAG:-latest}
container_name: godoxy-frontend
restart: unless-stopped
network_mode: host # do not change this
env_file: .env
user: ${GODOXY_UID:-1000}:${GODOXY_GID:-1000}
read_only: true
security_opt:
- no-new-privileges:true
cap_drop:
- all
depends_on:
- app
environment:
HOSTNAME: 127.0.0.1
PORT: ${GODOXY_FRONTEND_PORT:-3000}
# modify below to fit your needs
labels:
proxy.aliases: ${GODOXY_FRONTEND_ALIASES:-godoxy}
proxy.godoxy.port: ${GODOXY_FRONTEND_PORT:-3000}
# proxy.godoxy.middlewares.cidr_whitelist: |
proxy.#1.port: ${GODOXY_FRONTEND_PORT:-3000}
# proxy.#1.middlewares.cidr_whitelist: |
# status: 403
# message: IP not allowed
# allow:
@@ -29,11 +56,22 @@ services:
restart: always
network_mode: host # do not change this
env_file: .env
user: ${GODOXY_UID:-1000}:${GODOXY_GID:-1000}
depends_on:
socket-proxy:
condition: service_started
security_opt:
- no-new-privileges:true
cap_drop:
- all
cap_add:
- NET_BIND_SERVICE
environment:
- DOCKER_HOST=tcp://${SOCKET_PROXY_LISTEN_ADDR:-127.0.0.1:2375}
volumes:
- ${DOCKER_SOCKET:-/var/run/docker.sock}:/var/run/docker.sock
- ./config:/app/config
- ./logs:/app/logs
- ./error_pages:/app/error_pages
- ./error_pages:/app/error_pages:ro
- ./data:/app/data
# To use autocert, certs will be stored in "./certs".

14
go.mod
View File

@@ -15,7 +15,7 @@ require (
github.com/go-acme/lego/v4 v4.23.1 // acme client
github.com/go-playground/validator/v10 v10.26.0 // validator
github.com/gobwas/glob v0.2.3 // glob matcher for route rules
github.com/gotify/server/v2 v2.6.1 // reference the Message struct for json response
github.com/gotify/server/v2 v2.6.3 // reference the Message struct for json response
github.com/lithammer/fuzzysearch v1.1.8 // fuzzy search for searching icons and filtering metrics
github.com/puzpuzpuz/xsync/v3 v3.5.1 // lock free map for concurrent operations
github.com/rs/zerolog v1.34.0 // logging
@@ -41,13 +41,13 @@ require (
github.com/samber/slog-zerolog/v2 v2.7.3
github.com/spf13/afero v1.14.0
github.com/stretchr/testify v1.10.0
github.com/yusing/go-proxy/agent v0.0.0-00010101000000-000000000000
github.com/yusing/go-proxy/internal/dnsproviders v0.0.0-00010101000000-000000000000
github.com/yusing/go-proxy/agent v0.0.0-20250428032249-8da63daf0202
github.com/yusing/go-proxy/internal/dnsproviders v0.0.0-20250428032249-8da63daf0202
go.uber.org/atomic v1.11.0
)
require (
github.com/grpc-ecosystem/grpc-gateway/v2 v2.26.1 // indirect
github.com/grpc-ecosystem/grpc-gateway/v2 v2.26.3 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.35.0 // indirect
go.opentelemetry.io/proto/otlp v1.5.0 // indirect
)
@@ -131,7 +131,7 @@ require (
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
github.com/hashicorp/go-retryablehttp v0.7.7 // indirect
github.com/hashicorp/go-uuid v1.0.3 // indirect
github.com/huaweicloud/huaweicloud-sdk-go-v3 v0.1.146 // indirect
github.com/huaweicloud/huaweicloud-sdk-go-v3 v0.1.147 // indirect
github.com/iij/doapi v0.0.0-20190504054126-0bbf12d6d7df // indirect
github.com/infobloxopen/infoblox-go-client/v2 v2.10.0 // indirect
github.com/jinzhu/copier v0.4.0 // indirect
@@ -191,7 +191,7 @@ require (
github.com/sacloud/iaas-api-go v1.14.0 // indirect
github.com/sacloud/packages-go v0.0.11 // indirect
github.com/sagikazarmark/locafero v0.9.0 // indirect
github.com/samber/lo v1.49.1 // indirect
github.com/samber/lo v1.50.0 // indirect
github.com/samber/slog-common v0.18.1 // indirect
github.com/scaleway/scaleway-sdk-go v1.0.0-beta.33 // indirect
github.com/selectel/domains-go v1.1.0 // indirect
@@ -207,7 +207,7 @@ require (
github.com/spf13/pflag v1.0.6 // indirect
github.com/spf13/viper v1.20.1 // indirect
github.com/subosito/gotenv v1.6.0 // indirect
github.com/tencentcloud/tencentcloud-sdk-go/tencentcloud/common v1.0.1151 // indirect
github.com/tencentcloud/tencentcloud-sdk-go/tencentcloud/common v1.0.1154 // indirect
github.com/tencentcloud/tencentcloud-sdk-go/tencentcloud/dnspod v1.0.1136 // indirect
github.com/tjfoc/gmsm v1.4.1 // indirect
github.com/tklauser/go-sysconf v0.3.15 // indirect

19
go.sum
View File

@@ -1082,16 +1082,19 @@ github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/z
github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg=
github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/gotify/server/v2 v2.6.1 h1:Kf7v5fzBxzELzZa/jonWfwJMkqYqh1LBzBpCmt5QIAI=
github.com/gotify/server/v2 v2.6.1/go.mod h1:Dk8HLyTVDqmXM8YEg6tjROBen6mxyHZFRggJFHTwZLc=
github.com/gotify/server/v2 v2.6.3 h1:2sLDRsQ/No1+hcFwFDvjNtwKepfCSIR8L3BkXl/Vz1I=
github.com/gotify/server/v2 v2.6.3/go.mod h1:IyeQ/iL3vetcuqUAzkCMVObIMGGJx4zb13/mVatIwE8=
github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs=
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk=
github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
github.com/grpc-ecosystem/grpc-gateway v1.16.0 h1:gmcG1KaJ57LophUzW0Hy8NmPhnMZb4M0+kPpLofRdBo=
github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw=
github.com/grpc-ecosystem/grpc-gateway/v2 v2.7.0/go.mod h1:hgWBS7lorOAVIJEQMi4ZsPv9hVvWI6+ch50m39Pf2Ks=
github.com/grpc-ecosystem/grpc-gateway/v2 v2.11.3/go.mod h1:o//XUCC/F+yRGJoPO/VU0GSB0f8Nhgmxx0VIRUvaC0w=
github.com/grpc-ecosystem/grpc-gateway/v2 v2.26.1 h1:e9Rjr40Z98/clHv5Yg79Is0NtosR5LXRvdr7o/6NwbA=
github.com/grpc-ecosystem/grpc-gateway/v2 v2.26.1/go.mod h1:tIxuGz/9mpox++sgp9fJjHO0+q1X9/UOWd798aAm22M=
github.com/grpc-ecosystem/grpc-gateway/v2 v2.26.3 h1:5ZPtiqj0JL5oKWmcsq4VMaAW5ukBEgSGXEN89zeH1Jo=
github.com/grpc-ecosystem/grpc-gateway/v2 v2.26.3/go.mod h1:ndYquD05frm2vACXE1nsccT4oJzjhw2arTS2cpUD1PI=
github.com/h2non/gock v1.2.0 h1:K6ol8rfrRkUOefooBC8elXoaNGYkpp7y2qcxGG6BzUE=
github.com/h2non/gock v1.2.0/go.mod h1:tNhoxHYW2W42cYkYb1WqzdbYIieALC99kpYr7rH/BQk=
github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 h1:2VTzZjLZBgl62/EtslCrtky5vbi9dd7HrQPQIx6wqiw=
@@ -1147,8 +1150,8 @@ github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/J
github.com/hashicorp/serf v0.9.5/go.mod h1:UWDWwZeL5cuWDJdl0C6wrvrUwEqtQ4ZKBKKENpqIUyk=
github.com/hashicorp/serf v0.10.1/go.mod h1:yL2t6BqATOLGc5HF7qbFkTfXoPIY0WZdWHfEvMqbG+4=
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
github.com/huaweicloud/huaweicloud-sdk-go-v3 v0.1.146 h1:ld5s5UeA9zgyFsZskVD2Tr6k6VnJWkvaLm5nqvfOEf4=
github.com/huaweicloud/huaweicloud-sdk-go-v3 v0.1.146/go.mod h1:Y/+YLCFCJtS29i2MbYPTUlNNfwXvkzEsZKR0imY/2aY=
github.com/huaweicloud/huaweicloud-sdk-go-v3 v0.1.147 h1:ip9+1n9+THhYgChlQpgDLVDVTv4LVJ7AoyPBJBaX2MY=
github.com/huaweicloud/huaweicloud-sdk-go-v3 v0.1.147/go.mod h1:Y/+YLCFCJtS29i2MbYPTUlNNfwXvkzEsZKR0imY/2aY=
github.com/hudl/fargo v1.4.0/go.mod h1:9Ai6uvFy5fQNq6VPKtg+Ceq1+eTY4nKUlR2JElEOcDo=
github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho=
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
@@ -1517,8 +1520,8 @@ github.com/sacloud/packages-go v0.0.11/go.mod h1:XNF5MCTWcHo9NiqWnYctVbASSSZR3ZO
github.com/sagikazarmark/crypt v0.10.0/go.mod h1:gwTNHQVoOS3xp9Xvz5LLR+1AauC5M6880z5NWzdhOyQ=
github.com/sagikazarmark/locafero v0.9.0 h1:GbgQGNtTrEmddYDSAH9QLRyfAHY12md+8YFTqyMTC9k=
github.com/sagikazarmark/locafero v0.9.0/go.mod h1:UBUyz37V+EdMS3hDF3QWIiVr/2dPrx49OMO0Bn0hJqk=
github.com/samber/lo v1.49.1 h1:4BIFyVfuQSEpluc7Fua+j1NolZHiEHEpaSEKdsH0tew=
github.com/samber/lo v1.49.1/go.mod h1:dO6KHFzUKXgP8LDhU0oI8d2hekjXnGOu0DB8Jecxd6o=
github.com/samber/lo v1.50.0 h1:XrG0xOeHs+4FQ8gJR97zDz5uOFMW7OwFWiFVzqopKgY=
github.com/samber/lo v1.50.0/go.mod h1:RjZyNk6WSnUFRKK6EyOhsRJMqft3G+pg7dCWHQCWvsc=
github.com/samber/slog-common v0.18.1 h1:c0EipD/nVY9HG5shgm/XAs67mgpWDMF+MmtptdJNCkQ=
github.com/samber/slog-common v0.18.1/go.mod h1:QNZiNGKakvrfbJ2YglQXLCZauzkI9xZBjOhWFKS3IKk=
github.com/samber/slog-zerolog/v2 v2.7.3 h1:/MkPDl/tJhijN2GvB1MWwBn2FU8RiL3rQ8gpXkQm2EY=
@@ -1615,8 +1618,8 @@ github.com/subosito/gotenv v1.4.2/go.mod h1:ayKnFf/c6rvx/2iiLrJUk1e6plDbT3edrFNG
github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8=
github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU=
github.com/tencentcloud/tencentcloud-sdk-go/tencentcloud/common v1.0.1136/go.mod h1:r5r4xbfxSaeR04b166HGsBa/R4U3SueirEUpXGuw+Q0=
github.com/tencentcloud/tencentcloud-sdk-go/tencentcloud/common v1.0.1151 h1:SBbEaeCwhqmyAEEF5ubpg/2vv3RO6SdBsOSYhpnJaL4=
github.com/tencentcloud/tencentcloud-sdk-go/tencentcloud/common v1.0.1151/go.mod h1:r5r4xbfxSaeR04b166HGsBa/R4U3SueirEUpXGuw+Q0=
github.com/tencentcloud/tencentcloud-sdk-go/tencentcloud/common v1.0.1154 h1:tc2GXLGwpjaZdapd7pEpUjoeWU5gl3XUuZzDEyes7fg=
github.com/tencentcloud/tencentcloud-sdk-go/tencentcloud/common v1.0.1154/go.mod h1:r5r4xbfxSaeR04b166HGsBa/R4U3SueirEUpXGuw+Q0=
github.com/tencentcloud/tencentcloud-sdk-go/tencentcloud/dnspod v1.0.1136 h1:kMIdSU5IvpOROh27ToVQ3hlm6ym3lCRs9tnGCOBoZqk=
github.com/tencentcloud/tencentcloud-sdk-go/tencentcloud/dnspod v1.0.1136/go.mod h1:FpyIz3mymKaExVs6Fz27kxDBS42jqZn7vbACtxdeEH4=
github.com/tjfoc/gmsm v1.4.1 h1:aMe1GlZb+0bLjn+cKTPEvvn9oUEBlJitaZiiBwsbgho=

View File

@@ -9,6 +9,7 @@ import (
"github.com/puzpuzpuz/xsync/v3"
"github.com/rs/zerolog"
acl "github.com/yusing/go-proxy/internal/acl/types"
"github.com/yusing/go-proxy/internal/common"
"github.com/yusing/go-proxy/internal/gperr"
"github.com/yusing/go-proxy/internal/logging"
"github.com/yusing/go-proxy/internal/logging/accesslog"
@@ -153,7 +154,10 @@ func (c *Config) Start(parent *task.Task) gperr.Error {
return nil
}
func (c *config) cacheRecord(info *acl.IPInfo, allow bool) {
func (c *Config) cacheRecord(info *acl.IPInfo, allow bool) {
if common.ForceResolveCountry && info.City == nil {
c.MaxMind.lookupCity(info)
}
c.ipCache.Store(info.Str, &checkCache{
IPInfo: info,
allow: allow,
@@ -175,7 +179,7 @@ func (c *Config) IPAllowed(ip net.IP) bool {
return false
}
// always allow private and loopback
// always allow loopback
// loopback is not logged
if ip.IsLoopback() {
return true

View File

@@ -54,7 +54,7 @@ func (cfg *MaxMindConfig) LoadMaxMindDB(parent task.Parent) gperr.Error {
path := dbPath(cfg.Database)
reader, err := maxmindDBOpen(path)
exists := true
valid := true
if err != nil {
switch {
case errors.Is(err, os.ErrNotExist):
@@ -65,20 +65,19 @@ func (cfg *MaxMindConfig) LoadMaxMindDB(parent task.Parent) gperr.Error {
return gperr.Wrap(err)
}
}
exists = false
valid = false
}
if !exists {
if !valid {
cfg.logger.Info().Msg("MaxMind DB not found/invalid, downloading...")
reader, err = cfg.download()
if err != nil {
if err = cfg.download(); err != nil {
return ErrDownloadFailure.With(err)
}
} else {
cfg.logger.Info().Msg("MaxMind DB loaded")
cfg.db.Reader = reader
go cfg.scheduleUpdate(parent)
}
cfg.logger.Info().Msg("MaxMind DB loaded")
cfg.db.Reader = reader
go cfg.scheduleUpdate(parent)
return nil
}
@@ -137,17 +136,10 @@ func (cfg *MaxMindConfig) update() {
Time("latest", remoteLastModified.Local()).
Time("current", cfg.lastUpdate).
Msg("MaxMind DB update available")
reader, err := cfg.download()
if err != nil {
if err = cfg.download(); err != nil {
cfg.logger.Err(err).Msg("failed to update MaxMind DB")
return
}
cfg.db.Lock()
cfg.db.Close()
cfg.db.Reader = reader
cfg.setLastUpdate(*remoteLastModified)
cfg.db.Unlock()
cfg.logger.Info().Msg("MaxMind DB updated")
}
@@ -190,57 +182,87 @@ func (cfg *MaxMindConfig) checkLastest() (lastModifiedT *time.Time, err error) {
return &lastModifiedTime, nil
}
func (cfg *MaxMindConfig) download() (*maxminddb.Reader, error) {
func (cfg *MaxMindConfig) download() error {
resp, err := newReq(cfg, http.MethodGet)
if err != nil {
return nil, err
return err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return nil, fmt.Errorf("%w: %d", ErrResponseNotOK, resp.StatusCode)
return fmt.Errorf("%w: %d", ErrResponseNotOK, resp.StatusCode)
}
path := dbPath(cfg.Database)
tmpPath := path + "-tmp.tar.gz"
file, err := os.OpenFile(tmpPath, os.O_CREATE|os.O_WRONLY, 0o644)
if err != nil {
return nil, err
}
dbFile := dbPath(cfg.Database)
tmpGZPath := dbFile + "-tmp.tar.gz"
tmpDBPath := dbFile + "-tmp"
cfg.logger.Info().Msg("MaxMind DB downloading...")
_, err = io.Copy(file, resp.Body)
if err != nil {
file.Close()
return nil, err
}
file.Close()
// extract .tar.gz and move only the dbFilename to path
err = extractFileFromTarGz(tmpPath, dbFilename(cfg.Database), path)
if err != nil {
return nil, gperr.New("failed to extract database from archive").With(err)
}
// cleanup the tar.gz file
_ = os.Remove(tmpPath)
db, err := maxmindDBOpen(path)
if err != nil {
return nil, err
}
return db, nil
}
func extractFileFromTarGz(tarGzPath, targetFilename, destPath string) error {
f, err := os.Open(tarGzPath)
tmpGZFile, err := os.OpenFile(tmpGZPath, os.O_CREATE|os.O_RDWR, 0o644)
if err != nil {
return err
}
defer f.Close()
gzr, err := gzip.NewReader(f)
// cleanup the tar.gz file
defer func() {
_ = tmpGZFile.Close()
_ = os.Remove(tmpGZPath)
}()
cfg.logger.Info().Msg("MaxMind DB downloading...")
_, err = io.Copy(tmpGZFile, resp.Body)
if err != nil {
return err
}
if _, err := tmpGZFile.Seek(0, io.SeekStart); err != nil {
return err
}
// extract .tar.gz and to database
err = extractFileFromTarGz(tmpGZFile, dbFilename(cfg.Database), tmpDBPath)
if err != nil {
return gperr.New("failed to extract database from archive").With(err)
}
// test if the downloaded database is valid
db, err := maxmindDBOpen(tmpDBPath)
if err != nil {
_ = os.Remove(tmpDBPath)
return err
}
db.Close()
err = os.Rename(tmpDBPath, dbFile)
if err != nil {
return err
}
cfg.db.Lock()
defer cfg.db.Unlock()
if cfg.db.Reader != nil {
cfg.db.Reader.Close()
}
cfg.db.Reader, err = maxmindDBOpen(dbFile)
if err != nil {
return err
}
lastModifiedStr := resp.Header.Get("Last-Modified")
lastModifiedTime, err := time.Parse(http.TimeFormat, lastModifiedStr)
if err == nil {
cfg.setLastUpdate(lastModifiedTime)
}
cfg.logger.Info().Msg("MaxMind DB downloaded")
return nil
}
func extractFileFromTarGz(tarGzFile *os.File, targetFilename, destPath string) error {
defer tarGzFile.Close()
gzr, err := gzip.NewReader(tarGzFile)
if err != nil {
return err
}

View File

@@ -1,6 +1,8 @@
package acl
import (
"archive/tar"
"compress/gzip"
"io"
"net/http"
"net/http/httptest"
@@ -144,9 +146,17 @@ func Test_MaxMindConfig_download(t *testing.T) {
logger: zerolog.Nop(),
}
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
io.Copy(w, strings.NewReader("FAKEMMDB"))
gz := gzip.NewWriter(w)
t := tar.NewWriter(gz)
t.WriteHeader(&tar.Header{
Name: dbFilename(MaxMindGeoLite),
})
t.Write([]byte("1234"))
t.Close()
gz.Close()
}))
defer server.Close()
oldURL := dbURL
dbURL = func(MaxMindDatabaseType) string { return server.URL }
defer func() { dbURL = oldURL }()
@@ -163,26 +173,26 @@ func Test_MaxMindConfig_download(t *testing.T) {
}
defer func() { maxmindDBOpen = origOpen }()
rw := &fakeReadCloser{}
req, err := http.NewRequest(http.MethodGet, server.URL, nil)
if err != nil {
t.Fatalf("newReq() error = %v", err)
}
rw := httptest.NewRecorder()
oldNewReq := newReq
newReq = func(cfg *MaxMindConfig, method string) (*http.Response, error) {
return &http.Response{
StatusCode: http.StatusOK,
Body: rw,
}, nil
server.Config.Handler.ServeHTTP(rw, req)
return rw.Result(), nil
}
defer func() { newReq = oldNewReq }()
db, err := cfg.download()
err = cfg.download()
if err != nil {
t.Fatalf("download() error = %v", err)
}
if db == nil {
if cfg.db.Reader == nil {
t.Error("expected db instance")
}
if !rw.closed {
t.Error("expected rw to be closed")
}
}
func Test_MaxMindConfig_loadMaxMindDB(t *testing.T) {

View File

@@ -6,9 +6,9 @@ import (
"strconv"
"strings"
"github.com/yusing/go-proxy/internal"
"github.com/yusing/go-proxy/internal/common"
config "github.com/yusing/go-proxy/internal/config/types"
"github.com/yusing/go-proxy/internal/homepage"
"github.com/yusing/go-proxy/internal/net/gphttp"
"github.com/yusing/go-proxy/internal/net/gphttp/middleware"
"github.com/yusing/go-proxy/internal/route/routes"
@@ -67,7 +67,7 @@ func List(cfg config.ConfigInstance, w http.ResponseWriter, r *http.Request) {
if err != nil {
limit = 0
}
icons, err := internal.SearchIcons(r.FormValue("keyword"), limit)
icons, err := homepage.SearchIcons(r.FormValue("keyword"), limit)
if err != nil {
gphttp.ClientError(w, err)
return

View File

@@ -1,6 +1,7 @@
package auth
import (
"context"
"net/http"
"github.com/yusing/go-proxy/internal/common"
@@ -38,17 +39,35 @@ func IsOIDCEnabled() bool {
return common.OIDCIssuerURL != ""
}
type nextHandler struct{}
var nextHandlerContextKey = nextHandler{}
func RequireAuth(next http.HandlerFunc) http.HandlerFunc {
if IsEnabled() {
return func(w http.ResponseWriter, r *http.Request) {
if err := defaultAuth.CheckToken(r); err != nil {
gphttp.ClientError(w, err, http.StatusUnauthorized)
} else {
next(w, r)
}
}
if !IsEnabled() {
return next
}
return func(w http.ResponseWriter, r *http.Request) {
if err := defaultAuth.CheckToken(r); err != nil {
if IsFrontend(r) {
r = r.WithContext(context.WithValue(r.Context(), nextHandlerContextKey, next))
defaultAuth.LoginHandler(w, r)
} else {
gphttp.ClientError(w, err, http.StatusUnauthorized)
}
return
}
next(w, r)
}
}
func ProceedNext(w http.ResponseWriter, r *http.Request) {
next, ok := r.Context().Value(nextHandlerContextKey).(http.HandlerFunc)
if ok {
next(w, r)
} else {
w.WriteHeader(http.StatusOK)
}
return next
}
func AuthCheckHandler(w http.ResponseWriter, r *http.Request) {

View File

@@ -1,11 +1,13 @@
package auth
import (
"context"
"crypto/rand"
"encoding/base64"
"encoding/hex"
"errors"
"fmt"
"net/http"
"sync"
"time"
"github.com/golang-jwt/jwt/v5"
@@ -19,6 +21,10 @@ type oauthRefreshToken struct {
Username string `json:"username"`
RefreshToken string `json:"refresh_token"`
Expiry time.Time `json:"expiry"`
result *refreshResult
err error
mu sync.Mutex
}
type Session struct {
@@ -27,6 +33,12 @@ type Session struct {
Groups []string `json:"groups"`
}
type refreshResult struct {
newSession Session
jwt string
jwtExpiry time.Time
}
type sessionClaims struct {
Session
jwt.RegisteredClaims
@@ -34,11 +46,12 @@ type sessionClaims struct {
type sessionID string
var oauthRefreshTokens jsonstore.MapStore[oauthRefreshToken]
var oauthRefreshTokens jsonstore.MapStore[*oauthRefreshToken]
var (
defaultRefreshTokenExpiry = 30 * 24 * time.Hour // 1 month
refreshBefore = 30 * time.Second
sessionInvalidateDelay = 3 * time.Second
)
var (
@@ -50,7 +63,7 @@ const sessionTokenIssuer = "GoDoxy"
func init() {
if IsOIDCEnabled() {
oauthRefreshTokens = jsonstore.Store[oauthRefreshToken]("oauth_refresh_tokens")
oauthRefreshTokens = jsonstore.Store[*oauthRefreshToken]("oauth_refresh_tokens")
}
}
@@ -61,7 +74,7 @@ func (token *oauthRefreshToken) expired() bool {
func newSessionID() sessionID {
b := make([]byte, 32)
_, _ = rand.Read(b)
return sessionID(base64.StdEncoding.EncodeToString(b))
return sessionID(hex.EncodeToString(b))
}
func newSession(username string, groups []string) Session {
@@ -72,26 +85,28 @@ func newSession(username string, groups []string) Session {
}
}
// getOnceOAuthRefreshToken returns the refresh token for the given session.
// getOAuthRefreshToken returns the refresh token for the given session.
//
// The token is removed from the store after retrieval.
func getOnceOAuthRefreshToken(claims *Session) (*oauthRefreshToken, bool) {
func getOAuthRefreshToken(claims *Session) (*oauthRefreshToken, bool) {
token, ok := oauthRefreshTokens.Load(string(claims.SessionID))
if !ok {
return nil, false
}
invalidateOAuthRefreshToken(claims.SessionID)
if token.expired() {
invalidateOAuthRefreshToken(claims.SessionID)
return nil, false
}
if claims.Username != token.Username {
return nil, false
}
return &token, true
return token, true
}
func storeOAuthRefreshToken(sessionID sessionID, username, token string) {
oauthRefreshTokens.Store(string(sessionID), oauthRefreshToken{
oauthRefreshTokens.Store(string(sessionID), &oauthRefreshToken{
Username: username,
RefreshToken: token,
Expiry: time.Now().Add(defaultRefreshTokenExpiry),
@@ -135,51 +150,75 @@ func (auth *OIDCProvider) parseSessionJWT(sessionJWT string) (claims *sessionCla
return claims, sessionToken.Valid && claims.Issuer == sessionTokenIssuer, nil
}
func (auth *OIDCProvider) TryRefreshToken(w http.ResponseWriter, r *http.Request, sessionJWT string) error {
func (auth *OIDCProvider) TryRefreshToken(ctx context.Context, sessionJWT string) (*refreshResult, error) {
// verify the session cookie
claims, valid, err := auth.parseSessionJWT(sessionJWT)
if err != nil {
return fmt.Errorf("%w: %w", ErrInvalidSessionToken, err)
return nil, fmt.Errorf("session: %s - %w: %w", claims.SessionID, ErrInvalidSessionToken, err)
}
if !valid {
return ErrInvalidSessionToken
return nil, ErrInvalidSessionToken
}
// check if refresh is possible
refreshToken, ok := getOnceOAuthRefreshToken(&claims.Session)
refreshToken, ok := getOAuthRefreshToken(&claims.Session)
if !ok {
return errNoRefreshToken
return nil, errNoRefreshToken
}
if !auth.checkAllowed(claims.Username, claims.Groups) {
return ErrUserNotAllowed
return nil, ErrUserNotAllowed
}
return auth.doRefreshToken(ctx, refreshToken, &claims.Session)
}
func (auth *OIDCProvider) doRefreshToken(ctx context.Context, refreshToken *oauthRefreshToken, claims *Session) (*refreshResult, error) {
refreshToken.mu.Lock()
defer refreshToken.mu.Unlock()
// already refreshed
// this must be called after refresh but before invalidate
if refreshToken.result != nil || refreshToken.err != nil {
return refreshToken.result, refreshToken.err
}
// this step refreshes the token
// see https://cs.opensource.google/go/x/oauth2/+/refs/tags/v0.29.0:oauth2.go;l=313
newToken, err := auth.oauthConfig.TokenSource(r.Context(), &oauth2.Token{
newToken, err := auth.oauthConfig.TokenSource(ctx, &oauth2.Token{
RefreshToken: refreshToken.RefreshToken,
}).Token()
if err != nil {
return fmt.Errorf("%w: %w", ErrRefreshTokenFailure, err)
refreshToken.err = fmt.Errorf("session: %s - %w: %w", claims.SessionID, ErrRefreshTokenFailure, err)
return nil, refreshToken.err
}
idTokenJWT, idToken, err := auth.getIdToken(r.Context(), newToken)
idTokenJWT, idToken, err := auth.getIdToken(ctx, newToken)
if err != nil {
return err
refreshToken.err = fmt.Errorf("session: %s - %w: %w", claims.SessionID, ErrRefreshTokenFailure, err)
return nil, refreshToken.err
}
// in case there're multiple requests for the same session to refresh
// invalidate the token after a short delay
go func() {
<-time.After(sessionInvalidateDelay)
invalidateOAuthRefreshToken(claims.SessionID)
}()
sessionID := newSessionID()
logging.Debug().Str("username", claims.Username).Time("expiry", newToken.Expiry).Msg("refreshed token")
storeOAuthRefreshToken(sessionID, claims.Username, newToken.RefreshToken)
// set new idToken and new sessionToken
auth.setIDTokenCookie(w, r, idTokenJWT, time.Until(idToken.Expiry))
auth.setSessionTokenCookie(w, r, Session{
SessionID: sessionID,
Username: claims.Username,
Groups: claims.Groups,
})
return nil
refreshToken.result = &refreshResult{
newSession: Session{
SessionID: sessionID,
Username: claims.Username,
Groups: claims.Groups,
},
jwt: idTokenJWT,
jwtExpiry: idToken.Expiry,
}
return refreshToken.result, nil
}

View File

@@ -13,6 +13,7 @@ import (
"github.com/coreos/go-oidc/v3/oidc"
"github.com/yusing/go-proxy/internal/common"
"github.com/yusing/go-proxy/internal/gperr"
"github.com/yusing/go-proxy/internal/logging"
"github.com/yusing/go-proxy/internal/net/gphttp"
"github.com/yusing/go-proxy/internal/utils"
@@ -47,7 +48,12 @@ const (
OIDCLogoutPath = "/auth/logout"
)
var errMissingIDToken = errors.New("missing id_token field from oauth token")
var (
errMissingIDToken = errors.New("missing id_token field from oauth token")
ErrMissingOAuthToken = gperr.New("missing oauth token")
ErrInvalidOAuthToken = gperr.New("invalid oauth token")
)
// generateState generates a random string for OIDC state.
const oidcStateLength = 32
@@ -148,12 +154,19 @@ func (auth *OIDCProvider) HandleAuth(w http.ResponseWriter, r *http.Request) {
func (auth *OIDCProvider) LoginHandler(w http.ResponseWriter, r *http.Request) {
// check for session token
sessionToken, err := r.Cookie(CookieOauthSessionToken)
if err == nil {
err = auth.TryRefreshToken(w, r, sessionToken.Value)
if err != nil {
logging.Debug().Err(err).Msg("failed to refresh token")
auth.clearCookie(w, r)
if err == nil { // session token exists
result, err := auth.TryRefreshToken(r.Context(), sessionToken.Value)
// redirect back to where they requested
// when token refresh is ok
if err == nil {
auth.setIDTokenCookie(w, r, result.jwt, time.Until(result.jwtExpiry))
auth.setSessionTokenCookie(w, r, result.newSession)
ProceedNext(w, r)
return
}
// clear cookies then redirect to home
logging.Err(err).Msg("failed to refresh token")
auth.clearCookie(w, r)
http.Redirect(w, r, "/", http.StatusFound)
return
}

View File

@@ -98,7 +98,7 @@ func TestUserPassLoginCallbackHandler(t *testing.T) {
Host: "app.example.com",
Body: io.NopCloser(bytes.NewReader(Must(json.Marshal(tt.creds)))),
}
auth.LoginHandler(w, req)
auth.PostAuthCallbackHandler(w, req)
if tt.wantErr {
ExpectEqual(t, w.Code, http.StatusUnauthorized)
} else {

View File

@@ -10,22 +10,21 @@ import (
)
var (
ErrMissingOAuthToken = gperr.New("missing oauth token")
ErrMissingSessionToken = gperr.New("missing session token")
ErrInvalidOAuthToken = gperr.New("invalid oauth token")
ErrInvalidSessionToken = gperr.New("invalid session token")
ErrUserNotAllowed = gperr.New("user not allowed")
)
func IsFrontend(r *http.Request) bool {
return r.Host == common.APIHTTPAddr
}
func requestHost(r *http.Request) string {
// check if it's from backend
switch r.Host {
case common.APIHTTPAddr:
// use XFH
if IsFrontend(r) {
return r.Header.Get("X-Forwarded-Host")
default:
return r.Host
}
return r.Host
}
// cookieDomain returns the fully qualified domain name of the request host

View File

@@ -45,6 +45,6 @@ oauth2_config:
testYaml = testYaml[1:] // remove first \n
opt := make(map[string]any)
require.NoError(t, yaml.Unmarshal([]byte(testYaml), &opt))
require.NoError(t, utils.Deserialize(opt, cfg))
require.NoError(t, utils.MapUnmarshalValidate(opt, cfg))
require.Equal(t, cfg, cfgExpected)
}

View File

@@ -16,7 +16,7 @@ func DNSProvider[CT any, PT challenge.Provider](
) Generator {
return func(opt map[string]any) (challenge.Provider, gperr.Error) {
cfg := defaultCfg()
err := utils.Deserialize(opt, &cfg)
err := utils.MapUnmarshalValidate(opt, &cfg)
if err != nil {
return nil, err
}

View File

@@ -16,7 +16,6 @@ const (
ConfigPath = ConfigBasePath + "/" + ConfigFileName
IconListCachePath = ConfigBasePath + "/.icon_list_cache.json"
IconCachePath = ConfigBasePath + "/.icon_cache.json"
NamespaceHomepageOverrides = ".homepage"
NamespaceIconCache = ".icon_cache"

View File

@@ -58,6 +58,8 @@ var (
MetricsDisableDisk = GetEnvBool("METRICS_DISABLE_DISK", false)
MetricsDisableNetwork = GetEnvBool("METRICS_DISABLE_NETWORK", false)
MetricsDisableSensors = GetEnvBool("METRICS_DISABLE_SENSORS", false)
ForceResolveCountry = GetEnvBool("FORCE_RESOLVE_COUNTRY", false)
)
func GetEnv[T any](key string, defaultValue T, parser func(string) (T, error)) T {

View File

@@ -18,6 +18,7 @@ import (
"github.com/yusing/go-proxy/internal/logging"
"github.com/yusing/go-proxy/internal/net/gphttp/server"
"github.com/yusing/go-proxy/internal/notif"
"github.com/yusing/go-proxy/internal/proxmox"
proxy "github.com/yusing/go-proxy/internal/route/provider"
"github.com/yusing/go-proxy/internal/task"
"github.com/yusing/go-proxy/internal/utils"
@@ -219,7 +220,7 @@ func (cfg *Config) load() gperr.Error {
}
model := config.DefaultConfig()
if err := utils.DeserializeYAML(data, model); err != nil {
if err := utils.UnmarshalValidateYAML(data, model); err != nil {
gperr.LogFatal(errMsg, err)
}
@@ -229,6 +230,7 @@ func (cfg *Config) load() gperr.Error {
errs.Add(cfg.entrypoint.SetAccessLogger(cfg.task, model.Entrypoint.AccessLog))
cfg.initNotification(model.Providers.Notification)
errs.Add(cfg.initAutoCert(model.AutoCert))
errs.Add(cfg.initProxmox(model.Providers.Proxmox))
errs.Add(cfg.loadRouteProviders(&model.Providers))
cfg.value = model
@@ -278,6 +280,17 @@ func (cfg *Config) initAutoCert(autocertCfg *autocert.Config) gperr.Error {
return nil
}
func (cfg *Config) initProxmox(proxmoxCfg []proxmox.Config) gperr.Error {
proxmox.Clients.Clear()
var errs = gperr.NewBuilder()
for _, cfg := range proxmoxCfg {
if err := cfg.Init(); err != nil {
errs.Add(err.Subject(cfg.URL))
}
}
return errs.Error()
}
func (cfg *Config) errIfExists(p *proxy.Provider) gperr.Error {
if _, ok := cfg.providers.Load(p.String()); ok {
return gperr.Errorf("provider %s already exists", p.String())

View File

@@ -12,6 +12,7 @@ import (
"github.com/yusing/go-proxy/internal/gperr"
"github.com/yusing/go-proxy/internal/logging/accesslog"
"github.com/yusing/go-proxy/internal/notif"
"github.com/yusing/go-proxy/internal/proxmox"
"github.com/yusing/go-proxy/internal/utils"
)
@@ -30,6 +31,7 @@ type (
Docker map[string]string `json:"docker" yaml:"docker,omitempty" validate:"non_empty_docker_keys,dive,unix_addr|url"`
Agents []*agent.AgentConfig `json:"agents" yaml:"agents,omitempty"`
Notification []notif.NotificationConfig `json:"notification" yaml:"notification,omitempty"`
Proxmox []proxmox.Config `json:"proxmox" yaml:"proxmox,omitempty"`
}
Entrypoint struct {
Middlewares []map[string]any `json:"middlewares"`
@@ -86,7 +88,7 @@ func HasInstance() bool {
func Validate(data []byte) gperr.Error {
var model Config
return utils.DeserializeYAML(data, &model)
return utils.UnmarshalValidateYAML(data, &model)
}
var matchDomainsRegex = regexp.MustCompile(`^[^\.]?([\w\d\-_]\.?)+[^\.]?$`)

View File

@@ -126,11 +126,27 @@ func (c *Container) isDatabase() bool {
return false
}
func (c *Container) isLocal() bool {
if strings.HasPrefix(c.DockerHost, "unix://") {
return true
}
url, err := url.Parse(c.DockerHost)
if err != nil {
return false
}
switch url.Hostname() {
case "localhost", "127.0.0.1", "::1":
return true
default:
return false
}
}
func (c *Container) setPublicHostname() {
if !c.Running {
return
}
if strings.HasPrefix(c.DockerHost, "unix://") {
if c.isLocal() {
c.PublicHostname = "127.0.0.1"
return
}
@@ -144,18 +160,17 @@ func (c *Container) setPublicHostname() {
}
func (c *Container) setPrivateHostname(helper containerHelper) {
if !strings.HasPrefix(c.DockerHost, "unix://") && c.Agent == nil {
if !c.isLocal() && c.Agent == nil {
return
}
if helper.NetworkSettings == nil {
return
}
for _, v := range helper.NetworkSettings.Networks {
if v.IPAddress == "" {
continue
if v.IPAddress != "" {
c.PrivateHostname = v.IPAddress
return
}
c.PrivateHostname = v.IPAddress
return
}
}
@@ -178,7 +193,7 @@ func (c *Container) loadDeleteIdlewatcherLabels(helper containerHelper) {
ContainerName: c.ContainerName,
},
}
err := utils.Deserialize(cfg, idwCfg)
err := utils.MapUnmarshalValidate(cfg, idwCfg)
if err != nil {
gperr.LogWarn("invalid idlewatcher config", gperr.PrependSubject(c.ContainerName, err))
} else {

View File

@@ -7,6 +7,7 @@ import (
"time"
"github.com/yusing/go-proxy/internal/common"
"github.com/yusing/go-proxy/internal/jsonstore"
"github.com/yusing/go-proxy/internal/logging"
"github.com/yusing/go-proxy/internal/task"
"github.com/yusing/go-proxy/internal/utils"
@@ -15,34 +16,24 @@ import (
type cacheEntry struct {
Icon []byte `json:"icon"`
ContentType string `json:"content_type"`
ContentType string `json:"content_type,omitempty"`
LastAccess atomic.Value[time.Time] `json:"last_access"`
}
// cache key can be absolute url or route name.
var (
iconCache = make(map[string]*cacheEntry)
iconCacheMu sync.RWMutex
iconCache = jsonstore.Store[*cacheEntry](common.NamespaceIconCache)
iconMu sync.RWMutex
)
const (
iconCacheTTL = 3 * 24 * time.Hour
cleanUpInterval = time.Minute
maxCacheSize = 1024 * 1024 // 1MB
maxIconSize = 1024 * 1024 // 1MB
maxCacheEntries = 100
)
func InitIconCache() {
iconCacheMu.Lock()
defer iconCacheMu.Unlock()
err := utils.LoadJSONIfExist(common.IconCachePath, &iconCache)
if err != nil {
logging.Error().Err(err).Msg("failed to load icon cache")
} else if len(iconCache) > 0 {
logging.Info().Int("count", len(iconCache)).Msg("icon cache loaded")
}
func init() {
go func() {
cleanupTicker := time.NewTicker(cleanUpInterval)
defer cleanupTicker.Stop()
@@ -55,36 +46,21 @@ func InitIconCache() {
}
}
}()
task.OnProgramExit("save_favicon_cache", func() {
iconCacheMu.Lock()
defer iconCacheMu.Unlock()
if len(iconCache) == 0 {
return
}
if err := utils.SaveJSON(common.IconCachePath, &iconCache, 0o644); err != nil {
logging.Error().Err(err).Msg("failed to save icon cache")
}
})
}
func pruneExpiredIconCache() {
iconCacheMu.Lock()
defer iconCacheMu.Unlock()
nPruned := 0
for key, icon := range iconCache {
for key, icon := range iconCache.Range {
if icon.IsExpired() {
delete(iconCache, key)
iconCache.Delete(key)
nPruned++
}
}
if len(iconCache) > maxCacheEntries {
if iconCache.Size() > maxCacheEntries {
iconCache.Clear()
newIconCache := make(map[string]*cacheEntry, maxCacheEntries)
i := 0
for key, icon := range iconCache {
for key, icon := range iconCache.Range {
if i == maxCacheEntries {
break
}
@@ -93,7 +69,9 @@ func pruneExpiredIconCache() {
i++
}
}
iconCache = newIconCache
for key, icon := range newIconCache {
iconCache.Store(key, icon)
}
}
if nPruned > 0 {
logging.Info().Int("pruned", nPruned).Msg("pruned expired icon cache")
@@ -101,21 +79,18 @@ func pruneExpiredIconCache() {
}
func PruneRouteIconCache(route route) {
iconCacheMu.Lock()
defer iconCacheMu.Unlock()
delete(iconCache, route.Key())
iconCache.Delete(route.Key())
}
func loadIconCache(key string) *FetchResult {
iconCacheMu.RLock()
defer iconCacheMu.RUnlock()
icon, ok := iconCache[key]
iconMu.RLock()
defer iconMu.RUnlock()
icon, ok := iconCache.Load(key)
if ok && len(icon.Icon) > 0 {
logging.Debug().
Str("key", key).
Msg("icon found in cache")
icon.LastAccess.Store(time.Now())
icon.LastAccess.Store(utils.TimeNow())
return &FetchResult{Icon: icon.Icon, contentType: icon.ContentType}
}
return nil
@@ -123,15 +98,17 @@ func loadIconCache(key string) *FetchResult {
func storeIconCache(key string, result *FetchResult) {
icon := result.Icon
if len(icon) > maxCacheSize {
if len(icon) > maxIconSize {
logging.Debug().Int("size", len(icon)).Msg("icon cache size exceeds max cache size")
return
}
iconCacheMu.Lock()
defer iconCacheMu.Unlock()
iconMu.Lock()
defer iconMu.Unlock()
entry := &cacheEntry{Icon: icon, ContentType: result.contentType}
entry.LastAccess.Store(time.Now())
iconCache[key] = entry
iconCache.Store(key, entry)
logging.Debug().Str("key", key).Int("size", len(icon)).Msg("stored icon cache")
}
@@ -140,12 +117,20 @@ func (e *cacheEntry) IsExpired() bool {
}
func (e *cacheEntry) UnmarshalJSON(data []byte) error {
var tmp struct {
Icon []byte `json:"icon"`
ContentType string `json:"content_type,omitempty"`
LastAccess time.Time `json:"last_access"`
}
// check if data is json
if json.Valid(data) {
err := json.Unmarshal(data, &e)
err := json.Unmarshal(data, &tmp)
// return only if unmarshal is successful
// otherwise fallback to base64
if err == nil {
e.Icon = tmp.Icon
e.ContentType = tmp.ContentType
e.LastAccess.Store(tmp.LastAccess)
return nil
}
}

View File

@@ -10,6 +10,7 @@ import (
"github.com/lithammer/fuzzysearch/fuzzy"
"github.com/yusing/go-proxy/internal/common"
"github.com/yusing/go-proxy/internal/logging"
"github.com/yusing/go-proxy/internal/task"
"github.com/yusing/go-proxy/internal/utils"
)
@@ -68,6 +69,10 @@ func InitIconListCache() {
Int("display_names", len(iconsCache.DisplayNames)).
Msg("icon list cache loaded")
}
task.OnProgramExit("save_icon_list_cache", func() {
utils.SaveJSON(common.IconListCachePath, iconsCache, 0o644)
})
}
func ListAvailableIcons() (*Cache, error) {

View File

@@ -38,6 +38,10 @@ func (w *Watcher) ServeHTTP(rw http.ResponseWriter, r *http.Request) {
return
default:
f := &ForceCacheControl{expires: w.expires().Format(http.TimeFormat), ResponseWriter: rw}
w, ok := watcherMap[w.Key()] // could've been reloaded
if !ok {
return
}
w.rp.ServeHTTP(f, r)
}
}

View File

@@ -131,7 +131,7 @@ func NewWatcher(parent task.Parent, r routes.Route) (*Watcher, error) {
case routes.StreamRoute:
w.stream = r
default:
return nil, gperr.New("unexpected route type")
return nil, gperr.Errorf("unexpected route type: %T", r)
}
ctx, cancel := context.WithTimeout(parent.Context(), reqTimeout)
@@ -262,7 +262,7 @@ func (w *Watcher) watchUntilDestroy() (returnCause gperr.Error) {
case <-w.task.Context().Done():
return gperr.Wrap(w.task.FinishCause())
case err := <-errCh:
return err
gperr.LogError("watcher error", err, &w.l)
case e := <-eventCh:
w.l.Debug().Stringer("action", e.Action).Msg("state changed")
if e.Action == events.ActionContainerDestroy {

View File

@@ -13,15 +13,16 @@ func TestNewJSON(t *testing.T) {
}
func TestSaveLoadStore(t *testing.T) {
defer clear(stores)
storesPath = t.TempDir()
store := Store[string]("test")
store.Store("a", "1")
if err := save(); err != nil {
t.Fatal(err)
}
if err := load(); err != nil {
t.Fatal(err)
}
// reload
clear(stores)
loaded := Store[string]("test")
v, ok := loaded.Load("a")
if !ok {
@@ -43,6 +44,8 @@ type testObject struct {
func (*testObject) Initialize() {}
func TestSaveLoadObject(t *testing.T) {
defer clear(stores)
storesPath = t.TempDir()
obj := Object[*testObject]("test")
obj.I = 1
@@ -50,9 +53,8 @@ func TestSaveLoadObject(t *testing.T) {
if err := save(); err != nil {
t.Fatal(err)
}
if err := load(); err != nil {
t.Fatal(err)
}
// reload
clear(stores)
loaded := Object[*testObject]("test")
if loaded.I != 1 || loaded.S != "1" {
t.Fatalf("expected 1, got %d, %s", loaded.I, loaded.S)

View File

@@ -1,297 +0,0 @@
package internal
import (
"encoding/json"
"io"
"net/http"
"sync"
"time"
"github.com/lithammer/fuzzysearch/fuzzy"
"github.com/yusing/go-proxy/internal/common"
"github.com/yusing/go-proxy/internal/logging"
"github.com/yusing/go-proxy/internal/utils"
)
type GitHubContents struct { //! keep this, may reuse in future
Type string `json:"type"`
Path string `json:"path"`
Name string `json:"name"`
Sha string `json:"sha"`
Size int `json:"size"`
}
type (
IconsMap map[string]map[string]struct{}
IconList []string
Cache struct {
WalkxCode, Selfhst IconsMap
DisplayNames ReferenceDisplayNameMap
IconList IconList // combined into a single list
}
ReferenceDisplayNameMap map[string]string
)
func (icons *Cache) needUpdate() bool {
return len(icons.WalkxCode) == 0 || len(icons.Selfhst) == 0 || len(icons.IconList) == 0 || len(icons.DisplayNames) == 0
}
const updateInterval = 2 * time.Hour
var (
iconsCache *Cache
iconsCahceMu sync.RWMutex
lastUpdate time.Time
)
const (
walkxcodeIcons = "https://cdn.jsdelivr.net/gh/walkxcode/dashboard-icons@master/tree.json"
selfhstIcons = "https://cdn.selfh.st/directory/icons.json"
)
func InitIconListCache() {
iconsCahceMu.Lock()
defer iconsCahceMu.Unlock()
iconsCache = &Cache{
WalkxCode: make(IconsMap),
Selfhst: make(IconsMap),
DisplayNames: make(ReferenceDisplayNameMap),
IconList: []string{},
}
err := utils.LoadJSONIfExist(common.IconListCachePath, iconsCache)
if err != nil {
logging.Error().Err(err).Msg("failed to load icon list cache config")
} else if len(iconsCache.IconList) > 0 {
logging.Info().
Int("icons", len(iconsCache.IconList)).
Int("display_names", len(iconsCache.DisplayNames)).
Msg("icon list cache loaded")
}
}
func ListAvailableIcons() (*Cache, error) {
iconsCahceMu.RLock()
if time.Since(lastUpdate) < updateInterval {
if !iconsCache.needUpdate() {
iconsCahceMu.RUnlock()
return iconsCache, nil
}
}
iconsCahceMu.RUnlock()
iconsCahceMu.Lock()
defer iconsCahceMu.Unlock()
logging.Info().Msg("updating icon data")
icons, err := fetchIconData()
if err != nil {
return nil, err
}
logging.Info().
Int("icons", len(icons.IconList)).
Int("display_names", len(icons.DisplayNames)).
Msg("icons list updated")
iconsCache = icons
lastUpdate = time.Now()
err = utils.SaveJSON(common.IconListCachePath, iconsCache, 0o644)
if err != nil {
logging.Warn().Err(err).Msg("failed to save icon list cache")
}
return icons, nil
}
func SearchIcons(keyword string, limit int) ([]string, error) {
icons, err := ListAvailableIcons()
if err != nil {
return nil, err
}
if keyword == "" {
return utils.Slice(icons.IconList, limit), nil
}
return utils.Slice(fuzzy.Find(keyword, icons.IconList), limit), nil
}
func HasWalkxCodeIcon(name string, filetype string) bool {
icons, err := ListAvailableIcons()
if err != nil {
logging.Error().Err(err).Msg("failed to list icons")
return false
}
if _, ok := icons.WalkxCode[filetype]; !ok {
return false
}
_, ok := icons.WalkxCode[filetype][name+"."+filetype]
return ok
}
func HasSelfhstIcon(name string, filetype string) bool {
icons, err := ListAvailableIcons()
if err != nil {
logging.Error().Err(err).Msg("failed to list icons")
return false
}
if _, ok := icons.Selfhst[filetype]; !ok {
return false
}
_, ok := icons.Selfhst[filetype][name+"."+filetype]
return ok
}
func GetDisplayName(reference string) (string, bool) {
icons, err := ListAvailableIcons()
if err != nil {
logging.Error().Err(err).Msg("failed to list icons")
return "", false
}
displayName, ok := icons.DisplayNames[reference]
return displayName, ok
}
func fetchIconData() (*Cache, error) {
walkxCodeIconMap, walkxCodeIconList, err := fetchWalkxCodeIcons()
if err != nil {
return nil, err
}
n := 0
for _, items := range walkxCodeIconMap {
n += len(items)
}
selfhstIconMap, selfhstIconList, referenceToNames, err := fetchSelfhstIcons()
if err != nil {
return nil, err
}
return &Cache{
WalkxCode: walkxCodeIconMap,
Selfhst: selfhstIconMap,
DisplayNames: referenceToNames,
IconList: append(walkxCodeIconList, selfhstIconList...),
}, nil
}
/*
format:
{
"png": [
"*.png",
],
"svg": [
"*.svg",
],
"webp": [
"*.webp",
]
}
*/
func fetchWalkxCodeIcons() (IconsMap, IconList, error) {
req, err := http.NewRequest(http.MethodGet, walkxcodeIcons, nil)
if err != nil {
return nil, nil, err
}
resp, err := http.DefaultClient.Do(req)
if err != nil {
return nil, nil, err
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
return nil, nil, err
}
data := make(map[string][]string)
err = json.Unmarshal(body, &data)
if err != nil {
return nil, nil, err
}
icons := make(IconsMap, len(data))
iconList := make(IconList, 0, 2000)
for fileType, files := range data {
icons[fileType] = make(map[string]struct{}, len(files))
for _, icon := range files {
icons[fileType][icon] = struct{}{}
iconList = append(iconList, "@walkxcode/"+icon)
}
}
return icons, iconList, nil
}
/*
format:
{
"Name": "2FAuth",
"Reference": "2fauth",
"SVG": "Yes",
"PNG": "Yes",
"WebP": "Yes",
"Light": "Yes",
"Category": "Self-Hosted",
"CreatedAt": "2024-08-16 00:27:23+00:00"
}
*/
func fetchSelfhstIcons() (IconsMap, IconList, ReferenceDisplayNameMap, error) {
type SelfhStIcon struct {
Name string `json:"Name"`
Reference string `json:"Reference"`
SVG string `json:"SVG"`
PNG string `json:"PNG"`
WebP string `json:"WebP"`
// Light string
// Category string
// CreatedAt string
}
req, err := http.NewRequest(http.MethodGet, selfhstIcons, nil)
if err != nil {
return nil, nil, nil, err
}
resp, err := http.DefaultClient.Do(req)
if err != nil {
return nil, nil, nil, err
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
return nil, nil, nil, err
}
data := make([]SelfhStIcon, 0, 2000)
err = json.Unmarshal(body, &data)
if err != nil {
return nil, nil, nil, err
}
iconList := make(IconList, 0, len(data)*3)
icons := make(IconsMap)
icons["svg"] = make(map[string]struct{}, len(data))
icons["png"] = make(map[string]struct{}, len(data))
icons["webp"] = make(map[string]struct{}, len(data))
referenceToNames := make(ReferenceDisplayNameMap, len(data))
for _, item := range data {
if item.SVG == "Yes" {
icons["svg"][item.Reference+".svg"] = struct{}{}
iconList = append(iconList, "@selfhst/"+item.Reference+".svg")
}
if item.PNG == "Yes" {
icons["png"][item.Reference+".png"] = struct{}{}
iconList = append(iconList, "@selfhst/"+item.Reference+".png")
}
if item.WebP == "Yes" {
icons["webp"][item.Reference+".webp"] = struct{}{}
iconList = append(iconList, "@selfhst/"+item.Reference+".webp")
}
referenceToNames[item.Reference] = item.Name
}
return icons, iconList, referenceToNames, nil
}

View File

@@ -4,8 +4,8 @@ import (
"bufio"
"io"
"net/http"
"os"
"sync"
"sync/atomic"
"time"
"github.com/rs/zerolog"
@@ -13,6 +13,7 @@ import (
"github.com/yusing/go-proxy/internal/gperr"
"github.com/yusing/go-proxy/internal/logging"
"github.com/yusing/go-proxy/internal/task"
"github.com/yusing/go-proxy/internal/utils/strutils"
"github.com/yusing/go-proxy/internal/utils/synk"
"golang.org/x/time/rate"
)
@@ -22,12 +23,17 @@ type (
task *task.Task
cfg *Config
rawWriter io.Writer
closer []io.Closer
supportRotate []supportRotate
writer *bufio.Writer
writeLock sync.Mutex
closed bool
wps int64
bufSize int
lastAdjust time.Time
lineBufPool *synk.BytesPool // buffer pool for formatting a single log line
errRateLimiter *rate.Limiter
@@ -43,6 +49,12 @@ type (
Name() string // file name or path
}
SupportRotate interface {
io.Writer
supportRotate
Name() string
}
RequestFormatter interface {
// AppendRequestLog appends a log line to line with or without a trailing newline
AppendRequestLog(line []byte, req *http.Request, res *http.Response) []byte
@@ -54,15 +66,13 @@ type (
)
const (
StdoutbufSize = 64
MinBufferSize = 4 * kilobyte
MaxBufferSize = 1 * megabyte
MaxBufferSize = 8 * megabyte
bufferAdjustInterval = time.Second // How often we check & adjust
)
const (
flushInterval = 30 * time.Second
rotateInterval = time.Hour
)
const defaultRotateInterval = time.Hour
const (
errRateLimit = 200 * time.Millisecond
@@ -99,24 +109,17 @@ func unwrap[Writer any](w io.Writer) []Writer {
func NewAccessLoggerWithIO(parent task.Parent, writer WriterWithName, anyCfg AnyConfig) *AccessLogger {
cfg := anyCfg.ToConfig()
if cfg.BufferSize == 0 {
cfg.BufferSize = DefaultBufferSize
}
if cfg.BufferSize < MinBufferSize {
cfg.BufferSize = MinBufferSize
}
if cfg.BufferSize > MaxBufferSize {
cfg.BufferSize = MaxBufferSize
}
if _, ok := writer.(*os.File); ok {
cfg.BufferSize = StdoutbufSize
if cfg.RotateInterval == 0 {
cfg.RotateInterval = defaultRotateInterval
}
l := &AccessLogger{
task: parent.Subtask("accesslog."+writer.Name(), true),
cfg: cfg,
writer: bufio.NewWriterSize(writer, cfg.BufferSize),
lineBufPool: synk.NewBytesPool(512, 8192),
rawWriter: writer,
writer: bufio.NewWriterSize(writer, MinBufferSize),
bufSize: MinBufferSize,
lineBufPool: synk.NewBytesPool(256, 768), // for common/combined usually < 256B; for json < 512B
errRateLimiter: rate.NewLimiter(rate.Every(errRateLimit), errBurst),
logger: logging.With().Str("file", writer.Name()).Logger(),
}
@@ -214,9 +217,9 @@ func (l *AccessLogger) Rotate() (result *RotateResult, err error) {
func (l *AccessLogger) handleErr(err error) {
if l.errRateLimiter.Allow() {
gperr.LogError("failed to write access log", err)
gperr.LogError("failed to write access log", err, &l.logger)
} else {
gperr.LogError("too many errors, stopping access log", err)
gperr.LogError("too many errors, stopping access log", err, &l.logger)
l.task.Finish(err)
}
}
@@ -228,19 +231,16 @@ func (l *AccessLogger) start() {
l.task.Finish(nil)
}()
// flushes the buffer every 30 seconds
flushTicker := time.NewTicker(30 * time.Second)
defer flushTicker.Stop()
rotateTicker := time.NewTicker(rotateInterval)
rotateTicker := time.NewTicker(l.cfg.RotateInterval)
defer rotateTicker.Stop()
bufAdjTicker := time.NewTicker(bufferAdjustInterval)
defer bufAdjTicker.Stop()
for {
select {
case <-l.task.Context().Done():
return
case <-flushTicker.C:
l.Flush()
case <-rotateTicker.C:
if !l.ShouldRotate() {
continue
@@ -253,6 +253,8 @@ func (l *AccessLogger) start() {
} else {
l.logger.Info().Msg("no rotation needed")
}
case <-bufAdjTicker.C:
l.adjustBuffer()
}
}
}
@@ -289,8 +291,55 @@ func (l *AccessLogger) write(data []byte) {
if l.closed {
return
}
_, err := l.writer.Write(data)
n, err := l.writer.Write(data)
if err != nil {
l.handleErr(err)
} else if n < len(data) {
l.handleErr(gperr.Errorf("%w, writing %d bytes, only %d written", io.ErrShortWrite, len(data), n))
}
atomic.AddInt64(&l.wps, int64(n))
}
func (l *AccessLogger) adjustBuffer() {
wps := int(atomic.SwapInt64(&l.wps, 0))
origBufSize := l.bufSize
newBufSize := origBufSize
halfDiff := (wps - origBufSize) / 2
if halfDiff < 0 {
halfDiff = -halfDiff
}
step := max(halfDiff, wps/2)
switch {
case origBufSize < wps:
newBufSize += step
if newBufSize > MaxBufferSize {
newBufSize = MaxBufferSize
}
case origBufSize > wps:
newBufSize -= step
if newBufSize < MinBufferSize {
newBufSize = MinBufferSize
}
}
if newBufSize == origBufSize {
return
}
l.writeLock.Lock()
defer l.writeLock.Unlock()
if l.closed {
return
}
l.logger.Info().
Str("wps", strutils.FormatByteSize(wps)).
Str("old", strutils.FormatByteSize(origBufSize)).
Str("new", strutils.FormatByteSize(newBufSize)).
Msg("adjusted buffer size")
l.writer = bufio.NewWriterSize(l.rawWriter, newBufSize)
l.bufSize = newBufSize
}

View File

@@ -26,12 +26,8 @@ type BackScanner struct {
// NewBackScanner creates a new Scanner to read the file backward.
// chunkSize determines the size of each read chunk from the end of the file.
func NewBackScanner(file ReaderAtSeeker, chunkSize int) *BackScanner {
size, err := file.Seek(0, io.SeekEnd)
if err != nil {
return &BackScanner{err: err}
}
return newBackScanner(file, size, make([]byte, chunkSize))
func NewBackScanner(file ReaderAtSeeker, fileSize int64, chunkSize int) *BackScanner {
return newBackScanner(file, fileSize, make([]byte, chunkSize))
}
func newBackScanner(file ReaderAtSeeker, fileSize int64, buf []byte) *BackScanner {
@@ -111,11 +107,6 @@ func (s *BackScanner) Bytes() []byte {
return s.line
}
// FileSize returns the size of the file.
func (s *BackScanner) FileSize() int64 {
return s.size
}
// Err returns the first non-EOF error encountered by the scanner.
func (s *BackScanner) Err() error {
return s.err

View File

@@ -67,7 +67,7 @@ func TestBackScanner(t *testing.T) {
}
// Create scanner with small chunk size to test chunking
scanner := NewBackScanner(mockFile, 10)
scanner := NewBackScanner(mockFile, mockFile.MustSize(), 10)
// Collect all lines
var lines [][]byte
@@ -108,7 +108,7 @@ func TestBackScannerWithVaryingChunkSizes(t *testing.T) {
t.Fatalf("failed to write to mock file: %v", err)
}
scanner := NewBackScanner(mockFile, chunkSize)
scanner := NewBackScanner(mockFile, mockFile.MustSize(), chunkSize)
var lines [][]byte
for scanner.Scan() {
@@ -170,7 +170,8 @@ func TestReset(t *testing.T) {
}
}
linesRead := 0
s := NewBackScanner(file, defaultChunkSize)
stat, _ := file.Stat()
s := NewBackScanner(file, stat.Size(), defaultChunkSize)
for s.Scan() {
linesRead++
}
@@ -199,7 +200,7 @@ func BenchmarkBackScanner(b *testing.B) {
}
for i := range 14 {
chunkSize := (2 << i) * kilobyte
scanner := NewBackScanner(mockFile, chunkSize)
scanner := NewBackScanner(mockFile, mockFile.MustSize(), chunkSize)
name := strutils.FormatByteSize(chunkSize)
b.ResetTimer()
b.Run(name, func(b *testing.B) {
@@ -226,7 +227,8 @@ func BenchmarkBackScannerRealFile(b *testing.B) {
}
}
scanner := NewBackScanner(file, 256*kilobyte)
stat, _ := file.Stat()
scanner := NewBackScanner(file, stat.Size(), 256*kilobyte)
b.ResetTimer()
for scanner.Scan() {
}

View File

@@ -1,16 +1,19 @@
package accesslog
import (
"time"
"github.com/yusing/go-proxy/internal/gperr"
"github.com/yusing/go-proxy/internal/utils"
)
type (
ConfigBase struct {
BufferSize int `json:"buffer_size"`
Path string `json:"path"`
Stdout bool `json:"stdout"`
Retention *Retention `json:"retention" aliases:"keep"`
B int `json:"buffer_size"` // Deprecated: buffer size is adjusted dynamically
Path string `json:"path"`
Stdout bool `json:"stdout"`
Retention *Retention `json:"retention" aliases:"keep"`
RotateInterval time.Duration `json:"rotate_interval,omitempty"`
}
ACLLoggerConfig struct {
ConfigBase
@@ -55,8 +58,6 @@ var (
ReqLoggerFormats = []Format{FormatCommon, FormatCombined, FormatJSON}
)
const DefaultBufferSize = 64 * kilobyte // 64KB
func (cfg *ConfigBase) Validate() gperr.Error {
if cfg.Path == "" && !cfg.Stdout {
return gperr.New("path or stdout is required")
@@ -99,8 +100,7 @@ func (cfg *RequestLoggerConfig) ToConfig() *Config {
func DefaultRequestLoggerConfig() *RequestLoggerConfig {
return &RequestLoggerConfig{
ConfigBase: ConfigBase{
BufferSize: DefaultBufferSize,
Retention: &Retention{Days: 30},
Retention: &Retention{Days: 30},
},
Format: FormatCombined,
Fields: Fields{
@@ -120,8 +120,7 @@ func DefaultRequestLoggerConfig() *RequestLoggerConfig {
func DefaultACLLoggerConfig() *ACLLoggerConfig {
return &ACLLoggerConfig{
ConfigBase: ConfigBase{
BufferSize: DefaultBufferSize,
Retention: &Retention{Days: 30},
Retention: &Retention{Days: 30},
},
}
}

View File

@@ -11,7 +11,6 @@ import (
func TestNewConfig(t *testing.T) {
labels := map[string]string{
"proxy.buffer_size": "10",
"proxy.format": "combined",
"proxy.path": "/tmp/access.log",
"proxy.filters.status_codes.values": "200-299",
@@ -30,10 +29,9 @@ func TestNewConfig(t *testing.T) {
expect.NoError(t, err)
var config RequestLoggerConfig
err = utils.Deserialize(parsed, &config)
err = utils.MapUnmarshalValidate(parsed, &config)
expect.NoError(t, err)
expect.Equal(t, config.BufferSize, 10)
expect.Equal(t, config.Format, FormatCombined)
expect.Equal(t, config.Path, "/tmp/access.log")
expect.Equal(t, config.Filters.StatusCodes.Values, []*StatusCodeRange{{Start: 200, End: 299}})

View File

@@ -2,8 +2,9 @@ package accesslog
import (
"fmt"
"io"
"os"
pathPkg "path"
"path/filepath"
"sync"
"github.com/yusing/go-proxy/internal/logging"
@@ -11,7 +12,7 @@ import (
)
type File struct {
*os.File
f *os.File
// os.File.Name() may not equal to key of `openedFiles`.
// Store it for later delete from `openedFiles`.
@@ -25,21 +26,25 @@ var (
openedFilesMu sync.Mutex
)
func newFileIO(path string) (WriterWithName, error) {
func newFileIO(path string) (SupportRotate, error) {
openedFilesMu.Lock()
defer openedFilesMu.Unlock()
var file *File
path = pathPkg.Clean(path)
path = filepath.Clean(path)
if opened, ok := openedFiles[path]; ok {
opened.refCount.Add()
return opened, nil
} else {
f, err := os.OpenFile(path, os.O_APPEND|os.O_CREATE|os.O_RDWR, 0o644)
// cannot open as O_APPEND as we need Seek and WriteAt
f, err := os.OpenFile(path, os.O_CREATE|os.O_RDWR, 0o644)
if err != nil {
return nil, fmt.Errorf("access log open error: %w", err)
}
file = &File{File: f, path: path, refCount: utils.NewRefCounter()}
if _, err := f.Seek(0, io.SeekEnd); err != nil {
return nil, fmt.Errorf("access log seek error: %w", err)
}
file = &File{f: f, path: path, refCount: utils.NewRefCounter()}
openedFiles[path] = file
go file.closeOnZero()
}
@@ -47,6 +52,38 @@ func newFileIO(path string) (WriterWithName, error) {
return file, nil
}
func (f *File) Name() string {
return f.f.Name()
}
func (f *File) Write(p []byte) (n int, err error) {
return f.f.Write(p)
}
func (f *File) ReadAt(p []byte, off int64) (n int, err error) {
return f.f.ReadAt(p, off)
}
func (f *File) WriteAt(p []byte, off int64) (n int, err error) {
return f.f.WriteAt(p, off)
}
func (f *File) Seek(offset int64, whence int) (int64, error) {
return f.f.Seek(offset, whence)
}
func (f *File) Size() (int64, error) {
stat, err := f.f.Stat()
if err != nil {
return 0, err
}
return stat.Size(), nil
}
func (f *File) Truncate(size int64) error {
return f.f.Truncate(size)
}
func (f *File) Close() error {
f.refCount.Sub()
return nil
@@ -62,5 +99,5 @@ func (f *File) closeOnZero() {
openedFilesMu.Lock()
delete(openedFiles, f.path)
openedFilesMu.Unlock()
f.File.Close()
f.f.Close()
}

View File

@@ -50,7 +50,6 @@ func TestConcurrentAccessLoggerLogAndFlush(t *testing.T) {
file := NewMockFile()
cfg := DefaultRequestLoggerConfig()
cfg.BufferSize = 1024
parent := task.RootTask("test", false)
loggerCount := 5

View File

@@ -17,8 +17,11 @@ type MockFile struct {
noLock
}
var _ SupportRotate = (*MockFile)(nil)
func NewMockFile() *MockFile {
f, _ := afero.TempFile(afero.NewMemMapFs(), "", "")
f.Seek(0, io.SeekEnd)
return &MockFile{
File: f,
}
@@ -47,3 +50,13 @@ func (m *MockFile) NumLines() int {
}
return count
}
func (m *MockFile) Size() (int64, error) {
stat, _ := m.Stat()
return stat.Size(), nil
}
func (m *MockFile) MustSize() int64 {
size, _ := m.Size()
return size
}

View File

@@ -6,16 +6,18 @@ import (
"time"
"github.com/rs/zerolog"
"github.com/yusing/go-proxy/internal/gperr"
"github.com/yusing/go-proxy/internal/utils"
"github.com/yusing/go-proxy/internal/utils/strutils"
"github.com/yusing/go-proxy/internal/utils/synk"
)
type supportRotate interface {
io.ReadSeeker
io.Seeker
io.ReaderAt
io.WriterAt
Truncate(size int64) error
Size() (int64, error)
}
type RotateResult struct {
@@ -29,17 +31,29 @@ type RotateResult struct {
}
func (r *RotateResult) Print(logger *zerolog.Logger) {
logger.Info().
Str("original_size", strutils.FormatByteSize(r.OriginalSize)).
Str("bytes_read", strutils.FormatByteSize(r.NumBytesRead)).
Str("bytes_keep", strutils.FormatByteSize(r.NumBytesKeep)).
Int("lines_read", r.NumLinesRead).
Int("lines_keep", r.NumLinesKeep).
Int("lines_invalid", r.NumLinesInvalid).
event := logger.Info().
Str("original_size", strutils.FormatByteSize(r.OriginalSize))
if r.NumBytesRead > 0 {
event.Str("bytes_read", strutils.FormatByteSize(r.NumBytesRead))
}
if r.NumBytesKeep > 0 {
event.Str("bytes_keep", strutils.FormatByteSize(r.NumBytesKeep))
}
if r.NumLinesRead > 0 {
event.Int("lines_read", r.NumLinesRead)
}
if r.NumLinesKeep > 0 {
event.Int("lines_keep", r.NumLinesKeep)
}
if r.NumLinesInvalid > 0 {
event.Int("lines_invalid", r.NumLinesInvalid)
}
event.Str("saved", strutils.FormatByteSize(r.OriginalSize-r.NumBytesKeep)).
Msg("log rotate result")
}
func (r *RotateResult) Add(other *RotateResult) {
r.OriginalSize += other.OriginalSize
r.NumBytesRead += other.NumBytesRead
r.NumBytesKeep += other.NumBytesKeep
r.NumLinesRead += other.NumLinesRead
@@ -66,9 +80,23 @@ var rotateBytePool = synk.NewBytesPool(0, 16*1024*1024)
// If the file does not need to be rotated, it returns nil, nil.
func rotateLogFile(file supportRotate, config *Retention) (result *RotateResult, err error) {
if config.KeepSize > 0 {
return rotateLogFileBySize(file, config)
result, err = rotateLogFileBySize(file, config)
} else {
result, err = rotateLogFileByPolicy(file, config)
}
if err != nil {
return nil, err
}
if _, err := file.Seek(0, io.SeekEnd); err != nil {
return nil, err
}
return result, nil
}
func rotateLogFileByPolicy(file supportRotate, config *Retention) (result *RotateResult, err error) {
var shouldStop func() bool
t := utils.TimeNow()
@@ -82,16 +110,21 @@ func rotateLogFile(file supportRotate, config *Retention) (result *RotateResult,
return nil, nil // should not happen
}
s := NewBackScanner(file, defaultChunkSize)
result = &RotateResult{
OriginalSize: s.FileSize(),
fileSize, err := file.Size()
if err != nil {
return nil, err
}
// nothing to rotate, return the nothing
if result.OriginalSize == 0 {
if fileSize == 0 {
return nil, nil
}
s := NewBackScanner(file, fileSize, defaultChunkSize)
result = &RotateResult{
OriginalSize: fileSize,
}
// Store the line positions and sizes we want to keep
linesToKeep := make([]lineInfo, 0)
lastLineValid := false
@@ -169,6 +202,8 @@ func rotateLogFile(file supportRotate, config *Retention) (result *RotateResult,
// Write it to the new position
if _, err := file.WriteAt(buf, writePos); err != nil {
return nil, err
} else if n < line.Size {
return nil, gperr.Errorf("%w, writing %d bytes, only %d written", io.ErrShortWrite, line.Size, n)
}
writePos += n
}
@@ -187,7 +222,7 @@ func rotateLogFile(file supportRotate, config *Retention) (result *RotateResult,
//
// Invalid lines will not be detected and included in the result.
func rotateLogFileBySize(file supportRotate, config *Retention) (result *RotateResult, err error) {
filesize, err := file.Seek(0, io.SeekEnd)
filesize, err := file.Size()
if err != nil {
return nil, err
}
@@ -234,7 +269,6 @@ var timeJSON = []byte(`"time":"`)
//
// The returned time is not validated.
func ExtractTime(line []byte) []byte {
//TODO: optimize this
switch line[0] {
case '{': // JSON format
if i := bytes.Index(line, timeJSON); i != -1 {

View File

@@ -133,11 +133,6 @@ func (lb *LoadBalancer) AddServer(srv Server) {
lb.rebalance()
lb.impl.OnAddServer(srv)
lb.l.Debug().
Str("action", "add").
Str("server", srv.Name()).
Msgf("%d servers available", lb.pool.Size())
}
func (lb *LoadBalancer) RemoveServer(srv Server) {

View File

@@ -17,8 +17,7 @@ import (
)
type cloudflareRealIP struct {
realIP realIP
Recursive bool
realIP realIP
}
const (
@@ -47,7 +46,7 @@ var CloudflareRealIP = NewMiddleware[cloudflareRealIP]()
func (cri *cloudflareRealIP) setup() {
cri.realIP.RealIPOpts = RealIPOpts{
Header: "CF-Connecting-IP",
Recursive: cri.Recursive,
Recursive: true,
}
}

View File

@@ -118,7 +118,7 @@ func (m *Middleware) apply(optsRaw OptionsRaw) gperr.Error {
} else {
m.priority = DefaultPriority
}
return utils.Deserialize(optsRaw, m.impl)
return utils.MapUnmarshalValidate(optsRaw, m.impl)
}
func (m *Middleware) finalize() error {

View File

@@ -35,8 +35,8 @@ var allMiddlewares = map[string]*Middleware{
}
var (
ErrUnknownMiddleware = gperr.New("unknown middleware")
ErrDuplicatedMiddleware = gperr.New("duplicated middleware")
ErrUnknownMiddleware = gperr.New("unknown middleware")
ErrMiddlewareAlreadyExists = gperr.New("middleware with the same name already exists")
)
func Get(name string) (*Middleware, Error) {
@@ -69,7 +69,7 @@ func LoadComposeFiles() {
for name, m := range mws {
name = strutils.ToLowerNoSnake(name)
if _, ok := allMiddlewares[name]; ok {
errs.Add(ErrDuplicatedMiddleware.Subject(name))
errs.Add(ErrMiddlewareAlreadyExists.Subject(name))
continue
}
allMiddlewares[name] = m

View File

@@ -47,7 +47,7 @@ func (cfg *NotificationConfig) UnmarshalMap(m map[string]any) (err gperr.Error)
}
// unmarshal provider config
if err := utils.Deserialize(m, cfg.Provider); err != nil {
if err := utils.MapUnmarshalValidate(m, cfg.Provider); err != nil {
return err
}

View File

@@ -150,7 +150,7 @@ func TestNotificationConfig(t *testing.T) {
t.Run(tt.name, func(t *testing.T) {
var cfg NotificationConfig
provider := tt.cfg["provider"]
err := utils.Deserialize(tt.cfg, &cfg)
err := utils.MapUnmarshalValidate(tt.cfg, &cfg)
if tt.wantErr {
ExpectHasError(t, err)
} else {

View File

@@ -83,9 +83,6 @@ func (disp *Dispatcher) start() {
}
func (disp *Dispatcher) dispatch(msg *LogMessage) {
if true {
return
}
task := disp.task.Subtask("dispatcher")
defer task.Finish("notif dispatched")

View File

@@ -80,8 +80,11 @@ func (p *DockerProvider) loadRoutesImpl() (route.Routes, gperr.Error) {
errs.Add(err.Subject(container.ContainerName))
}
for k, v := range newEntries {
if routes.Contains(k) {
errs.Addf("duplicated alias %s", k)
if conflict, ok := routes[k]; ok {
errs.Add(gperr.Multiline().
Addf("route with alias %s already exists", k).
Addf("container %s", container.ContainerName).
Addf("conflicting container %s", conflict.Container.ContainerName))
} else {
routes[k] = v
}
@@ -169,7 +172,7 @@ func (p *DockerProvider) routesFromContainerLabels(container *docker.Container)
}
// deserialize map into entry object
err := U.Deserialize(entryMap, r)
err := U.MapUnmarshalValidate(entryMap, r)
if err != nil {
errs.Add(err.Subject(alias))
} else {
@@ -178,7 +181,7 @@ func (p *DockerProvider) routesFromContainerLabels(container *docker.Container)
}
if wildcardProps != nil {
for _, re := range routes {
if err := U.Deserialize(wildcardProps, re); err != nil {
if err := U.MapUnmarshalValidate(wildcardProps, re); err != nil {
errs.Add(err.Subject(docker.WildcardAlias))
break
}

View File

@@ -12,19 +12,13 @@ import (
type EventHandler struct {
provider *Provider
errs *gperr.Builder
added *gperr.Builder
removed *gperr.Builder
updated *gperr.Builder
errs *gperr.Builder
}
func (p *Provider) newEventHandler() *EventHandler {
return &EventHandler{
provider: p,
errs: gperr.NewBuilder("event errors"),
added: gperr.NewBuilder("added"),
removed: gperr.NewBuilder("removed"),
updated: gperr.NewBuilder("updated"),
}
}
@@ -88,15 +82,12 @@ func (handler *EventHandler) Add(parent task.Parent, route *route.Route) {
err := handler.provider.startRoute(parent, route)
if err != nil {
handler.errs.Add(err.Subject("add"))
} else {
handler.added.Adds(route.Alias)
}
}
func (handler *EventHandler) Remove(route *route.Route) {
route.Finish("route removed")
delete(handler.provider.routes, route.Alias)
handler.removed.Adds(route.Alias)
}
func (handler *EventHandler) Update(parent task.Parent, oldRoute *route.Route, newRoute *route.Route) {
@@ -104,18 +95,11 @@ func (handler *EventHandler) Update(parent task.Parent, oldRoute *route.Route, n
err := handler.provider.startRoute(parent, newRoute)
if err != nil {
handler.errs.Add(err.Subject("update"))
} else {
handler.updated.Adds(newRoute.Alias)
}
}
func (handler *EventHandler) Log() {
results := gperr.NewBuilder("event occurred")
results.AddFrom(handler.added, false)
results.AddFrom(handler.removed, false)
results.AddFrom(handler.updated, false)
results.AddFrom(handler.errs, false)
if result := results.String(); result != "" {
handler.provider.Logger().Info().Msg(result)
if err := handler.errs.Error(); err != nil {
handler.provider.Logger().Info().Msg(err.Error())
}
}

View File

@@ -34,7 +34,7 @@ func FileProviderImpl(filename string) (ProviderImpl, error) {
}
func validate(data []byte) (routes route.Routes, err gperr.Error) {
err = utils.DeserializeYAML(data, &routes)
err = utils.UnmarshalValidateYAML(data, &routes)
return
}

View File

@@ -22,19 +22,19 @@ import (
"github.com/yusing/go-proxy/internal/watcher/health/monitor"
)
type (
ReveseProxyRoute struct {
*Route
type ReveseProxyRoute struct {
*Route
HealthMon health.HealthMonitor `json:"health,omitempty"`
HealthMon health.HealthMonitor `json:"health,omitempty"`
loadBalancer *loadbalancer.LoadBalancer
handler http.Handler
rp *reverseproxy.ReverseProxy
loadBalancer *loadbalancer.LoadBalancer
handler http.Handler
rp *reverseproxy.ReverseProxy
task *task.Task
}
)
task *task.Task
}
var _ routes.ReverseProxyRoute = (*ReveseProxyRoute)(nil)
// var globalMux = http.NewServeMux() // TODO: support regex subdomain matching.
@@ -88,6 +88,11 @@ func NewReverseProxyRoute(base *Route) (*ReveseProxyRoute, gperr.Error) {
return r, nil
}
// ReverseProxy implements routes.ReverseProxyRoute.
func (r *ReveseProxyRoute) ReverseProxy() *reverseproxy.ReverseProxy {
return r.rp
}
// Start implements task.TaskStarter.
func (r *ReveseProxyRoute) Start(parent task.Parent) gperr.Error {
if existing, ok := routes.HTTP.Get(r.Key()); ok && !r.UseLoadBalance() {

View File

@@ -8,7 +8,6 @@ import (
"github.com/docker/docker/api/types/container"
"github.com/yusing/go-proxy/agent/pkg/agent"
"github.com/yusing/go-proxy/internal"
"github.com/yusing/go-proxy/internal/docker"
"github.com/yusing/go-proxy/internal/gperr"
"github.com/yusing/go-proxy/internal/homepage"
@@ -152,6 +151,10 @@ func (r *Route) Validate() gperr.Error {
}
}
if r.Container != nil && r.Container.IdlewatcherConfig != nil {
r.Idlewatcher = r.Container.IdlewatcherConfig
}
// return error if route is localhost:<godoxy_port>
switch r.Host {
case "localhost", "127.0.0.1":
@@ -484,7 +487,7 @@ func (r *Route) FinalizeHomepageConfig() {
} else {
key = r.Alias
}
displayName, ok := internal.GetDisplayName(key)
displayName, ok := homepage.GetDisplayName(key)
if ok {
hp.Name = displayName
} else {

View File

@@ -28,7 +28,7 @@ func TestParseRule(t *testing.T) {
var rules struct {
Rules Rules
}
err := utils.Deserialize(utils.SerializedObject{"rules": test}, &rules)
err := utils.MapUnmarshalValidate(utils.SerializedObject{"rules": test}, &rules)
ExpectNoError(t, err)
ExpectEqual(t, len(rules.Rules), len(test))
ExpectEqual(t, rules.Rules[0].Name, "test")

View File

@@ -40,7 +40,7 @@ func TestHTTPConfigDeserialize(t *testing.T) {
t.Run(tt.name, func(t *testing.T) {
cfg := Route{}
tt.input["host"] = "internal"
err := utils.Deserialize(tt.input, &cfg)
err := utils.MapUnmarshalValidate(tt.input, &cfg)
if err != nil {
expect.NoError(t, err)
}

View File

@@ -170,8 +170,8 @@ func dive(dst reflect.Value) (v reflect.Value, t reflect.Type, err gperr.Error)
}
}
// Deserialize takes a SerializedObject and a target value, and assigns the values in the SerializedObject to the target value.
// Deserialize ignores case differences between the field names in the SerializedObject and the target.
// MapUnmarshalValidate takes a SerializedObject and a target value, and assigns the values in the SerializedObject to the target value.
// MapUnmarshalValidate ignores case differences between the field names in the SerializedObject and the target.
//
// The target value must be a struct or a map[string]any.
// If the target value is a struct , and implements the MapUnmarshaller interface,
@@ -183,11 +183,11 @@ func dive(dst reflect.Value) (v reflect.Value, t reflect.Type, err gperr.Error)
// If the target value is a map[string]any the SerializedObject will be deserialized into the map.
//
// The function returns an error if the target value is not a struct or a map[string]any, or if there is an error during deserialization.
func Deserialize(src SerializedObject, dst any) (err gperr.Error) {
return deserialize(src, dst, true)
func MapUnmarshalValidate(src SerializedObject, dst any) (err gperr.Error) {
return mapUnmarshalValidate(src, dst, true)
}
func deserialize(src SerializedObject, dst any, checkValidateTag bool) (err gperr.Error) {
func mapUnmarshalValidate(src SerializedObject, dst any, checkValidateTag bool) (err gperr.Error) {
dstV := reflect.ValueOf(dst)
dstT := dstV.Type()
@@ -314,7 +314,7 @@ func Convert(src reflect.Value, dst reflect.Value, checkValidateTag bool) gperr.
return gperr.Errorf("convert: dst is %w", ErrNilValue)
}
if !src.IsValid() {
if !src.IsValid() || src.IsZero() {
if dst.CanSet() {
dst.Set(reflect.Zero(dst.Type()))
return nil
@@ -378,7 +378,7 @@ func Convert(src reflect.Value, dst reflect.Value, checkValidateTag bool) gperr.
if !ok {
return ErrUnsupportedConversion.Subject(dstT.String() + " to " + srcT.String())
}
return deserialize(obj, dst.Addr().Interface(), checkValidateTag)
return mapUnmarshalValidate(obj, dst.Addr().Interface(), checkValidateTag)
case srcKind == reflect.Slice:
if src.Len() == 0 {
return nil
@@ -500,21 +500,21 @@ func ConvertString(src string, dst reflect.Value) (convertible bool, convErr gpe
return true, Convert(reflect.ValueOf(tmp), dst, true)
}
func DeserializeYAML[T any](data []byte, target *T) gperr.Error {
func UnmarshalValidateYAML[T any](data []byte, target *T) gperr.Error {
m := make(map[string]any)
if err := yaml.Unmarshal(data, &m); err != nil {
return gperr.Wrap(err)
}
return Deserialize(m, target)
return MapUnmarshalValidate(m, target)
}
func DeserializeYAMLMap[V any](data []byte) (_ functional.Map[string, V], err gperr.Error) {
func UnmarshalValidateYAMLXSync[V any](data []byte) (_ functional.Map[string, V], err gperr.Error) {
m := make(map[string]any)
if err = gperr.Wrap(yaml.Unmarshal(data, &m)); err != nil {
return
}
m2 := make(map[string]V, len(m))
if err = Deserialize(m, m2); err != nil {
if err = MapUnmarshalValidate(m, m2); err != nil {
return
}
return functional.NewMapFrom(m2), nil

View File

@@ -40,7 +40,7 @@ func TestDeserialize(t *testing.T) {
t.Run("deserialize", func(t *testing.T) {
var s2 S
err := Deserialize(testStructSerialized, &s2)
err := MapUnmarshalValidate(testStructSerialized, &s2)
ExpectNoError(t, err)
ExpectEqual(t, s2, testStruct)
})
@@ -60,13 +60,13 @@ func TestDeserializeAnonymousField(t *testing.T) {
}
// all, anon := extractFields(reflect.TypeOf(s2))
// t.Fatalf("anon %v, all %v", anon, all)
err := Deserialize(map[string]any{"a": 1, "b": 2, "c": 3}, &s)
err := MapUnmarshalValidate(map[string]any{"a": 1, "b": 2, "c": 3}, &s)
ExpectNoError(t, err)
ExpectEqual(t, s.A, 1)
ExpectEqual(t, s.B, 2)
ExpectEqual(t, s.C, 3)
err = Deserialize(map[string]any{"a": 1, "b": 2, "c": 3}, &s2)
err = MapUnmarshalValidate(map[string]any{"a": 1, "b": 2, "c": 3}, &s2)
ExpectNoError(t, err)
ExpectEqual(t, s2.A, 1)
ExpectEqual(t, s2.B, 2)
@@ -148,7 +148,7 @@ func (c *testType) Parse(v string) (err error) {
func TestConvertor(t *testing.T) {
t.Run("valid", func(t *testing.T) {
m := new(testModel)
ExpectNoError(t, Deserialize(map[string]any{"Test": "123"}, m))
ExpectNoError(t, MapUnmarshalValidate(map[string]any{"Test": "123"}, m))
ExpectEqual(t, m.Test.foo, 123)
ExpectEqual(t, m.Test.bar, "123")
@@ -156,18 +156,28 @@ func TestConvertor(t *testing.T) {
t.Run("int_to_string", func(t *testing.T) {
m := new(testModel)
ExpectNoError(t, Deserialize(map[string]any{"Test": "123"}, m))
ExpectNoError(t, MapUnmarshalValidate(map[string]any{"Test": "123"}, m))
ExpectEqual(t, m.Test.foo, 123)
ExpectEqual(t, m.Test.bar, "123")
ExpectNoError(t, Deserialize(map[string]any{"Baz": 123}, m))
ExpectNoError(t, MapUnmarshalValidate(map[string]any{"Baz": 123}, m))
ExpectEqual(t, m.Baz, "123")
})
t.Run("invalid", func(t *testing.T) {
m := new(testModel)
ExpectError(t, ErrUnsupportedConversion, Deserialize(map[string]any{"Test": struct{}{}}, m))
err := MapUnmarshalValidate(map[string]any{"Test": struct{ a int }{1}}, m)
ExpectError(t, ErrUnsupportedConversion, err)
})
t.Run("set_empty", func(t *testing.T) {
m := testModel{
Test: testType{1, "2"},
Baz: "3",
}
ExpectNoError(t, MapUnmarshalValidate(map[string]any{"Test": nil, "Baz": nil}, &m))
ExpectEqual(t, m, testModel{})
})
}

View File

@@ -136,12 +136,19 @@ func (w *DockerWatcher) EventsWithOptions(ctx context.Context, options DockerLis
err = nil
// trigger reload (clear routes)
eventCh <- reloadTrigger
for !w.checkConnection(ctx) {
retry := time.NewTicker(dockerWatcherRetryInterval)
defer retry.Stop()
ok := false
for !ok {
select {
case <-ctx.Done():
return
case <-time.After(dockerWatcherRetryInterval):
continue
case <-retry.C:
if w.checkConnection(ctx) {
ok = true
break
}
}
}
// connection successful, trigger reload (reload routes)

View File

@@ -3,14 +3,9 @@ package pkg
import (
"fmt"
"net/http"
"os"
"path/filepath"
"regexp"
"strconv"
"strings"
"github.com/yusing/go-proxy/internal/common"
"github.com/yusing/go-proxy/internal/logging"
)
func GetVersion() Version {
@@ -31,26 +26,26 @@ func init() {
currentVersion = ParseVersion(version)
// ignore errors
versionFile := filepath.Join(common.DataDir, "version")
var lastVersionStr string
f, err := os.OpenFile(versionFile, os.O_RDWR|os.O_CREATE, 0o644)
if err == nil {
_, err = fmt.Fscanf(f, "%s", &lastVersionStr)
lastVersion = ParseVersion(lastVersionStr)
}
if err != nil && !os.IsNotExist(err) {
logging.Warn().Err(err).Msg("failed to read version file")
return
}
if err := f.Truncate(0); err != nil {
logging.Warn().Err(err).Msg("failed to truncate version file")
return
}
_, err = f.WriteString(version)
if err != nil {
logging.Warn().Err(err).Msg("failed to save version file")
return
}
// versionFile := filepath.Join(common.DataDir, "version")
// var lastVersionStr string
// f, err := os.OpenFile(versionFile, os.O_RDWR|os.O_CREATE, 0o644)
// if err == nil {
// _, err = fmt.Fscanf(f, "%s", &lastVersionStr)
// lastVersion = ParseVersion(lastVersionStr)
// }
// if err != nil && !os.IsNotExist(err) {
// logging.Warn().Err(err).Msg("failed to read version file")
// return
// }
// if err := f.Truncate(0); err != nil {
// logging.Warn().Err(err).Msg("failed to truncate version file")
// return
// }
// _, err = f.WriteString(version)
// if err != nil {
// logging.Warn().Err(err).Msg("failed to save version file")
// return
// }
}
type Version struct{ Generation, Major, Minor int }

View File

@@ -1,33 +0,0 @@
# To generate schema
# comment out this part from typescript-json-schema.js#L884
#
# if (indexType.flags !== ts.TypeFlags.Number && !isIndexedObject) {
# throw new Error("Not supported: IndexSignatureDeclaration with index symbol other than a number or a string");
# }
gen-schema-single:
bun -bun typescript-json-schema --noExtraProps --required --skipLibCheck --tsNodeRegister=true -o "${OUT}" "${IN}" ${CLASS}
# minify
python3 -c "import json; f=open('${OUT}', 'r'); j=json.load(f); f.close(); f=open('${OUT}', 'w'); json.dump(j, f, separators=(',', ':'));"
gen-schema:
bun -bun tsc
sed -i 's#"type": "module"#"type": "commonjs"#' package.json
make IN=config/config.ts \
CLASS=Config \
OUT=config.schema.json \
gen-schema-single
make IN=providers/routes.ts \
CLASS=Routes \
OUT=routes.schema.json \
gen-schema-single
make IN=middlewares/middleware_compose.ts \
CLASS=MiddlewareCompose \
OUT=middleware_compose.schema.json \
gen-schema-single
make IN=docker.ts \
CLASS=DockerRoutes \
OUT=docker_routes.schema.json \
gen-schema-single
sed -i 's#"type": "commonjs"#"type": "module"#' package.json
bun format:write

View File

@@ -1,120 +0,0 @@
{
"lockfileVersion": 1,
"workspaces": {
"": {
"name": "godoxy-schemas",
"devDependencies": {
"prettier": "^3.5.3",
"typescript": "^5.8.3",
"typescript-json-schema": "^0.65.1",
},
},
},
"packages": {
"@cspotcode/source-map-support": ["@cspotcode/source-map-support@0.8.1", "", { "dependencies": { "@jridgewell/trace-mapping": "0.3.9" } }, "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw=="],
"@jridgewell/resolve-uri": ["@jridgewell/resolve-uri@3.1.2", "", {}, "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw=="],
"@jridgewell/sourcemap-codec": ["@jridgewell/sourcemap-codec@1.5.0", "", {}, "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ=="],
"@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.9", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.0.3", "@jridgewell/sourcemap-codec": "^1.4.10" } }, "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ=="],
"@tsconfig/node10": ["@tsconfig/node10@1.0.11", "", {}, "sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw=="],
"@tsconfig/node12": ["@tsconfig/node12@1.0.11", "", {}, "sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag=="],
"@tsconfig/node14": ["@tsconfig/node14@1.0.3", "", {}, "sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow=="],
"@tsconfig/node16": ["@tsconfig/node16@1.0.4", "", {}, "sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA=="],
"@types/json-schema": ["@types/json-schema@7.0.15", "", {}, "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA=="],
"@types/node": ["@types/node@18.19.86", "", { "dependencies": { "undici-types": "~5.26.4" } }, "sha512-fifKayi175wLyKyc5qUfyENhQ1dCNI1UNjp653d8kuYcPQN5JhX3dGuP/XmvPTg/xRBn1VTLpbmi+H/Mr7tLfQ=="],
"acorn": ["acorn@8.14.1", "", { "bin": { "acorn": "bin/acorn" } }, "sha512-OvQ/2pUDKmgfCg++xsTX1wGxfTaszcHVcTctW4UJB4hibJx2HXxxO5UmVgyjMa+ZDsiaf5wWLXYpRWMmBI0QHg=="],
"acorn-walk": ["acorn-walk@8.3.4", "", { "dependencies": { "acorn": "^8.11.0" } }, "sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g=="],
"ansi-regex": ["ansi-regex@5.0.1", "", {}, "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="],
"ansi-styles": ["ansi-styles@4.3.0", "", { "dependencies": { "color-convert": "^2.0.1" } }, "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg=="],
"arg": ["arg@4.1.3", "", {}, "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA=="],
"balanced-match": ["balanced-match@1.0.2", "", {}, "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="],
"brace-expansion": ["brace-expansion@1.1.11", "", { "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" } }, "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA=="],
"cliui": ["cliui@8.0.1", "", { "dependencies": { "string-width": "^4.2.0", "strip-ansi": "^6.0.1", "wrap-ansi": "^7.0.0" } }, "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ=="],
"color-convert": ["color-convert@2.0.1", "", { "dependencies": { "color-name": "~1.1.4" } }, "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ=="],
"color-name": ["color-name@1.1.4", "", {}, "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="],
"concat-map": ["concat-map@0.0.1", "", {}, "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg=="],
"create-require": ["create-require@1.1.1", "", {}, "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ=="],
"diff": ["diff@4.0.2", "", {}, "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A=="],
"emoji-regex": ["emoji-regex@8.0.0", "", {}, "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="],
"escalade": ["escalade@3.2.0", "", {}, "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA=="],
"fs.realpath": ["fs.realpath@1.0.0", "", {}, "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw=="],
"get-caller-file": ["get-caller-file@2.0.5", "", {}, "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg=="],
"glob": ["glob@7.2.3", "", { "dependencies": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", "inherits": "2", "minimatch": "^3.1.1", "once": "^1.3.0", "path-is-absolute": "^1.0.0" } }, "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q=="],
"inflight": ["inflight@1.0.6", "", { "dependencies": { "once": "^1.3.0", "wrappy": "1" } }, "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA=="],
"inherits": ["inherits@2.0.4", "", {}, "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="],
"is-fullwidth-code-point": ["is-fullwidth-code-point@3.0.0", "", {}, "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg=="],
"make-error": ["make-error@1.3.6", "", {}, "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw=="],
"minimatch": ["minimatch@3.1.2", "", { "dependencies": { "brace-expansion": "^1.1.7" } }, "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw=="],
"once": ["once@1.4.0", "", { "dependencies": { "wrappy": "1" } }, "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w=="],
"path-equal": ["path-equal@1.2.5", "", {}, "sha512-i73IctDr3F2W+bsOWDyyVm/lqsXO47aY9nsFZUjTT/aljSbkxHxxCoyZ9UUrM8jK0JVod+An+rl48RCsvWM+9g=="],
"path-is-absolute": ["path-is-absolute@1.0.1", "", {}, "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg=="],
"prettier": ["prettier@3.5.3", "", { "bin": { "prettier": "bin/prettier.cjs" } }, "sha512-QQtaxnoDJeAkDvDKWCLiwIXkTgRhwYDEQCghU9Z6q03iyek/rxRh/2lC3HB7P8sWT2xC/y5JDctPLBIGzHKbhw=="],
"require-directory": ["require-directory@2.1.1", "", {}, "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q=="],
"safe-stable-stringify": ["safe-stable-stringify@2.5.0", "", {}, "sha512-b3rppTKm9T+PsVCBEOUR46GWI7fdOs00VKZ1+9c1EWDaDMvjQc6tUwuFyIprgGgTcWoVHSKrU8H31ZHA2e0RHA=="],
"string-width": ["string-width@4.2.3", "", { "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.1" } }, "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g=="],
"strip-ansi": ["strip-ansi@6.0.1", "", { "dependencies": { "ansi-regex": "^5.0.1" } }, "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="],
"ts-node": ["ts-node@10.9.2", "", { "dependencies": { "@cspotcode/source-map-support": "^0.8.0", "@tsconfig/node10": "^1.0.7", "@tsconfig/node12": "^1.0.7", "@tsconfig/node14": "^1.0.0", "@tsconfig/node16": "^1.0.2", "acorn": "^8.4.1", "acorn-walk": "^8.1.1", "arg": "^4.1.0", "create-require": "^1.1.0", "diff": "^4.0.1", "make-error": "^1.1.1", "v8-compile-cache-lib": "^3.0.1", "yn": "3.1.1" }, "peerDependencies": { "@swc/core": ">=1.2.50", "@swc/wasm": ">=1.2.50", "@types/node": "*", "typescript": ">=2.7" }, "optionalPeers": ["@swc/core", "@swc/wasm"], "bin": { "ts-node": "dist/bin.js", "ts-script": "dist/bin-script-deprecated.js", "ts-node-cwd": "dist/bin-cwd.js", "ts-node-esm": "dist/bin-esm.js", "ts-node-script": "dist/bin-script.js", "ts-node-transpile-only": "dist/bin-transpile.js" } }, "sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ=="],
"typescript": ["typescript@5.8.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ=="],
"typescript-json-schema": ["typescript-json-schema@0.65.1", "", { "dependencies": { "@types/json-schema": "^7.0.9", "@types/node": "^18.11.9", "glob": "^7.1.7", "path-equal": "^1.2.5", "safe-stable-stringify": "^2.2.0", "ts-node": "^10.9.1", "typescript": "~5.5.0", "yargs": "^17.1.1" }, "bin": { "typescript-json-schema": "bin/typescript-json-schema" } }, "sha512-tuGH7ff2jPaUYi6as3lHyHcKpSmXIqN7/mu50x3HlYn0EHzLpmt3nplZ7EuhUkO0eqDRc9GqWNkfjgBPIS9kxg=="],
"undici-types": ["undici-types@5.26.5", "", {}, "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="],
"v8-compile-cache-lib": ["v8-compile-cache-lib@3.0.1", "", {}, "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg=="],
"wrap-ansi": ["wrap-ansi@7.0.0", "", { "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", "strip-ansi": "^6.0.0" } }, "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q=="],
"wrappy": ["wrappy@1.0.2", "", {}, "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="],
"y18n": ["y18n@5.0.8", "", {}, "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA=="],
"yargs": ["yargs@17.7.2", "", { "dependencies": { "cliui": "^8.0.1", "escalade": "^3.1.1", "get-caller-file": "^2.0.5", "require-directory": "^2.1.1", "string-width": "^4.2.3", "y18n": "^5.0.5", "yargs-parser": "^21.1.1" } }, "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w=="],
"yargs-parser": ["yargs-parser@21.1.1", "", {}, "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw=="],
"yn": ["yn@3.1.1", "", {}, "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q=="],
"typescript-json-schema/typescript": ["typescript@5.5.4", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-Mtq29sKDAEYP7aljRgtPOpTvOfbwRWlS6dPRzwjdE+C0R4brX/GUyhHSecbHMFLNBLcJIPt9nl9yG5TZ1weH+Q=="],
}
}

File diff suppressed because one or more lines are too long

View File

@@ -1,57 +0,0 @@
import { CIDR, HTTPHeader, HTTPMethod, StatusCodeRange, URI } from "../types";
export declare const ACCESS_LOG_FORMATS: readonly [
"combined",
"common",
"json",
];
export type AccessLogFormat = (typeof ACCESS_LOG_FORMATS)[number];
export type AccessLogConfig = {
/**
* The size of the buffer.
*
* @minimum 0
* @default 65536
* @TJS-type integer
*/
buffer_size?: number;
/** The format of the access log.
*
* @default "combined"
*/
format?: AccessLogFormat;
path: URI;
filters?: AccessLogFilters;
fields?: AccessLogFields;
};
export type AccessLogFilter<T> = {
/** Whether the filter is negative.
*
* @default false
*/
negative?: boolean;
values: T[];
};
export type AccessLogFilters = {
status_code?: AccessLogFilter<StatusCodeRange>;
method?: AccessLogFilter<HTTPMethod>;
host?: AccessLogFilter<string>;
headers?: AccessLogFilter<HTTPHeader>;
cidr?: AccessLogFilter<CIDR>;
};
export declare const ACCESS_LOG_FIELD_MODES: readonly [
"keep",
"drop",
"redact",
];
export type AccessLogFieldMode = (typeof ACCESS_LOG_FIELD_MODES)[number];
export type AccessLogField = {
default?: AccessLogFieldMode;
config: {
[key: string]: AccessLogFieldMode;
};
};
export type AccessLogFields = {
header?: AccessLogField;
query?: AccessLogField;
cookie?: AccessLogField;
};

View File

@@ -1,2 +0,0 @@
export const ACCESS_LOG_FORMATS = ["combined", "common", "json"];
export const ACCESS_LOG_FIELD_MODES = ["keep", "drop", "redact"];

View File

@@ -1,66 +0,0 @@
import { CIDR, HTTPHeader, HTTPMethod, StatusCodeRange, URI } from "../types";
export const ACCESS_LOG_FORMATS = ["combined", "common", "json"] as const;
export type AccessLogFormat = (typeof ACCESS_LOG_FORMATS)[number];
export type AccessLogConfig = {
/**
* The size of the buffer.
*
* @minimum 0
* @default 65536
* @TJS-type integer
*/
buffer_size?: number;
/** The format of the access log.
*
* @default "combined"
*/
format?: AccessLogFormat;
/* The path to the access log file. */
path: URI;
/* The access log filters. */
filters?: AccessLogFilters;
/* The access log fields. */
fields?: AccessLogFields;
};
export type AccessLogFilter<T> = {
/** Whether the filter is negative.
*
* @default false
*/
negative?: boolean;
/* The values to filter. */
values: T[];
};
export type AccessLogFilters = {
/* Status code filter. */
status_code?: AccessLogFilter<StatusCodeRange>;
/* Method filter. */
method?: AccessLogFilter<HTTPMethod>;
/* Host filter. */
host?: AccessLogFilter<string>;
/* Header filter. */
headers?: AccessLogFilter<HTTPHeader>;
/* CIDR filter. */
cidr?: AccessLogFilter<CIDR>;
};
export const ACCESS_LOG_FIELD_MODES = ["keep", "drop", "redact"] as const;
export type AccessLogFieldMode = (typeof ACCESS_LOG_FIELD_MODES)[number];
export type AccessLogField = {
default?: AccessLogFieldMode;
config: {
[key: string]: AccessLogFieldMode;
};
};
export type AccessLogFields = {
header?: AccessLogField;
query?: AccessLogField;
cookie?: AccessLogField;
};

View File

@@ -1,88 +0,0 @@
import { DomainOrWildcard, Email } from "../types";
export declare const AUTOCERT_PROVIDERS: readonly [
"local",
"cloudflare",
"clouddns",
"duckdns",
"ovh",
"porkbun",
];
export type AutocertProvider = (typeof AUTOCERT_PROVIDERS)[number];
export type AutocertConfig =
| LocalOptions
| CloudflareOptions
| CloudDNSOptions
| DuckDNSOptions
| OVHOptionsWithAppKey
| OVHOptionsWithOAuth2Config
| PorkbunOptions;
export interface AutocertConfigBase {
email: Email;
domains: DomainOrWildcard[];
cert_path?: string;
key_path?: string;
}
export interface LocalOptions {
provider: "local";
cert_path?: string;
key_path?: string;
options?: {} | null;
}
export interface CloudflareOptions extends AutocertConfigBase {
provider: "cloudflare";
options: {
auth_token: string;
};
}
export interface CloudDNSOptions extends AutocertConfigBase {
provider: "clouddns";
options: {
client_id: string;
email: Email;
password: string;
};
}
export interface DuckDNSOptions extends AutocertConfigBase {
provider: "duckdns";
options: {
token: string;
};
}
export interface PorkbunOptions extends AutocertConfigBase {
provider: "porkbun";
options: {
api_key: string;
secret_api_key: string;
};
}
export declare const OVH_ENDPOINTS: readonly [
"ovh-eu",
"ovh-ca",
"ovh-us",
"kimsufi-eu",
"kimsufi-ca",
"soyoustart-eu",
"soyoustart-ca",
];
export type OVHEndpoint = (typeof OVH_ENDPOINTS)[number];
export interface OVHOptionsWithAppKey extends AutocertConfigBase {
provider: "ovh";
options: {
application_secret: string;
consumer_key: string;
api_endpoint?: OVHEndpoint;
application_key: string;
};
}
export interface OVHOptionsWithOAuth2Config extends AutocertConfigBase {
provider: "ovh";
options: {
application_secret: string;
consumer_key: string;
api_endpoint?: OVHEndpoint;
oauth2_config: {
client_id: string;
client_secret: string;
};
};
}

View File

@@ -1,17 +0,0 @@
export const AUTOCERT_PROVIDERS = [
"local",
"cloudflare",
"clouddns",
"duckdns",
"ovh",
"porkbun",
];
export const OVH_ENDPOINTS = [
"ovh-eu",
"ovh-ca",
"ovh-us",
"kimsufi-eu",
"kimsufi-ca",
"soyoustart-eu",
"soyoustart-ca",
];

View File

@@ -1,104 +0,0 @@
import { DomainOrWildcard, Email } from "../types";
export const AUTOCERT_PROVIDERS = [
"local",
"cloudflare",
"clouddns",
"duckdns",
"ovh",
"porkbun",
] as const;
export type AutocertProvider = (typeof AUTOCERT_PROVIDERS)[number];
export type AutocertConfig =
| LocalOptions
| CloudflareOptions
| CloudDNSOptions
| DuckDNSOptions
| OVHOptionsWithAppKey
| OVHOptionsWithOAuth2Config
| PorkbunOptions;
export interface AutocertConfigBase {
/* ACME email */
email: Email;
/* ACME domains */
domains: DomainOrWildcard[];
/* ACME certificate path */
cert_path?: string;
/* ACME key path */
key_path?: string;
}
export interface LocalOptions {
provider: "local";
/* ACME certificate path */
cert_path?: string;
/* ACME key path */
key_path?: string;
options?: {} | null;
}
export interface CloudflareOptions extends AutocertConfigBase {
provider: "cloudflare";
options: { auth_token: string };
}
export interface CloudDNSOptions extends AutocertConfigBase {
provider: "clouddns";
options: {
client_id: string;
email: Email;
password: string;
};
}
export interface DuckDNSOptions extends AutocertConfigBase {
provider: "duckdns";
options: {
token: string;
};
}
export interface PorkbunOptions extends AutocertConfigBase {
provider: "porkbun";
options: {
api_key: string;
secret_api_key: string;
};
}
export const OVH_ENDPOINTS = [
"ovh-eu",
"ovh-ca",
"ovh-us",
"kimsufi-eu",
"kimsufi-ca",
"soyoustart-eu",
"soyoustart-ca",
] as const;
export type OVHEndpoint = (typeof OVH_ENDPOINTS)[number];
export interface OVHOptionsWithAppKey extends AutocertConfigBase {
provider: "ovh";
options: {
application_secret: string;
consumer_key: string;
api_endpoint?: OVHEndpoint;
application_key: string;
};
}
export interface OVHOptionsWithOAuth2Config extends AutocertConfigBase {
provider: "ovh";
options: {
application_secret: string;
consumer_key: string;
api_endpoint?: OVHEndpoint;
oauth2_config: {
client_id: string;
client_secret: string;
};
};
}

View File

@@ -1,61 +0,0 @@
import { DomainName } from "../types";
import { AutocertConfig } from "./autocert";
import { EntrypointConfig } from "./entrypoint";
import { HomepageConfig } from "./homepage";
import { Providers } from "./providers";
export type Config = {
/** Optional autocert configuration
*
* @examples require(".").autocertExamples
*/
autocert?: AutocertConfig;
entrypoint?: EntrypointConfig;
providers: Providers;
/** Optional list of domains to match
*
* @minItems 1
* @examples require(".").matchDomainsExamples
*/
match_domains?: DomainName[];
homepage?: HomepageConfig;
/**
* Optional timeout before shutdown
* @default 3
* @minimum 1
*/
timeout_shutdown?: number;
};
export declare const autocertExamples: (
| {
provider: string;
email?: undefined;
domains?: undefined;
options?: undefined;
}
| {
provider: string;
email: string;
domains: string[];
options: {
auth_token: string;
client_id?: undefined;
email?: undefined;
password?: undefined;
};
}
| {
provider: string;
email: string;
domains: string[];
options: {
client_id: string;
email: string;
password: string;
auth_token?: undefined;
};
}
)[];
export declare const matchDomainsExamples: readonly [
"example.com",
"*.example.com",
];

View File

@@ -1,20 +0,0 @@
export const autocertExamples = [
{ provider: "local" },
{
provider: "cloudflare",
email: "abc@gmail",
domains: ["example.com"],
options: { auth_token: "c1234565789-abcdefghijklmnopqrst" },
},
{
provider: "clouddns",
email: "abc@gmail",
domains: ["example.com"],
options: {
client_id: "c1234565789",
email: "abc@gmail",
password: "password",
},
},
];
export const matchDomainsExamples = ["example.com", "*.example.com"];

View File

@@ -1,52 +0,0 @@
import { DomainName } from "../types";
import { AutocertConfig } from "./autocert";
import { EntrypointConfig } from "./entrypoint";
import { HomepageConfig } from "./homepage";
import { Providers } from "./providers";
export type Config = {
/** Optional autocert configuration
*
* @examples require(".").autocertExamples
*/
autocert?: AutocertConfig;
/* Optional entrypoint configuration */
entrypoint?: EntrypointConfig;
/* Providers configuration (include file, docker, notification) */
providers: Providers;
/** Optional list of domains to match
*
* @minItems 1
* @examples require(".").matchDomainsExamples
*/
match_domains?: DomainName[];
/* Optional homepage configuration */
homepage?: HomepageConfig;
/**
* Optional timeout before shutdown
* @default 3
* @minimum 1
*/
timeout_shutdown?: number;
};
export const autocertExamples = [
{ provider: "local" },
{
provider: "cloudflare",
email: "abc@gmail",
domains: ["example.com"],
options: { auth_token: "c1234565789-abcdefghijklmnopqrst" },
},
{
provider: "clouddns",
email: "abc@gmail",
domains: ["example.com"],
options: {
client_id: "c1234565789",
email: "abc@gmail",
password: "password",
},
},
];
export const matchDomainsExamples = ["example.com", "*.example.com"] as const;

View File

@@ -1,49 +0,0 @@
import { MiddlewareCompose } from "../middlewares/middleware_compose";
import { AccessLogConfig } from "./access_log";
export type EntrypointConfig = {
/** Entrypoint middleware configuration
*
* @examples require(".").middlewaresExamples
*/
middlewares?: MiddlewareCompose;
/** Entrypoint access log configuration
*
* @examples require(".").accessLogExamples
*/
access_log?: AccessLogConfig;
};
export declare const accessLogExamples: readonly [
{
readonly path: "/var/log/access.log";
readonly format: "combined";
readonly filters: {
readonly status_codes: {
readonly values: readonly ["200-299"];
};
};
readonly fields: {
readonly headers: {
readonly default: "keep";
readonly config: {
readonly foo: "redact";
};
};
};
},
];
export declare const middlewaresExamples: readonly [
{
readonly use: "RedirectHTTP";
},
{
readonly use: "CIDRWhitelist";
readonly allow: readonly [
"127.0.0.1",
"10.0.0.0/8",
"172.16.0.0/12",
"192.168.0.0/16",
];
readonly status: 403;
readonly message: "Forbidden";
},
];

View File

@@ -1,30 +0,0 @@
export const accessLogExamples = [
{
path: "/var/log/access.log",
format: "combined",
filters: {
status_codes: {
values: ["200-299"],
},
},
fields: {
headers: {
default: "keep",
config: {
foo: "redact",
},
},
},
},
];
export const middlewaresExamples = [
{
use: "RedirectHTTP",
},
{
use: "CIDRWhitelist",
allow: ["127.0.0.1", "10.0.0.0/8", "172.16.0.0/12", "192.168.0.0/16"],
status: 403,
message: "Forbidden",
},
];

View File

@@ -1,47 +0,0 @@
import { MiddlewareCompose } from "../middlewares/middleware_compose";
import { AccessLogConfig } from "./access_log";
export type EntrypointConfig = {
/** Entrypoint middleware configuration
*
* @examples require(".").middlewaresExamples
*/
middlewares?: MiddlewareCompose;
/** Entrypoint access log configuration
*
* @examples require(".").accessLogExamples
*/
access_log?: AccessLogConfig;
};
export const accessLogExamples = [
{
path: "/var/log/access.log",
format: "combined",
filters: {
status_codes: {
values: ["200-299"],
},
},
fields: {
headers: {
default: "keep",
config: {
foo: "redact",
},
},
},
},
] as const;
export const middlewaresExamples = [
{
use: "RedirectHTTP",
},
{
use: "CIDRWhitelist",
allow: ["127.0.0.1", "10.0.0.0/8", "172.16.0.0/12", "192.168.0.0/16"],
status: 403,
message: "Forbidden",
},
] as const;

View File

@@ -1,7 +0,0 @@
export type HomepageConfig = {
/**
* Use default app categories (uses docker image name)
* @default true
*/
use_default_categories: boolean;
};

View File

@@ -1 +0,0 @@
export {};

View File

@@ -1,7 +0,0 @@
export type HomepageConfig = {
/**
* Use default app categories (uses docker image name)
* @default true
*/
use_default_categories: boolean;
};

View File

@@ -1,69 +0,0 @@
import { URL } from "../types";
export declare const NOTIFICATION_PROVIDERS: readonly [
"webhook",
"gotify",
"ntfy",
];
export type NotificationProvider = (typeof NOTIFICATION_PROVIDERS)[number];
export type NotificationConfig = {
name: string;
url: URL;
};
export interface GotifyConfig extends NotificationConfig {
provider: "gotify";
token: string;
}
export declare const NTFY_MSG_STYLES: string[];
export type NtfyStyle = (typeof NTFY_MSG_STYLES)[number];
export interface NtfyConfig extends NotificationConfig {
provider: "ntfy";
topic: string;
token?: string;
style?: NtfyStyle;
}
export declare const WEBHOOK_TEMPLATES: readonly ["", "discord"];
export declare const WEBHOOK_METHODS: readonly ["POST", "GET", "PUT"];
export declare const WEBHOOK_MIME_TYPES: readonly [
"application/json",
"application/x-www-form-urlencoded",
"text/plain",
"text/markdown",
];
export declare const WEBHOOK_COLOR_MODES: readonly ["hex", "dec"];
export type WebhookTemplate = (typeof WEBHOOK_TEMPLATES)[number];
export type WebhookMethod = (typeof WEBHOOK_METHODS)[number];
export type WebhookMimeType = (typeof WEBHOOK_MIME_TYPES)[number];
export type WebhookColorMode = (typeof WEBHOOK_COLOR_MODES)[number];
export interface WebhookConfig extends NotificationConfig {
provider: "webhook";
/**
* Webhook template
*
* @default "discord"
*/
template?: WebhookTemplate;
token?: string;
/**
* Webhook message (usally JSON),
* required when template is not defined
*/
payload?: string;
/**
* Webhook method
*
* @default "POST"
*/
method?: WebhookMethod;
/**
* Webhook mime type
*
* @default "application/json"
*/
mime_type?: WebhookMimeType;
/**
* Webhook color mode
*
* @default "hex"
*/
color_mode?: WebhookColorMode;
}

View File

@@ -1,11 +0,0 @@
export const NOTIFICATION_PROVIDERS = ["webhook", "gotify", "ntfy"];
export const NTFY_MSG_STYLES = ["markdown", "plain"];
export const WEBHOOK_TEMPLATES = ["", "discord"];
export const WEBHOOK_METHODS = ["POST", "GET", "PUT"];
export const WEBHOOK_MIME_TYPES = [
"application/json",
"application/x-www-form-urlencoded",
"text/plain",
"text/markdown",
];
export const WEBHOOK_COLOR_MODES = ["hex", "dec"];

View File

@@ -1,78 +0,0 @@
import { URL } from "../types";
export const NOTIFICATION_PROVIDERS = ["webhook", "gotify", "ntfy"] as const;
export type NotificationProvider = (typeof NOTIFICATION_PROVIDERS)[number];
export type NotificationConfig = {
/* Name of the notification provider */
name: string;
/* URL of the notification provider */
url: URL;
};
export interface GotifyConfig extends NotificationConfig {
provider: "gotify";
/* Gotify token */
token: string;
}
export const NTFY_MSG_STYLES = ["markdown", "plain"];
export type NtfyStyle = (typeof NTFY_MSG_STYLES)[number];
export interface NtfyConfig extends NotificationConfig {
provider: "ntfy";
topic: string;
token?: string;
style?: NtfyStyle;
}
export const WEBHOOK_TEMPLATES = ["", "discord"] as const;
export const WEBHOOK_METHODS = ["POST", "GET", "PUT"] as const;
export const WEBHOOK_MIME_TYPES = [
"application/json",
"application/x-www-form-urlencoded",
"text/plain",
"text/markdown",
] as const;
export const WEBHOOK_COLOR_MODES = ["hex", "dec"] as const;
export type WebhookTemplate = (typeof WEBHOOK_TEMPLATES)[number];
export type WebhookMethod = (typeof WEBHOOK_METHODS)[number];
export type WebhookMimeType = (typeof WEBHOOK_MIME_TYPES)[number];
export type WebhookColorMode = (typeof WEBHOOK_COLOR_MODES)[number];
export interface WebhookConfig extends NotificationConfig {
provider: "webhook";
/**
* Webhook template
*
* @default "discord"
*/
template?: WebhookTemplate;
/* Webhook token */
token?: string;
/**
* Webhook message (usally JSON),
* required when template is not defined
*/
payload?: string;
/**
* Webhook method
*
* @default "POST"
*/
method?: WebhookMethod;
/**
* Webhook mime type
*
* @default "application/json"
*/
mime_type?: WebhookMimeType;
/**
* Webhook color mode
*
* @default "hex"
*/
color_mode?: WebhookColorMode;
}

View File

@@ -1,58 +0,0 @@
import { URI, URL } from "../types";
import { GotifyConfig, NtfyConfig, WebhookConfig } from "./notification";
export type Providers = {
/** List of route definition files to include
*
* @minItems 1
* @examples require(".").includeExamples
* @items.pattern ^[\w\d\-_]+\.(yaml|yml)$
*/
include?: URI[];
/** Name-value mapping of docker hosts to retrieve routes from
*
* @minProperties 1
* @examples require(".").dockerExamples
*/
docker?: {
[name: string]: URL | "$DOCKER_HOST";
};
/** List of GoDoxy agents
*
* @minItems 1
* @examples require(".").agentExamples
*/
agents?: `${string}:${number}`[];
/** List of notification providers
*
* @minItems 1
* @examples require(".").notificationExamples
*/
notification?: (WebhookConfig | GotifyConfig | NtfyConfig)[];
};
export declare const includeExamples: readonly ["file1.yml", "file2.yml"];
export declare const dockerExamples: readonly [
{
readonly local: "$DOCKER_HOST";
},
{
readonly remote: "tcp://10.0.2.1:2375";
},
{
readonly remote2: "ssh://root:1234@10.0.2.2";
},
];
export declare const notificationExamples: readonly [
{
readonly name: "gotify";
readonly provider: "gotify";
readonly url: "https://gotify.domain.tld";
readonly token: "abcd";
},
{
readonly name: "discord";
readonly provider: "webhook";
readonly template: "discord";
readonly url: "https://discord.com/api/webhooks/1234/abcd";
},
];
export declare const agentExamples: readonly ["10.0.2.3:8890", "10.0.2.4:8890"];

View File

@@ -1,21 +0,0 @@
export const includeExamples = ["file1.yml", "file2.yml"];
export const dockerExamples = [
{ local: "$DOCKER_HOST" },
{ remote: "tcp://10.0.2.1:2375" },
{ remote2: "ssh://root:1234@10.0.2.2" },
];
export const notificationExamples = [
{
name: "gotify",
provider: "gotify",
url: "https://gotify.domain.tld",
token: "abcd",
},
{
name: "discord",
provider: "webhook",
template: "discord",
url: "https://discord.com/api/webhooks/1234/abcd",
},
];
export const agentExamples = ["10.0.2.3:8890", "10.0.2.4:8890"];

View File

@@ -1,52 +0,0 @@
import { URI, URL } from "../types";
import { GotifyConfig, NtfyConfig, WebhookConfig } from "./notification";
export type Providers = {
/** List of route definition files to include
*
* @minItems 1
* @examples require(".").includeExamples
* @items.pattern ^[\w\d\-_]+\.(yaml|yml)$
*/
include?: URI[];
/** Name-value mapping of docker hosts to retrieve routes from
*
* @minProperties 1
* @examples require(".").dockerExamples
*/
docker?: { [name: string]: URL | "$DOCKER_HOST" };
/** List of GoDoxy agents
*
* @minItems 1
* @examples require(".").agentExamples
*/
agents?: `${string}:${number}`[];
/** List of notification providers
*
* @minItems 1
* @examples require(".").notificationExamples
*/
notification?: (WebhookConfig | GotifyConfig | NtfyConfig)[];
};
export const includeExamples = ["file1.yml", "file2.yml"] as const;
export const dockerExamples = [
{ local: "$DOCKER_HOST" },
{ remote: "tcp://10.0.2.1:2375" },
{ remote2: "ssh://root:1234@10.0.2.2" },
] as const;
export const notificationExamples = [
{
name: "gotify",
provider: "gotify",
url: "https://gotify.domain.tld",
token: "abcd",
},
{
name: "discord",
provider: "webhook",
template: "discord",
url: "https://discord.com/api/webhooks/1234/abcd",
},
] as const;
export const agentExamples = ["10.0.2.3:8890", "10.0.2.4:8890"] as const;

5
schemas/docker.d.ts vendored
View File

@@ -1,5 +0,0 @@
import { IdleWatcherConfig } from "./providers/idlewatcher";
import { Route } from "./providers/routes";
export type DockerRoutes = {
[key: string]: Route & IdleWatcherConfig;
};

View File

@@ -1 +0,0 @@
export {};

View File

@@ -1,7 +0,0 @@
import { IdleWatcherConfig } from "./providers/idlewatcher";
import { Route } from "./providers/routes";
//FIXME: fix this
export type DockerRoutes = {
[key: string]: Route & IdleWatcherConfig;
};

File diff suppressed because one or more lines are too long

38
schemas/index.d.ts vendored
View File

@@ -1,38 +0,0 @@
import * as AccessLog from "./config/access_log";
import * as Autocert from "./config/autocert";
import * as Config from "./config/config";
import * as Entrypoint from "./config/entrypoint";
import * as Notification from "./config/notification";
import * as Providers from "./config/providers";
import * as MiddlewareCompose from "./middlewares/middleware_compose";
import * as Middlewares from "./middlewares/middlewares";
import * as Healthcheck from "./providers/healthcheck";
import * as Homepage from "./providers/homepage";
import * as IdleWatcher from "./providers/idlewatcher";
import * as LoadBalance from "./providers/loadbalance";
import * as Routes from "./providers/routes";
import * as GoDoxy from "./types";
import ConfigSchema from "./config.schema.json";
import DockerRoutesSchema from "./docker_routes.schema.json";
import MiddlewareComposeSchema from "./middleware_compose.schema.json";
import RoutesSchema from "./routes.schema.json";
export {
AccessLog,
Autocert,
Config,
ConfigSchema,
DockerRoutesSchema,
Entrypoint,
GoDoxy,
Healthcheck,
Homepage,
IdleWatcher,
LoadBalance,
MiddlewareCompose,
MiddlewareComposeSchema,
Middlewares,
Notification,
Providers,
Routes,
RoutesSchema,
};

View File

@@ -1,19 +0,0 @@
import * as AccessLog from "./config/access_log";
import * as Autocert from "./config/autocert";
import * as Config from "./config/config";
import * as Entrypoint from "./config/entrypoint";
import * as Notification from "./config/notification";
import * as Providers from "./config/providers";
import * as MiddlewareCompose from "./middlewares/middleware_compose";
import * as Middlewares from "./middlewares/middlewares";
import * as Healthcheck from "./providers/healthcheck";
import * as Homepage from "./providers/homepage";
import * as IdleWatcher from "./providers/idlewatcher";
import * as LoadBalance from "./providers/loadbalance";
import * as Routes from "./providers/routes";
import * as GoDoxy from "./types";
import ConfigSchema from "./config.schema.json";
import DockerRoutesSchema from "./docker_routes.schema.json";
import MiddlewareComposeSchema from "./middleware_compose.schema.json";
import RoutesSchema from "./routes.schema.json";
export { AccessLog, Autocert, Config, ConfigSchema, DockerRoutesSchema, Entrypoint, GoDoxy, Healthcheck, Homepage, IdleWatcher, LoadBalance, MiddlewareCompose, MiddlewareComposeSchema, Middlewares, Notification, Providers, Routes, RoutesSchema, };

View File

@@ -1,43 +0,0 @@
import * as AccessLog from "./config/access_log";
import * as Autocert from "./config/autocert";
import * as Config from "./config/config";
import * as Entrypoint from "./config/entrypoint";
import * as Notification from "./config/notification";
import * as Providers from "./config/providers";
import * as MiddlewareCompose from "./middlewares/middleware_compose";
import * as Middlewares from "./middlewares/middlewares";
import * as Healthcheck from "./providers/healthcheck";
import * as Homepage from "./providers/homepage";
import * as IdleWatcher from "./providers/idlewatcher";
import * as LoadBalance from "./providers/loadbalance";
import * as Routes from "./providers/routes";
import * as GoDoxy from "./types";
import ConfigSchema from "./config.schema.json";
import DockerRoutesSchema from "./docker_routes.schema.json";
import MiddlewareComposeSchema from "./middleware_compose.schema.json";
import RoutesSchema from "./routes.schema.json";
export {
AccessLog,
Autocert,
Config,
ConfigSchema,
DockerRoutesSchema,
Entrypoint,
GoDoxy,
Healthcheck,
Homepage,
IdleWatcher,
LoadBalance,
MiddlewareCompose,
MiddlewareComposeSchema,
Middlewares,
Notification,
Providers,
Routes,
RoutesSchema,
};

View File

@@ -1 +0,0 @@
{"$schema":"http://json-schema.org/draft-07/schema#","definitions":{"CIDR":{"anyOf":[{"pattern":"^[0-9]*\\.[0-9]*\\.[0-9]*\\.[0-9]*$","type":"string"},{"pattern":"^.*:.*:.*:.*:.*:.*:.*:.*$","type":"string"},{"pattern":"^[0-9]*\\.[0-9]*\\.[0-9]*\\.[0-9]*/[0-9]*$","type":"string"},{"pattern":"^::[0-9]*$","type":"string"},{"pattern":"^.*::/[0-9]*$","type":"string"},{"pattern":"^.*:.*::/[0-9]*$","type":"string"}]},"Duration":{"pattern":"^([0-9]+(ms|s|m|h))+$","type":"string"},"HTTPHeader":{"description":"HTTP Header","pattern":"^[a-zA-Z0-9\\-]+$","type":"string"},"MiddlewareComposeMap":{"anyOf":[{"additionalProperties":false,"properties":{"use":{"enum":["CustomErrorPage","ErrorPage","customErrorPage","custom_error_page","errorPage","error_page"],"type":"string"}},"required":["use"],"type":"object"},{"additionalProperties":false,"properties":{"bypass":{"additionalProperties":false,"description":"Bypass redirect","properties":{"user_agents":{"description":"Bypass redirect for user agents","items":{"type":"string"},"type":"array"}},"type":"object"},"use":{"enum":["RedirectHTTP","redirectHTTP","redirect_http"],"type":"string"}},"required":["use"],"type":"object"},{"additionalProperties":false,"properties":{"use":{"enum":["SetXForwarded","setXForwarded","set_x_forwarded"],"type":"string"}},"required":["use"],"type":"object"},{"additionalProperties":false,"properties":{"use":{"enum":["HideXForwarded","hideXForwarded","hide_x_forwarded"],"type":"string"}},"required":["use"],"type":"object"},{"additionalProperties":false,"properties":{"allow":{"items":{"$ref":"#/definitions/CIDR"},"type":"array"},"message":{"default":"IP not allowed","description":"Error message when blocked","type":"string"},"status":{"$ref":"#/definitions/StatusCode","default":403,"description":"HTTP status code when blocked (alias of status_code)"},"status_code":{"$ref":"#/definitions/StatusCode","default":403,"description":"HTTP status code when blocked"},"use":{"enum":["CIDRWhitelist","cidrWhitelist","cidr_whitelist"],"type":"string"}},"required":["allow","use"],"type":"object"},{"additionalProperties":false,"properties":{"recursive":{"default":false,"description":"Recursively resolve the IP","type":"boolean"},"use":{"enum":["CloudflareRealIP","cloudflareRealIp","cloudflare_real_ip"],"type":"string"}},"required":["use"],"type":"object"},{"additionalProperties":false,"properties":{"add_headers":{"additionalProperties":false,"description":"Add HTTP headers","items":{"type":"string"},"type":"array"},"add_prefix":{"description":"Add prefix to request URL","type":"string"},"hide_headers":{"description":"Hide HTTP headers","items":{"$ref":"#/definitions/HTTPHeader"},"type":"array"},"set_headers":{"additionalProperties":false,"description":"Set HTTP headers","items":{"type":"string"},"type":"array"},"use":{"enum":["ModifyRequest","Request","modifyRequest","modify_request","request"],"type":"string"}},"required":["use"],"type":"object"},{"additionalProperties":false,"properties":{"add_headers":{"additionalProperties":false,"description":"Add HTTP headers","items":{"type":"string"},"type":"array"},"hide_headers":{"description":"Hide HTTP headers","items":{"$ref":"#/definitions/HTTPHeader"},"type":"array"},"set_headers":{"additionalProperties":false,"description":"Set HTTP headers","items":{"type":"string"},"type":"array"},"use":{"enum":["ModifyResponse","Response","modifyResponse","modify_response","response"],"type":"string"}},"required":["use"],"type":"object"},{"additionalProperties":false,"properties":{"allowed_groups":{"description":"Allowed groups","items":{"type":"string"},"minItems":1,"type":"array"},"allowed_users":{"description":"Allowed users","items":{"type":"string"},"minItems":1,"type":"array"},"use":{"enum":["OIDC","oidc"],"type":"string"}},"required":["use"],"type":"object"},{"additionalProperties":false,"properties":{"average":{"description":"Average number of requests allowed in a period","type":"number"},"burst":{"description":"Maximum number of requests allowed in a period","type":"number"},"period":{"$ref":"#/definitions/Duration","default":"1s","description":"Duration of the rate limit"},"use":{"enum":["RateLimit","rateLimit","rate_limit"],"type":"string"}},"required":["average","burst","use"],"type":"object"},{"additionalProperties":false,"properties":{"from":{"items":{"$ref":"#/definitions/CIDR"},"type":"array"},"header":{"$ref":"#/definitions/HTTPHeader","default":"X-Real-IP","description":"Header to get the client IP from"},"recursive":{"default":false,"description":"Recursive resolve the IP","type":"boolean"},"use":{"enum":["RealIP","realIP","real_ip"],"type":"string"}},"required":["from","use"],"type":"object"}]},"StatusCode":{"anyOf":[{"pattern":"^[0-9]*$","type":"string"},{"type":"number"}]}},"items":{"$ref":"#/definitions/MiddlewareComposeMap"},"type":"array"}

View File

@@ -1,2 +0,0 @@
import { MiddlewareComposeMap } from "./middlewares";
export type MiddlewareCompose = MiddlewareComposeMap[];

View File

@@ -1 +0,0 @@
export {};

View File

@@ -1,3 +0,0 @@
import { MiddlewareComposeMap } from "./middlewares";
export type MiddlewareCompose = MiddlewareComposeMap[];

View File

@@ -1,186 +0,0 @@
import * as types from "../types";
export type KeyOptMapping<
T extends {
use: string;
},
> = {
[key in T["use"]]?: Omit<T, "use">;
};
export declare const ALL_MIDDLEWARES: readonly [
"ErrorPage",
"RedirectHTTP",
"SetXForwarded",
"HideXForwarded",
"CIDRWhitelist",
"CloudflareRealIP",
"ModifyRequest",
"ModifyResponse",
"OIDC",
"RateLimit",
"RealIP",
];
/**
* @type object
* @patternProperties {"^.*@file$": {"type": "null"}}
*/
export type MiddlewareFileRef = {
[key: `${string}@file`]: null;
};
export type MiddlewaresMap =
| (KeyOptMapping<CustomErrorPage> &
KeyOptMapping<RedirectHTTP> &
KeyOptMapping<SetXForwarded> &
KeyOptMapping<HideXForwarded> &
KeyOptMapping<CIDRWhitelist> &
KeyOptMapping<CloudflareRealIP> &
KeyOptMapping<ModifyRequest> &
KeyOptMapping<ModifyResponse> &
KeyOptMapping<OIDC> &
KeyOptMapping<RateLimit> &
KeyOptMapping<RealIP>)
| MiddlewareFileRef;
export type MiddlewareComposeMap =
| CustomErrorPage
| RedirectHTTP
| SetXForwarded
| HideXForwarded
| CIDRWhitelist
| CloudflareRealIP
| ModifyRequest
| ModifyResponse
| OIDC
| RateLimit
| RealIP;
export type CustomErrorPage = {
use:
| "error_page"
| "errorPage"
| "ErrorPage"
| "custom_error_page"
| "customErrorPage"
| "CustomErrorPage";
};
export type RedirectHTTP = {
use: "redirect_http" | "redirectHTTP" | "RedirectHTTP";
/** Bypass redirect */
bypass?: {
/** Bypass redirect for user agents */
user_agents?: string[];
};
};
export type SetXForwarded = {
use: "set_x_forwarded" | "setXForwarded" | "SetXForwarded";
};
export type HideXForwarded = {
use: "hide_x_forwarded" | "hideXForwarded" | "HideXForwarded";
};
export type CIDRWhitelist = {
use: "cidr_whitelist" | "cidrWhitelist" | "CIDRWhitelist";
allow: types.CIDR[];
/** HTTP status code when blocked
*
* @default 403
*/
status_code?: types.StatusCode;
/** HTTP status code when blocked (alias of status_code)
*
* @default 403
*/
status?: types.StatusCode;
/** Error message when blocked
*
* @default "IP not allowed"
*/
message?: string;
};
export type CloudflareRealIP = {
use: "cloudflare_real_ip" | "cloudflareRealIp" | "CloudflareRealIP";
/** Recursively resolve the IP
*
* @default false
*/
recursive?: boolean;
};
export type ModifyRequest = {
use:
| "request"
| "Request"
| "modify_request"
| "modifyRequest"
| "ModifyRequest";
/** Set HTTP headers */
set_headers?: {
[key: types.HTTPHeader]: string;
};
/** Add HTTP headers */
add_headers?: {
[key: types.HTTPHeader]: string;
};
/** Hide HTTP headers */
hide_headers?: types.HTTPHeader[];
/** Add prefix to request URL */
add_prefix?: string;
};
export type ModifyResponse = {
use:
| "response"
| "Response"
| "modify_response"
| "modifyResponse"
| "ModifyResponse";
/** Set HTTP headers */
set_headers?: {
[key: types.HTTPHeader]: string;
};
/** Add HTTP headers */
add_headers?: {
[key: types.HTTPHeader]: string;
};
/** Hide HTTP headers */
hide_headers?: types.HTTPHeader[];
};
export type OIDC = {
use: "oidc" | "OIDC";
/** Allowed users
*
* @minItems 1
*/
allowed_users?: string[];
/** Allowed groups
*
* @minItems 1
*/
allowed_groups?: string[];
};
export type RateLimit = {
use: "rate_limit" | "rateLimit" | "RateLimit";
/** Average number of requests allowed in a period
*
* @min 1
*/
average: number;
/** Maximum number of requests allowed in a period
*
* @min 1
*/
burst: number;
/** Duration of the rate limit
*
* @default 1s
*/
period?: types.Duration;
};
export type RealIP = {
use: "real_ip" | "realIP" | "RealIP";
/** Header to get the client IP from
*
* @default "X-Real-IP"
*/
header?: types.HTTPHeader;
from: types.CIDR[];
/** Recursive resolve the IP
*
* @default false
*/
recursive?: boolean;
};

View File

@@ -1,13 +0,0 @@
export const ALL_MIDDLEWARES = [
"ErrorPage",
"RedirectHTTP",
"SetXForwarded",
"HideXForwarded",
"CIDRWhitelist",
"CloudflareRealIP",
"ModifyRequest",
"ModifyResponse",
"OIDC",
"RateLimit",
"RealIP",
];

View File

@@ -1,190 +0,0 @@
import * as types from "../types";
export type KeyOptMapping<T extends { use: string }> = {
[key in T["use"]]?: Omit<T, "use">;
};
export const ALL_MIDDLEWARES = [
"ErrorPage",
"RedirectHTTP",
"SetXForwarded",
"HideXForwarded",
"CIDRWhitelist",
"CloudflareRealIP",
"ModifyRequest",
"ModifyResponse",
"OIDC",
"RateLimit",
"RealIP",
] as const;
/**
* @type object
* @patternProperties {"^.*@file$": {"type": "null"}}
*/
export type MiddlewareFileRef = {
[key: `${string}@file`]: null;
};
export type MiddlewaresMap =
| (KeyOptMapping<CustomErrorPage> &
KeyOptMapping<RedirectHTTP> &
KeyOptMapping<SetXForwarded> &
KeyOptMapping<HideXForwarded> &
KeyOptMapping<CIDRWhitelist> &
KeyOptMapping<CloudflareRealIP> &
KeyOptMapping<ModifyRequest> &
KeyOptMapping<ModifyResponse> &
KeyOptMapping<OIDC> &
KeyOptMapping<RateLimit> &
KeyOptMapping<RealIP>)
| MiddlewareFileRef;
export type MiddlewareComposeMap =
| CustomErrorPage
| RedirectHTTP
| SetXForwarded
| HideXForwarded
| CIDRWhitelist
| CloudflareRealIP
| ModifyRequest
| ModifyResponse
| OIDC
| RateLimit
| RealIP;
export type CustomErrorPage = {
use:
| "error_page"
| "errorPage"
| "ErrorPage"
| "custom_error_page"
| "customErrorPage"
| "CustomErrorPage";
};
export type RedirectHTTP = {
use: "redirect_http" | "redirectHTTP" | "RedirectHTTP";
/** Bypass redirect */
bypass?: {
/** Bypass redirect for user agents */
user_agents?: string[];
};
};
export type SetXForwarded = {
use: "set_x_forwarded" | "setXForwarded" | "SetXForwarded";
};
export type HideXForwarded = {
use: "hide_x_forwarded" | "hideXForwarded" | "HideXForwarded";
};
export type CIDRWhitelist = {
use: "cidr_whitelist" | "cidrWhitelist" | "CIDRWhitelist";
/* Allowed CIDRs/IPs */
allow: types.CIDR[];
/** HTTP status code when blocked
*
* @default 403
*/
status_code?: types.StatusCode;
/** HTTP status code when blocked (alias of status_code)
*
* @default 403
*/
status?: types.StatusCode;
/** Error message when blocked
*
* @default "IP not allowed"
*/
message?: string;
};
export type CloudflareRealIP = {
use: "cloudflare_real_ip" | "cloudflareRealIp" | "CloudflareRealIP";
/** Recursively resolve the IP
*
* @default false
*/
recursive?: boolean;
};
export type ModifyRequest = {
use:
| "request"
| "Request"
| "modify_request"
| "modifyRequest"
| "ModifyRequest";
/** Set HTTP headers */
set_headers?: { [key: types.HTTPHeader]: string };
/** Add HTTP headers */
add_headers?: { [key: types.HTTPHeader]: string };
/** Hide HTTP headers */
hide_headers?: types.HTTPHeader[];
/** Add prefix to request URL */
add_prefix?: string;
};
export type ModifyResponse = {
use:
| "response"
| "Response"
| "modify_response"
| "modifyResponse"
| "ModifyResponse";
/** Set HTTP headers */
set_headers?: { [key: types.HTTPHeader]: string };
/** Add HTTP headers */
add_headers?: { [key: types.HTTPHeader]: string };
/** Hide HTTP headers */
hide_headers?: types.HTTPHeader[];
};
export type OIDC = {
use: "oidc" | "OIDC";
/** Allowed users
*
* @minItems 1
*/
allowed_users?: string[];
/** Allowed groups
*
* @minItems 1
*/
allowed_groups?: string[];
};
export type RateLimit = {
use: "rate_limit" | "rateLimit" | "RateLimit";
/** Average number of requests allowed in a period
*
* @min 1
*/
average: number;
/** Maximum number of requests allowed in a period
*
* @min 1
*/
burst: number;
/** Duration of the rate limit
*
* @default 1s
*/
period?: types.Duration;
};
export type RealIP = {
use: "real_ip" | "realIP" | "RealIP";
/** Header to get the client IP from
*
* @default "X-Real-IP"
*/
header?: types.HTTPHeader;
from: types.CIDR[];
/** Recursive resolve the IP
*
* @default false
*/
recursive?: boolean;
};

Some files were not shown because too many files have changed in this diff Show More