mirror of
https://github.com/yusing/godoxy.git
synced 2026-02-19 08:57:42 +01:00
Compare commits
7 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
add7884a36 | ||
|
|
115fba4ff4 | ||
|
|
bb757b2432 | ||
|
|
c2d8cca3b4 | ||
|
|
20695c52e8 | ||
|
|
7baf0b6fe5 | ||
|
|
863f16862b |
7
Makefile
7
Makefile
@@ -7,7 +7,7 @@ export GOOS = linux
|
||||
REPO_URL ?= https://github.com/yusing/godoxy
|
||||
|
||||
WEBUI_DIR ?= ../godoxy-webui
|
||||
DOCS_DIR ?= ${WEBUI_DIR}/wiki
|
||||
DOCS_DIR ?= wiki
|
||||
|
||||
ifneq ($(BRANCH), compat)
|
||||
GO_TAGS = sonic
|
||||
@@ -178,10 +178,7 @@ gen-swagger:
|
||||
python3 scripts/fix-swagger-json.py
|
||||
# we don't need this
|
||||
rm internal/api/v1/docs/docs.go
|
||||
|
||||
gen-swagger-markdown: gen-swagger
|
||||
# brew tap go-swagger/go-swagger && brew install go-swagger
|
||||
swagger generate markdown -f internal/api/v1/docs/swagger.yaml --skip-validation --output ${DOCS_DIR}/src/API.md
|
||||
cp internal/api/v1/docs/swagger.json ${DOCS_DIR}/public/api.json
|
||||
|
||||
gen-api-types: gen-swagger
|
||||
# --disable-throw-on-error
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Agent Package
|
||||
# agent/pkg/agent
|
||||
|
||||
The `agent` package provides the client-side implementation for interacting with GoDoxy agents. It handles agent configuration, secure communication via TLS, and provides utilities for agent deployment and management.
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Stream proxy protocol
|
||||
# agent/pkg/agent/stream
|
||||
|
||||
This package implements a small header-based handshake that allows an authenticated client to request forwarding to a `(host, port)` destination. It supports both TCP-over-TLS and UDP-over-DTLS transports.
|
||||
|
||||
|
||||
2
goutils
2
goutils
Submodule goutils updated: 494ab85a33...0dbc371839
@@ -1,4 +1,4 @@
|
||||
# ACL (Access Control List)
|
||||
# internal/acl
|
||||
|
||||
Access control at the TCP connection level with IP/CIDR, timezone, and country-based filtering.
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Agent Pool
|
||||
# internal/agentpool
|
||||
|
||||
Thread-safe pool for managing remote Docker agent connections.
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# API v1 Package
|
||||
# internal/api/v1
|
||||
|
||||
Implements the v1 REST API handlers for GoDoxy, exposing endpoints for managing routes, Docker containers, certificates, metrics, and system configuration.
|
||||
|
||||
|
||||
@@ -4,10 +4,12 @@ import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
"github.com/bytedance/sonic"
|
||||
"github.com/cenkalti/backoff/v5"
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/yusing/godoxy/agent/pkg/agent"
|
||||
@@ -35,6 +37,11 @@ type bytesFromPool struct {
|
||||
release func([]byte)
|
||||
}
|
||||
|
||||
type systemInfoData struct {
|
||||
agentName string
|
||||
systemInfo any
|
||||
}
|
||||
|
||||
// @x-id "all_system_info"
|
||||
// @BasePath /api/v1
|
||||
// @Summary Get system info
|
||||
@@ -72,91 +79,19 @@ func AllSystemInfo(c *gin.Context) {
|
||||
defer manager.Close()
|
||||
|
||||
query := c.Request.URL.Query()
|
||||
queryEncoded := c.Request.URL.Query().Encode()
|
||||
|
||||
type SystemInfoData struct {
|
||||
AgentName string
|
||||
SystemInfo any
|
||||
}
|
||||
queryEncoded := query.Encode()
|
||||
|
||||
// leave 5 extra slots for buffering in case new agents are added.
|
||||
dataCh := make(chan SystemInfoData, 1+agentpool.Num()+5)
|
||||
defer close(dataCh)
|
||||
dataCh := make(chan systemInfoData, 1+agentpool.Num()+5)
|
||||
|
||||
ticker := time.NewTicker(req.Interval)
|
||||
defer ticker.Stop()
|
||||
|
||||
go func() {
|
||||
for {
|
||||
select {
|
||||
case <-manager.Done():
|
||||
return
|
||||
case data := <-dataCh:
|
||||
err := marshalSystemInfo(manager, data.AgentName, data.SystemInfo)
|
||||
if err != nil {
|
||||
manager.Close()
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
// processing function for one round.
|
||||
doRound := func() (bool, error) {
|
||||
var numErrs atomic.Int32
|
||||
|
||||
totalAgents := int32(1) // myself
|
||||
|
||||
var errs gperr.Group
|
||||
// get system info for me and all agents in parallel.
|
||||
errs.Go(func() error {
|
||||
data, err := systeminfo.Poller.GetRespData(req.Period, query)
|
||||
if err != nil {
|
||||
numErrs.Add(1)
|
||||
return gperr.PrependSubject(err, "Main server")
|
||||
}
|
||||
select {
|
||||
case <-manager.Done():
|
||||
return nil
|
||||
case dataCh <- SystemInfoData{
|
||||
AgentName: "GoDoxy",
|
||||
SystemInfo: data,
|
||||
}:
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
for _, a := range agentpool.Iter() {
|
||||
totalAgents++
|
||||
|
||||
errs.Go(func() error {
|
||||
data, err := getAgentSystemInfoWithRetry(manager.Context(), a, queryEncoded)
|
||||
if err != nil {
|
||||
numErrs.Add(1)
|
||||
return gperr.PrependSubject(err, "Agent "+a.Name)
|
||||
}
|
||||
select {
|
||||
case <-manager.Done():
|
||||
return nil
|
||||
case dataCh <- SystemInfoData{
|
||||
AgentName: a.Name,
|
||||
SystemInfo: data,
|
||||
}:
|
||||
}
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
err := errs.Wait().Error()
|
||||
return numErrs.Load() == totalAgents, err
|
||||
}
|
||||
go streamSystemInfo(manager, dataCh)
|
||||
|
||||
// write system info immediately once.
|
||||
if shouldContinue, err := doRound(); err != nil {
|
||||
if !shouldContinue {
|
||||
c.Error(apitypes.InternalServerError(err, "failed to get all system info"))
|
||||
return
|
||||
}
|
||||
if hasSuccess, err := collectSystemInfoRound(manager, req, query, queryEncoded, dataCh); handleRoundResult(c, hasSuccess, err, false) {
|
||||
return
|
||||
}
|
||||
|
||||
// then continue on the ticker.
|
||||
@@ -165,17 +100,95 @@ func AllSystemInfo(c *gin.Context) {
|
||||
case <-manager.Done():
|
||||
return
|
||||
case <-ticker.C:
|
||||
if shouldContinue, err := doRound(); err != nil {
|
||||
if !shouldContinue {
|
||||
c.Error(apitypes.InternalServerError(err, "failed to get all system info"))
|
||||
return
|
||||
}
|
||||
log.Warn().Err(err).Msg("failed to get some system info")
|
||||
if hasSuccess, err := collectSystemInfoRound(manager, req, query, queryEncoded, dataCh); handleRoundResult(c, hasSuccess, err, true) {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func streamSystemInfo(manager *websocket.Manager, dataCh <-chan systemInfoData) {
|
||||
for {
|
||||
select {
|
||||
case <-manager.Done():
|
||||
return
|
||||
case data := <-dataCh:
|
||||
err := marshalSystemInfo(manager, data.agentName, data.systemInfo)
|
||||
if err != nil {
|
||||
manager.Close()
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func queueSystemInfo(manager *websocket.Manager, dataCh chan<- systemInfoData, data systemInfoData) {
|
||||
select {
|
||||
case <-manager.Done():
|
||||
case dataCh <- data:
|
||||
}
|
||||
}
|
||||
|
||||
func collectSystemInfoRound(
|
||||
manager *websocket.Manager,
|
||||
req AllSystemInfoRequest,
|
||||
query url.Values,
|
||||
queryEncoded string,
|
||||
dataCh chan<- systemInfoData,
|
||||
) (hasSuccess bool, err error) {
|
||||
var numErrs atomic.Int32
|
||||
totalAgents := int32(1) // myself
|
||||
|
||||
var errs gperr.Group
|
||||
// get system info for me and all agents in parallel.
|
||||
errs.Go(func() error {
|
||||
data, err := systeminfo.Poller.GetRespData(req.Period, query)
|
||||
if err != nil {
|
||||
numErrs.Add(1)
|
||||
return gperr.PrependSubject(err, "Main server")
|
||||
}
|
||||
queueSystemInfo(manager, dataCh, systemInfoData{
|
||||
agentName: "GoDoxy",
|
||||
systemInfo: data,
|
||||
})
|
||||
return nil
|
||||
})
|
||||
|
||||
for _, a := range agentpool.Iter() {
|
||||
totalAgents++
|
||||
|
||||
errs.Go(func() error {
|
||||
data, err := getAgentSystemInfoWithRetry(manager.Context(), a, queryEncoded)
|
||||
if err != nil {
|
||||
numErrs.Add(1)
|
||||
return gperr.PrependSubject(err, "Agent "+a.Name)
|
||||
}
|
||||
queueSystemInfo(manager, dataCh, systemInfoData{
|
||||
agentName: a.Name,
|
||||
systemInfo: data,
|
||||
})
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
err = errs.Wait().Error()
|
||||
return numErrs.Load() < totalAgents, err
|
||||
}
|
||||
|
||||
func handleRoundResult(c *gin.Context, hasSuccess bool, err error, logPartial bool) (stop bool) {
|
||||
if err == nil {
|
||||
return false
|
||||
}
|
||||
if !hasSuccess {
|
||||
c.Error(apitypes.InternalServerError(err, "failed to get all system info"))
|
||||
return true
|
||||
}
|
||||
if logPartial {
|
||||
log.Warn().Err(err).Msg("failed to get some system info")
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func getAgentSystemInfo(ctx context.Context, a *agentpool.Agent, query string) (bytesFromPool, error) {
|
||||
ctx, cancel := context.WithTimeout(ctx, 5*time.Second)
|
||||
defer cancel()
|
||||
@@ -197,35 +210,26 @@ func getAgentSystemInfo(ctx context.Context, a *agentpool.Agent, query string) (
|
||||
|
||||
func getAgentSystemInfoWithRetry(ctx context.Context, a *agentpool.Agent, query string) (bytesFromPool, error) {
|
||||
const maxRetries = 3
|
||||
var lastErr error
|
||||
|
||||
for attempt := range maxRetries {
|
||||
// Apply backoff delay for retries (not for first attempt)
|
||||
if attempt > 0 {
|
||||
delay := max((1<<attempt)*time.Second, 5*time.Second)
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return bytesFromPool{}, ctx.Err()
|
||||
case <-time.After(delay):
|
||||
}
|
||||
}
|
||||
const retryDelay = 5 * time.Second
|
||||
var attempt int
|
||||
data, err := backoff.Retry(ctx, func() (bytesFromPool, error) {
|
||||
attempt++
|
||||
|
||||
data, err := getAgentSystemInfo(ctx, a, query)
|
||||
if err == nil {
|
||||
return data, nil
|
||||
}
|
||||
|
||||
lastErr = err
|
||||
|
||||
log.Debug().Str("agent", a.Name).Int("attempt", attempt+1).Str("error", err.Error()).Msg("Agent request attempt failed")
|
||||
|
||||
// Don't retry on context cancellation
|
||||
if ctx.Err() != nil {
|
||||
return bytesFromPool{}, ctx.Err()
|
||||
}
|
||||
log.Err(err).Str("agent", a.Name).Int("attempt", attempt).Msg("Agent request attempt failed")
|
||||
return bytesFromPool{}, err
|
||||
},
|
||||
backoff.WithBackOff(backoff.NewConstantBackOff(retryDelay)),
|
||||
backoff.WithMaxTries(maxRetries),
|
||||
)
|
||||
if err != nil {
|
||||
return bytesFromPool{}, err
|
||||
}
|
||||
|
||||
return bytesFromPool{}, lastErr
|
||||
return data, nil
|
||||
}
|
||||
|
||||
func marshalSystemInfo(ws *websocket.Manager, agentName string, systemInfo any) error {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Authentication
|
||||
# internal/auth
|
||||
|
||||
Authentication providers supporting OIDC and username/password authentication with JWT-based sessions.
|
||||
|
||||
|
||||
@@ -8,7 +8,6 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/yusing/godoxy/internal/common"
|
||||
strutils "github.com/yusing/goutils/strings"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -70,12 +69,12 @@ func cookieDomain(r *http.Request) string {
|
||||
}
|
||||
}
|
||||
|
||||
parts := strutils.SplitRune(reqHost, '.')
|
||||
parts := strings.Split(reqHost, ".")
|
||||
if len(parts) < 2 {
|
||||
return ""
|
||||
}
|
||||
parts[0] = ""
|
||||
return strutils.JoinRune(parts, '.')
|
||||
return strings.Join(parts, ".")
|
||||
}
|
||||
|
||||
func SetTokenCookie(w http.ResponseWriter, r *http.Request, name, value string, ttl time.Duration) {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Autocert Package
|
||||
# internal/autocert
|
||||
|
||||
Automated SSL certificate management using the ACME protocol (Let's Encrypt and compatible CAs).
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Configuration Management
|
||||
# internal/config
|
||||
|
||||
Centralized YAML configuration management with thread-safe state access and provider initialization.
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Configuration Query
|
||||
# internal/config/query
|
||||
|
||||
Read-only access to the active configuration state, including route providers and system statistics.
|
||||
|
||||
@@ -149,7 +149,7 @@ No metrics are currently exposed.
|
||||
## Performance Characteristics
|
||||
|
||||
- O(n) where n is number of providers for provider queries
|
||||
- O(n * m) where m is routes per provider for route search
|
||||
- O(n \* m) where m is routes per provider for route search
|
||||
- O(n) for statistics aggregation
|
||||
- No locking required (uses atomic load)
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# DNS Providers
|
||||
# internal/dnsproviders
|
||||
|
||||
DNS provider integrations for Let's Encrypt certificate management via the lego library.
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Docker Integration
|
||||
# internal/docker
|
||||
|
||||
Docker container discovery, connection management, and label-based route configuration.
|
||||
|
||||
|
||||
@@ -46,8 +46,8 @@ func (c containerHelper) getMounts() *ordered.Map[string, string] {
|
||||
}
|
||||
|
||||
func (c containerHelper) parseImage() *types.ContainerImage {
|
||||
colonSep := strutils.SplitRune(c.Image, ':')
|
||||
slashSep := strutils.SplitRune(colonSep[0], '/')
|
||||
colonSep := strings.Split(c.Image, ":")
|
||||
slashSep := strings.Split(colonSep[0], "/")
|
||||
_, sha256, _ := strings.Cut(c.ImageID, ":")
|
||||
im := &types.ContainerImage{
|
||||
SHA256: sha256,
|
||||
|
||||
@@ -9,7 +9,6 @@ import (
|
||||
"github.com/goccy/go-yaml"
|
||||
"github.com/yusing/godoxy/internal/types"
|
||||
gperr "github.com/yusing/goutils/errs"
|
||||
strutils "github.com/yusing/goutils/strings"
|
||||
)
|
||||
|
||||
var ErrInvalidLabel = errors.New("invalid label")
|
||||
@@ -31,7 +30,7 @@ func ParseLabels(labels map[string]string, aliases ...string) (types.LabelMap, e
|
||||
ExpandWildcard(labels, aliases...)
|
||||
|
||||
for lbl, value := range labels {
|
||||
parts := strutils.SplitRune(lbl, '.')
|
||||
parts := strings.Split(lbl, ".")
|
||||
if parts[0] != NSProxy {
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Entrypoint
|
||||
# internal/entrypoint
|
||||
|
||||
The entrypoint package provides the main HTTP entry point for GoDoxy, handling domain-based routing, middleware application, short link matching, access logging, and HTTP server lifecycle management.
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Health Check Package
|
||||
# internal/health/check
|
||||
|
||||
Low-level health check implementations for different protocols and services in GoDoxy.
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Health Monitor Package
|
||||
# internal/health/monitor
|
||||
|
||||
Route health monitoring with configurable check intervals, retry policies, and notification integration.
|
||||
|
||||
|
||||
@@ -6,6 +6,7 @@ import (
|
||||
"fmt"
|
||||
"math/rand"
|
||||
"net/url"
|
||||
"strings"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
@@ -199,7 +200,7 @@ func (mon *monitor) Detail() string {
|
||||
|
||||
// Name implements HealthMonitor.
|
||||
func (mon *monitor) Name() string {
|
||||
parts := strutils.SplitRune(mon.service, '/')
|
||||
parts := strings.Split(mon.service, "/")
|
||||
return parts[len(parts)-1]
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Homepage
|
||||
# internal/homepage
|
||||
|
||||
The homepage package provides the GoDoxy WebUI dashboard with support for categories, favorites, widgets, dynamic item configuration, and icon management.
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Icons Package
|
||||
# internal/homepage/icons
|
||||
|
||||
Icon URL parsing, fetching, and listing for the homepage dashboard.
|
||||
|
||||
|
||||
@@ -56,7 +56,8 @@ func init() {
|
||||
func InitCache() {
|
||||
m := make(IconMap)
|
||||
err := serialization.LoadFileIfExist(common.IconListCachePath, &m, sonic.Unmarshal)
|
||||
if err != nil {
|
||||
switch {
|
||||
case err != nil:
|
||||
// backward compatible
|
||||
oldFormat := struct {
|
||||
Icons IconMap
|
||||
@@ -70,11 +71,11 @@ func InitCache() {
|
||||
// store it to disk immediately
|
||||
_ = serialization.SaveFile(common.IconListCachePath, &m, 0o644, sonic.Marshal)
|
||||
}
|
||||
} else if len(m) > 0 {
|
||||
case len(m) > 0:
|
||||
log.Info().
|
||||
Int("icons", len(m)).
|
||||
Msg("icons loaded")
|
||||
} else {
|
||||
default:
|
||||
if err := updateIcons(m); err != nil {
|
||||
log.Error().Err(err).Msg("failed to update icons")
|
||||
}
|
||||
@@ -142,33 +143,46 @@ func SearchIcons(keyword string, limit int) []*IconMetaSearch {
|
||||
return a.rank - b.rank
|
||||
}
|
||||
|
||||
var rank int
|
||||
dashedKeyword := strings.ReplaceAll(keyword, " ", "-")
|
||||
whitespacedKeyword := strings.ReplaceAll(keyword, "-", " ")
|
||||
|
||||
icons := ListAvailableIcons()
|
||||
for k, icon := range icons {
|
||||
if strutils.ContainsFold(string(k), keyword) || strutils.ContainsFold(icon.DisplayName, keyword) {
|
||||
source, ref := k.SourceRef()
|
||||
|
||||
var rank int
|
||||
switch {
|
||||
case strings.EqualFold(ref, dashedKeyword):
|
||||
// exact match: best rank, use source as tiebreaker (lower index = higher priority)
|
||||
rank = 0
|
||||
} else {
|
||||
rank = fuzzy.RankMatchFold(keyword, string(k))
|
||||
case strutils.HasPrefixFold(ref, dashedKeyword):
|
||||
// prefix match: rank by how much extra the name has (shorter = better)
|
||||
rank = 100 + len(ref) - len(dashedKeyword)
|
||||
case strutils.ContainsFold(ref, dashedKeyword) || strutils.ContainsFold(icon.DisplayName, whitespacedKeyword):
|
||||
// contains match
|
||||
rank = 500 + len(ref) - len(dashedKeyword)
|
||||
default:
|
||||
rank = fuzzy.RankMatchFold(keyword, ref)
|
||||
if rank == -1 || rank > 3 {
|
||||
continue
|
||||
}
|
||||
rank += 1000
|
||||
}
|
||||
|
||||
source, ref := k.SourceRef()
|
||||
ranked := &IconMetaSearch{
|
||||
Source: source,
|
||||
Ref: ref,
|
||||
Meta: icon,
|
||||
rank: rank,
|
||||
}
|
||||
// Sorted insert based on rank (lower rank = better match)
|
||||
insertPos, _ := slices.BinarySearchFunc(results, ranked, sortByRank)
|
||||
results = slices.Insert(results, insertPos, ranked)
|
||||
results = append(results, ranked)
|
||||
if len(results) == searchLimit {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
slices.SortStableFunc(results, sortByRank)
|
||||
|
||||
// Extract results and limit to the requested count
|
||||
return results[:min(len(results), limit)]
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# qBittorrent Integration Package
|
||||
# internal/homepage/integrations/qbittorrent
|
||||
|
||||
This package provides a qBittorrent widget for the GoDoxy homepage dashboard, enabling real-time monitoring of torrent status and transfer statistics.
|
||||
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
# Types Package
|
||||
|
||||
Configuration types for the homepage package.
|
||||
|
||||
## Config
|
||||
|
||||
```go
|
||||
type Config struct {
|
||||
UseDefaultCategories bool `json:"use_default_categories"`
|
||||
}
|
||||
|
||||
var ActiveConfig atomic.Pointer[Config]
|
||||
```
|
||||
@@ -1,4 +1,4 @@
|
||||
# Homepage Widgets Package
|
||||
# internal/homepage/widgets
|
||||
|
||||
> [!WARNING]
|
||||
>
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Idlewatcher
|
||||
# internal/idlewatcher
|
||||
|
||||
Manages container lifecycle based on idle timeout, automatically stopping/pausing containers and waking them on request.
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Idlewatcher Provider
|
||||
# internal/idlewatcher/provider
|
||||
|
||||
Implements container runtime abstractions for Docker and Proxmox LXC backends.
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# JSON Store
|
||||
# internal/jsonstore
|
||||
|
||||
The jsonstore package provides persistent JSON storage with namespace support, using thread-safe concurrent maps and automatic loading/saving.
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Logging Package
|
||||
# internal/logging
|
||||
|
||||
Structured logging capabilities for GoDoxy, including application logging, HTTP access logging, and in-memory log streaming.
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Access Logging
|
||||
# internal/logging/accesslog
|
||||
|
||||
Provides HTTP access logging with file rotation, log filtering, and multiple output formats for request and ACL event logging.
|
||||
|
||||
|
||||
@@ -8,7 +8,6 @@ import (
|
||||
"strings"
|
||||
|
||||
nettypes "github.com/yusing/godoxy/internal/net/types"
|
||||
strutils "github.com/yusing/goutils/strings"
|
||||
)
|
||||
|
||||
type (
|
||||
@@ -54,7 +53,7 @@ func (method HTTPMethod) Fulfill(req *http.Request, res *http.Response) bool {
|
||||
|
||||
// Parse implements strutils.Parser.
|
||||
func (k *HTTPHeader) Parse(v string) error {
|
||||
split := strutils.SplitRune(v, '=')
|
||||
split := strings.Split(v, "=")
|
||||
switch len(split) {
|
||||
case 1:
|
||||
split = append(split, "")
|
||||
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
strutils "github.com/yusing/goutils/strings"
|
||||
)
|
||||
@@ -32,7 +33,7 @@ var defaultChunkSize = 32 * kilobyte
|
||||
//
|
||||
// Parse implements strutils.Parser.
|
||||
func (r *Retention) Parse(v string) (err error) {
|
||||
split := strutils.SplitSpace(v)
|
||||
split := strings.Fields(v)
|
||||
if len(split) != 2 {
|
||||
return fmt.Errorf("%w: %s", ErrInvalidSyntax, v)
|
||||
}
|
||||
|
||||
@@ -4,9 +4,9 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
gperr "github.com/yusing/goutils/errs"
|
||||
strutils "github.com/yusing/goutils/strings"
|
||||
)
|
||||
|
||||
type StatusCodeRange struct {
|
||||
@@ -22,7 +22,7 @@ func (r *StatusCodeRange) Includes(code int) bool {
|
||||
|
||||
// Parse implements strutils.Parser.
|
||||
func (r *StatusCodeRange) Parse(v string) error {
|
||||
split := strutils.SplitRune(v, '-')
|
||||
split := strings.Split(v, "-")
|
||||
switch len(split) {
|
||||
case 1:
|
||||
start, err := strconv.Atoi(split[0])
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# In-Memory Logger
|
||||
# internal/logging/memlogger
|
||||
|
||||
Provides a thread-safe in-memory circular buffer logger with WebSocket-based real-time streaming for log data.
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# MaxMind
|
||||
# internal/maxmind
|
||||
|
||||
The maxmind package provides MaxMind GeoIP database integration for IP geolocation, including automatic database downloading and updates.
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Metrics Package
|
||||
# internal/metrics
|
||||
|
||||
System monitoring and metrics collection for GoDoxy with time-series storage and REST/WebSocket APIs.
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Period Metrics
|
||||
# internal/metrics/period
|
||||
|
||||
Provides time-bucketed metrics storage with configurable periods, enabling historical data aggregation and real-time streaming.
|
||||
|
||||
@@ -453,7 +453,7 @@ for {
|
||||
- O(1) add to circular buffer
|
||||
- O(1) get (returns slice view)
|
||||
- O(n) serialization where n = total entries
|
||||
- Memory: O(5 * 100 * sizeof(T)) = fixed overhead
|
||||
- Memory: O(5 _ 100 _ sizeof(T)) = fixed overhead
|
||||
- JSON load/save: O(n) where n = total entries
|
||||
|
||||
## Testing Notes
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# System Info
|
||||
# internal/metrics/systeminfo
|
||||
|
||||
Collects and aggregates system metrics including CPU, memory, disk, network, and sensor data with configurable aggregation modes.
|
||||
|
||||
@@ -367,7 +367,7 @@ curl "http://localhost:8080/api/metrics/system?period=1h&aggregate=disks_read_sp
|
||||
|
||||
```javascript
|
||||
const ws = new WebSocket(
|
||||
"ws://localhost:8080/api/metrics/system?period=1m&interval=5s&aggregate=cpu_average"
|
||||
"ws://localhost:8080/api/metrics/system?period=1m&interval=5s&aggregate=cpu_average",
|
||||
);
|
||||
|
||||
ws.onmessage = (event) => {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Uptime
|
||||
# internal/metrics/uptime
|
||||
|
||||
Tracks and aggregates route health status over time, providing uptime/downtime statistics and latency metrics.
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Network Utilities
|
||||
# internal/net
|
||||
|
||||
The net package provides network utility functions for GoDoxy, including TCP connection testing and network-related helpers.
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# gphttp
|
||||
# internal/net/gphttp
|
||||
|
||||
HTTP utilities package providing transport configuration, default HTTP client, and a wrapper around `http.ServeMux` with panic recovery.
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Load Balancer
|
||||
# internal/net/gphttp/loadbalancer
|
||||
|
||||
Load balancing package providing multiple distribution algorithms, sticky sessions, and server health management.
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Middleware
|
||||
# internal/net/gphttp/middleware
|
||||
|
||||
HTTP middleware framework providing request/response processing, middleware chaining, and composition from YAML files.
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Captcha Middleware
|
||||
# internal/net/gphttp/middleware/captcha
|
||||
|
||||
CAPTCHA verification middleware package providing session-based captcha challenge and verification.
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Error Page Middleware
|
||||
# internal/net/gphttp/middleware/errorpage
|
||||
|
||||
Custom error page serving middleware that replaces default HTTP error responses with styled custom pages.
|
||||
|
||||
@@ -233,16 +233,16 @@ flowchart TD
|
||||
```html
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<head>
|
||||
<title>Service Unavailable</title>
|
||||
<link rel="stylesheet" href="/$gperrorpage/style.css">
|
||||
</head>
|
||||
<body>
|
||||
<link rel="stylesheet" href="/$gperrorpage/style.css" />
|
||||
</head>
|
||||
<body>
|
||||
<div class="error-container">
|
||||
<h1>503 - Service Unavailable</h1>
|
||||
<p>The service is temporarily unavailable. Please try again later.</p>
|
||||
<h1>503 - Service Unavailable</h1>
|
||||
<p>The service is temporarily unavailable. Please try again later.</p>
|
||||
</div>
|
||||
</body>
|
||||
</body>
|
||||
</html>
|
||||
```
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Notifications
|
||||
# internal/notif
|
||||
|
||||
The notif package provides a notification dispatching system for GoDoxy, supporting multiple providers (Webhook, Gotify, Ntfy) with retry logic and exponential backoff.
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Proxmox
|
||||
# internal/proxmox
|
||||
|
||||
The proxmox package provides Proxmox VE integration for GoDoxy, enabling management of Proxmox LXC containers.
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Route
|
||||
# internal/route
|
||||
|
||||
Provides HTTP routing, reverse proxy, file serving, and TCP/UDP stream proxying for GoDoxy.
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Route Provider
|
||||
# internal/route/provider
|
||||
|
||||
Discovers and loads routes from Docker containers, YAML files, and remote agents.
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Route Rules
|
||||
# internal/route/rules
|
||||
|
||||
Implements a rule engine for HTTP request/response processing, enabling conditional routing, header manipulation, authentication, and more.
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Rule Presets
|
||||
# internal/route/rules/presets
|
||||
|
||||
Provides embedded, pre-configured rule sets for common routing patterns.
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Stream Handling
|
||||
# internal/route/stream
|
||||
|
||||
Implements TCP and UDP stream proxying for non-HTTP protocols.
|
||||
|
||||
|
||||
@@ -3,9 +3,9 @@ package route
|
||||
import (
|
||||
"errors"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
gperr "github.com/yusing/goutils/errs"
|
||||
strutils "github.com/yusing/goutils/strings"
|
||||
)
|
||||
|
||||
type Port struct {
|
||||
@@ -20,14 +20,18 @@ var (
|
||||
|
||||
// Parse implements strutils.Parser.
|
||||
func (p *Port) Parse(v string) (err error) {
|
||||
parts := strutils.SplitRune(v, ':')
|
||||
parts := strings.Split(v, ":")
|
||||
switch len(parts) {
|
||||
case 1:
|
||||
p.Listening = 0
|
||||
p.Proxy, err = strconv.Atoi(v)
|
||||
case 2:
|
||||
var err2 error
|
||||
p.Listening, err = strconv.Atoi(parts[0])
|
||||
if parts[0] == "" {
|
||||
p.Listening = 0
|
||||
} else {
|
||||
p.Listening, err = strconv.Atoi(parts[0])
|
||||
}
|
||||
p.Proxy, err2 = strconv.Atoi(parts[1])
|
||||
err = gperr.Join(err, err2)
|
||||
default:
|
||||
|
||||
@@ -10,7 +10,6 @@ var invalidPorts = []string{
|
||||
"",
|
||||
"123:",
|
||||
"0:",
|
||||
":1234",
|
||||
"qwerty",
|
||||
"asdfgh:asdfgh",
|
||||
"1234:asdfgh",
|
||||
@@ -78,6 +77,14 @@ func TestPortValid(t *testing.T) {
|
||||
Proxy: 5678,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "valid_lp_empty",
|
||||
inputs: ":1234",
|
||||
expect: Port{
|
||||
Listening: 0,
|
||||
Proxy: 1234,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "valid_p",
|
||||
inputs: "5678",
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Serialization Package
|
||||
# internal/serialization
|
||||
|
||||
Flexible, type-safe serialization/deserialization with validation support for GoDoxy configuration.
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Watcher
|
||||
# internal/watcher
|
||||
|
||||
Provides file and Docker event watching capabilities for GoDoxy, enabling dynamic configuration updates.
|
||||
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 4.0 MiB After Width: | Height: | Size: 284 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 1.6 MiB After Width: | Height: | Size: 191 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 476 KiB After Width: | Height: | Size: 292 KiB |
114
scripts/update-wiki/api-md2mdx.ts
Normal file
114
scripts/update-wiki/api-md2mdx.ts
Normal file
@@ -0,0 +1,114 @@
|
||||
export function md2mdx(md: string) {
|
||||
const indexFirstH2 = md.indexOf("## ");
|
||||
if (indexFirstH2 === -1) {
|
||||
console.error("## section not found in the file");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const h1 = md.slice(0, indexFirstH2);
|
||||
const h1Lines = h1.split("\n");
|
||||
const keptH1Lines: string[] = [];
|
||||
const callouts: string[] = [];
|
||||
|
||||
for (let i = 0; i < h1Lines.length; i++) {
|
||||
const line = h1Lines[i] ?? "";
|
||||
const calloutStart = line.match(/^>\s*\[!([a-z0-9_-]+)\]\s*$/i);
|
||||
if (calloutStart) {
|
||||
const rawCalloutType = (calloutStart[1] ?? "note").toLowerCase();
|
||||
const calloutType =
|
||||
rawCalloutType === "note"
|
||||
? "info"
|
||||
: rawCalloutType === "warning"
|
||||
? "warn"
|
||||
: rawCalloutType;
|
||||
const contentLines: string[] = [];
|
||||
|
||||
i++;
|
||||
for (; i < h1Lines.length; i++) {
|
||||
const blockLine = h1Lines[i] ?? "";
|
||||
if (!blockLine.startsWith(">")) {
|
||||
i--;
|
||||
break;
|
||||
}
|
||||
contentLines.push(blockLine.replace(/^>\s?/, ""));
|
||||
}
|
||||
|
||||
while (contentLines[0] === "") {
|
||||
contentLines.shift();
|
||||
}
|
||||
while (contentLines[contentLines.length - 1] === "") {
|
||||
contentLines.pop();
|
||||
}
|
||||
|
||||
if (contentLines.length > 0) {
|
||||
callouts.push(
|
||||
`<Callout type="${calloutType}">\n${contentLines.join("\n")}\n</Callout>`,
|
||||
);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
keptH1Lines.push(line);
|
||||
}
|
||||
|
||||
const h1WithoutCallout = keptH1Lines.join("\n");
|
||||
const titleMatchResult = h1WithoutCallout.match(
|
||||
new RegExp(/^\s*#\s+([^\n]+)/, "im"),
|
||||
);
|
||||
const title = titleMatchResult?.[1]?.trim() ?? "";
|
||||
let description = h1WithoutCallout
|
||||
.replace(new RegExp(/^\s*#\s+[^\n]+\n?/, "im"), "")
|
||||
.replaceAll(new RegExp(/^\s*>.+$/, "gm"), "")
|
||||
.trim();
|
||||
// remove trailing full stop
|
||||
if (description.endsWith(".")) {
|
||||
description = description.slice(0, -1);
|
||||
}
|
||||
|
||||
let header = `---\ntitle: ${title}`;
|
||||
if (description) {
|
||||
header += `\ndescription: ${description}`;
|
||||
}
|
||||
header += "\n---";
|
||||
|
||||
md = md.slice(indexFirstH2);
|
||||
const calloutsBlock = callouts.join("\n\n");
|
||||
if (calloutsBlock) {
|
||||
md = `${header}\n\n${calloutsBlock}\n\n${md}`;
|
||||
} else {
|
||||
md = `${header}\n\n${md}`;
|
||||
}
|
||||
|
||||
md = md.replaceAll("</br>", "<br/>");
|
||||
md = md.replaceAll("<0", "\\<0");
|
||||
|
||||
return md;
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const Parser = await import("argparse").then((m) => m.ArgumentParser);
|
||||
|
||||
const parser = new Parser({
|
||||
description: "Convert API markdown to VitePress MDX",
|
||||
});
|
||||
parser.add_argument("-i", "--input", {
|
||||
help: "Input markdown file",
|
||||
required: true,
|
||||
});
|
||||
parser.add_argument("-o", "--output", {
|
||||
help: "Output VitePress MDX file",
|
||||
required: true,
|
||||
});
|
||||
|
||||
const args = parser.parse_args();
|
||||
const inMdFile = args.input;
|
||||
const outMdxFile = args.output;
|
||||
|
||||
const md = await Bun.file(inMdFile).text();
|
||||
const mdx = md2mdx(md);
|
||||
await Bun.write(outMdxFile, mdx);
|
||||
}
|
||||
|
||||
if (import.meta.main) {
|
||||
await main();
|
||||
}
|
||||
@@ -4,20 +4,28 @@
|
||||
"workspaces": {
|
||||
"": {
|
||||
"name": "update-wiki",
|
||||
"dependencies": {
|
||||
"argparse": "^2.0.1",
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/bun": "latest",
|
||||
"@types/argparse": "^2.0.17",
|
||||
"@types/bun": "^1.3.9",
|
||||
},
|
||||
"peerDependencies": {
|
||||
"typescript": "^5",
|
||||
"typescript": "^5.9.3",
|
||||
},
|
||||
},
|
||||
},
|
||||
"packages": {
|
||||
"@types/bun": ["@types/bun@1.3.5", "", { "dependencies": { "bun-types": "1.3.5" } }, "sha512-RnygCqNrd3srIPEWBd5LFeUYG7plCoH2Yw9WaZGyNmdTEei+gWaHqydbaIRkIkcbXwhBT94q78QljxN0Sk838w=="],
|
||||
"@types/argparse": ["@types/argparse@2.0.17", "", {}, "sha512-fueJssTf+4dW4HODshEGkIZbkLKHzgu1FvCI4cTc/MKum/534Euo3SrN+ilq8xgyHnOjtmg33/hee8iXLRg1XA=="],
|
||||
|
||||
"@types/bun": ["@types/bun@1.3.9", "", { "dependencies": { "bun-types": "1.3.9" } }, "sha512-KQ571yULOdWJiMH+RIWIOZ7B2RXQGpL1YQrBtLIV3FqDcCu6FsbFUBwhdKUlCKUpS3PJDsHlJ1QKlpxoVR+xtw=="],
|
||||
|
||||
"@types/node": ["@types/node@25.0.3", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-W609buLVRVmeW693xKfzHeIV6nJGGz98uCPfeXI1ELMLXVeKYZ9m15fAMSaUPBHYLGFsVRcMmSCksQOrZV9BYA=="],
|
||||
|
||||
"bun-types": ["bun-types@1.3.5", "", { "dependencies": { "@types/node": "*" } }, "sha512-inmAYe2PFLs0SUbFOWSVD24sg1jFlMPxOjOSSCYqUgn4Hsc3rDc7dFvfVYjFPNHtov6kgUeulV4SxbuIV/stPw=="],
|
||||
"argparse": ["argparse@2.0.1", "", {}, "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q=="],
|
||||
|
||||
"bun-types": ["bun-types@1.3.9", "", { "dependencies": { "@types/node": "*" } }, "sha512-+UBWWOakIP4Tswh0Bt0QD0alpTY8cb5hvgiYeWCMet9YukHbzuruIEeXC2D7nMJPB12kbh8C7XJykSexEqGKJg=="],
|
||||
|
||||
"typescript": ["typescript@5.9.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw=="],
|
||||
|
||||
|
||||
@@ -1,393 +1,338 @@
|
||||
import { mkdir, readdir, readFile, rm, writeFile } from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { Glob } from "bun";
|
||||
import { md2mdx } from "./api-md2mdx";
|
||||
|
||||
type ImplDoc = {
|
||||
/** Directory path relative to this repo, e.g. "internal/health/check" */
|
||||
pkgPath: string;
|
||||
/** File name in wiki `src/impl/`, e.g. "internal-health-check.md" */
|
||||
docFileName: string;
|
||||
/** VitePress route path (extensionless), e.g. "/impl/internal-health-check" */
|
||||
docRoute: string;
|
||||
/** Absolute source README path */
|
||||
srcPathAbs: string;
|
||||
/** Absolute destination doc path */
|
||||
dstPathAbs: string;
|
||||
/** Directory path relative to this repo, e.g. "internal/health/check" */
|
||||
pkgPath: string;
|
||||
/** File name in wiki `src/impl/`, e.g. "internal-health-check.md" */
|
||||
docFileName: string;
|
||||
/** VitePress route path (extensionless), e.g. "/impl/internal-health-check" */
|
||||
docRoute: string;
|
||||
/** Absolute source README path */
|
||||
srcPathAbs: string;
|
||||
/** Absolute destination doc path */
|
||||
dstPathAbs: string;
|
||||
};
|
||||
|
||||
const START_MARKER = "// GENERATED-IMPL-SIDEBAR-START";
|
||||
const END_MARKER = "// GENERATED-IMPL-SIDEBAR-END";
|
||||
|
||||
const skipSubmodules = [
|
||||
"internal/go-oidc/",
|
||||
"internal/gopsutil/",
|
||||
"internal/go-proxmox/",
|
||||
"internal/go-oidc/",
|
||||
"internal/gopsutil/",
|
||||
"internal/go-proxmox/",
|
||||
];
|
||||
|
||||
function escapeRegex(s: string) {
|
||||
return s.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||
}
|
||||
|
||||
function escapeSingleQuotedTs(s: string) {
|
||||
return s.replace(/\\/g, "\\\\").replace(/'/g, "\\'");
|
||||
}
|
||||
|
||||
function normalizeRepoUrl(raw: string) {
|
||||
let url = (raw ?? "").trim();
|
||||
if (!url) return "";
|
||||
// Common typo: "https://https://github.com/..."
|
||||
url = url.replace(/^https?:\/\/https?:\/\//i, "https://");
|
||||
if (!/^https?:\/\//i.test(url)) url = `https://${url}`;
|
||||
url = url.replace(/\/+$/, "");
|
||||
return url;
|
||||
let url = (raw ?? "").trim();
|
||||
if (!url) return "";
|
||||
// Common typo: "https://https://github.com/..."
|
||||
url = url.replace(/^https?:\/\/https?:\/\//i, "https://");
|
||||
if (!/^https?:\/\//i.test(url)) url = `https://${url}`;
|
||||
url = url.replace(/\/+$/, "");
|
||||
return url;
|
||||
}
|
||||
|
||||
function sanitizeFileStemFromPkgPath(pkgPath: string) {
|
||||
// Convert a package path into a stable filename.
|
||||
// Example: "internal/go-oidc/example" -> "internal-go-oidc-example"
|
||||
// Keep it readable and unique (uses full path).
|
||||
const parts = pkgPath
|
||||
.split("/")
|
||||
.filter(Boolean)
|
||||
.map((p) => p.replace(/[^A-Za-z0-9._-]+/g, "-"));
|
||||
const joined = parts.join("-");
|
||||
return joined.replace(/-+/g, "-").replace(/^-|-$/g, "");
|
||||
// Convert a package path into a stable filename.
|
||||
// Example: "internal/go-oidc/example" -> "internal-go-oidc-example"
|
||||
// Keep it readable and unique (uses full path).
|
||||
const parts = pkgPath
|
||||
.split("/")
|
||||
.filter(Boolean)
|
||||
.map((p) => p.replace(/[^A-Za-z0-9._-]+/g, "-"));
|
||||
const joined = parts.join("-");
|
||||
return joined.replace(/-+/g, "-").replace(/^-|-$/g, "");
|
||||
}
|
||||
|
||||
function splitUrlAndFragment(url: string): {
|
||||
urlNoFragment: string;
|
||||
fragment: string;
|
||||
urlNoFragment: string;
|
||||
fragment: string;
|
||||
} {
|
||||
const i = url.indexOf("#");
|
||||
if (i === -1) return { urlNoFragment: url, fragment: "" };
|
||||
return { urlNoFragment: url.slice(0, i), fragment: url.slice(i) };
|
||||
const i = url.indexOf("#");
|
||||
if (i === -1) return { urlNoFragment: url, fragment: "" };
|
||||
return { urlNoFragment: url.slice(0, i), fragment: url.slice(i) };
|
||||
}
|
||||
|
||||
function isExternalOrAbsoluteUrl(url: string) {
|
||||
// - absolute site links: "/foo"
|
||||
// - pure fragments: "#bar"
|
||||
// - external schemes: "https:", "mailto:", "vscode:", etc.
|
||||
// IMPORTANT: don't treat "config.go:29" as a scheme.
|
||||
if (url.startsWith("/") || url.startsWith("#")) return true;
|
||||
if (url.includes("://")) return true;
|
||||
return /^(https?|mailto|tel|vscode|file|data|ssh|git):/i.test(url);
|
||||
// - absolute site links: "/foo"
|
||||
// - pure fragments: "#bar"
|
||||
// - external schemes: "https:", "mailto:", "vscode:", etc.
|
||||
// IMPORTANT: don't treat "config.go:29" as a scheme.
|
||||
if (url.startsWith("/") || url.startsWith("#")) return true;
|
||||
if (url.includes("://")) return true;
|
||||
return /^(https?|mailto|tel|vscode|file|data|ssh|git):/i.test(url);
|
||||
}
|
||||
|
||||
function isRepoSourceFilePath(filePath: string) {
|
||||
// Conservative allow-list: avoid rewriting .md (non-README) which may be VitePress docs.
|
||||
return /\.(go|ts|tsx|js|jsx|py|sh|yml|yaml|json|toml|env|css|html|txt)$/i.test(
|
||||
filePath,
|
||||
);
|
||||
// Conservative allow-list: avoid rewriting .md (non-README) which may be VitePress docs.
|
||||
return /\.(go|ts|tsx|js|jsx|py|sh|yml|yaml|json|toml|env|css|html|txt)$/i.test(
|
||||
filePath,
|
||||
);
|
||||
}
|
||||
|
||||
function parseFileLineSuffix(urlNoFragment: string): {
|
||||
filePath: string;
|
||||
line?: string;
|
||||
filePath: string;
|
||||
line?: string;
|
||||
} {
|
||||
// Match "file.ext:123" (line suffix), while leaving "file.ext" untouched.
|
||||
const m = urlNoFragment.match(/^(.*?):(\d+)$/);
|
||||
if (!m) return { filePath: urlNoFragment };
|
||||
return { filePath: m[1] ?? urlNoFragment, line: m[2] };
|
||||
// Match "file.ext:123" (line suffix), while leaving "file.ext" untouched.
|
||||
const m = urlNoFragment.match(/^(.*?):(\d+)$/);
|
||||
if (!m) return { filePath: urlNoFragment };
|
||||
return { filePath: m[1] ?? urlNoFragment, line: m[2] };
|
||||
}
|
||||
|
||||
function rewriteMarkdownLinksOutsideFences(
|
||||
md: string,
|
||||
rewriteInline: (url: string) => string,
|
||||
md: string,
|
||||
rewriteInline: (url: string) => string,
|
||||
) {
|
||||
const lines = md.split("\n");
|
||||
let inFence = false;
|
||||
const lines = md.split("\n");
|
||||
let inFence = false;
|
||||
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i] ?? "";
|
||||
const trimmed = line.trimStart();
|
||||
if (trimmed.startsWith("```")) {
|
||||
inFence = !inFence;
|
||||
continue;
|
||||
}
|
||||
if (inFence) continue;
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i] ?? "";
|
||||
const trimmed = line.trimStart();
|
||||
if (trimmed.startsWith("```")) {
|
||||
inFence = !inFence;
|
||||
continue;
|
||||
}
|
||||
if (inFence) continue;
|
||||
|
||||
// Inline markdown links/images: [text](url "title") / 
|
||||
lines[i] = line.replace(
|
||||
/\]\(([^)\s]+)(\s+"[^"]*")?\)/g,
|
||||
(_full, urlRaw: string, maybeTitle: string | undefined) => {
|
||||
const rewritten = rewriteInline(urlRaw);
|
||||
return `](${rewritten}${maybeTitle ?? ""})`;
|
||||
},
|
||||
);
|
||||
}
|
||||
// Inline markdown links/images: [text](url "title") / 
|
||||
lines[i] = line.replace(
|
||||
/\]\(([^)\s]+)(\s+"[^"]*")?\)/g,
|
||||
(_full, urlRaw: string, maybeTitle: string | undefined) => {
|
||||
const rewritten = rewriteInline(urlRaw);
|
||||
return `](${rewritten}${maybeTitle ?? ""})`;
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
return lines.join("\n");
|
||||
return lines.join("\n");
|
||||
}
|
||||
|
||||
function rewriteImplMarkdown(params: {
|
||||
md: string;
|
||||
pkgPath: string;
|
||||
readmeRelToDocRoute: Map<string, string>;
|
||||
dirPathToDocRoute: Map<string, string>;
|
||||
repoUrl: string;
|
||||
md: string;
|
||||
pkgPath: string;
|
||||
readmeRelToDocRoute: Map<string, string>;
|
||||
dirPathToDocRoute: Map<string, string>;
|
||||
repoUrl: string;
|
||||
}) {
|
||||
const { md, pkgPath, readmeRelToDocRoute, dirPathToDocRoute, repoUrl } =
|
||||
params;
|
||||
const { md, pkgPath, readmeRelToDocRoute, dirPathToDocRoute, repoUrl } =
|
||||
params;
|
||||
|
||||
return rewriteMarkdownLinksOutsideFences(md, (urlRaw) => {
|
||||
// Handle angle-bracketed destinations: (<./foo/README.md>)
|
||||
const angleWrapped =
|
||||
urlRaw.startsWith("<") && urlRaw.endsWith(">")
|
||||
? urlRaw.slice(1, -1)
|
||||
: urlRaw;
|
||||
return rewriteMarkdownLinksOutsideFences(md, (urlRaw) => {
|
||||
// Handle angle-bracketed destinations: (<./foo/README.md>)
|
||||
const angleWrapped =
|
||||
urlRaw.startsWith("<") && urlRaw.endsWith(">")
|
||||
? urlRaw.slice(1, -1)
|
||||
: urlRaw;
|
||||
|
||||
const { urlNoFragment, fragment } = splitUrlAndFragment(angleWrapped);
|
||||
if (!urlNoFragment) return urlRaw;
|
||||
if (isExternalOrAbsoluteUrl(urlNoFragment)) return urlRaw;
|
||||
const { urlNoFragment, fragment } = splitUrlAndFragment(angleWrapped);
|
||||
if (!urlNoFragment) return urlRaw;
|
||||
if (isExternalOrAbsoluteUrl(urlNoFragment)) return urlRaw;
|
||||
|
||||
// 1) Directory links like "common" or "common/" that have a README
|
||||
const dirPathNormalized = urlNoFragment.replace(/\/+$/, "");
|
||||
let rewritten: string | undefined;
|
||||
// First try exact match
|
||||
if (dirPathToDocRoute.has(dirPathNormalized)) {
|
||||
rewritten = `${dirPathToDocRoute.get(dirPathNormalized)}${fragment}`;
|
||||
} else {
|
||||
// Fallback: check parent directories for a README
|
||||
// This handles paths like "internal/watcher/events" where only the parent has a README
|
||||
let parentPath = dirPathNormalized;
|
||||
while (parentPath.includes("/")) {
|
||||
parentPath = parentPath.slice(0, parentPath.lastIndexOf("/"));
|
||||
if (dirPathToDocRoute.has(parentPath)) {
|
||||
rewritten = `${dirPathToDocRoute.get(parentPath)}${fragment}`;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (rewritten) {
|
||||
return angleWrapped === urlRaw ? rewritten : `<${rewritten}>`;
|
||||
}
|
||||
// 1) Directory links like "common" or "common/" that have a README
|
||||
const dirPathNormalized = urlNoFragment.replace(/\/+$/, "");
|
||||
let rewritten: string | undefined;
|
||||
// First try exact match
|
||||
if (dirPathToDocRoute.has(dirPathNormalized)) {
|
||||
rewritten = `${dirPathToDocRoute.get(dirPathNormalized)}${fragment}`;
|
||||
} else {
|
||||
// Fallback: check parent directories for a README
|
||||
// This handles paths like "internal/watcher/events" where only the parent has a README
|
||||
let parentPath = dirPathNormalized;
|
||||
while (parentPath.includes("/")) {
|
||||
parentPath = parentPath.slice(0, parentPath.lastIndexOf("/"));
|
||||
if (dirPathToDocRoute.has(parentPath)) {
|
||||
rewritten = `${dirPathToDocRoute.get(parentPath)}${fragment}`;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (rewritten) {
|
||||
return angleWrapped === urlRaw ? rewritten : `<${rewritten}>`;
|
||||
}
|
||||
|
||||
// 2) Intra-repo README links -> VitePress impl routes
|
||||
if (/(^|\/)README\.md$/.test(urlNoFragment)) {
|
||||
const targetReadmeRel = path.posix.normalize(
|
||||
path.posix.join(pkgPath, urlNoFragment),
|
||||
);
|
||||
const route = readmeRelToDocRoute.get(targetReadmeRel);
|
||||
if (route) {
|
||||
const rewritten = `${route}${fragment}`;
|
||||
return angleWrapped === urlRaw ? rewritten : `<${rewritten}>`;
|
||||
}
|
||||
return urlRaw;
|
||||
}
|
||||
// 2) Intra-repo README links -> VitePress impl routes
|
||||
if (/(^|\/)README\.md$/.test(urlNoFragment)) {
|
||||
const targetReadmeRel = path.posix.normalize(
|
||||
path.posix.join(pkgPath, urlNoFragment),
|
||||
);
|
||||
const route = readmeRelToDocRoute.get(targetReadmeRel);
|
||||
if (route) {
|
||||
const rewritten = `${route}${fragment}`;
|
||||
return angleWrapped === urlRaw ? rewritten : `<${rewritten}>`;
|
||||
}
|
||||
return urlRaw;
|
||||
}
|
||||
|
||||
// 3) Local source-file references like "config.go:29" -> GitHub blob link
|
||||
if (repoUrl) {
|
||||
const { filePath, line } = parseFileLineSuffix(urlNoFragment);
|
||||
if (isRepoSourceFilePath(filePath)) {
|
||||
const repoRel = path.posix.normalize(
|
||||
path.posix.join(pkgPath, filePath),
|
||||
);
|
||||
const githubUrl = `${repoUrl}/blob/main/${repoRel}${
|
||||
line ? `#L${line}` : ""
|
||||
}`;
|
||||
const rewritten = `${githubUrl}${fragment}`;
|
||||
return angleWrapped === urlRaw ? rewritten : `<${rewritten}>`;
|
||||
}
|
||||
}
|
||||
// 3) Local source-file references like "config.go:29" -> GitHub blob link
|
||||
if (repoUrl) {
|
||||
const { filePath, line } = parseFileLineSuffix(urlNoFragment);
|
||||
if (isRepoSourceFilePath(filePath)) {
|
||||
const repoRel = path.posix.normalize(
|
||||
path.posix.join(pkgPath, filePath),
|
||||
);
|
||||
const githubUrl = `${repoUrl}/blob/main/${repoRel}${
|
||||
line ? `#L${line}` : ""
|
||||
}`;
|
||||
const rewritten = `${githubUrl}${fragment}`;
|
||||
return angleWrapped === urlRaw ? rewritten : `<${rewritten}>`;
|
||||
}
|
||||
}
|
||||
|
||||
return urlRaw;
|
||||
});
|
||||
return urlRaw;
|
||||
});
|
||||
}
|
||||
|
||||
async function listRepoReadmes(repoRootAbs: string): Promise<string[]> {
|
||||
const glob = new Glob("**/README.md");
|
||||
const readmes: string[] = [];
|
||||
const glob = new Glob("**/README.md");
|
||||
const readmes: string[] = [];
|
||||
|
||||
for await (const rel of glob.scan({
|
||||
cwd: repoRootAbs,
|
||||
onlyFiles: true,
|
||||
dot: false,
|
||||
})) {
|
||||
// Bun returns POSIX-style rel paths.
|
||||
if (rel === "README.md") continue; // exclude root README
|
||||
if (rel.startsWith(".git/") || rel.includes("/.git/")) continue;
|
||||
if (rel.startsWith("node_modules/") || rel.includes("/node_modules/"))
|
||||
continue;
|
||||
let skip = false;
|
||||
for (const submodule of skipSubmodules) {
|
||||
if (rel.startsWith(submodule)) {
|
||||
skip = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (skip) continue;
|
||||
readmes.push(rel);
|
||||
}
|
||||
for await (const rel of glob.scan({
|
||||
cwd: repoRootAbs,
|
||||
onlyFiles: true,
|
||||
dot: false,
|
||||
})) {
|
||||
// Bun returns POSIX-style rel paths.
|
||||
if (rel === "README.md") continue; // exclude root README
|
||||
if (rel.startsWith(".git/") || rel.includes("/.git/")) continue;
|
||||
if (rel.startsWith("node_modules/") || rel.includes("/node_modules/"))
|
||||
continue;
|
||||
let skip = false;
|
||||
for (const submodule of skipSubmodules) {
|
||||
if (rel.startsWith(submodule)) {
|
||||
skip = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (skip) continue;
|
||||
readmes.push(rel);
|
||||
}
|
||||
|
||||
// Deterministic order.
|
||||
readmes.sort((a, b) => a.localeCompare(b));
|
||||
return readmes;
|
||||
// Deterministic order.
|
||||
readmes.sort((a, b) => a.localeCompare(b));
|
||||
return readmes;
|
||||
}
|
||||
|
||||
async function writeImplDocCopy(params: {
|
||||
srcAbs: string;
|
||||
dstAbs: string;
|
||||
pkgPath: string;
|
||||
readmeRelToDocRoute: Map<string, string>;
|
||||
dirPathToDocRoute: Map<string, string>;
|
||||
repoUrl: string;
|
||||
srcAbs: string;
|
||||
dstAbs: string;
|
||||
pkgPath: string;
|
||||
readmeRelToDocRoute: Map<string, string>;
|
||||
dirPathToDocRoute: Map<string, string>;
|
||||
repoUrl: string;
|
||||
}) {
|
||||
const {
|
||||
srcAbs,
|
||||
dstAbs,
|
||||
pkgPath,
|
||||
readmeRelToDocRoute,
|
||||
dirPathToDocRoute,
|
||||
repoUrl,
|
||||
} = params;
|
||||
await mkdir(path.dirname(dstAbs), { recursive: true });
|
||||
await rm(dstAbs, { force: true });
|
||||
const {
|
||||
srcAbs,
|
||||
dstAbs,
|
||||
pkgPath,
|
||||
readmeRelToDocRoute,
|
||||
dirPathToDocRoute,
|
||||
repoUrl,
|
||||
} = params;
|
||||
await mkdir(path.dirname(dstAbs), { recursive: true });
|
||||
await rm(dstAbs, { force: true });
|
||||
|
||||
const original = await readFile(srcAbs, "utf8");
|
||||
const rewritten = rewriteImplMarkdown({
|
||||
md: original,
|
||||
pkgPath,
|
||||
readmeRelToDocRoute,
|
||||
dirPathToDocRoute,
|
||||
repoUrl,
|
||||
});
|
||||
await writeFile(dstAbs, rewritten);
|
||||
const original = await readFile(srcAbs, "utf8");
|
||||
const rewritten = rewriteImplMarkdown({
|
||||
md: original,
|
||||
pkgPath,
|
||||
readmeRelToDocRoute,
|
||||
dirPathToDocRoute,
|
||||
repoUrl,
|
||||
});
|
||||
await writeFile(dstAbs, md2mdx(rewritten));
|
||||
}
|
||||
|
||||
async function syncImplDocs(
|
||||
repoRootAbs: string,
|
||||
wikiRootAbs: string,
|
||||
repoRootAbs: string,
|
||||
wikiRootAbs: string,
|
||||
): Promise<ImplDoc[]> {
|
||||
const implDirAbs = path.join(wikiRootAbs, "src", "impl");
|
||||
await mkdir(implDirAbs, { recursive: true });
|
||||
const implDirAbs = path.join(wikiRootAbs, "content", "docs", "impl");
|
||||
await mkdir(implDirAbs, { recursive: true });
|
||||
|
||||
const readmes = await listRepoReadmes(repoRootAbs);
|
||||
const docs: ImplDoc[] = [];
|
||||
const expectedFileNames = new Set<string>();
|
||||
expectedFileNames.add("introduction.md");
|
||||
const readmes = await listRepoReadmes(repoRootAbs);
|
||||
const docs: ImplDoc[] = [];
|
||||
const expectedFileNames = new Set<string>();
|
||||
expectedFileNames.add("index.mdx");
|
||||
expectedFileNames.add("meta.json");
|
||||
|
||||
const repoUrl = normalizeRepoUrl(
|
||||
Bun.env.REPO_URL ?? "https://github.com/yusing/godoxy",
|
||||
);
|
||||
const repoUrl = normalizeRepoUrl(
|
||||
Bun.env.REPO_URL ?? "https://github.com/yusing/godoxy",
|
||||
);
|
||||
|
||||
// Precompute mapping from repo-relative README path -> VitePress route.
|
||||
// This lets us rewrite intra-repo README links when copying content.
|
||||
const readmeRelToDocRoute = new Map<string, string>();
|
||||
// Precompute mapping from repo-relative README path -> VitePress route.
|
||||
// This lets us rewrite intra-repo README links when copying content.
|
||||
const readmeRelToDocRoute = new Map<string, string>();
|
||||
|
||||
// Also precompute mapping from directory path -> VitePress route.
|
||||
// This handles links like "[`common/`](common)" that point to directories with READMEs.
|
||||
const dirPathToDocRoute = new Map<string, string>();
|
||||
// Also precompute mapping from directory path -> VitePress route.
|
||||
// This handles links like "[`common/`](common)" that point to directories with READMEs.
|
||||
const dirPathToDocRoute = new Map<string, string>();
|
||||
|
||||
for (const readmeRel of readmes) {
|
||||
const pkgPath = path.posix.dirname(readmeRel);
|
||||
if (!pkgPath || pkgPath === ".") continue;
|
||||
for (const readmeRel of readmes) {
|
||||
const pkgPath = path.posix.dirname(readmeRel);
|
||||
if (!pkgPath || pkgPath === ".") continue;
|
||||
|
||||
const docStem = sanitizeFileStemFromPkgPath(pkgPath);
|
||||
if (!docStem) continue;
|
||||
const route = `/impl/${docStem}`;
|
||||
readmeRelToDocRoute.set(readmeRel, route);
|
||||
dirPathToDocRoute.set(pkgPath, route);
|
||||
}
|
||||
const docStem = sanitizeFileStemFromPkgPath(pkgPath);
|
||||
if (!docStem) continue;
|
||||
const route = `/impl/${docStem}`;
|
||||
readmeRelToDocRoute.set(readmeRel, route);
|
||||
dirPathToDocRoute.set(pkgPath, route);
|
||||
}
|
||||
|
||||
for (const readmeRel of readmes) {
|
||||
const pkgPath = path.posix.dirname(readmeRel);
|
||||
if (!pkgPath || pkgPath === ".") continue;
|
||||
for (const readmeRel of readmes) {
|
||||
const pkgPath = path.posix.dirname(readmeRel);
|
||||
if (!pkgPath || pkgPath === ".") continue;
|
||||
|
||||
const docStem = sanitizeFileStemFromPkgPath(pkgPath);
|
||||
if (!docStem) continue;
|
||||
const docFileName = `${docStem}.md`;
|
||||
const docRoute = `/impl/${docStem}`;
|
||||
const docStem = sanitizeFileStemFromPkgPath(pkgPath);
|
||||
if (!docStem) continue;
|
||||
const docFileName = `${docStem}.mdx`;
|
||||
const docRoute = `/impl/${docStem}`;
|
||||
|
||||
const srcPathAbs = path.join(repoRootAbs, readmeRel);
|
||||
const dstPathAbs = path.join(implDirAbs, docFileName);
|
||||
const srcPathAbs = path.join(repoRootAbs, readmeRel);
|
||||
const dstPathAbs = path.join(implDirAbs, docFileName);
|
||||
|
||||
await writeImplDocCopy({
|
||||
srcAbs: srcPathAbs,
|
||||
dstAbs: dstPathAbs,
|
||||
pkgPath,
|
||||
readmeRelToDocRoute,
|
||||
dirPathToDocRoute,
|
||||
repoUrl,
|
||||
});
|
||||
await writeImplDocCopy({
|
||||
srcAbs: srcPathAbs,
|
||||
dstAbs: dstPathAbs,
|
||||
pkgPath,
|
||||
readmeRelToDocRoute,
|
||||
dirPathToDocRoute,
|
||||
repoUrl,
|
||||
});
|
||||
|
||||
docs.push({ pkgPath, docFileName, docRoute, srcPathAbs, dstPathAbs });
|
||||
expectedFileNames.add(docFileName);
|
||||
}
|
||||
docs.push({ pkgPath, docFileName, docRoute, srcPathAbs, dstPathAbs });
|
||||
expectedFileNames.add(docFileName);
|
||||
}
|
||||
|
||||
// Clean orphaned impl docs.
|
||||
const existing = await readdir(implDirAbs, { withFileTypes: true });
|
||||
for (const ent of existing) {
|
||||
if (!ent.isFile()) continue;
|
||||
if (!ent.name.endsWith(".md")) continue;
|
||||
if (expectedFileNames.has(ent.name)) continue;
|
||||
await rm(path.join(implDirAbs, ent.name), { force: true });
|
||||
}
|
||||
// Clean orphaned impl docs.
|
||||
const existing = await readdir(implDirAbs, { withFileTypes: true });
|
||||
for (const ent of existing) {
|
||||
if (!ent.isFile()) continue;
|
||||
if (!ent.name.endsWith(".md")) continue;
|
||||
if (expectedFileNames.has(ent.name)) continue;
|
||||
await rm(path.join(implDirAbs, ent.name), { force: true });
|
||||
}
|
||||
|
||||
// Deterministic for sidebar.
|
||||
docs.sort((a, b) => a.pkgPath.localeCompare(b.pkgPath));
|
||||
return docs;
|
||||
}
|
||||
|
||||
function renderSidebarItems(docs: ImplDoc[], indent: string) {
|
||||
// link: '/impl/<stem>' (extensionless) because VitePress `srcDir = "src"`.
|
||||
if (docs.length === 0) return "";
|
||||
return (
|
||||
docs
|
||||
.map((d) => {
|
||||
const text = escapeSingleQuotedTs(d.pkgPath);
|
||||
const link = escapeSingleQuotedTs(d.docRoute);
|
||||
return `${indent}{ text: '${text}', link: '${link}' },`;
|
||||
})
|
||||
.join("\n") + "\n"
|
||||
);
|
||||
}
|
||||
|
||||
async function updateVitepressSidebar(wikiRootAbs: string, docs: ImplDoc[]) {
|
||||
const configPathAbs = path.join(wikiRootAbs, ".vitepress", "config.mts");
|
||||
if (!(await Bun.file(configPathAbs).exists())) {
|
||||
throw new Error(`vitepress config not found: ${configPathAbs}`);
|
||||
}
|
||||
|
||||
const original = await readFile(configPathAbs, "utf8");
|
||||
|
||||
// Replace between markers with generated items.
|
||||
// We keep indentation based on the marker line.
|
||||
const markerRe = new RegExp(
|
||||
`(^[\\t ]*)${escapeRegex(START_MARKER)}[\\s\\S]*?\\n\\1${escapeRegex(
|
||||
END_MARKER,
|
||||
)}`,
|
||||
"m",
|
||||
);
|
||||
|
||||
const m = original.match(markerRe);
|
||||
if (!m) {
|
||||
throw new Error(
|
||||
`sidebar markers not found in ${configPathAbs}. Expected lines: ${START_MARKER} ... ${END_MARKER}`,
|
||||
);
|
||||
}
|
||||
const indent = m[1] ?? "";
|
||||
const generated = `${indent}${START_MARKER}\n${renderSidebarItems(
|
||||
docs,
|
||||
indent,
|
||||
)}${indent}${END_MARKER}`;
|
||||
|
||||
const updated = original.replace(markerRe, generated);
|
||||
if (updated !== original) {
|
||||
await writeFile(configPathAbs, updated);
|
||||
}
|
||||
// Deterministic for sidebar.
|
||||
docs.sort((a, b) => a.pkgPath.localeCompare(b.pkgPath));
|
||||
return docs;
|
||||
}
|
||||
|
||||
async function main() {
|
||||
// This script lives in `scripts/update-wiki/`, so repo root is two levels up.
|
||||
const repoRootAbs = path.resolve(import.meta.dir, "../..");
|
||||
// This script lives in `scripts/update-wiki/`, so repo root is two levels up.
|
||||
const repoRootAbs = path.resolve(import.meta.dir);
|
||||
|
||||
// Required by task, but allow overriding via env for convenience.
|
||||
const wikiRootAbs = Bun.env.DOCS_DIR
|
||||
? path.resolve(repoRootAbs, Bun.env.DOCS_DIR)
|
||||
: path.resolve(repoRootAbs, "..", "godoxy-webui", "wiki");
|
||||
// Required by task, but allow overriding via env for convenience.
|
||||
const wikiRootAbs = Bun.env.DOCS_DIR
|
||||
? path.resolve(repoRootAbs, Bun.env.DOCS_DIR)
|
||||
: undefined;
|
||||
|
||||
const docs = await syncImplDocs(repoRootAbs, wikiRootAbs);
|
||||
await updateVitepressSidebar(wikiRootAbs, docs);
|
||||
if (!wikiRootAbs) {
|
||||
throw new Error("DOCS_DIR is not set");
|
||||
}
|
||||
|
||||
await syncImplDocs(repoRootAbs, wikiRootAbs);
|
||||
}
|
||||
|
||||
await main();
|
||||
|
||||
@@ -2,9 +2,13 @@
|
||||
"name": "update-wiki",
|
||||
"private": true,
|
||||
"devDependencies": {
|
||||
"@types/bun": "latest"
|
||||
"@types/argparse": "^2.0.17",
|
||||
"@types/bun": "^1.3.9"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"typescript": "^5"
|
||||
"typescript": "^5.9.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"argparse": "^2.0.1"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Socket Proxy Reverse Proxy
|
||||
# socket-proxy/pkg/reverseproxy
|
||||
|
||||
This package provides an HTTP reverse proxy implementation for proxying requests to Unix sockets (typically Docker sockets). It is based on Go's `net/http/httputil.ReverseProxy` with simplifications for socket proxying use cases.
|
||||
|
||||
@@ -16,7 +16,6 @@ This package provides an HTTP reverse proxy implementation for proxying requests
|
||||
1. **Director only**: Only the `Director` function is supported. The stdlib's `Rewrite` type and `ModifyResponse` hook are removed.
|
||||
|
||||
2. **Context-aware body copying**: Uses `ioutils.CopyCloseWithContext` which:
|
||||
|
||||
- Respects request context for cancellation
|
||||
- Uses `Content-Length` for optimal copying when available
|
||||
- Properly handles trailer headers
|
||||
|
||||
Reference in New Issue
Block a user