3 Commits

Author SHA1 Message Date
Gianni Carafa
8b3551659f run make file in build job 2023-12-05 15:07:12 +01:00
Kevin Pham
f78c958334 Merge pull request #57 from jgillies/change-form-type
Change form type to url
2023-12-03 21:33:11 -06:00
Jesse Gillies
d6925e53c2 change form type to url 2023-12-03 21:10:43 -05:00
106 changed files with 601 additions and 6659 deletions

View File

@@ -7,21 +7,21 @@ tmp_dir = "tmp"
bin = "./tmp/main"
cmd = "go build -o ./tmp/main ./cmd"
delay = 1000
exclude_dir = ["assets", "tmp", "vendor", "testdata",]
exclude_file = ["proxychain/ruleset/rule_resmod_types.gen.go", "proxychain/ruleset/rule_reqmod_types.gen.go"]
exclude_dir = ["assets", "tmp", "vendor", "testdata"]
exclude_file = []
exclude_regex = ["_test.go"]
exclude_unchanged = false
follow_symlink = false
full_bin = "./tmp/main --ruleset ./ruleset.yaml"
full_bin = "RULESET=./ruleset.yaml ./tmp/main"
include_dir = []
include_ext = ["go", "tpl", "tmpl", "yaml", "html", "js"]
include_ext = ["go", "tpl", "tmpl", "yaml", "html"]
include_file = []
kill_delay = "0s"
log = "build-errors.log"
poll = false
poll_interval = 0
post_cmd = []
pre_cmd = ["git submodule update --init --recursive; git rev-parse --short HEAD > handlers/VERSION; git rev-parse --short HEAD > cmd/VERSION; cd proxychain/codegen && go run codegen.go"]
pre_cmd = ["echo 'dev' > handlers/VERSION"]
rerun = false
rerun_delay = 500
send_interrupt = false

View File

@@ -4,7 +4,6 @@ on:
push:
paths:
- "handlers/form.html"
- "proxychain/responsemodifiers/generate_readable_outline.html"
workflow_dispatch:
jobs:

3
.gitignore vendored
View File

@@ -2,5 +2,4 @@
ladder
VERSION
output.css
.aider*
output.css

6
.gitmodules vendored
View File

@@ -1,6 +0,0 @@
[submodule "proxychain/responsemodifiers/vendor/ddg-tracker-surrogates"]
path = proxychain/responsemodifiers/vendor/ddg-tracker-surrogates
url = https://github.com/duckduckgo/tracker-surrogates
[submodule "proxychain/requestmodifiers/vendor/ua-parser-js"]
path = proxychain/requestmodifiers/vendor/ua-parser-js
url = https://github.com/faisalman/ua-parser-js.git

View File

@@ -7,7 +7,7 @@ COPY . .
RUN go mod download
RUN CGO_ENABLED=0 GOOS=linux go build -o ladder cmd/main.go
RUN make build
FROM debian:12-slim as release
@@ -18,8 +18,4 @@ RUN chmod +x /app/ladder
RUN apt update && apt install -y ca-certificates && rm -rf /var/lib/apt/lists/*
#EXPOSE 8080
#ENTRYPOINT ["/usr/bin/dumb-init", "--"]
CMD ["sh", "-c", "/app/ladder"]

View File

@@ -1,10 +1,3 @@
build:
cd proxychain/codegen && go run codegen.go
git submodule update --init --recursive
git rev-parse --short HEAD > handlers/VERSION
git rev-parse --short HEAD > cmd/VERSION
go build -o ladder -ldflags="-s -w" cmd/main.go
lint:
gofumpt -l -w .
golangci-lint run -c .golangci-lint.yaml --fix
@@ -14,7 +7,4 @@ lint:
install-linters:
go install mvdan.cc/gofumpt@latest
go install github.com/golangci/golangci-lint/cmd/golangci-lint@v1.55.2
run:
go run ./cmd/.
go install github.com/golangci/golangci-lint/cmd/golangci-lint@v1.55.2

View File

@@ -189,10 +189,7 @@ There is a basic ruleset available in a separate repository [ruleset.yaml](https
To run a development server at http://localhost:8080:
```bash
git clone git@github.com-ladddder:everywall/ladder.git
git submodule update --init --recursive
echo "dev " > handlers/VERSION
echo "dev " > cmd/VERSION
echo "dev" > handlers/VERSION
RULESET="./ruleset.yaml" go run cmd/main.go
```

View File

Before

Width:  |  Height:  |  Size: 15 KiB

After

Width:  |  Height:  |  Size: 15 KiB

View File

@@ -1,23 +1,26 @@
package main
import (
_ "embed"
"embed"
"fmt"
"html/template"
"log"
"os"
"strings"
"ladder/handlers"
"ladder/internal/cli"
"ladder/proxychain/requestmodifiers/bot"
"ladder/handlers/cli"
"github.com/akamensky/argparse"
"github.com/gofiber/fiber/v2"
"github.com/gofiber/template/html/v2"
"github.com/gofiber/fiber/v2/middleware/basicauth"
"github.com/gofiber/fiber/v2/middleware/favicon"
)
//go:embed VERSION
var version string
//go:embed favicon.ico
var faviconData string
//go:embed styles.css
var cssData embed.FS
func main() {
parser := argparse.NewParser("ladder", "Every Wall needs a Ladder")
@@ -38,23 +41,6 @@ func main() {
Help: "This will spawn multiple processes listening",
})
verbose := parser.Flag("v", "verbose", &argparse.Options{
Required: false,
Help: "Adds verbose logging",
})
randomGoogleBot := parser.Flag("", "random-googlebot", &argparse.Options{
Required: false,
Help: "Update the list of trusted Googlebot IPs, and use a random one for each masqueraded request",
})
randomBingBot := parser.Flag("", "random-bingbot", &argparse.Options{
Required: false,
Help: "Update the list of trusted Bingbot IPs, and use a random one for each masqueraded request",
})
// TODO: add version flag that reads from handers/VERSION
ruleset := parser.String("r", "ruleset", &argparse.Options{
Required: false,
Help: "File, Directory or URL to a ruleset.yaml. Overrides RULESET environment variable.",
@@ -80,29 +66,13 @@ func main() {
fmt.Print(parser.Usage(err))
}
if *randomGoogleBot {
err := bot.GoogleBot.UpdatePool("https://developers.google.com/static/search/apis/ipranges/googlebot.json")
if err != nil {
fmt.Println("error while retrieving list of Googlebot IPs: " + err.Error())
fmt.Println("defaulting to known trusted Googlebot identity")
}
}
if *randomBingBot {
err := bot.BingBot.UpdatePool("https://www.bing.com/toolbox/bingbot.json")
if err != nil {
fmt.Println("error while retrieving list of Bingbot IPs: " + err.Error())
fmt.Println("defaulting to known trusted Bingbot identity")
}
}
// utility cli flag to compile ruleset directory into single ruleset.yaml
if *mergeRulesets || *mergeRulesetsGzip {
output := os.Stdout
if *mergeRulesetsOutput != "" {
output, err = os.Create(*mergeRulesetsOutput)
if err != nil {
fmt.Println(err)
os.Exit(1)
@@ -121,26 +91,28 @@ func main() {
*prefork = true
}
engine := html.New("./handlers", ".html")
engine.AddFunc(
// add unescape function
"unescape", func(s string) template.HTML {
return template.HTML(s)
},
)
app := fiber.New(
fiber.Config{
Prefork: *prefork,
GETOnly: false,
ReadBufferSize: 4096 * 4, // increase max header size
DisableStartupMessage: true,
Views: engine,
Prefork: *prefork,
GETOnly: true,
},
)
app.Use(handlers.Auth())
app.Use(handlers.Favicon())
userpass := os.Getenv("USERPASS")
if userpass != "" {
userpass := strings.Split(userpass, ":")
app.Use(basicauth.New(basicauth.Config{
Users: map[string]string{
userpass[0]: userpass[1],
},
}))
}
app.Use(favicon.New(favicon.Config{
Data: []byte(faviconData),
URL: "/favicon.ico",
}))
if os.Getenv("NOLOGS") != "true" {
app.Use(func(c *fiber.Ctx) error {
@@ -152,22 +124,21 @@ func main() {
app.Get("/", handlers.Form)
app.Get("styles.css", handlers.Styles)
app.Get("script.js", handlers.Script)
app.Get("/styles.css", func(c *fiber.Ctx) error {
cssData, err := cssData.ReadFile("styles.css")
if err != nil {
return c.Status(fiber.StatusInternalServerError).SendString("Internal Server Error")
}
c.Set("Content-Type", "text/css")
return c.Send(cssData)
})
app.Get("ruleset", handlers.Ruleset)
app.Get("raw/*", handlers.Raw)
app.Get("api/*", handlers.Api)
app.Get("/*", handlers.ProxySite(*ruleset))
proxyOpts := &handlers.ProxyOptions{
Verbose: *verbose,
RulesetPath: *ruleset,
}
app.Get("api/content/*", handlers.NewAPIContentHandler("api/outline/*", proxyOpts))
app.Get("outline/*", handlers.NewOutlineHandler("outline/*", proxyOpts))
app.All("/*", handlers.NewProxySiteHandler(proxyOpts))
fmt.Println(cli.StartupMessage(version, *port, *ruleset))
log.Fatal(app.Listen(":" + *port))
}

1
cmd/styles.css Normal file

File diff suppressed because one or more lines are too long

View File

@@ -3,7 +3,7 @@ services:
ladder:
image: ghcr.io/everywall/ladder:latest
container_name: ladder
#build: .
build: .
#restart: always
#command: sh -c ./ladder
environment:

44
go.mod
View File

@@ -3,51 +3,19 @@ module ladder
go 1.21.1
require (
github.com/PuerkitoBio/goquery v1.8.1
github.com/akamensky/argparse v1.4.0
github.com/bogdanfinn/fhttp v0.5.24
github.com/bogdanfinn/tls-client v1.6.1
github.com/go-shiori/dom v0.0.0-20230515143342-73569d674e1c
github.com/gofiber/fiber/v2 v2.50.0
github.com/markusmobius/go-trafilatura v1.5.1
github.com/stretchr/testify v1.8.4
gopkg.in/yaml.v3 v3.0.1
)
require (
github.com/abadojack/whatlanggo v1.0.1 // indirect
github.com/andybalholm/cascadia v1.3.2 // indirect
github.com/bogdanfinn/utls v1.5.16 // indirect
github.com/elliotchance/pie/v2 v2.8.0 // indirect
github.com/forPelevin/gomoji v1.1.8 // indirect
github.com/go-shiori/go-readability v0.0.0-20231029095239-6b97d5aba789 // indirect
github.com/gofiber/template v1.8.2 // indirect
github.com/gofiber/utils v1.1.0 // indirect
github.com/gogs/chardet v0.0.0-20211120154057-b7413eaefb8f // indirect
github.com/hablullah/go-hijri v1.0.2 // indirect
github.com/hablullah/go-juliandays v1.0.0 // indirect
github.com/jalaali/go-jalaali v0.0.0-20210801064154-80525e88d958 // indirect
github.com/magefile/mage v1.15.0 // indirect
github.com/markusmobius/go-dateparser v1.2.1 // indirect
github.com/markusmobius/go-domdistiller v0.0.0-20230515154422-71af71939ff3 // indirect
github.com/markusmobius/go-htmldate v1.2.2 // indirect
github.com/rs/zerolog v1.31.0 // indirect
github.com/sirupsen/logrus v1.9.3 // indirect
github.com/tam7t/hpkp v0.0.0-20160821193359-2b70b4024ed5 // indirect
github.com/tetratelabs/wazero v1.5.0 // indirect
github.com/wasilibs/go-re2 v1.4.1 // indirect
github.com/yosssi/gohtml v0.0.0-20201013000340-ee4748c638f4 // indirect
golang.org/x/crypto v0.16.0 // indirect
golang.org/x/exp v0.0.0-20231127185646-65229373498e // indirect
golang.org/x/text v0.14.0 // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect
)
require (
github.com/andybalholm/brotli v1.0.6 // indirect
github.com/andybalholm/cascadia v1.3.2 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/gofiber/template/html/v2 v2.0.5
github.com/google/uuid v1.4.0 // indirect
github.com/klauspost/compress v1.17.3 // indirect
github.com/klauspost/compress v1.17.2 // indirect
github.com/mattn/go-colorable v0.1.13 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mattn/go-runewidth v0.0.15 // indirect
@@ -56,7 +24,7 @@ require (
github.com/valyala/bytebufferpool v1.0.0 // indirect
github.com/valyala/fasthttp v1.50.0 // indirect
github.com/valyala/tcplisten v1.0.0 // indirect
golang.org/x/net v0.19.0
golang.org/x/sys v0.15.0 // indirect
golang.org/x/term v0.15.0
golang.org/x/net v0.18.0 // indirect
golang.org/x/sys v0.14.0 // indirect
golang.org/x/term v0.14.0
)

105
go.sum
View File

@@ -1,160 +1,87 @@
github.com/abadojack/whatlanggo v1.0.1 h1:19N6YogDnf71CTHm3Mp2qhYfkRdyvbgwWdd2EPxJRG4=
github.com/abadojack/whatlanggo v1.0.1/go.mod h1:66WiQbSbJBIlOZMsvbKe5m6pzQovxCH9B/K8tQB2uoc=
github.com/PuerkitoBio/goquery v1.8.1 h1:uQxhNlArOIdbrH1tr0UXwdVFgDcZDrZVdcpygAcwmWM=
github.com/PuerkitoBio/goquery v1.8.1/go.mod h1:Q8ICL1kNUJ2sXGoAhPGUdYDJvgQgHzJsnnd3H7Ho5jQ=
github.com/akamensky/argparse v1.4.0 h1:YGzvsTqCvbEZhL8zZu2AiA5nq805NZh75JNj4ajn1xc=
github.com/akamensky/argparse v1.4.0/go.mod h1:S5kwC7IuDcEr5VeXtGPRVZ5o/FdhcMlQz4IZQuw64xA=
github.com/andybalholm/brotli v1.0.6 h1:Yf9fFpf49Zrxb9NlQaluyE92/+X7UVHlhMNJN2sxfOI=
github.com/andybalholm/brotli v1.0.6/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
github.com/andybalholm/cascadia v1.3.1/go.mod h1:R4bJ1UQfqADjvDa4P6HZHLh/3OxWWEqc0Sk8XGwHqvA=
github.com/andybalholm/cascadia v1.3.2 h1:3Xi6Dw5lHF15JtdcmAHD3i1+T8plmv7BQ/nsViSLyss=
github.com/andybalholm/cascadia v1.3.2/go.mod h1:7gtRlve5FxPPgIgX36uWBX58OdBsSS6lUvCFb+h7KvU=
github.com/bogdanfinn/fhttp v0.5.24 h1:OlyBKjvJp6a3TotN3wuj4mQHHRbfK7QUMrzCPOZGhRc=
github.com/bogdanfinn/fhttp v0.5.24/go.mod h1:brqi5woc5eSCVHdKYBV8aZLbO7HGqpwyDLeXW+fT18I=
github.com/bogdanfinn/tls-client v1.6.1 h1:GTIqQssFoIvLaDf4btoYRzDhUzudLqYD4axvfUCXl3I=
github.com/bogdanfinn/tls-client v1.6.1/go.mod h1:FtwQ3DndVZ0xAOO704v4iNAgbHOcEc5kPk9tjICTNQ0=
github.com/bogdanfinn/utls v1.5.16 h1:NhhWkegEcYETBMj9nvgO4lwvc6NcLH+znrXzO3gnw4M=
github.com/bogdanfinn/utls v1.5.16/go.mod h1:mHeRCi69cUiEyVBkKONB1cAbLjRcZnlJbGzttmiuK4o=
github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/elliotchance/pie/v2 v2.8.0 h1://QS43W8sEha8XV/fjngO5iMudN3XARJV5cpBayAcVY=
github.com/elliotchance/pie/v2 v2.8.0/go.mod h1:18t0dgGFH006g4eVdDtWfgFZPQEgl10IoEO8YWEq3Og=
github.com/forPelevin/gomoji v1.1.8 h1:JElzDdt0TyiUlecy6PfITDL6eGvIaxqYH1V52zrd0qQ=
github.com/forPelevin/gomoji v1.1.8/go.mod h1:8+Z3KNGkdslmeGZBC3tCrwMrcPy5GRzAD+gL9NAwMXg=
github.com/go-shiori/dom v0.0.0-20230515143342-73569d674e1c h1:wpkoddUomPfHiOziHZixGO5ZBS73cKqVzZipfrLmO1w=
github.com/go-shiori/dom v0.0.0-20230515143342-73569d674e1c/go.mod h1:oVDCh3qjJMLVUSILBRwrm+Bc6RNXGZYtoh9xdvf1ffM=
github.com/go-shiori/go-readability v0.0.0-20231029095239-6b97d5aba789 h1:G6wSuUyCoLB9jrUokipsmFuRi8aJozt3phw/g9Sl4Xs=
github.com/go-shiori/go-readability v0.0.0-20231029095239-6b97d5aba789/go.mod h1:2DpZlTJO/ycxp/vsc/C11oUyveStOgIXB88SYV1lncI=
github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
github.com/gofiber/fiber/v2 v2.50.0 h1:ia0JaB+uw3GpNSCR5nvC5dsaxXjRU5OEu36aytx+zGw=
github.com/gofiber/fiber/v2 v2.50.0/go.mod h1:21eytvay9Is7S6z+OgPi7c7n4++tnClWmhpimVHMimw=
github.com/gofiber/template v1.8.2 h1:PIv9s/7Uq6m+Fm2MDNd20pAFFKt5wWs7ZBd8iV9pWwk=
github.com/gofiber/template v1.8.2/go.mod h1:bs/2n0pSNPOkRa5VJ8zTIvedcI/lEYxzV3+YPXdBvq8=
github.com/gofiber/template/html/v2 v2.0.5 h1:BKLJ6Qr940NjntbGmpO3zVa4nFNGDCi/IfUiDB9OC20=
github.com/gofiber/template/html/v2 v2.0.5/go.mod h1:RCF14eLeQDCSUPp0IGc2wbSSDv6yt+V54XB/+Unz+LM=
github.com/gofiber/utils v1.1.0 h1:vdEBpn7AzIUJRhe+CiTOJdUcTg4Q9RK+pEa0KPbLdrM=
github.com/gofiber/utils v1.1.0/go.mod h1:poZpsnhBykfnY1Mc0KeEa6mSHrS3dV0+oBWyeQmb2e0=
github.com/gogs/chardet v0.0.0-20211120154057-b7413eaefb8f h1:3BSP1Tbs2djlpprl7wCLuiqMaUh5SJkkzI2gDs+FgLs=
github.com/gogs/chardet v0.0.0-20211120154057-b7413eaefb8f/go.mod h1:Pcatq5tYkCW2Q6yrR2VRHlbHpZ/R4/7qyL1TCF7vl14=
github.com/google/uuid v1.4.0 h1:MtMxsa51/r9yyhkyLsVeVt0B+BGQZzpQiTQ4eHZ8bc4=
github.com/google/uuid v1.4.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/hablullah/go-hijri v1.0.2 h1:drT/MZpSZJQXo7jftf5fthArShcaMtsal0Zf/dnmp6k=
github.com/hablullah/go-hijri v1.0.2/go.mod h1:OS5qyYLDjORXzK4O1adFw9Q5WfhOcMdAKglDkcTxgWQ=
github.com/hablullah/go-juliandays v1.0.0 h1:A8YM7wIj16SzlKT0SRJc9CD29iiaUzpBLzh5hr0/5p0=
github.com/hablullah/go-juliandays v1.0.0/go.mod h1:0JOYq4oFOuDja+oospuc61YoX+uNEn7Z6uHYTbBzdGc=
github.com/jalaali/go-jalaali v0.0.0-20210801064154-80525e88d958 h1:qxLoi6CAcXVzjfvu+KXIXJOAsQB62LXjsfbOaErsVzE=
github.com/jalaali/go-jalaali v0.0.0-20210801064154-80525e88d958/go.mod h1:Wqfu7mjUHj9WDzSSPI5KfBclTTEnLveRUFr/ujWnTgE=
github.com/klauspost/compress v1.17.3 h1:qkRjuerhUU1EmXLYGkSH6EZL+vPSxIrYjLNAK4slzwA=
github.com/klauspost/compress v1.17.3/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/magefile/mage v1.15.0 h1:BvGheCMAsG3bWUDbZ8AyXXpCNwU9u5CB6sM+HNb9HYg=
github.com/magefile/mage v1.15.0/go.mod h1:z5UZb/iS3GoOSn0JgWuiw7dxlurVYTu+/jHXqQg881A=
github.com/markusmobius/go-dateparser v1.2.1 h1:mYRRdu3TzpAeE6fSl2Gn3arfxEtoTRvFOKlumlVsUtg=
github.com/markusmobius/go-dateparser v1.2.1/go.mod h1:5xYsZ1h7iB3sE1BSu8bkjYpbFST7EU1/AFxcyO3mgYg=
github.com/markusmobius/go-domdistiller v0.0.0-20230515154422-71af71939ff3 h1:D83RvMz1lQ0ilKlJt6DWc65+Q77CXGRFmfihR0bfQvc=
github.com/markusmobius/go-domdistiller v0.0.0-20230515154422-71af71939ff3/go.mod h1:n1AYw0wiJDT3YXnIsElJPiDR63YGXT2yv3uq0CboGmU=
github.com/markusmobius/go-htmldate v1.2.2 h1:tp1IxhefCYpEoL9CM1LiU6l+2YayTpuTjkkdnik6hXE=
github.com/markusmobius/go-htmldate v1.2.2/go.mod h1:26VRz16sCosuiv42MNRW9iPBGnGLo+q/Z6TWitt8uzs=
github.com/markusmobius/go-trafilatura v1.5.1 h1:EXhZY2AVRyepUlLZHeuZUme3v7Ms9G8lDOLl4u+Jp5M=
github.com/markusmobius/go-trafilatura v1.5.1/go.mod h1:FhuBBPZ9ph4ufpGBKAkuq5oQwEhg0KKnIOUlv5h7EHg=
github.com/klauspost/compress v1.17.2 h1:RlWWUY/Dr4fL8qk9YG7DTZ7PDgME2V4csBXA8L/ixi4=
github.com/klauspost/compress v1.17.2/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE=
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZgg3U=
github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs=
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
github.com/rivo/uniseg v0.4.4 h1:8TfxU8dW6PdqD27gjM8MVNuicgxIjxpm4K7x4jp8sis=
github.com/rivo/uniseg v0.4.4/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg=
github.com/rs/zerolog v1.31.0 h1:FcTR3NnLWW+NnTwwhFWiJSZr4ECLpqCm6QsEnyvbV4A=
github.com/rs/zerolog v1.31.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss=
github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0=
github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
github.com/tam7t/hpkp v0.0.0-20160821193359-2b70b4024ed5 h1:YqAladjX7xpA6BM04leXMWAEjS0mTZ5kUU9KRBriQJc=
github.com/tam7t/hpkp v0.0.0-20160821193359-2b70b4024ed5/go.mod h1:2JjD2zLQYH5HO74y5+aE3remJQvl6q4Sn6aWA2wD1Ng=
github.com/tetratelabs/wazero v1.5.0 h1:Yz3fZHivfDiZFUXnWMPUoiW7s8tC1sjdBtlJn08qYa0=
github.com/tetratelabs/wazero v1.5.0/go.mod h1:0U0G41+ochRKoPKCJlh0jMg1CHkyfK8kDqiirMmKY8A=
github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
github.com/valyala/fasthttp v1.50.0 h1:H7fweIlBm0rXLs2q0XbalvJ6r0CUPFWK3/bB4N13e9M=
github.com/valyala/fasthttp v1.50.0/go.mod h1:k2zXd82h/7UZc3VOdJ2WaUqt1uZ/XpXAfE9i+HBC3lA=
github.com/valyala/tcplisten v1.0.0 h1:rBHj/Xf+E1tRGZyWIWwJDiRY0zc1Js+CV5DqwacVSA8=
github.com/valyala/tcplisten v1.0.0/go.mod h1:T0xQ8SeCZGxckz9qRXTfG43PvQ/mcWh7FwZEA7Ioqkc=
github.com/wasilibs/go-re2 v1.4.1 h1:E5+9O1M8UoGeqLB2A9omeoaWImqpuYDs9cKwvTJq/Oo=
github.com/wasilibs/go-re2 v1.4.1/go.mod h1:ynB8eCwd9JsqUnsk8WlPDk6cEeme8BguZmnqOSURE4Y=
github.com/wasilibs/nottinygc v0.4.0 h1:h1TJMihMC4neN6Zq+WKpLxgd9xCFMw7O9ETLwY2exJQ=
github.com/wasilibs/nottinygc v0.4.0/go.mod h1:oDcIotskuYNMpqMF23l7Z8uzD4TC0WXHK8jetlB3HIo=
github.com/yosssi/gohtml v0.0.0-20201013000340-ee4748c638f4 h1:0sw0nJM544SpsihWx1bkXdYLQDlzRflMgFJQ4Yih9ts=
github.com/yosssi/gohtml v0.0.0-20201013000340-ee4748c638f4/go.mod h1:+ccdNT0xMY1dtc5XBxumbYfOUhmduiGudqaDgD2rVRE=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.16.0 h1:mMMrFzRSCF0GvB7Ne27XVtVAaXLrPmgPC7/v0tkwHaY=
golang.org/x/crypto v0.16.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4=
golang.org/x/exp v0.0.0-20231127185646-65229373498e h1:Gvh4YaCaXNs6dKTlfgismwWZKyjVZXwOPfIyUaqU3No=
golang.org/x/exp v0.0.0-20231127185646-65229373498e/go.mod h1:iRJReGqOEeBhDZGkGbynYwcHlctCvnjTYIamk7uXpHI=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20210916014120-12bc252f5db8/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns=
golang.org/x/net v0.19.0 h1:zTwKpTd2XuCqf8huc7Fo2iSy+4RHPd10s4KzeTnVr1c=
golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U=
golang.org/x/net v0.18.0 h1:mIYleuAkSbHh0tCv7RvjL3F6ZVbLjq4+R7zbOn3Kokg=
golang.org/x/net v0.18.0/go.mod h1:/czyP5RqHAH4odGYxBJ1qz0+CE5WZ+2j1YgoEo8F2jQ=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.15.0 h1:h48lPFYpsTvQJZF4EKyI4aLHaev3CxivZmv7yZig9pc=
golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.14.0 h1:Vz7Qs629MkJkGyHxUlRHizWJRG2j8fbQKjELVSNhy7Q=
golang.org/x/sys v0.14.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY=
golang.org/x/term v0.15.0 h1:y/Oo/a/q3IXu26lQgl04j/gjuBDOBlx7X6Om1j2CPW4=
golang.org/x/term v0.15.0/go.mod h1:BDl952bC7+uMoWR75FIrCDx79TPU9oHkTZ9yRbYOrX0=
golang.org/x/term v0.14.0 h1:LGK9IlZ8T9jvdy6cTdfKUCltatMFOehAQo9SRC46UQ8=
golang.org/x/term v0.14.0/go.mod h1:TySc+nGkYR6qt8km8wUhuFRTVSMIX3XPR58y2lC8vww=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20200902074654-038fdea0a05b h1:QRR6H1YWRnHb4Y/HeNFCTJLFVxaq6wH4YuVdsUOr75U=
gopkg.in/check.v1 v1.0.0-20200902074654-038fdea0a05b/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

View File

@@ -2,6 +2,7 @@ package handlers
import (
_ "embed"
"log"
"github.com/gofiber/fiber/v2"
)
@@ -11,5 +12,48 @@ import (
var version string
func Api(c *fiber.Ctx) error {
return nil
// Get the url from the URL
urlQuery := c.Params("*")
queries := c.Queries()
body, req, resp, err := fetchSite(urlQuery, queries)
if err != nil {
log.Println("ERROR:", err)
c.SendStatus(500)
return c.SendString(err.Error())
}
response := Response{
Version: version,
Body: body,
}
response.Request.Headers = make([]any, 0, len(req.Header))
for k, v := range req.Header {
response.Request.Headers = append(response.Request.Headers, map[string]string{
"key": k,
"value": v[0],
})
}
response.Response.Headers = make([]any, 0, len(resp.Header))
for k, v := range resp.Header {
response.Response.Headers = append(response.Response.Headers, map[string]string{
"key": k,
"value": v[0],
})
}
return c.JSON(response)
}
type Response struct {
Version string `json:"version"`
Body string `json:"body"`
Request struct {
Headers []interface{} `json:"headers"`
} `json:"request"`
Response struct {
Headers []interface{} `json:"headers"`
} `json:"response"`
}

View File

@@ -1,44 +0,0 @@
package handlers
import (
"ladder/proxychain"
rx "ladder/proxychain/requestmodifiers"
tx "ladder/proxychain/responsemodifiers"
"github.com/gofiber/fiber/v2"
)
func NewAPIContentHandler(path string, opts *ProxyOptions) fiber.Handler {
// TODO: implement ruleset logic
/*
var rs ruleset.RuleSet
if opts.RulesetPath != "" {
r, err := ruleset.NewRuleset(opts.RulesetPath)
if err != nil {
panic(err)
}
rs = r
}
*/
return func(c *fiber.Ctx) error {
proxychain := proxychain.
NewProxyChain().
WithAPIPath(path).
SetDebugLogging(opts.Verbose).
SetRequestModifications(
rx.MasqueradeAsGoogleBot(),
rx.ForwardRequestHeaders(),
rx.SpoofReferrerFromGoogleSearch(),
).
AddResponseModifications(
tx.DeleteIncomingCookies(),
tx.RewriteHTMLResourceURLs(),
tx.APIContent(),
).
SetFiberCtx(c).
Execute()
return proxychain
}
}

View File

@@ -1,25 +0,0 @@
package handlers
import (
"os"
"strings"
"github.com/gofiber/fiber/v2"
"github.com/gofiber/fiber/v2/middleware/basicauth"
)
func Auth() fiber.Handler {
userpass := os.Getenv("USERPASS")
if userpass != "" {
userpass := strings.Split(userpass, ":")
return basicauth.New(basicauth.Config{
Users: map[string]string{
userpass[0]: userpass[1],
},
})
}
return func(c *fiber.Ctx) error {
return c.Next()
}
}

View File

@@ -1,18 +0,0 @@
package handlers
import (
_ "embed"
"github.com/gofiber/fiber/v2"
"github.com/gofiber/fiber/v2/middleware/favicon"
)
//go:embed favicon.ico
var faviconData string
func Favicon() fiber.Handler {
return favicon.New(favicon.Config{
Data: []byte(faviconData),
URL: "/favicon.ico",
})
}

View File

@@ -1,351 +1,79 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<link rel="stylesheet" href="/styles.css" />
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>ladder</title>
<script src="/script.js" defer></script>
<script>
const handleThemeChange = () => {
let theme = localStorage.getItem("theme");
if (theme === null) {
localStorage.setItem("theme", "system");
theme = "system";
}
if (
theme === "dark" ||
(theme === "system" &&
window.matchMedia("(prefers-color-scheme: dark)").matches)
) {
document.documentElement.classList.add("dark");
} else {
document.documentElement.classList.remove("dark");
}
};
handleThemeChange();
</script>
</head>
<link rel="stylesheet" href="/styles.css">
</head>
<body class="antialiased bg-white dark:bg-slate-900">
<div class="flex flex-col gap-4 max-w-3xl mx-auto pt-10">
<div class="place-self-end z-10">
<div class="relative" id="dropdown">
<button
aria-expanded="closed"
onclick="toggleDropdown()"
type="button"
class="inline-flex items-center justify-center whitespace-nowrap rounded-full h-12 px-4 py-2 text-sm font-medium text-slate-600 dark:text-slate-400 ring-offset-white dark:ring-offset-slate-900 transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:pointer-events-none disabled:opacity-50 bg-white dark:bg-slate-900 hover:bg-slate-200 dark:hover:bg-slate-700 hover:text-slate-500 dark:hover:text-slate-200"
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 24 24"
fill="none"
stroke="currentColor"
stroke-width="2"
stroke-linecap="round"
stroke-linejoin="round"
class="h-5 w-5"
>
<path
d="M12.22 2h-.44a2 2 0 0 0-2 2v.18a2 2 0 0 1-1 1.73l-.43.25a2 2 0 0 1-2 0l-.15-.08a2 2 0 0 0-2.73.73l-.22.38a2 2 0 0 0 .73 2.73l.15.1a2 2 0 0 1 1 1.72v.51a2 2 0 0 1-1 1.74l-.15.09a2 2 0 0 0-.73 2.73l.22.38a2 2 0 0 0 2.73.73l.15-.08a2 2 0 0 1 2 0l.43.25a2 2 0 0 1 1 1.73V20a2 2 0 0 0 2 2h.44a2 2 0 0 0 2-2v-.18a2 2 0 0 1 1-1.73l.43-.25a2 2 0 0 1 2 0l.15.08a2 2 0 0 0 2.73-.73l.22-.39a2 2 0 0 0-.73-2.73l-.15-.08a2 2 0 0 1-1-1.74v-.5a2 2 0 0 1 1-1.74l.15-.09a2 2 0 0 0 .73-2.73l-.22-.38a2 2 0 0 0-2.73-.73l-.15.08a2 2 0 0 1-2 0l-.43-.25a2 2 0 0 1-1-1.73V4a2 2 0 0 0-2-2z"
/>
<circle cx="12" cy="12" r="3" />
</svg>
</button>
<div
id="dropdown_panel"
class="hidden absolute right-0 mt-2 w-52 rounded-md bg-white dark:bg-slate-900 text-slate-900 dark:text-slate-200 shadow-md border border-slate-400 dark:border-slate-700"
>
<div
class="flex flex-col gap-2 w-full first-of-type:rounded-t-md last-of-type:rounded-b-md px-4 py-2.5 text-left text-sm"
>
Appearance
<div class="grid grid-cols-4 gap-2">
<div>
<input
type="radio"
name="theme"
id="light"
value="light"
class="peer hidden"
/>
<label
for="light"
tabindex="0"
title="Light"
class="flex items-end justify-center h-10 w-10 cursor-pointer select-none rounded-md p-2 text-sm text-slate-600 dark:text-slate-200 text-center hover:bg-slate-200 dark:hover:bg-slate-700 peer-checked:bg-slate-200 dark:peer-checked:bg-slate-700"
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 24 24"
fill="none"
stroke="currentColor"
stroke-width="2"
stroke-linecap="round"
stroke-linejoin="round"
class="h-5 w-5"
>
<circle cx="12" cy="12" r="4" />
<path d="M12 2v2" />
<path d="M12 20v2" />
<path d="m4.93 4.93 1.41 1.41" />
<path d="m17.66 17.66 1.41 1.41" />
<path d="M2 12h2" />
<path d="M20 12h2" />
<path d="m6.34 17.66-1.41 1.41" />
<path d="m19.07 4.93-1.41 1.41" />
</svg>
</label>
</div>
<div>
<input
type="radio"
name="theme"
id="dark"
value="dark"
class="peer hidden"
/>
<label
for="dark"
tabindex="0"
title="Dark"
class="flex items-end justify-center h-10 w-10 cursor-pointer select-none rounded-md p-2 text-base text-slate-600 dark:text-slate-200 text-center hover:bg-slate-200 dark:hover:bg-slate-700 peer-checked:bg-slate-200 dark:peer-checked:bg-slate-700"
>
<svg
xmlns="http://www.w3.org/2000/svg"
width="24"
height="24"
viewBox="0 0 24 24"
fill="none"
stroke="currentColor"
stroke-width="2"
stroke-linecap="round"
stroke-linejoin="round"
class="h-5 w-5"
>
<path d="M12 3a6 6 0 0 0 9 9 9 9 0 1 1-9-9Z" />
</svg>
</label>
</div>
<div>
<input
type="radio"
name="theme"
id="system"
value="system"
class="peer hidden"
checked
/>
<label
for="system"
tabindex="0"
title="System preference"
class="flex items-end justify-center h-10 w-10 cursor-pointer select-none rounded-md p-2 text-lg text-slate-600 dark:text-slate-200 text-center hover:bg-slate-200 dark:hover:bg-slate-700 peer-checked:bg-slate-200 dark:peer-checked:bg-slate-700"
>
<svg
xmlns="http://www.w3.org/2000/svg"
width="24"
height="24"
viewBox="0 0 24 24"
fill="none"
stroke="currentColor"
stroke-width="2"
stroke-linecap="round"
stroke-linejoin="round"
class="h-5 w-5"
>
<path d="M12 8a2.83 2.83 0 0 0 4 4 4 4 0 1 1-4-4" />
<path d="M12 2v2" />
<path d="M12 20v2" />
<path d="m4.9 4.9 1.4 1.4" />
<path d="m17.7 17.7 1.4 1.4" />
<path d="M2 12h2" />
<path d="M20 12h2" />
<path d="m6.3 17.7-1.4 1.4" />
<path d="m19.1 4.9-1.4 1.4" />
</svg>
</label>
</div>
</div>
</div>
</div>
</div>
</div>
<div class="mx-auto -mt-12">
<svg
xmlns="http://www.w3.org/2000/svg"
xmlns:xlink="http://www.w3.org/1999/xlink"
viewBox="0 0 512 512"
class="h-[250px] hover:drop-shadow-[0_0px_10px_rgba(122,167,209,.3)] transition-colors duration-300"
>
<path
fill="#7AA7D1"
d="M262.074 485.246C254.809 485.265 247.407 485.534 240.165 484.99L226.178 483.306C119.737 468.826 34.1354 383.43 25.3176 274.714C24.3655 262.975 23.5876 253.161 24.3295 241.148C31.4284 126.212 123.985 31.919 238.633 24.1259L250.022 23.8366C258.02 23.8001 266.212 23.491 274.183 24.1306C320.519 27.8489 366.348 45.9743 402.232 75.4548L416.996 88.2751C444.342 114.373 464.257 146.819 475.911 182.72L480.415 197.211C486.174 219.054 488.67 242.773 487.436 265.259L486.416 275.75C478.783 352.041 436.405 418.1 369.36 455.394L355.463 462.875C326.247 477.031 294.517 484.631 262.074 485.246ZM253.547 72.4475C161.905 73.0454 83.5901 144.289 73.0095 234.5C69.9101 260.926 74.7763 292.594 83.9003 317.156C104.53 372.691 153.9 416.616 211.281 430.903C226.663 434.733 242.223 436.307 258.044 436.227C353.394 435.507 430.296 361.835 438.445 267.978C439.794 252.442 438.591 236.759 435.59 221.5C419.554 139.955 353.067 79.4187 269.856 72.7052C264.479 72.2714 258.981 72.423 253.586 72.4127L253.547 72.4475Z"
/>
<path
fill="#7AA7D1"
d="M153.196 310.121L133.153 285.021C140.83 283.798 148.978 285.092 156.741 284.353L156.637 277.725L124.406 278.002C123.298 277.325 122.856 276.187 122.058 275.193L116.089 267.862C110.469 260.975 103.827 254.843 98.6026 247.669C103.918 246.839 105.248 246.537 111.14 246.523L129.093 246.327C130.152 238.785 128.62 240.843 122.138 240.758C111.929 240.623 110.659 242.014 105.004 234.661L97.9953 225.654C94.8172 221.729 91.2219 218.104 88.2631 214.005C84.1351 208.286 90.1658 209.504 94.601 209.489L236.752 209.545C257.761 209.569 268.184 211.009 285.766 221.678L285.835 206.051C285.837 197.542 286.201 189.141 284.549 180.748C280.22 158.757 260.541 143.877 240.897 135.739C238.055 134.561 232.259 133.654 235.575 129.851C244.784 119.288 263.680 111.990 277.085 111.105C288.697 109.828 301.096 113.537 311.75 117.703C360.649 136.827 393.225 183.042 398.561 234.866C402.204 270.253 391.733 308.356 367.999 335.1C332.832 374.727 269.877 384.883 223.294 360.397C206.156 351.388 183.673 333.299 175.08 316.6C173.511 313.551 174.005 313.555 170.443 313.52L160.641 313.449C158.957 313.435 156.263 314.031 155.122 312.487L153.196 310.121Z"
/>
<body class="antialiased text-slate-500 dark:text-slate-400 bg-white dark:bg-slate-900">
<div class="grid grid-cols-1 gap-4 max-w-3xl mx-auto pt-10">
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="100%" height="250" viewBox="0 0 512 512">
<path fill="#7AA7D1" d="M262.074 485.246C254.809 485.265 247.407 485.534 240.165 484.99L226.178 483.306C119.737 468.826 34.1354 383.43 25.3176 274.714C24.3655 262.975 23.5876 253.161 24.3295 241.148C31.4284 126.212 123.985 31.919 238.633 24.1259L250.022 23.8366C258.02 23.8001 266.212 23.491 274.183 24.1306C320.519 27.8489 366.348 45.9743 402.232 75.4548L416.996 88.2751C444.342 114.373 464.257 146.819 475.911 182.72L480.415 197.211C486.174 219.054 488.67 242.773 487.436 265.259L486.416 275.75C478.783 352.041 436.405 418.1 369.36 455.394L355.463 462.875C326.247 477.031 294.517 484.631 262.074 485.246ZM253.547 72.4475C161.905 73.0454 83.5901 144.289 73.0095 234.5C69.9101 260.926 74.7763 292.594 83.9003 317.156C104.53 372.691 153.9 416.616 211.281 430.903C226.663 434.733 242.223 436.307 258.044 436.227C353.394 435.507 430.296 361.835 438.445 267.978C439.794 252.442 438.591 236.759 435.59 221.5C419.554 139.955 353.067 79.4187 269.856 72.7052C264.479 72.2714 258.981 72.423 253.586 72.4127L253.547 72.4475Z"/>
<path fill="#7AA7D1" d="M153.196 310.121L133.153 285.021C140.83 283.798 148.978 285.092 156.741 284.353L156.637 277.725L124.406 278.002C123.298 277.325 122.856 276.187 122.058 275.193L116.089 267.862C110.469 260.975 103.827 254.843 98.6026 247.669C103.918 246.839 105.248 246.537 111.14 246.523L129.093 246.327C130.152 238.785 128.62 240.843 122.138 240.758C111.929 240.623 110.659 242.014 105.004 234.661L97.9953 225.654C94.8172 221.729 91.2219 218.104 88.2631 214.005C84.1351 208.286 90.1658 209.504 94.601 209.489L236.752 209.545C257.761 209.569 268.184 211.009 285.766 221.678L285.835 206.051C285.837 197.542 286.201 189.141 284.549 180.748C280.22 158.757 260.541 143.877 240.897 135.739C238.055 134.561 232.259 133.654 235.575 129.851C244.784 119.288 263.680 111.990 277.085 111.105C288.697 109.828 301.096 113.537 311.75 117.703C360.649 136.827 393.225 183.042 398.561 234.866C402.204 270.253 391.733 308.356 367.999 335.1C332.832 374.727 269.877 384.883 223.294 360.397C206.156 351.388 183.673 333.299 175.08 316.6C173.511 313.551 174.005 313.555 170.443 313.52L160.641 313.449C158.957 313.435 156.263 314.031 155.122 312.487L153.196 310.121Z"/>
</svg>
</div>
<header>
<h1
class="text-center text-3xl sm:text-4xl font-extrabold text-slate-900 tracking-tight dark:text-slate-200 cursor-default"
>
ladddddddder
</h1>
</header>
<form id="inputForm" method="get" class="flex flex-col gap-2 mx-4">
<div class="relative">
<input
type="url"
id="inputField"
placeholder="Enter URL"
name="inputField"
class="w-full text-sm leading-6 text-slate-400 rounded-md ring-1 ring-slate-900/10 shadow-sm py-1.5 pl-2 pr-3 hover:ring-slate-300 dark:bg-slate-800 dark:highlight-white/5 dark:hover:bg-slate-700"
autocomplete="off"
autofocus
/>
<button
id="clearButton"
type="reset"
aria-label="Clear Search"
title="Clear Search"
class="hidden absolute inset-y-0 right-0 items-center pr-2 text-slate-600 dark:text-slate-400 hover:text-slate-400 hover:dark:text-slate-300"
tabindex="-1"
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 24 24"
fill="none"
stroke="currentColor"
stroke-width="2"
stroke-linecap="round"
stroke-linejoin="round"
class="h-4 w-4"
>
<path d="M18 6 6 18" />
<path d="m6 6 12 12" />
</svg>
</button>
<p
id="errorContainer"
class="absolute ml-2 left-0 -bottom-6 text-red-700 dark:text-red-400 text-sm"
/>
</div>
<div
class="flex flex-wrap-reverse mt-5 gap-x-10 gap-y-4 justify-center"
>
<button
type="button"
id="outlineButton"
class="inline-flex items-center justify-center h-11 px-8 whitespace-nowrap rounded-md text-sm font-medium text-slate-900 dark:text-slate-200 ring-offset-white dark:ring-offset-slate-900 transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:pointer-events-none disabled:opacity-50 bg-slate-200 dark:bg-slate-800 hover:bg-slate-200/90 dark:hover:bg-slate-800/90"
>
Create Outline
</button>
<button
type="submit"
class="inline-flex items-center justify-center h-11 px-8 whitespace-nowrap rounded-md text-sm font-medium text-slate-200 dark:text-slate-900 ring-offset-white dark:ring-offset-slate-900 transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:pointer-events-none disabled:opacity-50 bg-slate-800 dark:bg-slate-200 hover:bg-slate-800/90 dark:hover:bg-slate-200/90"
>
Proxy Search
</button>
</div>
</form>
<footer class="mx-4 text-center text-slate-600 dark:text-slate-400">
<p>
Code Licensed Under GPL v3.0 |
<a
href="https://github.com/everywall/ladder"
class="hover:text-blue-500 dark:hover:text-blue-500 hover:underline underline-offset-2 transition-colors duration-300"
>View Source</a
>
|
<a
href="https://github.com/everywall"
class="hover:text-blue-500 dark:hover:text-blue-500 hover:underline underline-offset-2 transition-colors duration-300"
>Everywall</a
>
</p>
</footer>
<header>
<h1 class="text-center text-3xl sm:text-4xl font-extrabold text-slate-900 tracking-tight dark:text-slate-200">ladddddddder</h1>
</header>
<form id="inputForm" method="get" class="mx-4 relative">
<div>
<input type="url" id="inputField" placeholder="Proxy Search" name="inputField" class="w-full text-sm leading-6 text-slate-400 rounded-md ring-1 ring-slate-900/10 shadow-sm py-1.5 pl-2 pr-3 hover:ring-slate-300 dark:bg-slate-800 dark:highlight-white/5 dark:hover:bg-slate-700" required autofocus>
<button id="clearButton" type="button" aria-label="Clear Search" title="Clear Search" class="hidden absolute inset-y-0 right-0 items-center pr-2 hover:text-slate-400 hover:dark:text-slate-300" tabindex="-1">
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round""><path d="M18 6 6 18"/><path d="m6 6 12 12"/></svg>
</button>
</div>
</form>
<footer class="mt-10 mx-4 text-center text-slate-600 dark:text-slate-400">
<p>
Code Licensed Under GPL v3.0 |
<a href="https://github.com/everywall/ladder" class="hover:text-blue-500 hover:underline underline-offset-2 transition-colors duration-300">View Source</a> |
<a href="https://github.com/everywall" class="hover:text-blue-500 hover:underline underline-offset-2 transition-colors duration-300">Everywall</a>
</p>
</footer>
</div>
<script>
function validateAndRedirect(destination) {
let url = inputField.value;
let error = "";
if (!url || typeof url !== "string") {
error = "Please enter a valid URL.";
}
if (typeof url === "string" && url.indexOf("http") === -1) {
url = "https://" + url;
}
const urlPattern = /^(https?:\/\/)?([\w.-]+)\.([a-z]{2,})(\/\S*)?$/i;
if (!urlPattern.test(url)) {
error = "Please enter a valid URL.";
}
if (error) {
errorContainer.textContent = error;
return false;
}
const redirectUrl =
destination === "outline" ? "/outline/" + url : "/" + url;
window.location.href = redirectUrl;
return true;
}
function clearInput() {
inputField.value = "";
clearButton.style.display = "none";
errorContainer.textContent = "";
inputField.focus();
}
document
.getElementById("inputForm")
.addEventListener("submit", function (e) {
e.preventDefault();
validateAndRedirect("default");
});
document
.getElementById("outlineButton")
.addEventListener("click", function () {
validateAndRedirect("outline");
});
const inputField = document.getElementById("inputField");
const clearButton = document.getElementById("clearButton");
const errorContainer = document.getElementById("errorContainer");
if (inputField !== null && clearButton !== null) {
inputField.addEventListener("input", () => {
const clearButton = document.getElementById("clearButton");
if (clearButton !== null) {
if (inputField.value.trim().length > 0) {
clearButton.style.display = "block";
} else {
clearButton.style.display = "none";
document.getElementById('inputForm').addEventListener('submit', function (e) {
e.preventDefault();
let url = document.getElementById('inputField').value;
if (url.indexOf('http') === -1) {
url = 'https://' + url;
}
}
window.location.href = '/' + url;
return false;
});
inputField.addEventListener("keydown", (event) => {
if (event.code === "Escape") {
clearInput();
}
document.getElementById('inputField').addEventListener('input', function() {
const clearButton = document.getElementById('clearButton');
if (this.value.trim().length > 0) {
clearButton.style.display = 'block';
} else {
clearButton.style.display = 'none';
}
});
clearButton.addEventListener("click", () => {
clearInput();
document.getElementById('clearButton').addEventListener('click', function() {
document.getElementById('inputField').value = '';
this.style.display = 'none';
document.getElementById('inputField').focus();
});
}
</script>
</body>
<style>
@media (prefers-color-scheme: light) {
body {
background-color: #ffffff;
color: #333333;
}
}
@media (prefers-color-scheme: dark) {
body {
background-color: #1a202c;
color: #ffffff;
}
}
</style>
</body>
</html>

View File

@@ -1,30 +0,0 @@
package handlers
import (
"ladder/proxychain"
rx "ladder/proxychain/requestmodifiers"
tx "ladder/proxychain/responsemodifiers"
"github.com/gofiber/fiber/v2"
)
func NewOutlineHandler(path string, opts *ProxyOptions) fiber.Handler {
return func(c *fiber.Ctx) error {
return proxychain.
NewProxyChain().
WithAPIPath(path).
SetDebugLogging(opts.Verbose).
SetRequestModifications(
rx.MasqueradeAsGoogleBot(),
rx.ForwardRequestHeaders(),
rx.SpoofReferrerFromGoogleSearch(),
).
AddResponseModifications(
tx.DeleteIncomingCookies(),
tx.RewriteHTMLResourceURLs(),
tx.GenerateReadableOutline(), // <-- this response modification does the outline rendering
).
SetFiberCtx(c).
Execute()
}
}

View File

@@ -1,59 +1,330 @@
package handlers
import (
"ladder/proxychain"
rx "ladder/proxychain/requestmodifiers"
tx "ladder/proxychain/responsemodifiers"
"fmt"
"io"
"log"
"net/http"
"net/url"
"os"
"regexp"
"strings"
"ladder/pkg/ruleset"
"github.com/PuerkitoBio/goquery"
"github.com/gofiber/fiber/v2"
)
type ProxyOptions struct {
RulesetPath string
Verbose bool
}
var (
UserAgent = getenv("USER_AGENT", "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)")
ForwardedFor = getenv("X_FORWARDED_FOR", "66.249.66.1")
rulesSet = ruleset.NewRulesetFromEnv()
allowedDomains = []string{}
)
func NewProxySiteHandler(opts *ProxyOptions) fiber.Handler {
/*
var rs ruleset.RuleSet
if opts.RulesetPath != "" {
r, err := ruleset.NewRuleset(opts.RulesetPath)
if err != nil {
panic(err)
}
rs = r
}
*/
return func(c *fiber.Ctx) error {
proxychain := proxychain.
NewProxyChain().
SetFiberCtx(c).
SetDebugLogging(opts.Verbose).
SetRequestModifications(
// rx.SpoofJA3fingerprint(ja3, "Googlebot"),
// rx.MasqueradeAsFacebookBot(),
// rx.MasqueradeAsGoogleBot(),
rx.DeleteOutgoingCookies(),
rx.ForwardRequestHeaders(),
// rx.SpoofReferrerFromGoogleSearch(),
rx.SpoofReferrerFromLinkedInPost(),
// rx.RequestWaybackMachine(),
// rx.RequestArchiveIs(),
).
AddResponseModifications(
tx.ForwardResponseHeaders(),
tx.BypassCORS(),
tx.BypassContentSecurityPolicy(),
// tx.DeleteIncomingCookies(),
tx.RewriteHTMLResourceURLs(),
tx.PatchTrackerScripts(),
tx.PatchDynamicResourceURLs(),
tx.BlockElementRemoval(".article-content"),
// tx.SetContentSecurityPolicy("default-src * 'unsafe-inline' 'unsafe-eval' data: blob:;"),
).
Execute()
return proxychain
func init() {
allowedDomains = strings.Split(os.Getenv("ALLOWED_DOMAINS"), ",")
if os.Getenv("ALLOWED_DOMAINS_RULESET") == "true" {
allowedDomains = append(allowedDomains, rulesSet.Domains()...)
}
}
// extracts a URL from the request ctx. If the URL in the request
// is a relative path, it reconstructs the full URL using the referer header.
func extractUrl(c *fiber.Ctx) (string, error) {
// try to extract url-encoded
reqUrl, err := url.QueryUnescape(c.Params("*"))
if err != nil {
// fallback
reqUrl = c.Params("*")
}
// Extract the actual path from req ctx
urlQuery, err := url.Parse(reqUrl)
if err != nil {
return "", fmt.Errorf("error parsing request URL '%s': %v", reqUrl, err)
}
isRelativePath := urlQuery.Scheme == ""
// eg: https://localhost:8080/images/foobar.jpg -> https://realsite.com/images/foobar.jpg
if isRelativePath {
// Parse the referer URL from the request header.
refererUrl, err := url.Parse(c.Get("referer"))
if err != nil {
return "", fmt.Errorf("error parsing referer URL from req: '%s': %v", reqUrl, err)
}
// Extract the real url from referer path
realUrl, err := url.Parse(strings.TrimPrefix(refererUrl.Path, "/"))
if err != nil {
return "", fmt.Errorf("error parsing real URL from referer '%s': %v", refererUrl.Path, err)
}
// reconstruct the full URL using the referer's scheme, host, and the relative path / queries
fullUrl := &url.URL{
Scheme: realUrl.Scheme,
Host: realUrl.Host,
Path: urlQuery.Path,
RawQuery: urlQuery.RawQuery,
}
if os.Getenv("LOG_URLS") == "true" {
log.Printf("modified relative URL: '%s' -> '%s'", reqUrl, fullUrl.String())
}
return fullUrl.String(), nil
}
// default behavior:
// eg: https://localhost:8080/https://realsite.com/images/foobar.jpg -> https://realsite.com/images/foobar.jpg
return urlQuery.String(), nil
}
func ProxySite(rulesetPath string) fiber.Handler {
if rulesetPath != "" {
rs, err := ruleset.NewRuleset(rulesetPath)
if err != nil {
panic(err)
}
rulesSet = rs
}
return func(c *fiber.Ctx) error {
// Get the url from the URL
url, err := extractUrl(c)
if err != nil {
log.Println("ERROR In URL extraction:", err)
}
queries := c.Queries()
body, _, resp, err := fetchSite(url, queries)
if err != nil {
log.Println("ERROR:", err)
c.SendStatus(fiber.StatusInternalServerError)
return c.SendString(err.Error())
}
c.Cookie(&fiber.Cookie{})
c.Set("Content-Type", resp.Header.Get("Content-Type"))
c.Set("Content-Security-Policy", resp.Header.Get("Content-Security-Policy"))
return c.SendString(body)
}
}
func modifyURL(uri string, rule ruleset.Rule) (string, error) {
newUrl, err := url.Parse(uri)
if err != nil {
return "", err
}
for _, urlMod := range rule.URLMods.Domain {
re := regexp.MustCompile(urlMod.Match)
newUrl.Host = re.ReplaceAllString(newUrl.Host, urlMod.Replace)
}
for _, urlMod := range rule.URLMods.Path {
re := regexp.MustCompile(urlMod.Match)
newUrl.Path = re.ReplaceAllString(newUrl.Path, urlMod.Replace)
}
v := newUrl.Query()
for _, query := range rule.URLMods.Query {
if query.Value == "" {
v.Del(query.Key)
continue
}
v.Set(query.Key, query.Value)
}
newUrl.RawQuery = v.Encode()
if rule.GoogleCache {
newUrl, err = url.Parse("https://webcache.googleusercontent.com/search?q=cache:" + newUrl.String())
if err != nil {
return "", err
}
}
return newUrl.String(), nil
}
func fetchSite(urlpath string, queries map[string]string) (string, *http.Request, *http.Response, error) {
urlQuery := "?"
if len(queries) > 0 {
for k, v := range queries {
urlQuery += k + "=" + v + "&"
}
}
urlQuery = strings.TrimSuffix(urlQuery, "&")
urlQuery = strings.TrimSuffix(urlQuery, "?")
u, err := url.Parse(urlpath)
if err != nil {
return "", nil, nil, err
}
if len(allowedDomains) > 0 && !StringInSlice(u.Host, allowedDomains) {
return "", nil, nil, fmt.Errorf("domain not allowed. %s not in %s", u.Host, allowedDomains)
}
if os.Getenv("LOG_URLS") == "true" {
log.Println(u.String() + urlQuery)
}
// Modify the URI according to ruleset
rule := fetchRule(u.Host, u.Path)
url, err := modifyURL(u.String()+urlQuery, rule)
if err != nil {
return "", nil, nil, err
}
// Fetch the site
client := &http.Client{}
req, _ := http.NewRequest("GET", url, nil)
if rule.Headers.UserAgent != "" {
req.Header.Set("User-Agent", rule.Headers.UserAgent)
} else {
req.Header.Set("User-Agent", UserAgent)
}
if rule.Headers.XForwardedFor != "" {
if rule.Headers.XForwardedFor != "none" {
req.Header.Set("X-Forwarded-For", rule.Headers.XForwardedFor)
}
} else {
req.Header.Set("X-Forwarded-For", ForwardedFor)
}
if rule.Headers.Referer != "" {
if rule.Headers.Referer != "none" {
req.Header.Set("Referer", rule.Headers.Referer)
}
} else {
req.Header.Set("Referer", u.String())
}
if rule.Headers.Cookie != "" {
req.Header.Set("Cookie", rule.Headers.Cookie)
}
resp, err := client.Do(req)
if err != nil {
return "", nil, nil, err
}
defer resp.Body.Close()
bodyB, err := io.ReadAll(resp.Body)
if err != nil {
return "", nil, nil, err
}
if rule.Headers.CSP != "" {
// log.Println(rule.Headers.CSP)
resp.Header.Set("Content-Security-Policy", rule.Headers.CSP)
}
// log.Print("rule", rule) TODO: Add a debug mode to print the rule
body := rewriteHtml(bodyB, u, rule)
return body, req, resp, nil
}
func rewriteHtml(bodyB []byte, u *url.URL, rule ruleset.Rule) string {
// Rewrite the HTML
body := string(bodyB)
// images
imagePattern := `<img\s+([^>]*\s+)?src="(/)([^"]*)"`
re := regexp.MustCompile(imagePattern)
body = re.ReplaceAllString(body, fmt.Sprintf(`<img $1 src="%s$3"`, "/https://"+u.Host+"/"))
// scripts
scriptPattern := `<script\s+([^>]*\s+)?src="(/)([^"]*)"`
reScript := regexp.MustCompile(scriptPattern)
body = reScript.ReplaceAllString(body, fmt.Sprintf(`<script $1 script="%s$3"`, "/https://"+u.Host+"/"))
// body = strings.ReplaceAll(body, "srcset=\"/", "srcset=\"/https://"+u.Host+"/") // TODO: Needs a regex to rewrite the URL's
body = strings.ReplaceAll(body, "href=\"/", "href=\"/https://"+u.Host+"/")
body = strings.ReplaceAll(body, "url('/", "url('/https://"+u.Host+"/")
body = strings.ReplaceAll(body, "url(/", "url(/https://"+u.Host+"/")
body = strings.ReplaceAll(body, "href=\"https://"+u.Host, "href=\"/https://"+u.Host+"/")
if os.Getenv("RULESET") != "" {
body = applyRules(body, rule)
}
return body
}
func getenv(key, fallback string) string {
value := os.Getenv(key)
if len(value) == 0 {
return fallback
}
return value
}
func fetchRule(domain string, path string) ruleset.Rule {
if len(rulesSet) == 0 {
return ruleset.Rule{}
}
rule := ruleset.Rule{}
for _, rule := range rulesSet {
domains := rule.Domains
if rule.Domain != "" {
domains = append(domains, rule.Domain)
}
for _, ruleDomain := range domains {
if ruleDomain == domain || strings.HasSuffix(domain, ruleDomain) {
if len(rule.Paths) > 0 && !StringInSlice(path, rule.Paths) {
continue
}
// return first match
return rule
}
}
}
return rule
}
func applyRules(body string, rule ruleset.Rule) string {
if len(rulesSet) == 0 {
return body
}
for _, regexRule := range rule.RegexRules {
re := regexp.MustCompile(regexRule.Match)
body = re.ReplaceAllString(body, regexRule.Replace)
}
for _, injection := range rule.Injections {
doc, err := goquery.NewDocumentFromReader(strings.NewReader(body))
if err != nil {
log.Fatal(err)
}
if injection.Replace != "" {
doc.Find(injection.Position).ReplaceWithHtml(injection.Replace)
}
if injection.Append != "" {
doc.Find(injection.Position).AppendHtml(injection.Append)
}
if injection.Prepend != "" {
doc.Find(injection.Position).PrependHtml(injection.Prepend)
}
body, err = doc.Html()
if err != nil {
log.Fatal(err)
}
}
return body
}
func StringInSlice(s string, list []string) bool {
for _, x := range list {
if strings.HasPrefix(s, x) {
return true
}
}
return false
}

60
handlers/proxy.test.go Normal file
View File

@@ -0,0 +1,60 @@
// BEGIN: 6f8b3f5d5d5d
package handlers
import (
"net/http"
"net/http/httptest"
"net/url"
"testing"
"ladder/pkg/ruleset"
"github.com/gofiber/fiber/v2"
"github.com/stretchr/testify/assert"
)
func TestProxySite(t *testing.T) {
app := fiber.New()
app.Get("/:url", ProxySite(""))
req := httptest.NewRequest("GET", "/https://example.com", nil)
resp, err := app.Test(req)
assert.NoError(t, err)
assert.Equal(t, http.StatusOK, resp.StatusCode)
}
func TestRewriteHtml(t *testing.T) {
bodyB := []byte(`
<html>
<head>
<title>Test Page</title>
</head>
<body>
<img src="/image.jpg">
<script src="/script.js"></script>
<a href="/about">About Us</a>
<div style="background-image: url('/background.jpg')"></div>
</body>
</html>
`)
u := &url.URL{Host: "example.com"}
expected := `
<html>
<head>
<title>Test Page</title>
</head>
<body>
<img src="/https://example.com/image.jpg">
<script script="/https://example.com/script.js"></script>
<a href="/https://example.com/about">About Us</a>
<div style="background-image: url('/https://example.com/background.jpg')"></div>
</body>
</html>
`
actual := rewriteHtml(bodyB, u, ruleset.Rule{})
assert.Equal(t, expected, actual)
}
// END: 6f8b3f5d5d5d

View File

@@ -1,9 +1,21 @@
package handlers
import (
"log"
"github.com/gofiber/fiber/v2"
)
func Raw(c *fiber.Ctx) error {
return nil
// Get the url from the URL
urlQuery := c.Params("*")
queries := c.Queries()
body, _, _, err := fetchSite(urlQuery, queries)
if err != nil {
log.Println("ERROR:", err)
c.SendStatus(500)
return c.SendString(err.Error())
}
return c.SendString(body)
}

View File

@@ -1,9 +1,23 @@
package handlers
import (
"os"
"github.com/gofiber/fiber/v2"
"gopkg.in/yaml.v3"
)
func Ruleset(c *fiber.Ctx) error {
return nil
if os.Getenv("EXPOSE_RULESET") == "false" {
c.SendStatus(fiber.StatusForbidden)
return c.SendString("Rules Disabled")
}
body, err := yaml.Marshal(rulesSet)
if err != nil {
c.SendStatus(fiber.StatusInternalServerError)
return c.SendString(err.Error())
}
return c.SendString(string(body))
}

View File

@@ -1,23 +0,0 @@
package handlers
import (
"embed"
"github.com/gofiber/fiber/v2"
)
//go:embed script.js
var scriptData embed.FS
func Script(c *fiber.Ctx) error {
scriptData, err := scriptData.ReadFile("script.js")
if err != nil {
return c.Status(fiber.StatusInternalServerError).SendString("Internal Server Error")
}
c.Set("Content-Type", "text/javascript")
return c.Send(scriptData)
}

View File

@@ -1,296 +0,0 @@
const labels = document.querySelectorAll("label");
const inputs = document.querySelectorAll('input[type="radio"]');
const mainElement = document.querySelector("main");
const handleDOMContentLoaded = () => {
handleFontChange();
handleFontSizeChange();
inputs.forEach((input) => {
const storedValue = localStorage.getItem(input.name);
if (storedValue === input.value) {
input.checked = true;
}
});
window.removeEventListener("DOMContentLoaded", handleDOMContentLoaded);
};
function focusable_children(node) {
const nodes = Array.from(
node.querySelectorAll(
'a[href], button, input, textarea, select, details, [tabindex]:not([tabindex="-1"])'
)
).filter((s) => s.offsetParent !== null);
const index = nodes.indexOf(document.activeElement);
const update = (d) => {
let i = index + d;
i += nodes.length;
i %= nodes.length;
nodes[i].focus();
};
return {
next: (selector) => {
const reordered = [
...nodes.slice(index + 1),
...nodes.slice(0, index + 1),
];
for (let i = 0; i < reordered.length; i += 1) {
if (!selector || reordered[i].matches(selector)) {
reordered[i].focus();
return;
}
}
},
prev: (selector) => {
const reordered = [
...nodes.slice(index + 1),
...nodes.slice(0, index + 1),
];
for (let i = reordered.length - 2; i >= 0; i -= 1) {
if (!selector || reordered[i].matches(selector)) {
reordered[i].focus();
return;
}
}
},
update,
};
}
function trap(node) {
const handle_keydown = (e) => {
if (e.key === "Tab") {
e.preventDefault();
const group = focusable_children(node);
if (e.shiftKey) {
group.prev();
} else {
group.next();
}
}
};
node.addEventListener("keydown", handle_keydown);
return {
destroy: () => {
node.removeEventListener("keydown", handle_keydown);
},
};
}
const toggleDropdown = () => {
const dropdown = document.getElementById("dropdown");
const dropdown_panel = document.getElementById("dropdown_panel");
const focusTrap = trap(dropdown);
const closeDropdown = () => {
dropdown_panel.classList.add("hidden");
focusTrap.destroy();
dropdown.removeEventListener("keydown", handleEscapeKey);
document.removeEventListener("click", handleClickOutside);
inputs.forEach((input) => {
input.removeEventListener("change", handleInputChange);
});
labels.forEach((label) => {
label.removeEventListener("click", handleLabelSelection);
});
};
const handleClickOutside = (e) => {
if (!dropdown.contains(e.target)) {
closeDropdown();
}
};
const handleEscapeKey = (e) => {
if (e.key === "Escape") {
dropdown_panel.classList.add("hidden");
closeDropdown();
}
};
const handleInputChange = (e) => {
if (e.target.checked) {
localStorage.setItem(e.target.name, e.target.value);
switch (e.target.name) {
case "theme": {
handleThemeChange();
break;
}
case "font": {
handleFontChange();
break;
}
case "fontsize": {
handleFontSizeChange();
break;
}
default: {
console.error("Unknown event");
break;
}
}
}
};
const handleLabelSelection = (e) => {
if (e.key === "Enter" || e.key === " ") {
e.preventDefault();
const input = document.getElementById(e.target.getAttribute("for"));
input.checked = true;
input.dispatchEvent(new Event("change", { bubbles: true }));
}
};
if (dropdown_panel.classList.contains("hidden")) {
dropdown_panel.classList.remove("hidden");
dropdown.addEventListener("keydown", handleEscapeKey);
inputs.forEach((input) => {
input.addEventListener("change", handleInputChange);
});
labels.forEach((label) => {
label.addEventListener("keydown", handleLabelSelection);
});
document.addEventListener("click", handleClickOutside);
} else {
closeDropdown();
}
};
const handleFontChange = () => {
if (mainElement === null) {
return;
}
let font = localStorage.getItem("font");
if (font === null) {
localStorage.setItem("font", "sans-serif");
font = "sans-serif";
}
if (font === "serif") {
mainElement.classList.add("font-serif");
mainElement.classList.remove("font-sans");
} else {
mainElement.classList.add("font-sans");
mainElement.classList.remove("font-serif");
}
};
const changeFontSize = (node, classes) => {
const sizes = [
"text-xs",
"text-sm",
"text-base",
"text-lg",
"text-xl",
"text-2xl",
"text-3xl",
"text-4xl",
"text-5xl",
"lg:text-4xl",
"lg:text-5xl",
"lg:text-6xl",
];
const currentClasses = sizes.filter((size) => node.classList.contains(size));
node.classList.remove(...currentClasses);
node.classList.add(...classes);
};
const handleFontSizeChange = () => {
if (mainElement === null) {
return;
}
let fontSize = localStorage.getItem("fontsize");
if (fontSize === null) {
localStorage.setItem("fontsize", "text-base");
fontSize = "text-base";
}
if (fontSize === "text-sm") {
changeFontSize(document.querySelector("body"), ["text-sm"]);
} else if (fontSize === "text-lg") {
changeFontSize(document.querySelector("body"), ["text-lg"]);
} else {
changeFontSize(document.querySelector("body"), ["text-base"]);
}
const nodes = document.querySelectorAll(
"h1, h2, h3, h4, h5, h6, code, pre, kbd, table"
);
if (fontSize === "text-sm") {
changeFontSize(mainElement, ["text-sm"]);
} else if (fontSize === "text-lg") {
changeFontSize(mainElement, ["text-lg"]);
} else {
changeFontSize(mainElement, ["text-base"]);
}
nodes.forEach((node) => {
let classes = "";
switch (node.tagName) {
case "H1": {
if (fontSize === "text-sm") {
classes = ["text-3xl", "lg:text-4xl"];
} else if (fontSize === "text-lg") {
classes = ["text-5xl", "lg:text-6xl"];
} else {
classes = ["text-4xl", "lg:text-5xl"];
}
break;
}
case "H2": {
if (fontSize === "text-sm") {
classes = ["text-2xl"];
} else if (fontSize === "text-lg") {
classes = ["text-4xl"];
} else {
classes = ["text-3xl"];
}
break;
}
case "H3": {
if (fontSize === "text-sm") {
classes = ["text-xl"];
} else if (fontSize === "text-lg") {
classes = ["text-3xl"];
} else {
classes = ["text-2xl"];
}
break;
}
case "H4":
case "H5":
case "H6": {
if (fontSize === "text-sm") {
classes = ["text-lg"];
} else if (fontSize === "text-lg") {
classes = ["text-2xl"];
} else {
classes = ["text-xl"];
}
break;
}
case "CODE":
case "PRE":
case "KBD":
case "TABLE": {
if (fontSize === "text-sm") {
classes = ["text-xs"];
} else if (fontSize === "text-lg") {
classes = ["text-base"];
} else {
classes = ["text-sm"];
}
break;
}
default: {
if (fontSize === "text-sm") {
classes = ["text-sm"];
} else if (fontSize === "text-lg") {
classes = ["text-lg"];
} else {
classes = ["text-base"];
}
break;
}
}
changeFontSize(node, classes);
});
};
window.addEventListener("DOMContentLoaded", handleDOMContentLoaded);

File diff suppressed because one or more lines are too long

View File

@@ -1,23 +0,0 @@
package handlers
import (
"embed"
"github.com/gofiber/fiber/v2"
)
//go:embed styles.css
var cssData embed.FS
func Styles(c *fiber.Ctx) error {
cssData, err := cssData.ReadFile("styles.css")
if err != nil {
return c.Status(fiber.StatusInternalServerError).SendString("Internal Server Error")
}
c.Set("Content-Type", "text/css")
return c.Send(cssData)
}

View File

@@ -1,82 +0,0 @@
package cli
import (
"fmt"
"os"
"strings"
"golang.org/x/term"
)
var art string = `
_____╬═╬____________________________________________
|_|__╬═╬___|___|___|___| EVERYWALL |___|___|___|___|
|___|╬═╬|___▄▄▌ ▄▄▄· ·▄▄▄▄ ·▄▄▄▄ ▄▄▄ .▄▄▄ __|_|
|_|__╬═╬___|██• ▐█ ▀█ ██▪ ██ ██▪ ██ ▀▄.▀·▀▄ █·|___|
|___|╬═╬|___██▪ ▄█▀▀█ ▐█· ▐█▌▐█· ▐█▌▐▀▀▪▄▐▀▀▄ __|_|
|_|__╬═╬___|▐█▌▐▌▐█ ▪▐▌██. ██ ██. ██ ▐█▄▄▌▐█•█▌|___|
|___|╬═╬|___.▀▀▀ ▀ ▀ ▀▀▀▀▀• ▀▀▀▀▀• ▀▀▀ .▀ ▀__|_|
|_|__╬═╬___|___|___|_ VERSION %-7s__|___|___|___|
|___|╬═╬|____|___|___|___|___|___|___|___|___|___|_|
╬═╬
╬═╬ %s
`
func StartupMessage(version string, port string, ruleset string) string {
isTerm := term.IsTerminal(int(os.Stdout.Fd()))
version = strings.Trim(version, " ")
version = strings.Trim(version, "\n")
var link string
if isTerm {
link = createHyperlink("http://localhost:" + port)
} else {
link = "http://localhost:" + port
}
buf := fmt.Sprintf(art, version, link)
if isTerm {
buf = blinkChars(buf, '.', '•', '·', '▪')
}
if ruleset == "" {
buf += "\n [!] no ruleset specified.\n [!] for better performance, use a ruleset using --ruleset\n"
}
if isTerm {
buf = colorizeNonASCII(buf)
}
return buf
}
func createHyperlink(url string) string {
return fmt.Sprintf("\033[4m%s\033[0m", url)
}
func colorizeNonASCII(input string) string {
result := ""
for _, r := range input {
if r > 127 {
// If the character is non-ASCII, color it blue
result += fmt.Sprintf("\033[34m%c\033[0m", r)
} else {
// ASCII characters remain unchanged
result += string(r)
}
}
return result
}
func blinkChars(input string, chars ...rune) string {
result := ""
MAIN:
for _, x := range input {
for _, y := range chars {
if x == y {
result += fmt.Sprintf("\033[5m%s\033[0m", string(x))
continue MAIN
}
}
result += fmt.Sprintf("%s", string(x))
}
return result
}

View File

@@ -1,6 +1,6 @@
{
"scripts": {
"build": "pnpx tailwindcss -i ./styles/input.css -o ./styles/output.css --build && pnpx minify ./styles/output.css > ./handlers/styles.css"
"build": "pnpx tailwindcss -i ./styles/input.css -o ./styles/output.css --build && pnpx minify ./styles/output.css > ./cmd/styles.css"
},
"devDependencies": {
"minify": "^10.5.2",

View File

@@ -308,40 +308,3 @@ func debugPrintRule(rule string, err error) {
fmt.Println(rule)
fmt.Println("------------------------------ END DEBUG RULESET -------------------------------")
}
// ======================= RuleSetMap implementation =================================================
// RuleSetMap: A map with domain names as keys and pointers to the corresponding Rules as values.
// This type is used to efficiently access rules based on domain names.
type RuleSetMap map[string]*Rule
// ToMap converts a RuleSet into a RuleSetMap. It transforms each Rule in the RuleSet
// into a map entry where the key is the Rule's domain (lowercase)
// and the value is a pointer to the Rule. This method is used to
// efficiently access rules based on domain names.
// The RuleSetMap may be accessed with or without a "www." prefix in the domain.
func (rs *RuleSet) ToMap() RuleSetMap {
rsm := make(RuleSetMap)
addMapEntry := func(d string, rule *Rule) {
d = strings.ToLower(d)
rsm[d] = rule
if strings.HasPrefix(d, "www.") {
d = strings.TrimPrefix(d, "www.")
rsm[d] = rule
} else {
d = fmt.Sprintf("www.%s", d)
rsm[d] = rule
}
}
for i, rule := range *rs {
rulePtr := &(*rs)[i]
addMapEntry(rule.Domain, rulePtr)
for _, domain := range rule.Domains {
addMapEntry(domain, rulePtr)
}
}
return rsm
}

View File

@@ -171,55 +171,3 @@ func TestLoadRulesFromLocalDir(t *testing.T) {
assert.Equal(t, rule.RegexRules[0].Replace, "https:")
}
}
func TestToMap(t *testing.T) {
// Prepare a ruleset with multiple rules, including "www." prefixed domains
rules := RuleSet{
{
Domain: "Example.com",
RegexRules: []Regex{{Match: "match1", Replace: "replace1"}},
},
{
Domain: "www.AnotherExample.com",
RegexRules: []Regex{{Match: "match2", Replace: "replace2"}},
},
{
Domain: "www.foo.bAr.baz.bOol.quX.com",
RegexRules: []Regex{{Match: "match3", Replace: "replace3"}},
},
}
// Convert to RuleSetMap
rsm := rules.ToMap()
// Test for correct number of entries
if len(rsm) != 6 {
t.Errorf("Expected 6 entries in RuleSetMap, got %d", len(rsm))
}
// Test for correct mapping
testDomains := []struct {
domain string
expectedMatch string
}{
{"example.com", "match1"},
{"www.example.com", "match1"},
{"anotherexample.com", "match2"},
{"www.anotherexample.com", "match2"},
{"foo.bar.baz.bool.qux.com", "match3"},
{"no.ruleset.domain.com", ""},
}
for _, test := range testDomains {
if test.domain == "no.ruleset.domain.com" {
assert.Empty(t, test.expectedMatch)
continue
}
rule, exists := rsm[test.domain]
if !exists {
t.Errorf("Expected domain %s to exist in RuleSetMap", test.domain)
} else if rule.RegexRules[0].Match != test.expectedMatch {
t.Errorf("Expected match for %s to be %s, got %s", test.domain, test.expectedMatch, rule.RegexRules[0].Match)
}
}
}

View File

@@ -1,16 +0,0 @@
## TLDR
- If you create, delete or rename any request/response modifier, run `go run codegen.go`, so that ruleset unmarshaling will work properly.
## Overview
The `codegen.go` file is a utility for the rulesets that automatically generates Go code that maps functional options names found in response/request modifiers to corresponding factory functions. This generation is crucial for the serialization of rulesets from JSON or YAML into functional options suitable for use in proxychains. The tool processes Go files containing modifier functions and generates the necessary mappings.
- The generated mappings will be written in `proxychain/ruleset/rule_reqmod_types.gen.go` and `proxychain/ruleset/rule_resmod_types.gen.go`.
- These files are used in UnmarshalJSON and UnmarshalYAML methods of the rule type, found in `proxychain/ruleset/rule.go`
## Usage
```sh
go run codegen.go
```

View File

@@ -1,205 +0,0 @@
package main
import (
"fmt"
"go/ast"
"go/parser"
"go/token"
"io"
"io/fs"
//"io/fs"
"os"
"path/filepath"
"strings"
//"strings"
)
func responseModToFactoryMap(fn *ast.FuncDecl) (modMap string) {
paramCount := len(fn.Type.Params.List)
name := fn.Name.Name
var x string
switch paramCount {
case 0:
x = fmt.Sprintf(" rsmModMap[\"%s\"] = func(_ ...string) proxychain.ResponseModification {\n return tx.%s()\n }\n", name, name)
default:
p := []string{}
for i := 0; i < paramCount; i++ {
p = append(p, fmt.Sprintf("params[%d]", i))
}
params := strings.Join(p, ", ")
x = fmt.Sprintf(" rsmModMap[\"%s\"] = func(params ...string) proxychain.ResponseModification {\n return tx.%s(%s)\n }\n", name, name, params)
}
return x
}
func responseModCodeGen(dir string) (code string, err error) {
fset := token.NewFileSet()
files, err := os.ReadDir(dir)
if err != nil {
panic(err)
}
factoryMaps := []string{}
for _, file := range files {
if !shouldGenCodeFor(file) {
continue
}
// Parse each Go file
node, err := parser.ParseFile(fset, filepath.Join(dir, file.Name()), nil, parser.ParseComments)
if err != nil {
return "", err
}
ast.Inspect(node, func(n ast.Node) bool {
fn, ok := n.(*ast.FuncDecl)
if ok && fn.Recv == nil && fn.Name.IsExported() {
factoryMaps = append(factoryMaps, responseModToFactoryMap(fn))
}
return true
})
}
code = fmt.Sprintf(`
package ruleset_v2
// DO NOT EDIT THIS FILE. It is automatically generated by ladder/proxychain/codegen/codegen.go
// The purpose of this is serialization of rulesets from JSON or YAML into functional options suitable
// for use in proxychains.
import (
"ladder/proxychain"
tx "ladder/proxychain/responsemodifiers"
)
type ResponseModifierFactory func(params ...string) proxychain.ResponseModification
var rsmModMap map[string]ResponseModifierFactory
func init() {
rsmModMap = make(map[string]ResponseModifierFactory)
%s
}`, strings.Join(factoryMaps, "\n"))
// fmt.Println(code)
return code, nil
}
func requestModToFactoryMap(fn *ast.FuncDecl) (modMap string) {
paramCount := len(fn.Type.Params.List)
name := fn.Name.Name
var x string
switch paramCount {
case 0:
x = fmt.Sprintf(" rqmModMap[\"%s\"] = func(_ ...string) proxychain.RequestModification {\n return rx.%s()\n }\n", name, name)
default:
p := []string{}
for i := 0; i < paramCount; i++ {
p = append(p, fmt.Sprintf("params[%d]", i))
}
params := strings.Join(p, ", ")
x = fmt.Sprintf(" rqmModMap[\"%s\"] = func(params ...string) proxychain.RequestModification {\n return rx.%s(%s)\n }\n", name, name, params)
}
return x
}
func requestModCodeGen(dir string) (code string, err error) {
fset := token.NewFileSet()
files, err := os.ReadDir(dir)
if err != nil {
panic(err)
}
factoryMaps := []string{}
for _, file := range files {
if !shouldGenCodeFor(file) {
continue
}
// Parse each Go file
node, err := parser.ParseFile(fset, filepath.Join(dir, file.Name()), nil, parser.ParseComments)
if err != nil {
return "", err
}
ast.Inspect(node, func(n ast.Node) bool {
fn, ok := n.(*ast.FuncDecl)
if ok && fn.Recv == nil && fn.Name.IsExported() {
factoryMaps = append(factoryMaps, requestModToFactoryMap(fn))
}
return true
})
}
code = fmt.Sprintf(`
package ruleset_v2
// DO NOT EDIT THIS FILE. It is automatically generated by ladder/proxychain/codegen/codegen.go
// The purpose of this is serialization of rulesets from JSON or YAML into functional options suitable
// for use in proxychains.
import (
"ladder/proxychain"
rx "ladder/proxychain/requestmodifiers"
)
type RequestModifierFactory func(params ...string) proxychain.RequestModification
var rqmModMap map[string]RequestModifierFactory
func init() {
rqmModMap = make(map[string]RequestModifierFactory)
%s
}`, strings.Join(factoryMaps, "\n"))
// fmt.Println(code)
return code, nil
}
func shouldGenCodeFor(file fs.DirEntry) bool {
if file.IsDir() {
return false
}
if filepath.Ext(file.Name()) != ".go" {
return false
}
if strings.HasSuffix(file.Name(), "_test.go") {
return false
}
return true
}
func main() {
rqmCode, err := requestModCodeGen("../requestmodifiers/")
if err != nil {
panic(err)
}
// fmt.Println(rqmCode)
fq, err := os.Create("../ruleset/rule_reqmod_types.gen.go")
if err != nil {
panic(err)
}
_, err = io.WriteString(fq, rqmCode)
if err != nil {
panic(err)
}
rsmCode, err := responseModCodeGen("../responsemodifiers/")
if err != nil {
panic(err)
}
// fmt.Println(rsmCode)
fs, err := os.Create("../ruleset/rule_resmod_types.gen.go")
if err != nil {
panic(err)
}
_, err = io.WriteString(fs, rsmCode)
if err != nil {
panic(err)
}
}

View File

@@ -1,517 +0,0 @@
package proxychain
import (
"errors"
"fmt"
"io"
"log"
"net/url"
"strings"
//"time"
//"net/http"
//"github.com/Danny-Dasilva/CycleTLS/cycletls"
//http "github.com/Danny-Dasilva/fhttp"
http "github.com/bogdanfinn/fhttp"
tls_client "github.com/bogdanfinn/tls-client"
//"github.com/bogdanfinn/tls-client/profiles"
"ladder/pkg/ruleset"
"github.com/gofiber/fiber/v2"
)
/*
ProxyChain manages the process of forwarding an HTTP request to an upstream server,
applying request and response modifications along the way.
- It accepts incoming HTTP requests (as a Fiber *ctx), and applies
request modifiers (ReqMods) and response modifiers (ResMods) before passing the
upstream response back to the client.
- ProxyChains can be reused to avoid memory allocations. However, they are not concurrent-safe
so a ProxyChainPool should be used with mutexes to avoid memory errors.
---
# EXAMPLE
```
import (
rx "ladder/pkg/proxychain/requestmodifiers"
tx "ladder/pkg/proxychain/responsemodifiers"
"ladder/pkg/proxychain/responsemodifiers/rewriters"
"ladder/internal/proxychain"
)
proxychain.NewProxyChain().
SetFiberCtx(c).
SetRequestModifications(
rx.BlockOutgoingCookies(),
rx.SpoofOrigin(),
rx.SpoofReferrer(),
).
SetResultModifications(
tx.BlockIncomingCookies(),
tx.RewriteHTMLResourceURLs()
).
Execute()
```
client ladder service upstream
┌─────────┐ ┌────────────────────────┐ ┌─────────┐
│ │GET │ │ │ │
│ req────┼───► ProxyChain │ │ │
│ │ │ │ │ │ │
│ │ │ ▼ │ │ │
│ │ │ apply │ │ │
│ │ │ RequestModifications │ │ │
│ │ │ │ │ │ │
│ │ │ ▼ │ │ │
│ │ │ send GET │ │ │
│ │ │ Request req────────┼─► │ │
│ │ │ │ │ │
│ │ │ 200 OK │ │ │
│ │ │ ┌────────────────┼─response │
│ │ │ ▼ │ │ │
│ │ │ apply │ │ │
│ │ │ ResultModifications │ │ │
│ │ │ │ │ │ │
│ │◄───┼───────┘ │ │ │
│ │ │ 200 OK │ │ │
│ │ │ │ │ │
└─────────┘ └────────────────────────┘ └─────────┘
*/
type ProxyChain struct {
Context *fiber.Ctx
Client HTTPClient
onceClient HTTPClient
Request *http.Request
Response *http.Response
requestModifications []RequestModification
onceRequestModifications []RequestModification
onceResponseModifications []ResponseModification
responseModifications []ResponseModification
Ruleset *ruleset.RuleSet
debugMode bool
abortErr error
APIPrefix string
}
// a ProxyStrategy is a pre-built proxychain with purpose-built defaults
type ProxyStrategy ProxyChain
// A RequestModification is a function that should operate on the
// ProxyChain Req or Client field, using the fiber ctx as needed.
type RequestModification func(*ProxyChain) error
// A ResponseModification is a function that should operate on the
// ProxyChain Res (http result) & Body (buffered http response body) field
type ResponseModification func(*ProxyChain) error
// abstraction over HTTPClient
type HTTPClient interface {
GetCookies(u *url.URL) []*http.Cookie
SetCookies(u *url.URL, cookies []*http.Cookie)
SetCookieJar(jar http.CookieJar)
GetCookieJar() http.CookieJar
SetProxy(proxyURL string) error
GetProxy() string
SetFollowRedirect(followRedirect bool)
GetFollowRedirect() bool
CloseIdleConnections()
Do(req *http.Request) (*http.Response, error)
Get(url string) (resp *http.Response, err error)
Head(url string) (resp *http.Response, err error)
Post(url, contentType string, body io.Reader) (resp *http.Response, err error)
}
// SetRequestModifications sets the ProxyChain's request modifiers
// the modifier will not fire until ProxyChain.Execute() is run.
func (chain *ProxyChain) SetRequestModifications(mods ...RequestModification) *ProxyChain {
chain.requestModifications = mods
return chain
}
// AddRequestModifications adds more request modifiers to the ProxyChain
// the modifier will not fire until ProxyChain.Execute() is run.
func (chain *ProxyChain) AddRequestModifications(mods ...RequestModification) *ProxyChain {
chain.requestModifications = append(chain.requestModifications, mods...)
return chain
}
// AddOnceRequestModifications adds a request modifier to the ProxyChain that should only fire once
// the modifier will not fire until ProxyChain.Execute() is run and will be removed after it has been applied.
func (chain *ProxyChain) AddOnceRequestModifications(mods ...RequestModification) *ProxyChain {
chain.onceRequestModifications = append(chain.onceRequestModifications, mods...)
return chain
}
// AddOnceResponseModifications adds a response modifier to the ProxyChain that should only fire once
// the modifier will not fire until ProxyChain.Execute() is run and will be removed after it has been applied.
func (chain *ProxyChain) AddOnceResponseModifications(mods ...ResponseModification) *ProxyChain {
chain.onceResponseModifications = append(chain.onceResponseModifications, mods...)
return chain
}
// AddResponseModifications sets the ProxyChain's response modifiers
// the modifier will not fire until ProxyChain.Execute() is run.
func (chain *ProxyChain) AddResponseModifications(mods ...ResponseModification) *ProxyChain {
chain.responseModifications = mods
return chain
}
// WithAPIPath trims the path during URL extraction.
// example: using path = "api/outline/", a path like "http://localhost:8080/api/outline/https://example.com" becomes "https://example.com"
func (chain *ProxyChain) WithAPIPath(path string) *ProxyChain {
chain.APIPrefix = path
chain.APIPrefix = strings.TrimSuffix(chain.APIPrefix, "*")
return chain
}
// Adds a ruleset to ProxyChain
func (chain *ProxyChain) AddRuleset(rs *ruleset.RuleSet) *ProxyChain {
chain.Ruleset = rs
// TODO: add _applyRuleset method
return chain
}
func (chain *ProxyChain) _initializeRequest() (*http.Request, error) {
if chain.Context == nil {
chain.abortErr = chain.abort(errors.New("no context set"))
return nil, chain.abortErr
}
// initialize a request (without url)
req, err := http.NewRequest(chain.Context.Method(), "", nil)
if err != nil {
return nil, err
}
chain.Request = req
switch chain.Context.Method() {
case "GET":
case "DELETE":
case "HEAD":
case "OPTIONS":
break
case "POST":
case "PUT":
case "PATCH":
// stream content of body from client request to upstream request
chain.Request.Body = io.NopCloser(chain.Context.Request().BodyStream())
default:
return nil, fmt.Errorf("unsupported request method from client: '%s'", chain.Context.Method())
}
/*
// copy client request headers to upstream request headers
forwardHeaders := func(key []byte, val []byte) {
req.Header.Set(string(key), string(val))
}
clientHeaders := &chain.Context.Request().Header
clientHeaders.VisitAll(forwardHeaders)
*/
return req, nil
}
// reconstructURLFromReferer reconstructs the URL using the referer's scheme, host, and the relative path / queries
func reconstructURLFromReferer(referer *url.URL, relativeURL *url.URL) (*url.URL, error) {
// Extract the real url from referer path
realURL, err := url.Parse(strings.TrimPrefix(referer.Path, "/"))
if err != nil {
return nil, fmt.Errorf("error parsing real URL from referer '%s': %v", referer.Path, err)
}
if realURL.Scheme == "" || realURL.Host == "" {
return nil, fmt.Errorf("invalid referer URL: '%s' on request '%s", referer.String(), relativeURL.String())
}
log.Printf("rewrite relative URL using referer: '%s' -> '%s'\n", relativeURL.String(), realURL.String())
return &url.URL{
Scheme: referer.Scheme,
Host: referer.Host,
Path: realURL.Path,
RawQuery: realURL.RawQuery,
}, nil
}
// prevents calls like: http://localhost:8080/http://localhost:8080
func preventRecursiveProxyRequest(urlQuery *url.URL, baseProxyURL string) *url.URL {
u := urlQuery.String()
isRecursive := strings.HasPrefix(u, baseProxyURL) || u == baseProxyURL
if !isRecursive {
return urlQuery
}
fixedURL, err := url.Parse(strings.TrimPrefix(strings.TrimPrefix(urlQuery.String(), baseProxyURL), "/"))
if err != nil {
log.Printf("proxychain: failed to fix recursive request: '%s' -> '%s\n'", baseProxyURL, u)
return urlQuery
}
return preventRecursiveProxyRequest(fixedURL, baseProxyURL)
}
// extractURL extracts a URL from the request ctx. If the URL in the request
// is a relative path, it reconstructs the full URL using the referer header.
func (chain *ProxyChain) extractURL() (*url.URL, error) {
reqURL := chain.Context.Params("*")
fmt.Println("XXXXXXXXXXXXXXXX")
fmt.Println(reqURL)
fmt.Println(chain.APIPrefix)
reqURL = strings.TrimPrefix(reqURL, chain.APIPrefix)
// sometimes client requests doubleroot '//'
// there is a bug somewhere else, but this is a workaround until we find it
if strings.HasPrefix(reqURL, "/") || strings.HasPrefix(reqURL, `%2F`) {
reqURL = strings.TrimPrefix(reqURL, "/")
reqURL = strings.TrimPrefix(reqURL, `%2F`)
}
// unescape url query
uReqURL, err := url.QueryUnescape(reqURL)
if err == nil {
reqURL = uReqURL
}
urlQuery, err := url.Parse(reqURL)
if err != nil {
return nil, fmt.Errorf("error parsing request URL '%s': %v", reqURL, err)
}
// prevent recursive proxy requests
fullURL := chain.Context.Request().URI()
proxyURL := fmt.Sprintf("%s://%s", fullURL.Scheme(), fullURL.Host())
urlQuery = preventRecursiveProxyRequest(urlQuery, proxyURL)
// Handle standard paths
// eg: https://localhost:8080/https://realsite.com/images/foobar.jpg -> https://realsite.com/images/foobar.jpg
isRelativePath := urlQuery.Scheme == ""
if !isRelativePath {
return urlQuery, nil
}
// Handle relative URLs
// eg: https://localhost:8080/images/foobar.jpg -> https://realsite.com/images/foobar.jpg
referer, err := url.Parse(chain.Context.Get("referer"))
relativePath := urlQuery
if err != nil {
return nil, fmt.Errorf("error parsing referer URL from req: '%s': %v", relativePath, err)
}
return reconstructURLFromReferer(referer, relativePath)
}
// SetFiberCtx takes the request ctx from the client
// for the modifiers and execute function to use.
// it must be set everytime a new request comes through
// if the upstream request url cannot be extracted from the ctx,
// a 500 error will be sent back to the client
func (chain *ProxyChain) SetFiberCtx(ctx *fiber.Ctx) *ProxyChain {
chain.Context = ctx
// initialize the request and prepare it for modification
req, err := chain._initializeRequest()
if err != nil {
chain.abortErr = chain.abort(err)
}
chain.Request = req
// extract the URL for the request and add it to the new request
url, err := chain.extractURL()
if err != nil {
chain.abortErr = chain.abort(err)
}
chain.Request.URL = url
fmt.Printf("extracted URL: %s\n", chain.Request.URL)
return chain
}
func (chain *ProxyChain) validateCtxIsSet() error {
if chain.Context != nil {
return nil
}
err := errors.New("proxyChain was called without setting a fiber Ctx. Use ProxyChain.SetCtx()")
chain.abortErr = chain.abort(err)
return chain.abortErr
}
// SetHTTPClient sets a new upstream http client transport
// useful for modifying TLS
func (chain *ProxyChain) SetHTTPClient(httpClient HTTPClient) *ProxyChain {
chain.Client = httpClient
return chain
}
// SetOnceHTTPClient sets a new upstream http client transport temporarily
// and clears it once it is used.
func (chain *ProxyChain) SetOnceHTTPClient(httpClient HTTPClient) *ProxyChain {
chain.onceClient = httpClient
return chain
}
// SetVerbose changes the logging behavior to print
// the modification steps and applied rulesets for debugging
func (chain *ProxyChain) SetDebugLogging(isDebugMode bool) *ProxyChain {
if isDebugMode {
log.Println("DEBUG MODE ENABLED")
}
chain.debugMode = isDebugMode
return chain
}
// abort proxychain and return 500 error to client
// this will prevent Execute from firing and reset the state
// returns the initial error enriched with context
func (chain *ProxyChain) abort(err error) error {
// defer chain._reset()
chain.abortErr = err
chain.Context.Response().SetStatusCode(500)
e := fmt.Errorf("ProxyChain error for '%s': %s", chain.Request.URL.String(), err.Error())
chain.Context.SendString(e.Error())
log.Println(e.Error())
return e
}
// internal function to reset state of ProxyChain for reuse
func (chain *ProxyChain) _reset() {
chain.abortErr = nil
chain.Request = nil
// chain.Response = nil
chain.Context = nil
chain.onceResponseModifications = []ResponseModification{}
chain.onceRequestModifications = []RequestModification{}
// chain.onceClient = nil
}
// NewProxyChain initializes a new ProxyChain
func NewProxyChain() *ProxyChain {
chain := new(ProxyChain)
options := []tls_client.HttpClientOption{
tls_client.WithTimeoutSeconds(20),
tls_client.WithRandomTLSExtensionOrder(),
// tls_client.WithClientProfile(profiles.Chrome_117),
// tls_client.WithNotFollowRedirects(),
// tls_client.WithCookieJar(jar), // create cookieJar instance and pass it as argument
}
client, err := tls_client.NewHttpClient(tls_client.NewNoopLogger(), options...)
if err != nil {
panic(err)
}
chain.Client = client
return chain
}
/// ========================================================================================================
// _execute sends the request for the ProxyChain and returns the raw body only
// the caller is responsible for returning a response back to the requestor
// the caller is also responsible for calling chain._reset() when they are done with the body
func (chain *ProxyChain) _execute() (io.Reader, error) {
// ================== PREFLIGHT CHECKS =============================
if chain.validateCtxIsSet() != nil || chain.abortErr != nil {
return nil, chain.abortErr
}
if chain.Request == nil {
return nil, errors.New("proxychain request not yet initialized")
}
if chain.Request.URL.Scheme == "" {
return nil, errors.New("request url not set or invalid. Check ProxyChain ReqMods for issues")
}
// ======== REQUEST MODIFICATIONS :: [client -> ladder] -> upstream -> ladder -> client =============================
// Apply requestModifications to proxychain
for _, applyRequestModificationsTo := range chain.requestModifications {
err := applyRequestModificationsTo(chain)
if err != nil {
return nil, chain.abort(err)
}
}
// Apply onceRequestModifications to proxychain and clear them
for _, applyOnceRequestModificationsTo := range chain.onceRequestModifications {
err := applyOnceRequestModificationsTo(chain)
if err != nil {
return nil, chain.abort(err)
}
}
chain.onceRequestModifications = []RequestModification{}
// ======== SEND REQUEST UPSTREAM :: client -> [ladder -> upstream] -> ladder -> client =============================
// Send Request Upstream
if chain.onceClient != nil {
// if chain.SetOnceClient() is used, use that client instead of the
// default http client temporarily.
resp, err := chain.onceClient.Do(chain.Request)
if err != nil {
return nil, chain.abort(err)
}
chain.Response = resp
// chain.onceClient = nil
} else {
resp, err := chain.Client.Do(chain.Request)
if err != nil {
return nil, chain.abort(err)
}
chain.Response = resp
}
// ======== APPLY RESPONSE MODIFIERS :: client -> ladder -> [upstream -> ladder] -> client =============================
// Apply ResponseModifiers to proxychain
for _, applyResultModificationsTo := range chain.responseModifications {
err := applyResultModificationsTo(chain)
if err != nil {
return nil, chain.abort(err)
}
}
// Apply onceResponseModifications to proxychain and clear them
for _, applyOnceResponseModificationsTo := range chain.onceResponseModifications {
err := applyOnceResponseModificationsTo(chain)
if err != nil {
return nil, chain.abort(err)
}
}
chain.onceResponseModifications = []ResponseModification{}
// ======== RETURN BODY TO CLIENT :: client -> ladder -> upstream -> [ladder -> client] =============================
return chain.Response.Body, nil
}
// Execute sends the request for the ProxyChain and returns the request to the sender
// and resets the fields so that the ProxyChain can be reused.
// if any step in the ProxyChain fails, the request will abort and a 500 error will
// be returned to the client
func (chain *ProxyChain) Execute() error {
defer chain._reset()
body, err := chain._execute()
if err != nil {
log.Println(err)
return err
}
if chain.Context == nil {
return errors.New("no context set")
}
// in case api user did not set or forward content-type, we do it for them
if chain.Context.Get("content-type") == "" {
chain.Context.Set("content-type", chain.Response.Header.Get("content-type"))
}
// Return request back to client
return chain.Context.SendStream(body)
// return chain.Context.SendStream(body)
}

View File

@@ -1,11 +0,0 @@
package proxychain
import (
"net/url"
)
type Pool map[url.URL]ProxyChain
func NewPool() Pool {
return map[url.URL]ProxyChain{}
}

View File

@@ -1,142 +0,0 @@
package bot
import (
"encoding/json"
"fmt"
"io"
"math/big"
"math/bits"
"math/rand"
"net"
"net/http"
"time"
)
type Bot interface {
UpdatePool() error
GetRandomIdentity() string
}
type bot struct {
UserAgent string
Fingerprint string
IPPool botPool
}
type botPool struct {
Timestamp string `json:"creationTime"`
Prefixes []botPrefix `json:"prefixes"`
}
type botPrefix struct {
IPv6 string `json:"ipv6Prefix,omitempty"`
IPv4 string `json:"ipv4Prefix,omitempty"`
}
// TODO: move pointers around, not global variables
var GoogleBot = bot{
UserAgent: "Mozilla/5.0 AppleWebKit/537.36 (KHTML, like Gecko; compatible; Googlebot/2.1; http://www.google.com/bot.html) Chrome/79.0.3945.120 Safari/537.36",
// https://github.com/trisulnsm/trisul-scripts/blob/master/lua/frontend_scripts/reassembly/ja3/prints/ja3fingerprint.json
Fingerprint: "769,49195-49199-49196-49200-52393-52392-52244-52243-49161-49171-49162-49172-156-157-47-53-10,65281-0-23-35-13-5-18-16-11-10-21,29-23-24,0",
IPPool: botPool{
Timestamp: "2023-11-28T23:00:56.000000",
Prefixes: []botPrefix{
{
IPv4: "34.100.182.96/28",
},
},
},
}
var BingBot = bot{
UserAgent: "Mozilla/5.0 AppleWebKit/537.36 (KHTML, like Gecko; compatible; bingbot/2.0; +http://www.bing.com/bingbot.htm) Chrome/79.0.3945.120 Safari/537.36",
IPPool: botPool{
Timestamp: "2023-03-08T10:00:00.121331",
Prefixes: []botPrefix{
{
IPv4: "207.46.13.0/24",
},
},
},
}
func (b *bot) UpdatePool(url string) error {
client := &http.Client{Timeout: 10 * time.Second}
resp, err := client.Get(url)
if err != nil {
return err
}
if resp.StatusCode != http.StatusOK {
return fmt.Errorf("failed to update googlebot IP pool: status code %s", resp.Status)
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
return err
}
err = json.Unmarshal(body, &b.IPPool)
return err
}
func (b *bot) GetRandomIP() string {
count := len(b.IPPool.Prefixes)
var prefix botPrefix
if count == 1 {
prefix = b.IPPool.Prefixes[0]
} else {
idx := rand.Intn(count)
prefix = b.IPPool.Prefixes[idx]
}
if prefix.IPv4 != "" {
ip, err := randomIPFromSubnet(prefix.IPv4)
if err == nil {
return ip.String()
}
}
if prefix.IPv6 != "" {
ip, err := randomIPFromSubnet(prefix.IPv6)
if err == nil {
return ip.String()
}
}
// fallback to default IP which is known to work
ip, _ := randomIPFromSubnet(b.IPPool.Prefixes[0].IPv4)
return ip.String()
}
func randomIPFromSubnet(c string) (net.IP, error) {
ip, ipnet, err := net.ParseCIDR(c)
if err != nil {
return nil, err
}
// int representation of byte mask
mask := big.NewInt(0).SetBytes(ipnet.Mask).Uint64()
// how many unset bits there are at the end of the mask
offset := bits.TrailingZeros8(byte(0) ^ byte(mask))
// total number of ips available in the block
offset *= offset
toAdd := rand.Intn(offset)
last := len(ip) - 1
ip[last] = ip[last] + byte(toAdd)
return ip, nil
}

View File

@@ -1,36 +0,0 @@
package bot
import (
"net"
"testing"
)
func TestRandomIPFromSubnet(t *testing.T) {
err := GoogleBot.UpdatePool("https://developers.google.com/static/search/apis/ipranges/googlebot.json")
if err != nil {
t.Error(err)
}
for _, prefix := range GoogleBot.IPPool.Prefixes {
subnet := prefix.IPv4
if prefix.IPv6 != "" {
subnet = prefix.IPv6
}
t.Run(subnet, func(t *testing.T) {
_, ipnet, err := net.ParseCIDR(subnet)
if err != nil {
t.Error(err)
}
ip, err := randomIPFromSubnet(subnet)
if err != nil {
t.Error(err)
}
if !ipnet.Contains(ip) {
t.Fail()
}
})
}
}

View File

@@ -1,45 +0,0 @@
package requestmodifiers
import (
"strings"
//"fmt"
"ladder/proxychain"
)
var forwardBlacklist map[string]bool
func init() {
forwardBlacklist = map[string]bool{
"host": true,
"connection": true,
"keep-alive": true,
"content-length": true,
"content-encoding": true,
"transfer-encoding": true,
"referer": true,
"x-forwarded-for": true,
"x-real-ip": true,
"forwarded": true,
"accept-encoding": true,
}
}
// ForwardRequestHeaders forwards the requests headers sent from the client to the upstream server
func ForwardRequestHeaders() proxychain.RequestModification {
return func(chain *proxychain.ProxyChain) error {
forwardHeaders := func(key, value []byte) {
k := strings.ToLower(string(key))
v := string(value)
if forwardBlacklist[k] {
return
}
// fmt.Println(k, v)
chain.Request.Header.Set(k, v)
}
chain.Context.Request().
Header.VisitAll(forwardHeaders)
return nil
}
}

View File

@@ -1,126 +0,0 @@
package requestmodifiers
import (
"ladder/proxychain"
"ladder/proxychain/requestmodifiers/bot"
)
// MasqueradeAsGoogleBot modifies user agent and x-forwarded for
// to appear to be a Google Bot
func MasqueradeAsGoogleBot() proxychain.RequestModification {
ip := bot.GoogleBot.GetRandomIP()
return masqueradeAsTrustedBot(bot.GoogleBot.UserAgent, ip, bot.GoogleBot.Fingerprint)
}
// MasqueradeAsBingBot modifies user agent and x-forwarded for
// to appear to be a Bing Bot
func MasqueradeAsBingBot() proxychain.RequestModification {
ip := bot.BingBot.GetRandomIP()
return masqueradeAsTrustedBot(bot.BingBot.Fingerprint, ip, "")
}
// MasqueradeAsWaybackMachineBot modifies user agent and x-forwarded for
// to appear to be a archive.org (wayback machine) Bot
func MasqueradeAsWaybackMachineBot() proxychain.RequestModification {
const botUA string = "Mozilla/5.0 (compatible; archive.org_bot +http://www.archive.org/details/archive.org_bot)"
const botIP string = "207.241.235.164"
return masqueradeAsTrustedBot(botUA, botIP, "")
}
// MasqueradeAsFacebookBot modifies user agent and x-forwarded for
// to appear to be a Facebook Bot (link previews?)
func MasqueradeAsFacebookBot() proxychain.RequestModification {
const botUA string = "facebookexternalhit/1.1 (+http://www.facebook.com/externalhit_uatext.php)"
// 31.13.97.0/24, 31.13.99.0/24, 31.13.100.0/24, 66.220.144.0/20, 69.63.189.0/24, 69.63.190.0/24, 69.171.224.0/20, 69.171.240.0/21, 69.171.248.0/24, 173.252.73.0/24, 173.252.74.0/24, 173.252.77.0/24, 173.252.100.0/22, 173.252.104.0/21, 173.252.112.0/24, 2a03:2880:10::/48, 2a03:2880:10ff::/48, 2a03:2880:11::/48, 2a03:2880:11ff::/48, 2a03:2880:20::/48, 2a03:2880:20ff::/48, 2a03:2880:21ff::/48, 2a03:2880:30ff::/48, 2a03:2880:31ff::/48, 2a03:2880:1010::/48, 2a03:2880:1020::/48, 2a03:2880:2020::/48, 2a03:2880:2050::/48, 2a03:2880:2040::/48, 2a03:2880:2110::/48, 2a03:2880:2130::/48, 2a03:2880:3010::/48, 2a03:2880:3020::/48
const botIP string = "31.13.99.8"
const ja3 string = "771,49199-49195-49171-49161-49200-49196-49172-49162-51-57-50-49169-49159-47-53-10-5-4-255,0-11-10-13-13172-16,23-25-28-27-24-26-22-14-13-11-12-9-10,0-1-2"
return masqueradeAsTrustedBot(botUA, botIP, ja3)
}
// MasqueradeAsYandexBot modifies user agent and x-forwarded for
// to appear to be a Yandex Spider Bot
func MasqueradeAsYandexBot() proxychain.RequestModification {
const botUA string = "Mozilla/5.0 (compatible; YandexBot/3.0; +http://yandex.com/bots)"
// 100.43.90.0/24, 37.9.115.0/24, 37.140.165.0/24, 77.88.22.0/25, 77.88.29.0/24, 77.88.31.0/24, 77.88.59.0/24, 84.201.146.0/24, 84.201.148.0/24, 84.201.149.0/24, 87.250.243.0/24, 87.250.253.0/24, 93.158.147.0/24, 93.158.148.0/24, 93.158.151.0/24, 93.158.153.0/32, 95.108.128.0/24, 95.108.138.0/24, 95.108.150.0/23, 95.108.158.0/24, 95.108.156.0/24, 95.108.188.128/25, 95.108.234.0/24, 95.108.248.0/24, 100.43.80.0/24, 130.193.62.0/24, 141.8.153.0/24, 178.154.165.0/24, 178.154.166.128/25, 178.154.173.29, 178.154.200.158, 178.154.202.0/24, 178.154.205.0/24, 178.154.239.0/24, 178.154.243.0/24, 37.9.84.253, 199.21.99.99, 178.154.162.29, 178.154.203.251, 178.154.211.250, 178.154.171.0/24, 178.154.200.0/24, 178.154.244.0/24, 178.154.246.0/24, 95.108.181.0/24, 95.108.246.252, 5.45.254.0/24, 5.255.253.0/24, 37.140.141.0/24, 37.140.188.0/24, 100.43.81.0/24, 100.43.85.0/24, 100.43.91.0/24, 199.21.99.0/24, 2a02:6b8:b000::/32, 2a02:6b8:b010::/32, 2a02:6b8:b011::/32, 2a02:6b8:c0e::/32
const botIP string = "37.9.115.9"
const ja3 string = "769,49200-49196-49192-49188-49172-49162-165-163-161-159-107-106-105-104-57-56-55-54-136-135-134-133-49202-49198-49194-49190-49167-49157-157-61-53-132-49199-49195-49191-49187-49171-49161-164-162-160-158-103-64-63-62-51-50-49-48-154-153-152-151-69-68-67-66-49201-49197-49193-49189-49166-49156-156-60-47-150-65-7-49169-49159-49164-49154-5-4-49170-49160-22-19-16-13-49165-49155-10-255,0-11-10-35-13-15,23-25-28-27-24-26-22-14-13-11-12-9-10,0-1-2"
return masqueradeAsTrustedBot(botUA, botIP, ja3)
}
// MasqueradeAsBaiduBot modifies user agent and x-forwarded for
// to appear to be a Baidu Spider Bot
func MasqueradeAsBaiduBot() proxychain.RequestModification {
const botUA string = "Mozilla/5.0 (compatible; Baiduspider/2.0; +http://www.baidu.com/search/spider.html)"
// 180.76.15.0/24, 119.63.196.0/24, 115.239.212./24, 119.63.199.0/24, 122.81.208.0/22, 123.125.71.0/24, 180.76.4.0/24, 180.76.5.0/24, 180.76.6.0/24, 185.10.104.0/24, 220.181.108.0/24, 220.181.51.0/24, 111.13.102.0/24, 123.125.67.144/29, 123.125.67.152/31, 61.135.169.0/24, 123.125.68.68/30, 123.125.68.72/29, 123.125.68.80/28, 123.125.68.96/30, 202.46.48.0/20, 220.181.38.0/24, 123.125.68.80/30, 123.125.68.84/31, 123.125.68.0/24
const botIP string = "180.76.15.7"
return masqueradeAsTrustedBot(botUA, botIP, "")
}
// MasqueradeAsDuckDuckBot modifies user agent and x-forwarded for
// to appear to be a DuckDuckGo Bot
func MasqueradeAsDuckDuckBot() proxychain.RequestModification {
const botUA string = "DuckDuckBot/1.0; (+http://duckduckgo.com/duckduckbot.html)"
// 46.51.197.88, 46.51.197.89, 50.18.192.250, 50.18.192.251, 107.21.1.61, 176.34.131.233, 176.34.135.167, 184.72.106.52, 184.72.115.86
const botIP string = "46.51.197.88"
return masqueradeAsTrustedBot(botUA, botIP, "")
}
// MasqueradeAsYahooBot modifies user agent and x-forwarded for
// to appear to be a Yahoo Bot
func MasqueradeAsYahooBot() proxychain.RequestModification {
const botUA string = "Mozilla/5.0 (compatible; Yahoo! Slurp; http://help.yahoo.com/help/us/ysearch/slurp)"
// 5.255.250.0/24, 37.9.87.0/24, 67.195.37.0/24, 67.195.50.0/24, 67.195.110.0/24, 67.195.111.0/24, 67.195.112.0/23, 67.195.114.0/24, 67.195.115.0/24, 68.180.224.0/21, 72.30.132.0/24, 72.30.142.0/24, 72.30.161.0/24, 72.30.196.0/24, 72.30.198.0/24, 74.6.254.0/24, 74.6.8.0/24, 74.6.13.0/24, 74.6.17.0/24, 74.6.18.0/24, 74.6.22.0/24, 74.6.27.0/24, 74.6.168.0/24, 77.88.5.0/24, 77.88.47.0/24, 93.158.161.0/24, 98.137.72.0/24, 98.137.206.0/24, 98.137.207.0/24, 98.139.168.0/24, 114.111.95.0/24, 124.83.159.0/24, 124.83.179.0/24, 124.83.223.0/24, 141.8.144.0/24, 183.79.63.0/24, 183.79.92.0/24, 203.216.255.0/24, 211.14.11.0/24
const ja3 = "769,49200-49196-49192-49188-49172-49162-163-159-107-106-57-56-136-135-49202-49198-49194-49190-49167-49157-157-61-53-132-49199-49195-49191-49187-49171-49161-162-158-103-64-51-50-49170-49160-154-153-69-68-22-19-49201-49197-49193-49189-49166-49156-49165-49155-156-60-47-150-65-10-7-49169-49159-49164-49154-5-4-255,0-11-10-13-15,25-24-23,0-1-2"
const botIP string = "37.9.87.5"
return masqueradeAsTrustedBot(botUA, botIP, ja3)
}
func masqueradeAsTrustedBot(botUA string, botIP string, ja3 string) proxychain.RequestModification {
return func(chain *proxychain.ProxyChain) error {
chain.AddOnceRequestModifications(
SpoofUserAgent(botUA),
// general / nginx
SetRequestHeader("X-Forwarded-For", botIP),
SetRequestHeader("X-Real-IP", botIP),
SetRequestHeader("True-Client-IP", botIP),
SetRequestHeader("WL-Proxy-Client-IP", botIP),
SetRequestHeader("X-Cluster-Client-IP", botIP),
/*
// akamai
SetRequestHeader("True-Client-IP", botIP),
// cloudflare
// TODO: this seems to cause issues with CF... figure out workaround or remove
Error 1000
Ray ID: xxxxxxxxxxxxxxxx •
2023-12-01 20:09:22 UTC
DNS points to prohibited IP
What happened?
You've requested a page on a website (xxxxxxxxxxxxxxxxxxx) that is on the Cloudflare network. Unfortunately, it is resolving to an IP address that is creating a conflict within Cloudflare's system
SetRequestHeader("CF-Connecting-IP", botIP),
// weblogic
SetRequestHeader("WL-Proxy-Client-IP", botIP),
// azure
SetRequestHeader("X-Cluster-Client-IP", botIP),
*/
DeleteRequestHeader("referrer"),
DeleteRequestHeader("origin"),
)
/*
if ja3 != "" {
chain.AddOnceRequestModifications(
SpoofJA3fingerprint(ja3, botUA),
)
}
*/
return nil
}
}

View File

@@ -1,19 +0,0 @@
package requestmodifiers
import (
"fmt"
"regexp"
"ladder/proxychain"
)
func ModifyDomainWithRegex(matchRegex string, replacement string) proxychain.RequestModification {
match, err := regexp.Compile(matchRegex)
return func(px *proxychain.ProxyChain) error {
if err != nil {
return fmt.Errorf("RequestModification :: ModifyDomainWithRegex error => invalid match regex: %s - %s", matchRegex, err.Error())
}
px.Request.URL.Host = match.ReplaceAllString(px.Request.URL.Host, replacement)
return nil
}
}

View File

@@ -1,100 +0,0 @@
package requestmodifiers
import (
//"net/http"
//http "github.com/Danny-Dasilva/fhttp"
http "github.com/bogdanfinn/fhttp"
"ladder/proxychain"
)
// SetOutgoingCookie modifes a specific cookie name
// by modifying the request cookie headers going to the upstream server.
// If the cookie name does not already exist, it is created.
func SetOutgoingCookie(name string, val string) proxychain.RequestModification {
return func(chain *proxychain.ProxyChain) error {
cookies := chain.Request.Cookies()
hasCookie := false
for _, cookie := range cookies {
if cookie.Name != name {
continue
}
hasCookie = true
cookie.Value = val
}
if hasCookie {
return nil
}
chain.Request.AddCookie(&http.Cookie{
Domain: chain.Request.URL.Host,
Name: name,
Value: val,
})
return nil
}
}
// SetOutgoingCookies modifies a client request's cookie header
// to a raw Cookie string, overwriting existing cookies
func SetOutgoingCookies(cookies string) proxychain.RequestModification {
return func(chain *proxychain.ProxyChain) error {
chain.Request.Header.Set("Cookies", cookies)
return nil
}
}
// DeleteOutgoingCookie modifies the http request's cookies header to
// delete a specific request cookie going to the upstream server.
// If the cookie does not exist, it does not do anything.
func DeleteOutgoingCookie(name string) proxychain.RequestModification {
return func(chain *proxychain.ProxyChain) error {
cookies := chain.Request.Cookies()
chain.Request.Header.Del("Cookies")
for _, cookie := range cookies {
if cookie.Name == name {
chain.Request.AddCookie(cookie)
}
}
return nil
}
}
// DeleteOutgoingCookies removes the cookie header entirely,
// preventing any cookies from reaching the upstream server.
func DeleteOutgoingCookies() proxychain.RequestModification {
return func(px *proxychain.ProxyChain) error {
px.Request.Header.Del("Cookie")
return nil
}
}
// DeleteOutGoingCookiesExcept prevents non-whitelisted cookies from being sent from the client
// to the upstream proxy server. Cookies whose names are in the whitelist are not removed.
func DeleteOutgoingCookiesExcept(whitelist ...string) proxychain.RequestModification {
return func(px *proxychain.ProxyChain) error {
// Convert whitelist slice to a map for efficient lookups
whitelistMap := make(map[string]struct{})
for _, cookieName := range whitelist {
whitelistMap[cookieName] = struct{}{}
}
// Get all cookies from the request header
cookies := px.Request.Cookies()
// Clear the original Cookie header
px.Request.Header.Del("Cookie")
// Re-add cookies that are in the whitelist
for _, cookie := range cookies {
if _, found := whitelistMap[cookie.Name]; found {
px.Request.AddCookie(cookie)
}
}
return nil
}
}

View File

@@ -1,19 +0,0 @@
package requestmodifiers
import (
"fmt"
"regexp"
"ladder/proxychain"
)
func ModifyPathWithRegex(matchRegex string, replacement string) proxychain.RequestModification {
match, err := regexp.Compile(matchRegex)
return func(px *proxychain.ProxyChain) error {
if err != nil {
return fmt.Errorf("RequestModification :: ModifyPathWithRegex error => invalid match regex: %s - %s", matchRegex, err.Error())
}
px.Request.URL.Path = match.ReplaceAllString(px.Request.URL.Path, replacement)
return nil
}
}

View File

@@ -1,26 +0,0 @@
package requestmodifiers
import (
"net/url"
"ladder/proxychain"
)
// ModifyQueryParams replaces query parameter values in URL's query params in a ProxyChain's URL.
// If the query param key doesn't exist, it is created.
func ModifyQueryParams(key string, value string) proxychain.RequestModification {
return func(chain *proxychain.ProxyChain) error {
q := chain.Request.URL.Query()
chain.Request.URL.RawQuery = modifyQueryParams(key, value, q)
return nil
}
}
func modifyQueryParams(key string, value string, q url.Values) string {
if value == "" {
q.Del(key)
return q.Encode()
}
q.Set(key, value)
return q.Encode()
}

View File

@@ -1,23 +0,0 @@
package requestmodifiers
import (
"ladder/proxychain"
)
// SetRequestHeader modifies a specific outgoing header
// This is the header that the upstream server will see.
func SetRequestHeader(name string, val string) proxychain.RequestModification {
return func(px *proxychain.ProxyChain) error {
px.Request.Header.Set(name, val)
return nil
}
}
// DeleteRequestHeader modifies a specific outgoing header
// This is the header that the upstream server will see.
func DeleteRequestHeader(name string) proxychain.RequestModification {
return func(px *proxychain.ProxyChain) error {
px.Request.Header.Del(name)
return nil
}
}

View File

@@ -1,47 +0,0 @@
package requestmodifiers
import (
"fmt"
"net/url"
"regexp"
"ladder/proxychain"
tx "ladder/proxychain/responsemodifiers"
)
const archivistUrl string = "https://archive.is/latest"
// RequestArchiveIs modifies a ProxyChain's URL to request an archived version from archive.is
func RequestArchiveIs() proxychain.RequestModification {
return func(chain *proxychain.ProxyChain) error {
rURL := preventRecursiveArchivistURLs(chain.Request.URL.String())
chain.Request.URL.RawQuery = ""
newURL, err := url.Parse(fmt.Sprintf("%s/%s", archivistUrl, rURL))
if err != nil {
return err
}
// archivist seems to sabotage requests from cloudflare's DNS
// bypass this just in case
chain.AddOnceRequestModifications(ResolveWithGoogleDoH())
chain.Request.URL = newURL
// cleanup archivst headers
script := `[...document.querySelector("body > center").childNodes].filter(e => e.id != "SOLID").forEach(e => e.remove())`
chain.AddOnceResponseModifications(
tx.InjectScriptAfterDOMContentLoaded(script),
)
return nil
}
}
// https://archive.is/20200421201055/https://rt.live/ -> http://rt.live/
func preventRecursiveArchivistURLs(url string) string {
re := regexp.MustCompile(`https?:\/\/archive\.is\/\d+\/(https?:\/\/.*)`)
match := re.FindStringSubmatch(url)
if match != nil {
return match[1]
}
return url
}

View File

@@ -1,22 +0,0 @@
package requestmodifiers
import (
"net/url"
"ladder/proxychain"
)
const googleCacheUrl string = "https://webcache.googleusercontent.com/search?q=cache:"
// RequestGoogleCache modifies a ProxyChain's URL to request its Google Cache version.
func RequestGoogleCache() proxychain.RequestModification {
return func(px *proxychain.ProxyChain) error {
encodedURL := url.QueryEscape(px.Request.URL.String())
newURL, err := url.Parse(googleCacheUrl + encodedURL)
if err != nil {
return err
}
px.Request.URL = newURL
return nil
}
}

View File

@@ -1,43 +0,0 @@
package requestmodifiers
import (
"net/url"
"regexp"
"ladder/proxychain"
tx "ladder/proxychain/responsemodifiers"
)
const waybackUrl string = "https://web.archive.org/web/"
// RequestWaybackMachine modifies a ProxyChain's URL to request the wayback machine (archive.org) version.
func RequestWaybackMachine() proxychain.RequestModification {
return func(chain *proxychain.ProxyChain) error {
chain.Request.URL.RawQuery = ""
rURL := preventRecursiveWaybackURLs(chain.Request.URL.String())
newURLString := waybackUrl + rURL
newURL, err := url.Parse(newURLString)
if err != nil {
return err
}
chain.Request.URL = newURL
// cleanup wayback headers
script := `["wm-ipp-print", "wm-ipp-base"].forEach(id => { try { document.getElementById(id).remove() } catch{ } })`
chain.AddOnceResponseModifications(
tx.InjectScriptAfterDOMContentLoaded(script),
)
return nil
}
}
func preventRecursiveWaybackURLs(url string) string {
re := regexp.MustCompile(`https:\/\/web\.archive\.org\/web\/\d+\/\*(https?:\/\/.*)`)
match := re.FindStringSubmatch(url)
if match != nil {
return match[1]
}
return url
}

View File

@@ -1,94 +0,0 @@
package requestmodifiers
import (
"context"
"encoding/json"
"fmt"
"net"
"time"
http "github.com/bogdanfinn/fhttp"
/*
tls_client "github.com/bogdanfinn/tls-client"
//"net/http"
*/
"ladder/proxychain"
)
// resolveWithGoogleDoH resolves DNS using Google's DNS-over-HTTPS
func resolveWithGoogleDoH(host string) (string, error) {
url := "https://dns.google/resolve?name=" + host + "&type=A"
resp, err := http.Get(url)
if err != nil {
return "", err
}
defer resp.Body.Close()
var result struct {
Answer []struct {
Data string `json:"data"`
} `json:"Answer"`
}
err = json.NewDecoder(resp.Body).Decode(&result)
if err != nil {
return "", err
}
// Get the first A record
if len(result.Answer) > 0 {
return result.Answer[0].Data, nil
}
return "", fmt.Errorf("no DoH DNS record found for %s", host)
}
type CustomDialer struct {
*net.Dialer
}
func newCustomDialer(timeout, keepAlive time.Duration) *CustomDialer {
return &CustomDialer{
Dialer: &net.Dialer{
Timeout: timeout,
KeepAlive: keepAlive,
},
}
}
func (cd *CustomDialer) DialContext(ctx context.Context, network, addr string) (net.Conn, error) {
host, port, err := net.SplitHostPort(addr)
if err != nil {
port = "443"
}
resolvedHost, err := resolveWithGoogleDoH(host)
if err != nil {
return nil, err
}
return cd.Dialer.DialContext(ctx, network, net.JoinHostPort(resolvedHost, port))
}
// ResolveWithGoogleDoH modifies a ProxyChain's client to make the request by resolving the URL
// using Google's DNS over HTTPs service
func ResolveWithGoogleDoH() proxychain.RequestModification {
///customDialer := NewCustomDialer(10*time.Second, 10*time.Second)
return func(chain *proxychain.ProxyChain) error {
/*
options := []tls_client.HttpClientOption{
tls_client.WithTimeoutSeconds(30),
tls_client.WithRandomTLSExtensionOrder(),
tls_client.WithDialer(*customDialer.Dialer),
//tls_client.WithClientProfile(profiles.Chrome_105),
}
client, err := tls_client.NewHttpClient(tls_client.NewNoopLogger(), options...)
if err != nil {
return err
}
chain.SetOnceHTTPClient(client)
*/
return nil
}
}

View File

@@ -1,52 +0,0 @@
package requestmodifiers
// removed due to using a different TLS spoofing technique
/*
import (
//"github.com/Danny-Dasilva/CycleTLS/cycletls"
//http "github.com/Danny-Dasilva/fhttp"
//http "github.com/bogdanfinn/fhttp"
"golang.org/x/net/proxy"
"ladder/proxychain"
)
// SpoofJA3fingerprint modifies the TLS client and user agent to spoof a particular JA3 fingerprint
// Some anti-bot WAFs such as cloudflare can fingerprint the fields of the TLS hello packet, and the order in which they appear
// https://web.archive.org/web/20231126224326/https://engineering.salesforce.com/tls-fingerprinting-with-ja3-and-ja3s-247362855967/
// https://web.archive.org/web/20231119065253/https://developers.cloudflare.com/bots/concepts/ja3-fingerprint/
func SpoofJA3fingerprint(ja3 string, userAgent string) proxychain.RequestModification {
//fmt.Println(ja3)
return func(chain *proxychain.ProxyChain) error {
// deep copy existing client while modifying http transport
ja3SpoofClient := &http.Client{
Transport: cycletls.NewTransport(ja3, userAgent),
Timeout: chain.Client.Timeout,
CheckRedirect: chain.Client.CheckRedirect,
}
chain.SetOnceHTTPClient(ja3SpoofClient)
return nil
}
}
// SpoofJA3fingerprintWithProxy modifies the TLS client and user agent to spoof a particular JA3 fingerprint and use a proxy.ContextDialer from the "golang.org/x/net/proxy"
// Some anti-bot WAFs such as cloudflare can fingerprint the fields of the TLS hello packet, and the order in which they appear
// https://web.archive.org/web/20231126224326/https://engineering.salesforce.com/tls-fingerprinting-with-ja3-and-ja3s-247362855967/
// https://web.archive.org/web/20231119065253/https://developers.cloudflare.com/bots/concepts/ja3-fingerprint/
func SpoofJA3fingerprintWithProxy(ja3 string, userAgent string, proxy proxy.ContextDialer) proxychain.RequestModification {
return func(chain *proxychain.ProxyChain) error {
// deep copy existing client while modifying http transport
ja3SpoofClient := &http.Client{
Transport: cycletls.NewTransportWithProxy(ja3, userAgent, proxy),
Timeout: chain.Client.Timeout,
CheckRedirect: chain.Client.CheckRedirect,
}
chain.SetOnceHTTPClient(ja3SpoofClient)
return nil
}
}
*/

View File

@@ -1,24 +0,0 @@
package requestmodifiers
import (
"ladder/proxychain"
)
// SpoofOrigin modifies the origin header
// if the upstream server returns a Vary header
// it means you might get a different response if you change this
func SpoofOrigin(url string) proxychain.RequestModification {
return func(px *proxychain.ProxyChain) error {
px.Request.Header.Set("origin", url)
return nil
}
}
// HideOrigin modifies the origin header
// so that it is the original origin, not the proxy
func HideOrigin() proxychain.RequestModification {
return func(px *proxychain.ProxyChain) error {
px.Request.Header.Set("origin", px.Request.URL.String())
return nil
}
}

View File

@@ -1,38 +0,0 @@
package requestmodifiers
import (
"fmt"
"ladder/proxychain"
tx "ladder/proxychain/responsemodifiers"
)
// SpoofReferrer modifies the referrer header.
// It is useful if the page can be accessed from a search engine
// or social media site, but not by browsing the website itself.
// if url is "", then the referrer header is removed.
func SpoofReferrer(url string) proxychain.RequestModification {
return func(chain *proxychain.ProxyChain) error {
// change refer on client side js
script := fmt.Sprintf(`document.referrer = "%s"`, url)
chain.AddOnceResponseModifications(
tx.InjectScriptBeforeDOMContentLoaded(script),
)
if url == "" {
chain.Request.Header.Del("referrer")
return nil
}
chain.Request.Header.Set("referrer", url)
return nil
}
}
// HideReferrer modifies the referrer header
// so that it is the original referrer, not the proxy
func HideReferrer() proxychain.RequestModification {
return func(px *proxychain.ProxyChain) error {
px.Request.Header.Set("referrer", px.Request.URL.String())
return nil
}
}

View File

@@ -1,43 +0,0 @@
package requestmodifiers
import (
"fmt"
"math/rand"
"strings"
"time"
"ladder/proxychain"
)
// SpoofReferrerFromBaiduSearch modifies the referrer header
// pretending to be from a BaiduSearch
func SpoofReferrerFromBaiduSearch() proxychain.RequestModification {
return func(chain *proxychain.ProxyChain) error {
// https://www.baidu.com/link?url=5biIeDvUIihawf3Zbbysach2Xn4H3w3FzO6LZKgSs-B5Yt4M4RUFikokOk5zetf2&wd=&eqid=9da80d8208009b8480000706655d5ed6
referrer := fmt.Sprintf("https://baidu.com/link?url=%s", generateRandomBaiduURL())
chain.Request.Header.Set("referrer", referrer)
chain.Request.Header.Set("sec-fetch-site", "cross-site")
chain.Request.Header.Set("sec-fetch-dest", "document")
chain.Request.Header.Set("sec-fetch-mode", "navigate")
return nil
}
}
// utility functions ==================
func generateRandomString(charset string, length int) string {
var seededRand *rand.Rand = rand.New(rand.NewSource(time.Now().UnixNano()))
var stringBuilder strings.Builder
for i := 0; i < length; i++ {
stringBuilder.WriteByte(charset[seededRand.Intn(len(charset))])
}
return stringBuilder.String()
}
func generateRandomBaiduURL() string {
const alphanumericCharset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
const hexCharset = "0123456789abcdef"
randomAlphanumeric := generateRandomString(alphanumericCharset, 30) // Length before "-"
randomHex := generateRandomString(hexCharset, 16) // Length of eqid
return randomAlphanumeric + "-" + "&wd=&eqid=" + randomHex
}

View File

@@ -1,20 +0,0 @@
package requestmodifiers
import (
"ladder/proxychain"
)
// SpoofReferrerFromBingSearch modifies the referrer header
// pretending to be from a bing search site
func SpoofReferrerFromBingSearch() proxychain.RequestModification {
return func(chain *proxychain.ProxyChain) error {
chain.AddOnceRequestModifications(
SpoofReferrer("https://www.bing.com/"),
SetRequestHeader("sec-fetch-site", "cross-site"),
SetRequestHeader("sec-fetch-dest", "document"),
SetRequestHeader("sec-fetch-mode", "navigate"),
ModifyQueryParams("utm_source", "bing"),
)
return nil
}
}

View File

@@ -1,20 +0,0 @@
package requestmodifiers
import (
"ladder/proxychain"
)
// SpoofReferrerFromGoogleSearch modifies the referrer header
// pretending to be from a google search site
func SpoofReferrerFromGoogleSearch() proxychain.RequestModification {
return func(chain *proxychain.ProxyChain) error {
chain.AddOnceRequestModifications(
SpoofReferrer("https://www.google.com"),
SetRequestHeader("sec-fetch-site", "cross-site"),
SetRequestHeader("sec-fetch-dest", "document"),
SetRequestHeader("sec-fetch-mode", "navigate"),
ModifyQueryParams("utm_source", "google"),
)
return nil
}
}

View File

@@ -1,21 +0,0 @@
package requestmodifiers
import (
"ladder/proxychain"
)
// SpoofReferrerFromLinkedInPost modifies the referrer header
// pretending to be from a linkedin post
func SpoofReferrerFromLinkedInPost() proxychain.RequestModification {
return func(chain *proxychain.ProxyChain) error {
chain.AddOnceRequestModifications(
SpoofReferrer("https://www.linkedin.com/"),
SetRequestHeader("sec-fetch-site", "cross-site"),
SetRequestHeader("sec-fetch-dest", "document"),
SetRequestHeader("sec-fetch-mode", "navigate"),
ModifyQueryParams("utm_campaign", "post"),
ModifyQueryParams("utm_medium", "web"),
)
return nil
}
}

View File

@@ -1,23 +0,0 @@
package requestmodifiers
import (
"fmt"
"ladder/proxychain"
)
// SpoofReferrerFromNaverSearch modifies the referrer header
// pretending to be from a Naver search (popular in South Korea)
func SpoofReferrerFromNaverSearch() proxychain.RequestModification {
return func(chain *proxychain.ProxyChain) error {
referrer := fmt.Sprintf(
"https://search.naver.com/search.naver?where=nexearch&sm=top_hty&fbm=0&ie=utf8&query=%s",
chain.Request.URL.Host,
)
chain.Request.Header.Set("referrer", referrer)
chain.Request.Header.Set("sec-fetch-site", "cross-site")
chain.Request.Header.Set("sec-fetch-dest", "document")
chain.Request.Header.Set("sec-fetch-mode", "navigate")
return nil
}
}

View File

@@ -1,17 +0,0 @@
package requestmodifiers
import (
"ladder/proxychain"
)
// SpoofReferrerFromPinterestPost modifies the referrer header
// pretending to be from a pinterest post
func SpoofReferrerFromPinterestPost() proxychain.RequestModification {
return func(chain *proxychain.ProxyChain) error {
chain.Request.Header.Set("referrer", "https://www.pinterest.com/")
chain.Request.Header.Set("sec-fetch-site", "cross-site")
chain.Request.Header.Set("sec-fetch-dest", "document")
chain.Request.Header.Set("sec-fetch-mode", "navigate")
return nil
}
}

View File

@@ -1,16 +0,0 @@
package requestmodifiers
import (
"ladder/proxychain"
)
// SpoofReferrerFromQQPost modifies the referrer header
// pretending to be from a QQ post (popular social media in China)
func SpoofReferrerFromQQPost() proxychain.RequestModification {
return func(chain *proxychain.ProxyChain) error {
chain.Request.Header.Set("referrer", "https://new.qq.com/")
chain.Request.Header.Set("sec-fetch-site", "cross-site")
chain.Request.Header.Set("sec-fetch-dest", "document")
return nil
}
}

View File

@@ -1,17 +0,0 @@
package requestmodifiers
import (
"ladder/proxychain"
)
// SpoofReferrerFromRedditPost modifies the referrer header
// pretending to be from a reddit post
func SpoofReferrerFromRedditPost() proxychain.RequestModification {
return func(chain *proxychain.ProxyChain) error {
chain.Request.Header.Set("referrer", "https://www.reddit.com/")
chain.Request.Header.Set("sec-fetch-site", "cross-site")
chain.Request.Header.Set("sec-fetch-dest", "document")
chain.Request.Header.Set("sec-fetch-mode", "navigate")
return nil
}
}

View File

@@ -1,19 +0,0 @@
package requestmodifiers
import (
"ladder/proxychain"
)
// SpoofReferrerFromTumblrPost modifies the referrer header
// pretending to be from a tumblr post
func SpoofReferrerFromTumblrPost() proxychain.RequestModification {
return func(chain *proxychain.ProxyChain) error {
chain.AddOnceRequestModifications(
SpoofReferrer("https://www.tumblr.com/"),
SetRequestHeader("sec-fetch-site", "cross-site"),
SetRequestHeader("sec-fetch-dest", "document"),
SetRequestHeader("sec-fetch-mode", "navigate"),
)
return nil
}
}

View File

@@ -1,19 +0,0 @@
package requestmodifiers
import (
"ladder/proxychain"
)
// SpoofReferrerFromTwitterPost modifies the referrer header
// pretending to be from a twitter post
func SpoofReferrerFromTwitterPost() proxychain.RequestModification {
return func(chain *proxychain.ProxyChain) error {
chain.AddOnceRequestModifications(
SpoofReferrer("https://t.co/"),
SetRequestHeader("sec-fetch-site", "cross-site"),
SetRequestHeader("sec-fetch-dest", "document"),
SetRequestHeader("sec-fetch-mode", "navigate"),
)
return nil
}
}

View File

@@ -1,19 +0,0 @@
package requestmodifiers
import (
"ladder/proxychain"
)
// SpoofReferrerFromVkontaktePost modifies the referrer header
// pretending to be from a vkontakte post (popular in Russia)
func SpoofReferrerFromVkontaktePost() proxychain.RequestModification {
return func(chain *proxychain.ProxyChain) error {
chain.AddOnceRequestModifications(
SpoofReferrer("https://away.vk.com/"),
SetRequestHeader("sec-fetch-site", "cross-site"),
SetRequestHeader("sec-fetch-dest", "document"),
SetRequestHeader("sec-fetch-mode", "navigate"),
)
return nil
}
}

View File

@@ -1,21 +0,0 @@
package requestmodifiers
import (
"fmt"
"math/rand"
"ladder/proxychain"
)
// SpoofReferrerFromWeiboPost modifies the referrer header
// pretending to be from a Weibo post (popular in China)
func SpoofReferrerFromWeiboPost() proxychain.RequestModification {
return func(chain *proxychain.ProxyChain) error {
referrer := fmt.Sprintf("http://weibo.com/u/%d", rand.Intn(90001))
chain.Request.Header.Set("referrer", referrer)
chain.Request.Header.Set("sec-fetch-site", "cross-site")
chain.Request.Header.Set("sec-fetch-dest", "document")
chain.Request.Header.Set("sec-fetch-mode", "navigate")
return nil
}
}

View File

@@ -1,40 +0,0 @@
package requestmodifiers
import (
_ "embed"
"strings"
"ladder/proxychain"
tx "ladder/proxychain/responsemodifiers"
)
// https://github.com/faisalman/ua-parser-js/tree/master
// update using:
// git submodule update --remote --merge
//
//go:embed vendor/ua-parser-js/dist/ua-parser.min.js
var UAParserJS string
// note: spoof_user_agent.js has a dependency on ua-parser.min.js
// ua-parser.min.js should be loaded first.
//
//go:embed spoof_user_agent.js
var spoofUserAgentJS string
// SpoofUserAgent modifies the user agent
func SpoofUserAgent(ua string) proxychain.RequestModification {
return func(chain *proxychain.ProxyChain) error {
// modify ua headers
chain.AddOnceRequestModifications(
SetRequestHeader("user-agent", ua),
)
script := strings.ReplaceAll(spoofUserAgentJS, "{{USER_AGENT}}", ua)
chain.AddOnceResponseModifications(
tx.InjectScriptBeforeDOMContentLoaded(script),
tx.InjectScriptBeforeDOMContentLoaded(UAParserJS),
)
return nil
}
}

View File

@@ -1,100 +0,0 @@
(() => {
const UA = "{{USER_AGENT}}";
// monkey-patch navigator.userAgent
{
const { get } = Object.getOwnPropertyDescriptor(
Navigator.prototype,
"userAgent",
);
Object.defineProperty(Navigator.prototype, "userAgent", {
get: new Proxy(get, {
apply() {
return UA;
},
}),
});
}
// monkey-patch navigator.appVersion
{
const { get } = Object.getOwnPropertyDescriptor(
Navigator.prototype,
"appVersion",
);
Object.defineProperty(Navigator.prototype, "appVersion", {
get: new Proxy(get, {
apply() {
return UA.replace("Mozilla/", "");
},
}),
});
}
// monkey-patch navigator.UserAgentData
// Assuming UAParser is already loaded and available
function spoofUserAgentData(uaString) {
// Parse the user-agent string
const parser = new UAParser(uaString);
const parsedData = parser.getResult();
// Extracted data
const platform = parsedData.os.name;
const browserName = parsedData.browser.name;
const browserMajorVersion = parsedData.browser.major;
const isMobile =
/Android|webOS|iPhone|iPad|iPod|BlackBerry|IEMobile|Opera Mini/i.test(
uaString,
);
// Overwrite navigator.userAgentData
self.NavigatorUAData = self.NavigatorUAData || new class NavigatorUAData {
brands = [{
brand: browserName,
version: browserMajorVersion,
}];
mobile = isMobile;
platform = platform;
toJSON() {
return {
brands: this.brands,
mobile: this.mobile,
platform: this.platform,
};
}
getHighEntropyValues(hints) {
const result = this.toJSON();
// Add additional high entropy values based on hints
// Modify these as per your requirements
if (hints.includes("architecture")) {
result.architecture = "x86";
}
if (hints.includes("bitness")) {
result.bitness = "64";
}
if (hints.includes("model")) {
result.model = "";
}
if (hints.includes("platformVersion")) {
result.platformVersion = "10.0.0"; // Example value
}
if (hints.includes("uaFullVersion")) {
result.uaFullVersion = browserMajorVersion;
}
if (hints.includes("fullVersionList")) {
result.fullVersionList = this.brands;
}
return Promise.resolve(result);
}
}();
// Apply the monkey patch
Object.defineProperty(navigator, "userAgentData", {
value: new self.NavigatorUAData(),
writable: false,
});
}
spoofUserAgentData(UA);
// TODO: use hideMonkeyPatch to hide overrides
})();

View File

@@ -1,14 +0,0 @@
package requestmodifiers
import (
"ladder/proxychain"
)
// SpoofXForwardedFor modifies the X-Forwarded-For header
// in some cases, a forward proxy may interpret this as the source IP
func SpoofXForwardedFor(ip string) proxychain.RequestModification {
return func(px *proxychain.ProxyChain) error {
px.Request.Header.Set("X-FORWARDED-FOR", ip)
return nil
}
}

View File

@@ -1,56 +0,0 @@
package api
import (
"bytes"
"encoding/json"
"errors"
"io"
"reflect"
)
type Error struct {
Success bool `json:"success"`
Error ErrorDetails `json:"error"`
}
type ErrorDetails struct {
Message string `json:"message"`
Type string `json:"type"`
Cause string `json:"cause"`
}
func CreateAPIErrReader(err error) io.ReadCloser {
if err == nil {
return io.NopCloser(bytes.NewBufferString(`{"success":false, "error": "No error provided"}`))
}
baseErr := getBaseError(err)
apiErr := Error{
Success: false,
Error: ErrorDetails{
Message: err.Error(),
Type: reflect.TypeOf(err).String(),
Cause: baseErr.Error(),
},
}
// Serialize the APIError into JSON
jsonData, jsonErr := json.Marshal(apiErr)
if jsonErr != nil {
return io.NopCloser(bytes.NewBufferString(`{"success":false, "error": "Failed to serialize error"}`))
}
// Return the JSON data as an io.ReadCloser
return io.NopCloser(bytes.NewBuffer(jsonData))
}
func getBaseError(err error) error {
for {
unwrapped := errors.Unwrap(err)
if unwrapped == nil {
return err
}
err = unwrapped
}
}

View File

@@ -1,174 +0,0 @@
package api
import (
"github.com/go-shiori/dom"
"github.com/markusmobius/go-trafilatura"
"golang.org/x/net/html"
)
// =======================================================================================
// credit @joncrangle https://github.com/everywall/ladder/issues/38#issuecomment-1831252934
type ImageContent struct {
Type string `json:"type"`
URL string `json:"url"`
Alt string `json:"alt"`
Caption string `json:"caption"`
}
type LinkContent struct {
Type string `json:"type"`
Href string `json:"href"`
Data string `json:"data"`
}
type TextContent struct {
Type string `json:"type"`
Data string `json:"data"`
}
type ListContent struct {
Type string `json:"type"`
ListItems []ListItemContent `json:"listItems"`
}
type ListItemContent struct {
Data string `json:"data"`
}
type JSONDocument struct {
Success bool `json:"success"`
Error ErrorDetails `json:"error"`
Metadata struct {
Title string `json:"title"`
Author string `json:"author"`
URL string `json:"url"`
Hostname string `json:"hostname"`
Image string `json:"image"`
Description string `json:"description"`
Sitename string `json:"sitename"`
Date string `json:"date"`
Categories []string `json:"categories"`
Tags []string `json:"tags"`
License string `json:"license"`
} `json:"metadata"`
Content []interface{} `json:"content"`
Comments string `json:"comments"`
}
func ExtractResultToAPIResponse(extract *trafilatura.ExtractResult) *JSONDocument {
jsonDoc := &JSONDocument{}
// Populate success
jsonDoc.Success = true
// Populate metadata
jsonDoc.Metadata.Title = extract.Metadata.Title
jsonDoc.Metadata.Author = extract.Metadata.Author
jsonDoc.Metadata.URL = extract.Metadata.URL
jsonDoc.Metadata.Hostname = extract.Metadata.Hostname
jsonDoc.Metadata.Description = extract.Metadata.Description
jsonDoc.Metadata.Image = extract.Metadata.Image
jsonDoc.Metadata.Sitename = extract.Metadata.Sitename
jsonDoc.Metadata.Date = extract.Metadata.Date.Format("2006-01-02")
jsonDoc.Metadata.Categories = extract.Metadata.Categories
jsonDoc.Metadata.Tags = extract.Metadata.Tags
jsonDoc.Metadata.License = extract.Metadata.License
jsonDoc.Metadata.Hostname = extract.Metadata.Hostname
// Populate content
if extract.ContentNode != nil {
jsonDoc.Content = parseContent(extract.ContentNode)
}
// Populate comments
if extract.CommentsNode != nil {
jsonDoc.Comments = dom.OuterHTML(extract.CommentsNode)
}
return jsonDoc
}
func parseContent(node *html.Node) []interface{} {
var content []interface{}
for child := node.FirstChild; child != nil; child = child.NextSibling {
switch child.Data {
case "img":
image := ImageContent{
Type: "img",
URL: dom.GetAttribute(child, "src"),
Alt: dom.GetAttribute(child, "alt"),
Caption: dom.GetAttribute(child, "caption"),
}
content = append(content, image)
case "a":
link := LinkContent{
Type: "a",
Href: dom.GetAttribute(child, "href"),
Data: dom.InnerText(child),
}
content = append(content, link)
case "h1":
text := TextContent{
Type: "h1",
Data: dom.InnerText(child),
}
content = append(content, text)
case "h2":
text := TextContent{
Type: "h2",
Data: dom.InnerText(child),
}
content = append(content, text)
case "h3":
text := TextContent{
Type: "h3",
Data: dom.InnerText(child),
}
content = append(content, text)
case "h4":
text := TextContent{
Type: "h4",
Data: dom.InnerText(child),
}
content = append(content, text)
case "h5":
text := TextContent{
Type: "h5",
Data: dom.InnerText(child),
}
content = append(content, text)
case "ul", "ol":
list := ListContent{
Type: child.Data,
ListItems: []ListItemContent{},
}
for listItem := child.FirstChild; listItem != nil; listItem = listItem.NextSibling {
if listItem.Data == "li" {
listItemContent := ListItemContent{
Data: dom.InnerText(listItem),
}
list.ListItems = append(list.ListItems, listItemContent)
}
}
content = append(content, list)
default:
text := TextContent{
Type: "p",
Data: dom.InnerText(child),
}
content = append(content, text)
}
}
return content
}

View File

@@ -1,47 +0,0 @@
package responsemodifiers
import (
"bytes"
"encoding/json"
"io"
"github.com/markusmobius/go-trafilatura"
"ladder/proxychain"
"ladder/proxychain/responsemodifiers/api"
)
// APIContent creates an JSON representation of the article and returns it as an API response.
func APIContent() proxychain.ResponseModification {
return func(chain *proxychain.ProxyChain) error {
// we set content-type twice here, in case another response modifier
// tries to forward over the original headers
chain.Context.Set("content-type", "application/json")
chain.Response.Header.Set("content-type", "application/json")
// extract dom contents
opts := trafilatura.Options{
IncludeImages: true,
IncludeLinks: true,
// FavorPrecision: true,
FallbackCandidates: nil, // TODO: https://github.com/markusmobius/go-trafilatura/blob/main/examples/chained/main.go
// implement fallbacks from "github.com/markusmobius/go-domdistiller" and "github.com/go-shiori/go-readability"
OriginalURL: chain.Request.URL,
}
result, err := trafilatura.Extract(chain.Response.Body, opts)
if err != nil {
chain.Response.Body = api.CreateAPIErrReader(err)
return nil
}
res := api.ExtractResultToAPIResponse(result)
jsonData, err := json.MarshalIndent(res, "", " ")
if err != nil {
return err
}
chain.Response.Body = io.NopCloser(bytes.NewReader(jsonData))
return nil
}
}

View File

@@ -1,70 +0,0 @@
package responsemodifiers
import (
"encoding/json"
"fmt"
"io"
"net/url"
"testing"
"ladder/proxychain/responsemodifiers/api"
)
func TestCreateAPIErrReader(t *testing.T) {
_, baseErr := url.Parse("://this is an invalid url")
wrappedErr := fmt.Errorf("wrapped error: %w", baseErr)
readCloser := api.CreateAPIErrReader(wrappedErr)
defer readCloser.Close()
// Read and unmarshal the JSON output
data, err := io.ReadAll(readCloser)
if err != nil {
t.Fatalf("Failed to read from ReadCloser: %v", err)
}
fmt.Println(string(data))
var apiErr api.Error
err = json.Unmarshal(data, &apiErr)
if err != nil {
t.Fatalf("Failed to unmarshal JSON: %v", err)
}
// Verify the structure of the APIError
if apiErr.Success {
t.Errorf("Expected Success to be false, got true")
}
if apiErr.Error.Message != wrappedErr.Error() {
t.Errorf("Expected error message to be '%v', got '%v'", wrappedErr.Error(), apiErr.Error.Message)
}
}
func TestCreateAPIErrReader2(t *testing.T) {
_, baseErr := url.Parse("://this is an invalid url")
readCloser := api.CreateAPIErrReader(baseErr)
defer readCloser.Close()
// Read and unmarshal the JSON output
data, err := io.ReadAll(readCloser)
if err != nil {
t.Fatalf("Failed to read from ReadCloser: %v", err)
}
fmt.Println(string(data))
var apiErr api.Error
err = json.Unmarshal(data, &apiErr)
if err != nil {
t.Fatalf("Failed to unmarshal JSON: %v", err)
}
// Verify the structure of the APIError
if apiErr.Success {
t.Errorf("Expected Success to be false, got true")
}
if apiErr.Error.Message != baseErr.Error() {
t.Errorf("Expected error message to be '%v', got '%v'", baseErr.Error(), apiErr.Error.Message)
}
}

View File

@@ -1,42 +0,0 @@
package responsemodifiers
import (
_ "embed"
"strings"
"ladder/proxychain"
"ladder/proxychain/responsemodifiers/rewriters"
)
//go:embed block_element_removal.js
var blockElementRemoval string
// BlockElementRemoval prevents paywall javascript from removing a
// particular element by detecting the removal, then immediately reinserting it.
// This is useful when a page will return a "fake" 404, after flashing the content briefly.
// If the /outline/ API works, but the regular API doesn't, try this modifier.
func BlockElementRemoval(cssSelector string) proxychain.ResponseModification {
return func(chain *proxychain.ProxyChain) error {
// don't add rewriter if it's not even html
ct := chain.Response.Header.Get("content-type")
if !strings.HasPrefix(ct, "text/html") {
return nil
}
params := map[string]string{
// ie: "div.article-content"
"{{CSS_SELECTOR}}": cssSelector,
}
rr := rewriters.NewScriptInjectorRewriterWithParams(
blockElementRemoval,
rewriters.BeforeDOMContentLoaded,
params,
)
htmlRewriter := rewriters.NewHTMLRewriter(chain.Response.Body, rr)
chain.Response.Body = htmlRewriter
return nil
}
}

View File

@@ -1,35 +0,0 @@
/**
* Monitors and restores specific DOM elements if they are removed.
*
* This self-invoking function creates a MutationObserver to watch for removal of elements matching
* "{{CSS_SELECTOR}}". If such an element is removed, it logs the event and attempts to restore the
* element after a 50ms delay. The restored element is reinserted at its original location or prepended
* to the document body if the original location is unavailable.
*/
(function() {
function handleMutation(mutationList) {
for (const mutation of mutationList) {
if (mutation.type === "childList") {
for (const node of Array.from(mutation.removedNodes)) {
if (node.outerHTML && node.querySelector("{{CSS_SELECTOR}}")) {
console.log(
"proxychain: prevented removal of element containing 'article-content'",
);
console.log(node.outerHTML);
setTimeout(() => {
let e = document.querySelector("{{CSS_SELECTOR}}");
if (e != null) {
e.replaceWith(node);
} else {
document.body.prepend(node);
}
}, 50);
}
}
}
}
}
const observer = new MutationObserver(handleMutation);
observer.observe(document, { childList: true, subtree: true });
})();

View File

@@ -1,21 +0,0 @@
package responsemodifiers
import (
"ladder/proxychain"
)
// BypassCORS modifies response headers to prevent the browser
// from enforcing any CORS restrictions. This should run at the end of the chain.
func BypassCORS() proxychain.ResponseModification {
return func(chain *proxychain.ProxyChain) error {
chain.AddOnceResponseModifications(
SetResponseHeader("Access-Control-Allow-Origin", "*"),
SetResponseHeader("Access-Control-Expose-Headers", "*"),
SetResponseHeader("Access-Control-Allow-Credentials", "true"),
SetResponseHeader("Access-Control-Allow-Methods", "GET, PUT, POST, DELETE, HEAD, OPTIONS, PATCH"),
SetResponseHeader("Access-Control-Allow-Headers", "*"),
DeleteResponseHeader("X-Frame-Options"),
)
return nil
}
}

View File

@@ -1,30 +0,0 @@
package responsemodifiers
import (
"ladder/proxychain"
)
// TODO: handle edge case where CSP is specified in meta tag:
// <meta http-equiv="Content-Security-Policy" content="default-src 'self'">
// BypassContentSecurityPolicy modifies response headers to prevent the browser
// from enforcing any CSP restrictions. This should run at the end of the chain.
func BypassContentSecurityPolicy() proxychain.ResponseModification {
return func(chain *proxychain.ProxyChain) error {
chain.AddOnceResponseModifications(
DeleteResponseHeader("Content-Security-Policy"),
DeleteResponseHeader("Content-Security-Policy-Report-Only"),
DeleteResponseHeader("X-Content-Security-Policy"),
DeleteResponseHeader("X-WebKit-CSP"),
)
return nil
}
}
// SetContentSecurityPolicy modifies response headers to a specific CSP
func SetContentSecurityPolicy(csp string) proxychain.ResponseModification {
return func(chain *proxychain.ProxyChain) error {
chain.Response.Header.Set("Content-Security-Policy", csp)
return nil
}
}

View File

@@ -1,53 +0,0 @@
package responsemodifiers
import (
"fmt"
"net/url"
"strings"
"ladder/proxychain"
)
var forwardBlacklist map[string]bool
func init() {
forwardBlacklist = map[string]bool{
"content-length": true,
"content-encoding": true,
"transfer-encoding": true,
"strict-transport-security": true,
"connection": true,
"keep-alive": true,
}
}
// ForwardResponseHeaders forwards the response headers from the upstream server to the client
func ForwardResponseHeaders() proxychain.ResponseModification {
return func(chain *proxychain.ProxyChain) error {
// fmt.Println(chain.Response.Header)
for uname, headers := range chain.Response.Header {
name := strings.ToLower(uname)
if forwardBlacklist[name] {
continue
}
// patch location header to forward to proxy instead
if name == "location" {
u, err := url.Parse(chain.Context.BaseURL())
if err != nil {
return err
}
newLocation := fmt.Sprintf("%s://%s/%s", u.Scheme, u.Host, headers[0])
chain.Context.Set("location", newLocation)
}
// forward headers
for _, value := range headers {
fmt.Println(name, value)
chain.Context.Set(name, value)
}
}
return nil
}
}

View File

@@ -1,189 +0,0 @@
package responsemodifiers
import (
"bytes"
"embed"
"fmt"
"html/template"
"io"
"log"
"net/url"
"strings"
"ladder/proxychain"
"golang.org/x/net/html"
"golang.org/x/net/html/atom"
//"github.com/go-shiori/dom"
"github.com/markusmobius/go-trafilatura"
)
//go:embed generate_readable_outline.html
var templateFS embed.FS
// GenerateReadableOutline creates an reader-friendly distilled representation of the article.
// This is a reliable way of bypassing soft-paywalled articles, where the content is hidden, but still present in the DOM.
func GenerateReadableOutline() proxychain.ResponseModification {
// get template only once, and resuse for subsequent calls
f := "generate_readable_outline.html"
tmpl, err := template.ParseFS(templateFS, f)
if err != nil {
panic(fmt.Errorf("tx.GenerateReadableOutline Error: %s not found", f))
}
return func(chain *proxychain.ProxyChain) error {
// ===========================================================
// 1. extract dom contents using reading mode algo
// ===========================================================
opts := trafilatura.Options{
IncludeImages: false,
IncludeLinks: true,
FavorRecall: true,
Deduplicate: true,
FallbackCandidates: nil, // TODO: https://github.com/markusmobius/go-trafilatura/blob/main/examples/chained/main.go
// implement fallbacks from "github.com/markusmobius/go-domdistiller" and "github.com/go-shiori/go-readability"
OriginalURL: chain.Request.URL,
}
extract, err := trafilatura.Extract(chain.Response.Body, opts)
if err != nil {
return err
}
// ============================================================================
// 2. render generate_readable_outline.html template using metadata from step 1
// ============================================================================
// render DOM to string without H1 title
removeFirstH1(extract.ContentNode)
// rewrite all links to stay on /outline/ path
rewriteHrefLinks(extract.ContentNode, chain.Context.BaseURL(), chain.APIPrefix)
var b bytes.Buffer
html.Render(&b, extract.ContentNode)
distilledHTML := b.String()
// populate template parameters
data := map[string]interface{}{
"Success": true,
"Image": extract.Metadata.Image,
"Description": extract.Metadata.Description,
"Sitename": extract.Metadata.Sitename,
"Hostname": extract.Metadata.Hostname,
"Url": "/" + chain.Request.URL.String(),
"Title": extract.Metadata.Title, // todo: modify CreateReadableDocument so we don't have <h1> titles duplicated?
"Date": extract.Metadata.Date.String(),
"Author": createWikipediaSearchLinks(extract.Metadata.Author),
//"Author": extract.Metadata.Author,
"Body": distilledHTML,
}
// ============================================================================
// 3. queue sending the response back to the client by replacing the response body
// (the response body will be read as a stream in proxychain.Execute() later on.)
// ============================================================================
pr, pw := io.Pipe() // pipe io.writer contents into io.reader
// Use a goroutine for writing to the pipe so we don't deadlock the request
go func() {
defer pw.Close()
err := tmpl.Execute(pw, data) // <- render template
if err != nil {
log.Printf("WARN: GenerateReadableOutline template rendering error: %s\n", err)
}
}()
chain.Context.Set("content-type", "text/html")
chain.Response.Body = pr // <- replace response body reader with our new reader from pipe
return nil
}
}
// =============================================
// DOM Rendering helpers
// =============================================
func removeFirstH1(n *html.Node) {
var recurse func(*html.Node) bool
recurse = func(n *html.Node) bool {
if n.Type == html.ElementNode && n.DataAtom == atom.H1 {
return true // Found the first H1, return true to stop
}
for c := n.FirstChild; c != nil; c = c.NextSibling {
if recurse(c) {
n.RemoveChild(c)
return false // Removed first H1, no need to continue
}
}
return false
}
recurse(n)
}
func rewriteHrefLinks(n *html.Node, baseURL string, apiPath string) {
u, err := url.Parse(baseURL)
if err != nil {
log.Printf("GenerateReadableOutline :: rewriteHrefLinks error - %s\n", err)
}
apiPath = strings.Trim(apiPath, "/")
proxyURL := fmt.Sprintf("%s://%s", u.Scheme, u.Host)
newProxyURL := fmt.Sprintf("%s/%s", proxyURL, apiPath)
var recurse func(*html.Node) bool
recurse = func(n *html.Node) bool {
if n.Type == html.ElementNode && n.DataAtom == atom.A {
for i := range n.Attr {
attr := n.Attr[i]
if attr.Key != "href" {
continue
}
// rewrite url on a.href: http://localhost:8080/https://example.com -> http://localhost:8080/outline/https://example.com
attr.Val = strings.Replace(attr.Val, proxyURL, newProxyURL, 1)
// rewrite relative URLs too
if strings.HasPrefix(attr.Val, "/") {
attr.Val = fmt.Sprintf("/%s%s", apiPath, attr.Val)
}
n.Attr[i].Val = attr.Val
log.Println(attr.Val)
}
}
for c := n.FirstChild; c != nil; c = c.NextSibling {
recurse(c)
}
return false
}
recurse(n)
}
// createWikipediaSearchLinks takes in comma or semicolon separated terms,
// then turns them into <a> links searching for the term.
func createWikipediaSearchLinks(searchTerms string) string {
semiColonSplit := strings.Split(searchTerms, ";")
var links []string
for i, termGroup := range semiColonSplit {
commaSplit := strings.Split(termGroup, ",")
for _, term := range commaSplit {
trimmedTerm := strings.TrimSpace(term)
if trimmedTerm == "" {
continue
}
encodedTerm := url.QueryEscape(trimmedTerm)
wikiURL := fmt.Sprintf("https://en.wikipedia.org/w/index.php?search=%s", encodedTerm)
link := fmt.Sprintf("<a href=\"%s\">%s</a>", wikiURL, trimmedTerm)
links = append(links, link)
}
// If it's not the last element in semiColonSplit, add a comma to the last link
if i < len(semiColonSplit)-1 {
links[len(links)-1] = links[len(links)-1] + ","
}
}
return strings.Join(links, " ")
}

View File

@@ -1,380 +0,0 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<link rel="stylesheet" href="/styles.css" />
<script src="/script.js" defer></script>
<script>
const handleThemeChange = () => {
let theme = localStorage.getItem("theme");
if (theme === null) {
localStorage.setItem("theme", "system");
theme = "system";
}
if (
theme === "dark" ||
(theme === "system" &&
window.matchMedia("(prefers-color-scheme: dark)").matches)
) {
document.documentElement.classList.add("dark");
} else {
document.documentElement.classList.remove("dark");
}
};
handleThemeChange();
</script>
<title>ladder | {{.Title}}</title>
</head>
<body
class="antialiased bg-white dark:bg-slate-900 text-slate-900 dark:text-slate-200"
>
<div class="flex flex-col gap-4 max-w-3xl mx-4 lg:mx-auto pt-10">
<div class="flex justify-between place-items-center">
<div
class="hover:drop-shadow-[0_0px_4px_rgba(122,167,209,.3)] transition-colors duration-300 focus:outline-none focus:ring focus:border-[#7AA7D1] ring-offset-2"
>
<div class="flex">
<a
href="/"
class="flex -ml-2 h-8 font-extrabold tracking-tight hover:no-underline focus:outline-none focus:ring focus:border-[#7AA7D1] ring-offset-2"
>
<svg
xmlns="http://www.w3.org/2000/svg"
xmlns:xlink="http://www.w3.org/1999/xlink"
viewBox="0 0 512 512"
class="h-8 focus:outline-none focus:ring focus:border-[#7AA7D1] ring-offset-2"
>
<path
fill="#7AA7D1"
d="M262.074 485.246C254.809 485.265 247.407 485.534 240.165 484.99L226.178 483.306C119.737 468.826 34.1354 383.43 25.3176 274.714C24.3655 262.975 23.5876 253.161 24.3295 241.148C31.4284 126.212 123.985 31.919 238.633 24.1259L250.022 23.8366C258.02 23.8001 266.212 23.491 274.183 24.1306C320.519 27.8489 366.348 45.9743 402.232 75.4548L416.996 88.2751C444.342 114.373 464.257 146.819 475.911 182.72L480.415 197.211C486.174 219.054 488.67 242.773 487.436 265.259L486.416 275.75C478.783 352.041 436.405 418.1 369.36 455.394L355.463 462.875C326.247 477.031 294.517 484.631 262.074 485.246ZM253.547 72.4475C161.905 73.0454 83.5901 144.289 73.0095 234.5C69.9101 260.926 74.7763 292.594 83.9003 317.156C104.53 372.691 153.9 416.616 211.281 430.903C226.663 434.733 242.223 436.307 258.044 436.227C353.394 435.507 430.296 361.835 438.445 267.978C439.794 252.442 438.591 236.759 435.59 221.5C419.554 139.955 353.067 79.4187 269.856 72.7052C264.479 72.2714 258.981 72.423 253.586 72.4127L253.547 72.4475Z"
/>
<path
fill="#7AA7D1"
d="M153.196 310.121L133.153 285.021C140.83 283.798 148.978 285.092 156.741 284.353L156.637 277.725L124.406 278.002C123.298 277.325 122.856 276.187 122.058 275.193L116.089 267.862C110.469 260.975 103.827 254.843 98.6026 247.669C103.918 246.839 105.248 246.537 111.14 246.523L129.093 246.327C130.152 238.785 128.62 240.843 122.138 240.758C111.929 240.623 110.659 242.014 105.004 234.661L97.9953 225.654C94.8172 221.729 91.2219 218.104 88.2631 214.005C84.1351 208.286 90.1658 209.504 94.601 209.489L236.752 209.545C257.761 209.569 268.184 211.009 285.766 221.678L285.835 206.051C285.837 197.542 286.201 189.141 284.549 180.748C280.22 158.757 260.541 143.877 240.897 135.739C238.055 134.561 232.259 133.654 235.575 129.851C244.784 119.288 263.680 111.990 277.085 111.105C288.697 109.828 301.096 113.537 311.75 117.703C360.649 136.827 393.225 183.042 398.561 234.866C402.204 270.253 391.733 308.356 367.999 335.1C332.832 374.727 269.877 384.883 223.294 360.397C206.156 351.388 183.673 333.299 175.08 316.6C173.511 313.551 174.005 313.555 170.443 313.52L160.641 313.449C158.957 313.435 156.263 314.031 155.122 312.487L153.196 310.121Z"
/>
</svg>
</a>
<a
href="/https://{{.Hostname}}"
class="flex ml-1 h-8 font-extrabold tracking-tight hover:no-underline focus:outline-none focus:ring focus:border-[#7AA7D1] ring-offset-2"
>
<span class="text-3xl mr-1 text-[#7AA7D1] leading-8 align-middle">{{.Sitename}}</span>
</a>
</div>
</div>
<div class="flex justify-center z-10">
<div class="relative" id="dropdown">
<button
aria-expanded="closed"
onclick="toggleDropdown()"
type="button"
class="inline-flex items-center justify-center whitespace-nowrap rounded-full h-12 px-4 py-2 text-sm font-medium text-slate-600 dark:text-slate-400 ring-offset-background transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:pointer-events-none disabled:opacity-50 bg-white dark:bg-slate-900 hover:bg-slate-200 dark:hover:bg-slate-700 hover:text-slate-500 dark:hover:text-slate-200"
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 24 24"
fill="none"
stroke="currentColor"
stroke-width="2"
stroke-linecap="round"
stroke-linejoin="round"
class="h-5 w-5"
>
<path
d="M12.22 2h-.44a2 2 0 0 0-2 2v.18a2 2 0 0 1-1 1.73l-.43.25a2 2 0 0 1-2 0l-.15-.08a2 2 0 0 0-2.73.73l-.22.38a2 2 0 0 0 .73 2.73l.15.1a2 2 0 0 1 1 1.72v.51a2 2 0 0 1-1 1.74l-.15.09a2 2 0 0 0-.73 2.73l.22.38a2 2 0 0 0 2.73.73l.15-.08a2 2 0 0 1 2 0l.43.25a2 2 0 0 1 1 1.73V20a2 2 0 0 0 2 2h.44a2 2 0 0 0 2-2v-.18a2 2 0 0 1 1-1.73l.43-.25a2 2 0 0 1 2 0l.15.08a2 2 0 0 0 2.73-.73l.22-.39a2 2 0 0 0-.73-2.73l-.15-.08a2 2 0 0 1-1-1.74v-.5a2 2 0 0 1 1-1.74l.15-.09a2 2 0 0 0 .73-2.73l-.22-.38a2 2 0 0 0-2.73-.73l-.15.08a2 2 0 0 1-2 0l-.43-.25a2 2 0 0 1-1-1.73V4a2 2 0 0 0-2-2z"
/>
<circle cx="12" cy="12" r="3" />
</svg>
</button>
<div
id="dropdown_panel"
class="hidden absolute right-0 mt-2 w-52 rounded-md bg-white dark:bg-slate-900 shadow-md border border-slate-400 dark:border-slate-700"
>
<div
class="flex flex-col gap-2 w-full first-of-type:rounded-t-md last-of-type:rounded-b-md px-4 py-2.5 text-left text-sm"
>
Font family
<div class="grid grid-cols-2 gap-2">
<div>
<input
type="radio"
name="font"
id="sans-serif"
value="sans-serif"
class="peer hidden"
checked
/>
<label
for="sans-serif"
tabindex="0"
class="flex items-center justify-center h-10 cursor-pointer select-none rounded-md p-2 text-sm font-sans text-slate-600 dark:text-slate-200 text-center hover:bg-slate-200 dark:hover:bg-slate-700 peer-checked:bg-slate-200 dark:peer-checked:bg-slate-700"
>Sans-serif</label
>
</div>
<div>
<input
type="radio"
name="font"
id="serif"
value="serif"
class="peer hidden"
/>
<label
for="serif"
tabindex="0"
class="flex items-center justify-center h-10 cursor-pointer select-none rounded-md p-2 text-sm font-serif text-slate-600 dark:text-slate-200 text-center hover:bg-slate-200 dark:hover:bg-slate-700 peer-checked:bg-slate-200 dark:peer-checked:bg-slate-700"
>Serif</label
>
</div>
</div>
</div>
<div
class="shrink-0 bg-slate-400 dark:bg-slate-700 h-[1px] w-full"
></div>
<div
class="flex flex-col gap-2 w-full first-of-type:rounded-t-md last-of-type:rounded-b-md px-4 py-2.5 text-left text-sm"
>
Font size
<div class="grid grid-cols-4 gap-2">
<div>
<input
type="radio"
name="fontsize"
id="sm"
value="text-sm"
class="peer hidden"
/>
<label
for="sm"
tabindex="0"
title="Small"
class="flex items-end justify-center h-10 w-10 cursor-pointer select-none rounded-md p-2 text-sm text-slate-600 dark:text-slate-200 text-center hover:bg-slate-200 dark:hover:bg-slate-700 peer-checked:bg-slate-200 dark:peer-checked:bg-slate-700"
>sm</label
>
</div>
<div>
<input
type="radio"
name="fontsize"
id="base"
value="text-base"
class="peer hidden"
checked
/>
<label
for="base"
tabindex="0"
title="Medium"
class="flex items-end justify-center h-10 w-10 cursor-pointer select-none rounded-md p-2 text-base text-slate-600 dark:text-slate-200 text-center hover:bg-slate-200 dark:hover:bg-slate-700 peer-checked:bg-slate-200 dark:peer-checked:bg-slate-700"
>md</label
>
</div>
<div>
<input
type="radio"
name="fontsize"
id="lg"
value="text-lg"
class="peer hidden"
/>
<label
for="lg"
tabindex="0"
title="Large"
class="flex items-end justify-center h-10 w-10 cursor-pointer select-none rounded-md p-2 text-lg text-slate-600 dark:text-slate-200 text-center hover:bg-slate-200 dark:hover:bg-slate-700 peer-checked:bg-slate-200 dark:peer-checked:bg-slate-700"
>lg</label
>
</div>
</div>
</div>
<div
class="shrink-0 bg-slate-200 dark:bg-slate-700 h-[1px] w-full"
></div>
<div
class="flex flex-col gap-2 w-full first-of-type:rounded-t-md last-of-type:rounded-b-md px-4 py-2.5 text-left text-sm"
>
Appearance
<div class="grid grid-cols-4 gap-2">
<div>
<input
type="radio"
name="theme"
id="light"
value="light"
class="peer hidden"
/>
<label
for="light"
tabindex="0"
title="Light"
class="flex items-end justify-center h-10 w-10 cursor-pointer select-none rounded-md p-2 text-sm text-slate-600 dark:text-slate-200 text-center hover:bg-slate-200 dark:hover:bg-slate-700 peer-checked:bg-slate-200 dark:peer-checked:bg-slate-700"
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 24 24"
fill="none"
stroke="currentColor"
stroke-width="2"
stroke-linecap="round"
stroke-linejoin="round"
class="h-5 w-5"
>
<circle cx="12" cy="12" r="4" />
<path d="M12 2v2" />
<path d="M12 20v2" />
<path d="m4.93 4.93 1.41 1.41" />
<path d="m17.66 17.66 1.41 1.41" />
<path d="M2 12h2" />
<path d="M20 12h2" />
<path d="m6.34 17.66-1.41 1.41" />
<path d="m19.07 4.93-1.41 1.41" />
</svg>
</label>
</div>
<div>
<input
type="radio"
name="theme"
id="dark"
value="dark"
class="peer hidden"
/>
<label
for="dark"
tabindex="0"
title="Dark"
class="flex items-end justify-center h-10 w-10 cursor-pointer select-none rounded-md p-2 text-base text-slate-600 dark:text-slate-200 text-center hover:bg-slate-200 dark:hover:bg-slate-700 peer-checked:bg-slate-200 dark:peer-checked:bg-slate-700"
>
<svg
xmlns="http://www.w3.org/2000/svg"
width="24"
height="24"
viewBox="0 0 24 24"
fill="none"
stroke="currentColor"
stroke-width="2"
stroke-linecap="round"
stroke-linejoin="round"
class="h-5 w-5"
>
<path d="M12 3a6 6 0 0 0 9 9 9 9 0 1 1-9-9Z" />
</svg>
</label>
</div>
<div>
<input
type="radio"
name="theme"
id="system"
value="system"
class="peer hidden"
checked
/>
<label
for="system"
tabindex="0"
title="System preference"
class="flex items-end justify-center h-10 w-10 cursor-pointer select-none rounded-md p-2 text-lg text-slate-600 dark:text-slate-200 text-center hover:bg-slate-200 dark:hover:bg-slate-700 peer-checked:bg-slate-200 dark:peer-checked:bg-slate-700"
>
<svg
xmlns="http://www.w3.org/2000/svg"
width="24"
height="24"
viewBox="0 0 24 24"
fill="none"
stroke="currentColor"
stroke-width="2"
stroke-linecap="round"
stroke-linejoin="round"
class="h-5 w-5"
>
<path d="M12 8a2.83 2.83 0 0 0 4 4 4 4 0 1 1-4-4" />
<path d="M12 2v2" />
<path d="M12 20v2" />
<path d="m4.9 4.9 1.4 1.4" />
<path d="m17.7 17.7 1.4 1.4" />
<path d="M2 12h2" />
<path d="M20 12h2" />
<path d="m6.3 17.7-1.4 1.4" />
<path d="m19.1 4.9-1.4 1.4" />
</svg>
</label>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
<main class="flex flex-col space-y-3">
{{if not .Success}}
<h1>
Error
</h1>
<p>
There was a problem querying
<a href="{{.Params}}">{{.Params}}</a>
</p>
<code class="text-red-500 dark:text-red-400">
{{.Error}}
</code>
{{else}}
<div class="flex flex-col gap-1 mt-3">
<h1>
<a href="{{.Url}}" class="text-slate-900 dark:text-slate-200"> {{.Title}} </a>
</h1>
{{if ne .Date ""}}
<small
class="text-sm font-medium leading-none text-slate-600 dark:text-slate-400"
>{{.Date}}</small
>
{{end}}
{{if ne .Author ""}}
<small
class="text-sm font-medium leading-none text-slate-600 dark:text-slate-400"
>{{.Author}}</small
>
{{end}}
</div>
<div class="flex flex-col space-y-3">
<div>
<div class="grid grid-cols-1 justify-items-center">
<div><img src="{{.Image}}" alt="{{.Description}}" class="h-auto w-auto object-cover max-w-full mx-auto rounded-md shadow-md dark:shadow-slate-700"/></div>
<div class="mt-2 text-sm text-slate-600 dark:text-slate-400">{{.Description}}</div>
</div>
</div>
<div>{{.Body}}</div>
{{end}}
</main>
<div class="my-2"></div>
<footer class="mx-4 text-center text-slate-600 dark:text-slate-400">
<p>
Code Licensed Under GPL v3.0 |
<a
href="https://github.com/everywall/ladder"
class="hover:text-blue-500 dark:hover:text-blue-500 hover:underline underline-offset-2 transition-colors duration-300"
>View Source</a
>
|
<a
href="https://github.com/everywall"
class="hover:text-blue-500 dark:hover:text-blue-500 hover:underline underline-offset-2 transition-colors duration-300"
>Everywall</a
>
</p>
</footer>
<div class="my-2"></div>
</div>
</body>
</html>

View File

@@ -1,41 +0,0 @@
package responsemodifiers
import (
_ "embed"
"strings"
"ladder/proxychain"
"ladder/proxychain/responsemodifiers/rewriters"
)
// injectScript modifies HTTP responses
// to execute javascript at a particular time.
func injectScript(js string, execTime rewriters.ScriptExecTime) proxychain.ResponseModification {
return func(chain *proxychain.ProxyChain) error {
// don't add rewriter if it's not even html
ct := chain.Response.Header.Get("content-type")
if !strings.HasPrefix(ct, "text/html") {
return nil
}
rr := rewriters.NewScriptInjectorRewriter(js, execTime)
htmlRewriter := rewriters.NewHTMLRewriter(chain.Response.Body, rr)
chain.Response.Body = htmlRewriter
return nil
}
}
// InjectScriptBeforeDOMContentLoaded modifies HTTP responses to inject a JS before DOM Content is loaded (script tag in head)
func InjectScriptBeforeDOMContentLoaded(js string) proxychain.ResponseModification {
return injectScript(js, rewriters.BeforeDOMContentLoaded)
}
// InjectScriptAfterDOMContentLoaded modifies HTTP responses to inject a JS after DOM Content is loaded (script tag in head)
func InjectScriptAfterDOMContentLoaded(js string) proxychain.ResponseModification {
return injectScript(js, rewriters.AfterDOMContentLoaded)
}
// InjectScriptAfterDOMIdle modifies HTTP responses to inject a JS after the DOM is idle (ie: js framework loaded)
func InjectScriptAfterDOMIdle(js string) proxychain.ResponseModification {
return injectScript(js, rewriters.AfterDOMIdle)
}

View File

@@ -1,107 +0,0 @@
package responsemodifiers
import (
"fmt"
http "github.com/bogdanfinn/fhttp"
//"net/http"
//http "github.com/Danny-Dasilva/fhttp"
"ladder/proxychain"
)
// DeleteIncomingCookies prevents ALL cookies from being sent from the proxy server
// back down to the client.
func DeleteIncomingCookies(_ ...string) proxychain.ResponseModification {
return func(px *proxychain.ProxyChain) error {
px.Response.Header.Del("Set-Cookie")
return nil
}
}
// DeleteIncomingCookiesExcept prevents non-whitelisted cookies from being sent from the proxy server
// to the client. Cookies whose names are in the whitelist are not removed.
func DeleteIncomingCookiesExcept(whitelist ...string) proxychain.ResponseModification {
return func(px *proxychain.ProxyChain) error {
// Convert whitelist slice to a map for efficient lookups
whitelistMap := make(map[string]struct{})
for _, cookieName := range whitelist {
whitelistMap[cookieName] = struct{}{}
}
// If the response has no cookies, return early
if px.Response.Header == nil {
return nil
}
// Filter the cookies in the response
filteredCookies := []string{}
for _, cookieStr := range px.Response.Header["Set-Cookie"] {
cookie := parseCookie(cookieStr)
if _, found := whitelistMap[cookie.Name]; found {
filteredCookies = append(filteredCookies, cookieStr)
}
}
// Update the Set-Cookie header with the filtered cookies
if len(filteredCookies) > 0 {
px.Response.Header["Set-Cookie"] = filteredCookies
} else {
px.Response.Header.Del("Set-Cookie")
}
return nil
}
}
// parseCookie parses a cookie string and returns an http.Cookie object.
func parseCookie(cookieStr string) *http.Cookie {
header := http.Header{}
header.Add("Set-Cookie", cookieStr)
request := http.Request{Header: header}
return request.Cookies()[0]
}
// SetIncomingCookies adds a raw cookie string being sent from the proxy server down to the client
func SetIncomingCookies(cookies string) proxychain.ResponseModification {
return func(px *proxychain.ProxyChain) error {
px.Response.Header.Set("Set-Cookie", cookies)
return nil
}
}
// SetIncomingCookie modifies a specific cookie in the response from the proxy server to the client.
func SetIncomingCookie(name string, val string) proxychain.ResponseModification {
return func(px *proxychain.ProxyChain) error {
if px.Response.Header == nil {
return nil
}
updatedCookies := []string{}
found := false
// Iterate over existing cookies and modify the one that matches the cookieName
for _, cookieStr := range px.Response.Header["Set-Cookie"] {
cookie := parseCookie(cookieStr)
if cookie.Name == name {
// Replace the cookie with the new value
updatedCookies = append(updatedCookies, fmt.Sprintf("%s=%s", name, val))
found = true
} else {
// Keep the cookie as is
updatedCookies = append(updatedCookies, cookieStr)
}
}
// If the specified cookie wasn't found, add it
if !found {
updatedCookies = append(updatedCookies, fmt.Sprintf("%s=%s", name, val))
}
// Update the Set-Cookie header
px.Response.Header["Set-Cookie"] = updatedCookies
return nil
}
}

View File

@@ -1,21 +0,0 @@
package responsemodifiers
import (
"ladder/proxychain"
)
// SetResponseHeader modifies response headers from the upstream server
func SetResponseHeader(key string, value string) proxychain.ResponseModification {
return func(px *proxychain.ProxyChain) error {
px.Context.Response().Header.Set(key, value)
return nil
}
}
// DeleteResponseHeader removes response headers from the upstream server
func DeleteResponseHeader(key string) proxychain.ResponseModification {
return func(px *proxychain.ProxyChain) error {
px.Context.Response().Header.Del(key)
return nil
}
}

View File

@@ -1,56 +0,0 @@
package responsemodifiers
import (
_ "embed"
"fmt"
"strings"
"ladder/proxychain"
"ladder/proxychain/responsemodifiers/rewriters"
)
//go:embed patch_dynamic_resource_urls.js
var patchDynamicResourceURLsScript string
// PatchDynamicResourceURLs patches the javascript runtime to rewrite URLs client-side.
// - This function is designed to allow the proxified page
// to still be browsible by routing all resource URLs through the proxy.
// - Native APIs capable of network requests will be hooked
// and the URLs arguments modified to point to the proxy instead.
// - fetch('/relative_path') -> fetch('/https://proxiedsite.com/relative_path')
// - Element.setAttribute('src', "/assets/img.jpg") -> Element.setAttribute('src', "/https://proxiedsite.com/assets/img.jpg") -> fetch('/https://proxiedsite.com/relative_path')
func PatchDynamicResourceURLs() proxychain.ResponseModification {
return func(chain *proxychain.ProxyChain) error {
// don't add rewriter if it's not even html
ct := chain.Response.Header.Get("content-type")
if !strings.HasPrefix(ct, "text/html") {
return nil
}
// this is the original URL sent by client:
// http://localhost:8080/http://proxiedsite.com/foo/bar
originalURI := chain.Context.Request().URI()
// this is the extracted URL that the client requests to proxy
// http://proxiedsite.com/foo/bar
reqURL := chain.Request.URL
params := map[string]string{
// ie: http://localhost:8080
"{{PROXY_ORIGIN}}": fmt.Sprintf("%s://%s", originalURI.Scheme(), originalURI.Host()),
// ie: http://proxiedsite.com
"{{ORIGIN}}": fmt.Sprintf("%s://%s", reqURL.Scheme, reqURL.Host),
}
rr := rewriters.NewScriptInjectorRewriterWithParams(
patchDynamicResourceURLsScript,
rewriters.BeforeDOMContentLoaded,
params,
)
htmlRewriter := rewriters.NewHTMLRewriter(chain.Response.Body, rr)
chain.Response.Body = htmlRewriter
return nil
}
}

View File

@@ -1,366 +0,0 @@
// Overrides the global fetch and XMLHttpRequest open methods to modify the request URLs.
// Also overrides the attribute setter prototype to modify the request URLs
// fetch("/relative_script.js") -> fetch("http://localhost:8080/relative_script.js")
(() => {
// ============== PARAMS ===========================
// if the original request was: http://localhost:8080/http://proxiedsite.com/foo/bar
// proxyOrigin is http://localhost:8080
const proxyOrigin = "{{PROXY_ORIGIN}}";
//const proxyOrigin = globalThis.window.location.origin;
// if the original request was: http://localhost:8080/http://proxiedsite.com/foo/bar
// origin is http://proxiedsite.com
const origin = "{{ORIGIN}}";
//const origin = (new URL(decodeURIComponent(globalThis.window.location.pathname.substring(1)))).origin
// ============== END PARAMS ======================
const blacklistedSchemes = [
"ftp:",
"mailto:",
"tel:",
"file:",
"blob:",
"javascript:",
"about:",
"magnet:",
"ws:",
"wss:",
];
function rewriteURL(url) {
if (!url) return url;
// fetch url might be string, url, or request object
// handle all three by downcasting to string
const isStr = typeof url === "string";
if (!isStr) {
x = String(url);
if (x == "[object Request]") {
url = url.url;
} else {
url = String(url);
}
}
const oldUrl = url;
// don't rewrite special URIs
if (blacklistedSchemes.includes(url)) return url;
// don't rewrite invalid URIs
try {
new URL(url, origin);
} catch {
return url;
}
// don't double rewrite
if (url.startsWith(`${proxyOrigin}/http://`)) return url;
if (url.startsWith(`${proxyOrigin}/https://`)) return url;
if (url.startsWith(`/${proxyOrigin}`)) return url;
if (url.startsWith(`/${origin}`)) return url;
if (url.startsWith(`/http://`)) return url;
if (url.startsWith(`/https://`)) return url;
if (url.startsWith(`/http%3A%2F%2F`)) return url;
if (url.startsWith(`/https%3A%2F%2F`)) return url;
if (url.startsWith(`/%2Fhttp`)) return url;
//console.log(`proxychain: origin: ${origin} // proxyOrigin: ${proxyOrigin} // original: ${oldUrl}`)
//originDomain = origin.replace("https://", "");
let scheme = origin.split(":")[0];
if (url.startsWith("//")) {
url = `/${scheme}://${encodeURIComponent(url.substring(2))}`;
} else if (url.startsWith("/")) {
url = `/${origin}/${encodeURIComponent(url.substring(1))}`;
} else if (
url.startsWith(proxyOrigin) && !url.startsWith(`${proxyOrigin}/http`)
) {
// edge case where client js uses current url host to write an absolute path
url = "".replace(proxyOrigin, `${proxyOrigin}/${origin}`);
} else if (url.startsWith(origin)) {
url = `/${encodeURIComponent(url)}`;
} else if (url.startsWith("http://") || url.startsWith("https://")) {
url = `/${proxyOrigin}/${encodeURIComponent(url)}`;
}
console.log(`proxychain: rewrite JS URL: ${oldUrl} -> ${url}`);
return url;
}
/*
// sometimes anti-bot protections like cloudflare or akamai bot manager check if JS is hooked
function hideMonkeyPatch(objectOrName, method, originalToString) {
let obj;
let isGlobalFunction = false;
if (typeof objectOrName === "string") {
obj = globalThis[objectOrName];
isGlobalFunction = (typeof obj === "function") &&
(method === objectOrName);
} else {
obj = objectOrName;
}
if (isGlobalFunction) {
const originalFunction = obj;
globalThis[objectOrName] = function(...args) {
return originalFunction.apply(this, args);
};
globalThis[objectOrName].toString = () => originalToString;
} else if (obj && typeof obj[method] === "function") {
const originalMethod = obj[method];
obj[method] = function(...args) {
return originalMethod.apply(this, args);
};
obj[method].toString = () => originalToString;
} else {
console.warn(
`proxychain: cannot hide monkey patch: ${method} is not a function on the provided object.`,
);
}
}
*/
function hideMonkeyPatch(objectOrName, method, originalToString) {
return;
}
// monkey patch fetch
const oldFetch = fetch;
fetch = async (url, init) => {
return oldFetch(rewriteURL(url), init);
};
hideMonkeyPatch("fetch", "fetch", "function fetch() { [native code] }");
// monkey patch xmlhttprequest
const oldOpen = XMLHttpRequest.prototype.open;
XMLHttpRequest.prototype.open = function (
method,
url,
async = true,
user = null,
password = null,
) {
return oldOpen.call(this, method, rewriteURL(url), async, user, password);
};
hideMonkeyPatch(
XMLHttpRequest.prototype,
"open",
'function(){if("function"==typeof eo)return eo.apply(this,arguments)}',
);
const oldSend = XMLHttpRequest.prototype.send;
XMLHttpRequest.prototype.send = function (method, url) {
return oldSend.call(this, method, rewriteURL(url));
};
hideMonkeyPatch(
XMLHttpRequest.prototype,
"send",
'function(){if("function"==typeof eo)return eo.apply(this,arguments)}',
);
// monkey patch service worker registration
const oldRegister = ServiceWorkerContainer.prototype.register;
ServiceWorkerContainer.prototype.register = function (scriptURL, options) {
return oldRegister.call(this, rewriteURL(scriptURL), options);
};
hideMonkeyPatch(
ServiceWorkerContainer.prototype,
"register",
"function register() { [native code] }",
);
// monkey patch URL.toString() method
const oldToString = URL.prototype.toString;
URL.prototype.toString = function () {
let originalURL = oldToString.call(this);
return rewriteURL(originalURL);
};
hideMonkeyPatch(
URL.prototype,
"toString",
"function toString() { [native code] }",
);
// monkey patch URL.toJSON() method
const oldToJson = URL.prototype.toString;
URL.prototype.toString = function () {
let originalURL = oldToJson.call(this);
return rewriteURL(originalURL);
};
hideMonkeyPatch(
URL.prototype,
"toString",
"function toJSON() { [native code] }",
);
// Monkey patch URL.href getter and setter
const originalHrefDescriptor = Object.getOwnPropertyDescriptor(
URL.prototype,
"href",
);
Object.defineProperty(URL.prototype, "href", {
get: function () {
let originalHref = originalHrefDescriptor.get.call(this);
return rewriteURL(originalHref);
},
set: function (newValue) {
originalHrefDescriptor.set.call(this, rewriteURL(newValue));
},
});
// TODO: do one more pass of this by manually traversing the DOM
// AFTER all the JS and page has loaded just in case
// Monkey patch setter
const elements = [
{ tag: "a", attribute: "href" },
{ tag: "img", attribute: "src" },
// { tag: 'img', attribute: 'srcset' }, // TODO: handle srcset
{ tag: "script", attribute: "src" },
{ tag: "link", attribute: "href" },
{ tag: "link", attribute: "icon" },
{ tag: "iframe", attribute: "src" },
{ tag: "audio", attribute: "src" },
{ tag: "video", attribute: "src" },
{ tag: "source", attribute: "src" },
// { tag: 'source', attribute: 'srcset' }, // TODO: handle srcset
{ tag: "embed", attribute: "src" },
{ tag: "embed", attribute: "pluginspage" },
{ tag: "html", attribute: "manifest" },
{ tag: "object", attribute: "src" },
{ tag: "input", attribute: "src" },
{ tag: "track", attribute: "src" },
{ tag: "form", attribute: "action" },
{ tag: "area", attribute: "href" },
{ tag: "base", attribute: "href" },
{ tag: "blockquote", attribute: "cite" },
{ tag: "del", attribute: "cite" },
{ tag: "ins", attribute: "cite" },
{ tag: "q", attribute: "cite" },
{ tag: "button", attribute: "formaction" },
{ tag: "input", attribute: "formaction" },
{ tag: "meta", attribute: "content" },
{ tag: "object", attribute: "data" },
];
elements.forEach(({ tag, attribute }) => {
const proto = document.createElement(tag).constructor.prototype;
const descriptor = Object.getOwnPropertyDescriptor(proto, attribute);
if (descriptor && descriptor.set) {
Object.defineProperty(proto, attribute, {
...descriptor,
set(value) {
// calling rewriteURL will end up calling a setter for href,
// leading to a recusive loop and a Maximum call stack size exceeded
// error, so we guard against this with a local semaphore flag
const isRewritingSetKey = Symbol.for("isRewritingSet");
if (!this[isRewritingSetKey]) {
this[isRewritingSetKey] = true;
descriptor.set.call(this, rewriteURL(value));
//descriptor.set.call(this, value);
this[isRewritingSetKey] = false;
} else {
// Directly set the value without rewriting
descriptor.set.call(this, value);
}
},
get() {
const isRewritingGetKey = Symbol.for("isRewritingGet");
if (!this[isRewritingGetKey]) {
this[isRewritingGetKey] = true;
let oldURL = descriptor.get.call(this);
let newURL = rewriteURL(oldURL);
this[isRewritingGetKey] = false;
return newURL;
} else {
return descriptor.get.call(this);
}
},
});
}
});
// monkey-patching Element.setAttribute
const originalSetAttribute = Element.prototype.setAttribute;
Element.prototype.setAttribute = function (name, value) {
const isMatchingElement = elements.some((element) => {
return this.tagName.toLowerCase() === element.tag &&
name.toLowerCase() === element.attribute;
});
if (isMatchingElement) {
value = rewriteURL(value);
}
originalSetAttribute.call(this, name, value);
};
// sometimes, libraries will set the Element.innerHTML or Element.outerHTML directly with a string instead of setters.
// in this case, we intercept it, create a fake DOM, parse it and then rewrite all attributes that could
// contain a URL. Then we return the replacement innerHTML/outerHTML with redirected links.
function rewriteInnerHTML(html, elements) {
const isRewritingHTMLKey = Symbol.for("isRewritingHTML");
// Check if already processing
if (document[isRewritingHTMLKey]) {
return html;
}
const tempContainer = document.createElement("div");
document[isRewritingHTMLKey] = true;
try {
tempContainer.innerHTML = html;
// Create a map for quick lookup
const elementsMap = new Map(elements.map((e) => [e.tag, e.attribute]));
// Loop-based DOM traversal
const nodes = [...tempContainer.querySelectorAll("*")];
for (const node of nodes) {
const attribute = elementsMap.get(node.tagName.toLowerCase());
if (attribute && node.hasAttribute(attribute)) {
const originalUrl = node.getAttribute(attribute);
const rewrittenUrl = rewriteURL(originalUrl);
node.setAttribute(attribute, rewrittenUrl);
}
}
return tempContainer.innerHTML;
} finally {
// Clear the flag
document[isRewritingHTMLKey] = false;
}
}
// Store original setters
const originalSetters = {};
["innerHTML", "outerHTML"].forEach((property) => {
const descriptor = Object.getOwnPropertyDescriptor(
Element.prototype,
property,
);
if (descriptor && descriptor.set) {
originalSetters[property] = descriptor.set;
Object.defineProperty(Element.prototype, property, {
...descriptor,
set(value) {
const isRewritingHTMLKey = Symbol.for("isRewritingHTML");
if (!this[isRewritingHTMLKey]) {
this[isRewritingHTMLKey] = true;
try {
// Use custom logic
descriptor.set.call(this, rewriteInnerHTML(value, elements));
} finally {
this[isRewritingHTMLKey] = false;
}
} else {
// Use original setter in recursive call
originalSetters[property].call(this, value);
}
},
});
}
});
})();

View File

@@ -1,34 +0,0 @@
package responsemodifiers
import (
_ "embed"
"io"
"strings"
"ladder/proxychain"
)
//go:embed patch_google_analytics.js
var gaPatch string
// PatchGoogleAnalytics replaces any request to google analytics with a no-op stub function.
// Some sites will not display content until GA is loaded, so we fake one instead.
// Credit to Raymond Hill @ github.com/gorhill/uBlock
func PatchGoogleAnalytics() proxychain.ResponseModification {
return func(chain *proxychain.ProxyChain) error {
// preflight check
isGADomain := chain.Request.URL.Host == "www.google-analytics.com" || chain.Request.URL.Host == "google-analytics.com"
isGAPath := strings.HasSuffix(chain.Request.URL.Path, "analytics.js")
if !(isGADomain || isGAPath) {
return nil
}
// send modified js payload to client containing
// stub functions from patch_google_analytics.js
gaPatchReader := io.NopCloser(strings.NewReader(gaPatch))
chain.Response.Body = gaPatchReader
chain.Context.Set("content-type", "text/javascript")
return nil
}
}

View File

@@ -1,109 +0,0 @@
// uBlock Origin - a browser extension to block requests.
// Copyright (C) 2019-present Raymond Hill
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see {http://www.gnu.org/licenses/}.
//
// Home: https://github.com/gorhill/uBlock
(function() {
"use strict";
// https://developers.google.com/analytics/devguides/collection/analyticsjs/
const noopfn = function() {
};
//
const Tracker = function() {
};
const p = Tracker.prototype;
p.get = noopfn;
p.set = noopfn;
p.send = noopfn;
//
const w = window;
const gaName = w.GoogleAnalyticsObject || "ga";
const gaQueue = w[gaName];
// https://github.com/uBlockOrigin/uAssets/pull/4115
const ga = function() {
const len = arguments.length;
if (len === 0) return;
const args = Array.from(arguments);
let fn;
let a = args[len - 1];
if (a instanceof Object && a.hitCallback instanceof Function) {
fn = a.hitCallback;
} else if (a instanceof Function) {
fn = () => {
a(ga.create());
};
} else {
const pos = args.indexOf("hitCallback");
if (pos !== -1 && args[pos + 1] instanceof Function) {
fn = args[pos + 1];
}
}
if (fn instanceof Function === false) return;
try {
fn();
} catch (ex) {
}
};
ga.create = function() {
return new Tracker();
};
ga.getByName = function() {
return new Tracker();
};
ga.getAll = function() {
return [new Tracker()];
};
ga.remove = noopfn;
// https://github.com/uBlockOrigin/uAssets/issues/2107
ga.loaded = true;
w[gaName] = ga;
// https://github.com/gorhill/uBlock/issues/3075
const dl = w.dataLayer;
if (dl instanceof Object) {
if (dl.hide instanceof Object && typeof dl.hide.end === "function") {
dl.hide.end();
dl.hide.end = () => { };
}
if (typeof dl.push === "function") {
const doCallback = function(item) {
if (item instanceof Object === false) return;
if (typeof item.eventCallback !== "function") return;
setTimeout(item.eventCallback, 1);
item.eventCallback = () => { };
};
dl.push = new Proxy(dl.push, {
apply: function(target, thisArg, args) {
doCallback(args[0]);
return Reflect.apply(target, thisArg, args);
},
});
if (Array.isArray(dl)) {
const q = dl.slice();
for (const item of q) {
doCallback(item);
}
}
}
}
// empty ga queue
if (gaQueue instanceof Function && Array.isArray(gaQueue.q)) {
const q = gaQueue.q.slice();
gaQueue.q.length = 0;
for (const entry of q) {
ga(...entry);
}
}
})();

View File

@@ -1,101 +0,0 @@
package responsemodifiers
import (
"embed"
"encoding/json"
"io"
"log"
"regexp"
"ladder/proxychain"
)
//go:embed vendor/ddg-tracker-surrogates/mapping.json
var mappingJSON []byte
//go:embed vendor/ddg-tracker-surrogates/surrogates/*
var surrogateFS embed.FS
var rules domainRules
func init() {
err := json.Unmarshal([]byte(mappingJSON), &rules)
if err != nil {
log.Printf("[ERROR]: PatchTrackerScripts: failed to deserialize ladder/proxychain/responsemodifiers/vendor/ddg-tracker-surrogates/mapping.json")
}
}
// mapping.json schema
type rule struct {
RegexRule *regexp.Regexp `json:"regexRule"`
Surrogate string `json:"surrogate"`
Action string `json:"action,omitempty"`
}
type domainRules map[string][]rule
func (r *rule) UnmarshalJSON(data []byte) error {
type Tmp struct {
RegexRule string `json:"regexRule"`
Surrogate string `json:"surrogate"`
Action string `json:"action,omitempty"`
}
var tmp Tmp
if err := json.Unmarshal(data, &tmp); err != nil {
return err
}
regex := regexp.MustCompile(tmp.RegexRule)
r.RegexRule = regex
r.Surrogate = tmp.Surrogate
r.Action = tmp.Action
return nil
}
// PatchTrackerScripts replaces any request to tracker scripts such as google analytics
// with a no-op stub that mocks the API structure of the original scripts they replace.
// Some pages depend on the existence of these structures for proper loading, so this may fix
// some broken elements.
// Surrogate script code borrowed from: DuckDuckGo Privacy Essentials browser extension for Firefox, Chrome. (Apache 2.0 license)
func PatchTrackerScripts() proxychain.ResponseModification {
return func(chain *proxychain.ProxyChain) error {
// preflight checks
reqURL := chain.Request.URL.String()
isTracker := false
//
var surrogateScript io.ReadCloser
for domain, domainRules := range rules {
for _, rule := range domainRules {
if !rule.RegexRule.MatchString(reqURL) {
continue
}
// found tracker script, replacing response body with nop stub from
// ./vendor/ddg-tracker-surrogates/surrogates/{{rule.Surrogate}}
isTracker = true
script, err := surrogateFS.Open("vendor/ddg-tracker-surrogates/surrogates/" + rule.Surrogate)
if err != nil {
panic(err)
}
surrogateScript = io.NopCloser(script)
log.Printf("INFO: PatchTrackerScripts :: injecting surrogate for '%s' => 'surrogates/%s'\n", domain, rule.Surrogate)
break
}
}
if !isTracker {
return nil
}
chain.Response.Body = surrogateScript
chain.Context.Set("content-type", "text/javascript")
return nil
}
}

View File

@@ -1,36 +0,0 @@
package responsemodifiers
import (
_ "embed"
"fmt"
"strings"
"ladder/proxychain"
"ladder/proxychain/responsemodifiers/rewriters"
)
// RewriteHTMLResourceURLs modifies HTTP responses
// to rewrite URLs attributes in HTML content (such as src, href)
// - `<img src='/relative_path'>` -> `<img src='/https://proxiedsite.com/relative_path'>`
// - This function is designed to allow the proxified page
// to still be browsible by routing all resource URLs through the proxy.
func RewriteHTMLResourceURLs() proxychain.ResponseModification {
return func(chain *proxychain.ProxyChain) error {
// don't add rewriter if it's not even html
ct := chain.Response.Header.Get("content-type")
if !strings.HasPrefix(ct, "text/html") {
return nil
}
// proxyURL is the URL of the ladder: http://localhost:8080 (ladder)
originalURI := chain.Context.Request().URI()
proxyURL := fmt.Sprintf("%s://%s", originalURI.Scheme(), originalURI.Host())
// replace http.Response.Body with a readcloser that wraps the original, modifying the html attributes
rr := rewriters.NewHTMLTokenURLRewriter(chain.Request.URL, proxyURL)
htmlRewriter := rewriters.NewHTMLRewriter(chain.Response.Body, rr)
chain.Response.Body = htmlRewriter
return nil
}
}

View File

@@ -1,28 +0,0 @@
(() => {
document.addEventListener("DOMContentLoaded", (event) => {
initIdleMutationObserver();
});
function initIdleMutationObserver() {
let debounceTimer;
const debounceDelay = 500; // adjust the delay as needed
const observer = new MutationObserver((mutations) => {
// Clear the previous timer and set a new one
clearTimeout(debounceTimer);
debounceTimer = setTimeout(() => {
execute();
observer.disconnect(); // Disconnect after first execution
}, debounceDelay);
});
const config = { attributes: false, childList: true, subtree: true };
observer.observe(document.body, config);
}
function execute() {
"{{AFTER_DOM_IDLE_SCRIPT}}";
//console.log('DOM is now idle. Executing...');
}
})();

View File

@@ -1,3 +0,0 @@
package rewriters
// todo: implement

View File

@@ -1,133 +0,0 @@
package rewriters
import (
"bytes"
"io"
"golang.org/x/net/html"
)
// IHTMLTokenRewriter defines an interface for modifying HTML tokens.
type IHTMLTokenRewriter interface {
// ShouldModify determines whether a given HTML token requires modification.
ShouldModify(*html.Token) bool
// ModifyToken applies modifications to a given HTML token.
// It returns strings representing content to be prepended and
// appended to the token. If no modifications are required or if an error occurs,
// it returns empty strings for both 'prepend' and 'append'.
// Note: The original token is not modified if an error occurs.
ModifyToken(*html.Token) (prepend, append string)
}
// HTMLRewriter is a struct that can take multiple TokenHandlers and process all
// HTML tokens from http.Response.Body in a single pass, making changes and returning a new io.ReadCloser
//
// - HTMLRewriter reads the http.Response.Body stream,
// parsing each HTML token one at a time and making modifications (defined by implementations of IHTMLTokenRewriter)
//
// - When ProxyChain.Execute() is called, the response body will be read from the server
// and pulled through each ResponseModification which wraps the ProxyChain.Response.Body
// without ever buffering the entire HTTP response in memory.
type HTMLRewriter struct {
tokenizer *html.Tokenizer
currentToken *html.Token
tokenBuffer *bytes.Buffer
currentTokenProcessed bool
rewriters []IHTMLTokenRewriter
}
// NewHTMLRewriter creates a new HTMLRewriter instance.
// It processes HTML tokens from an io.ReadCloser source (typically http.Response.Body)
// using a series of HTMLTokenRewriters. Each HTMLTokenRewriter in the 'rewriters' slice
// applies its specific modifications to the HTML tokens.
// The HTMLRewriter reads from the provided 'src', applies the modifications,
// and returns the processed content as a new io.ReadCloser.
// This new io.ReadCloser can be used to stream the modified content back to the client.
//
// Parameters:
// - src: An io.ReadCloser representing the source of the HTML content, such as http.Response.Body.
// - rewriters: A slice of HTMLTokenRewriters that define the modifications to be applied to the HTML tokens.
//
// Returns:
// - A pointer to an HTMLRewriter, which implements io.ReadCloser, containing the modified HTML content.
func NewHTMLRewriter(src io.ReadCloser, rewriters ...IHTMLTokenRewriter) *HTMLRewriter {
return &HTMLRewriter{
tokenizer: html.NewTokenizer(src),
currentToken: nil,
tokenBuffer: new(bytes.Buffer),
currentTokenProcessed: false,
rewriters: rewriters,
}
}
// Close resets the internal state of HTMLRewriter, clearing buffers and token data.
func (r *HTMLRewriter) Close() error {
r.tokenBuffer.Reset()
r.currentToken = nil
r.currentTokenProcessed = false
return nil
}
// Read processes the HTML content, rewriting URLs and managing the state of tokens.
func (r *HTMLRewriter) Read(p []byte) (int, error) {
if r.currentToken == nil || r.currentToken.Data == "" || r.currentTokenProcessed {
tokenType := r.tokenizer.Next()
// done reading html, close out reader
if tokenType == html.ErrorToken {
if r.tokenizer.Err() == io.EOF {
return 0, io.EOF
}
return 0, r.tokenizer.Err()
}
// get the next token; reset buffer
t := r.tokenizer.Token()
r.currentToken = &t
r.tokenBuffer.Reset()
// buffer += "<prepends> <token> <appends>"
// process token through all registered rewriters
// rewriters will modify the token, and optionally
// return a <prepend> or <append> string token
appends := make([]string, 0, len(r.rewriters))
for _, rewriter := range r.rewriters {
if !rewriter.ShouldModify(r.currentToken) {
continue
}
prepend, a := rewriter.ModifyToken(r.currentToken)
appends = append(appends, a)
// add <prepends> to buffer
r.tokenBuffer.WriteString(prepend)
}
// add <token> to buffer
if tokenType == html.TextToken {
// don't unescape textTokens (such as inline scripts).
// Token.String() by default will escape the inputs, but
// we don't want to modify the original source
r.tokenBuffer.WriteString(r.currentToken.Data)
} else {
r.tokenBuffer.WriteString(r.currentToken.String())
}
// add <appends> to buffer
for _, a := range appends {
r.tokenBuffer.WriteString(a)
}
r.currentTokenProcessed = false
}
n, err := r.tokenBuffer.Read(p)
if err == io.EOF || r.tokenBuffer.Len() == 0 {
r.currentTokenProcessed = true
err = nil // EOF in this context is expected and not an actual error
}
return n, err
}

View File

@@ -1,288 +0,0 @@
package rewriters
import (
_ "embed"
"fmt"
"log"
"net/url"
"path"
"regexp"
"strings"
"golang.org/x/net/html/atom"
"golang.org/x/net/html"
)
var (
rewriteAttrs map[string]map[string]bool
specialRewriteAttrs map[string]map[string]bool
schemeBlacklist map[string]bool
)
func init() {
// define all tag/attributes which might contain URLs
// to attempt to rewrite to point to proxy instead
rewriteAttrs = map[string]map[string]bool{
"img": {"src": true, "srcset": true, "longdesc": true, "usemap": true},
"a": {"href": true},
"form": {"action": true},
"link": {"href": true, "manifest": true, "icon": true},
"script": {"src": true},
"video": {"src": true, "poster": true},
"audio": {"src": true},
"iframe": {"src": true, "longdesc": true},
"embed": {"src": true},
"object": {"data": true, "codebase": true},
"source": {"src": true, "srcset": true},
"track": {"src": true},
"area": {"href": true},
"base": {"href": true},
"blockquote": {"cite": true},
"del": {"cite": true},
"ins": {"cite": true},
"q": {"cite": true},
"body": {"background": true},
"button": {"formaction": true},
"input": {"src": true, "formaction": true},
"meta": {"content": true},
}
// might contain URL but requires special handling
specialRewriteAttrs = map[string]map[string]bool{
"img": {"srcset": true},
"source": {"srcset": true},
"meta": {"content": true},
}
// define URIs to NOT rewrite
// for example: don't overwrite <img src="data:image/png;base64;iVBORw...">"
schemeBlacklist = map[string]bool{
"data": true,
"tel": true,
"mailto": true,
"file": true,
"blob": true,
"javascript": true,
"about": true,
"magnet": true,
"ws": true,
"wss": true,
"ftp": true,
}
}
// HTMLTokenURLRewriter implements HTMLTokenRewriter
// it rewrites URLs within HTML resources to use a specified proxy URL.
// <img src='/relative_path'> -> <img src='/https://proxiedsite.com/relative_path'>
type HTMLTokenURLRewriter struct {
baseURL *url.URL
proxyURL string // ladder URL, not proxied site URL
}
// NewHTMLTokenURLRewriter creates a new instance of HTMLResourceURLRewriter.
// It initializes the tokenizer with the provided source and sets the proxy URL.
// baseURL might be https://medium.com/foobar
// proxyURL is http://localhost:8080
func NewHTMLTokenURLRewriter(baseURL *url.URL, proxyURL string) *HTMLTokenURLRewriter {
return &HTMLTokenURLRewriter{
baseURL: baseURL,
proxyURL: proxyURL,
}
}
func (r *HTMLTokenURLRewriter) ShouldModify(token *html.Token) bool {
// fmt.Printf("touch token: %s\n", token.String())
attrLen := len(token.Attr)
if attrLen == 0 {
return false
}
if token.Type == html.StartTagToken {
return true
}
if token.Type == html.SelfClosingTagToken {
return true
}
return false
}
func (r *HTMLTokenURLRewriter) ModifyToken(token *html.Token) (string, string) {
for i := range token.Attr {
attr := &token.Attr[i]
switch {
// don't touch tag/attributes that don't contain URIs
case !rewriteAttrs[token.Data][attr.Key]:
continue
// don't touch attributes with special URIs (like data:)
case schemeBlacklist[strings.Split(attr.Val, ":")[0]]:
continue
// don't double-overwrite the url
case strings.HasPrefix(attr.Val, r.proxyURL):
continue
case strings.HasPrefix(attr.Val, "/http://"):
continue
case strings.HasPrefix(attr.Val, "/https://"):
continue
// handle special rewrites
case specialRewriteAttrs[token.Data][attr.Key]:
r.handleSpecialAttr(token, attr, r.baseURL)
continue
default:
// rewrite url
handleURLPart(attr, r.baseURL)
}
}
return "", ""
}
// dispatcher for ModifyURL based on URI type
func handleURLPart(attr *html.Attribute, baseURL *url.URL) {
switch {
case strings.HasPrefix(attr.Val, "//"):
handleProtocolRelativePath(attr, baseURL)
case strings.HasPrefix(attr.Val, "/"):
handleRootRelativePath(attr, baseURL)
case strings.HasPrefix(attr.Val, "https://"):
handleAbsolutePath(attr, baseURL)
case strings.HasPrefix(attr.Val, "http://"):
handleAbsolutePath(attr, baseURL)
default:
handleDocumentRelativePath(attr, baseURL)
}
}
// Protocol-relative URLs: These start with "//" and will use the same protocol (http or https) as the current page.
func handleProtocolRelativePath(attr *html.Attribute, baseURL *url.URL) {
attr.Val = strings.TrimPrefix(attr.Val, "/")
handleRootRelativePath(attr, baseURL)
log.Printf("proto rel url rewritten-> '%s'='%s'", attr.Key, attr.Val)
}
// Root-relative URLs: These are relative to the root path and start with a "/".
func handleRootRelativePath(attr *html.Attribute, baseURL *url.URL) {
// Skip processing if it's already in the correct format
if strings.HasPrefix(attr.Val, "/http://") || strings.HasPrefix(attr.Val, "/https://") {
return
}
// doublecheck this is a valid relative URL
log.Printf("PROCESSING: key: %s val: %s\n", attr.Key, attr.Val)
_, err := url.Parse(fmt.Sprintf("http://localhost.com%s", attr.Val))
if err != nil {
log.Println(err)
return
}
// log.Printf("BASEURL patch: %s\n", baseURL)
attr.Val = fmt.Sprintf(
"%s://%s/%s",
baseURL.Scheme,
baseURL.Host,
strings.TrimPrefix(attr.Val, "/"),
)
attr.Val = escape(attr.Val)
attr.Val = fmt.Sprintf("/%s", attr.Val)
log.Printf("root rel url rewritten-> '%s'='%s'", attr.Key, attr.Val)
}
// Document-relative URLs: These are relative to the current document's path and don't start with a "/".
func handleDocumentRelativePath(attr *html.Attribute, baseURL *url.URL) {
log.Printf("PROCESSING: key: %s val: %s\n", attr.Key, attr.Val)
if strings.HasPrefix(attr.Val, "#") {
return
}
relativePath := path.Join(strings.Trim(baseURL.RawPath, "/"), strings.Trim(attr.Val, "/"))
attr.Val = fmt.Sprintf(
"%s://%s/%s",
baseURL.Scheme,
strings.Trim(baseURL.Host, "/"),
relativePath,
)
attr.Val = escape(attr.Val)
attr.Val = fmt.Sprintf("/%s", attr.Val)
log.Printf("doc rel url rewritten-> '%s'='%s'", attr.Key, attr.Val)
}
// full URIs beginning with https?://proxiedsite.com
func handleAbsolutePath(attr *html.Attribute, _ *url.URL) {
// check if valid URL
log.Printf("PROCESSING: key: %s val: %s\n", attr.Key, attr.Val)
u, err := url.Parse(attr.Val)
if err != nil {
return
}
if !(u.Scheme == "http" || u.Scheme == "https") {
return
}
attr.Val = fmt.Sprintf("/%s", escape(strings.TrimPrefix(attr.Val, "/")))
// attr.Val = fmt.Sprintf("/%s", escape(attr.Val))
log.Printf("abs url rewritten-> '%s'='%s'", attr.Key, attr.Val)
}
// handle edge cases for special attributes
func (r *HTMLTokenURLRewriter) handleSpecialAttr(token *html.Token, attr *html.Attribute, baseURL *url.URL) {
switch {
// srcset attribute doesn't contain a single URL but a comma-separated list of URLs, each potentially followed by a space and a descriptor (like a width, pixel density, or other conditions).
case token.DataAtom == atom.Img && attr.Key == "srcset":
handleSrcSet(attr, baseURL)
case token.DataAtom == atom.Source && attr.Key == "srcset":
handleSrcSet(attr, baseURL)
// meta with http-equiv="refresh": The content attribute of a meta tag, when used for a refresh directive, contains a time interval followed by a URL, like content="5;url=http://example.com/".
case token.DataAtom == atom.Meta && attr.Key == "content" && regexp.MustCompile(`^\d+;url=`).MatchString(attr.Val):
handleMetaRefresh(attr, baseURL)
default:
break
}
}
func handleMetaRefresh(attr *html.Attribute, baseURL *url.URL) {
sec := strings.Split(attr.Val, ";url=")[0]
url := strings.Split(attr.Val, ";url=")[1]
f := &html.Attribute{Val: url, Key: "src"}
handleURLPart(f, baseURL)
attr.Val = fmt.Sprintf("%s;url=%s", sec, f.Val)
}
func handleSrcSet(attr *html.Attribute, baseURL *url.URL) {
var srcSetBuilder strings.Builder
srcSetItems := strings.Split(attr.Val, ",")
for i, srcItem := range srcSetItems {
srcParts := strings.Fields(srcItem)
if len(srcParts) == 0 {
continue
}
f := &html.Attribute{Val: srcParts[0], Key: "src"}
handleURLPart(f, baseURL)
if i > 0 {
srcSetBuilder.WriteString(", ")
}
srcSetBuilder.WriteString(f.Val)
if len(srcParts) > 1 {
srcSetBuilder.WriteString(" ")
srcSetBuilder.WriteString(strings.Join(srcParts[1:], " "))
}
}
attr.Val = srcSetBuilder.String()
}
func escape(str string) string {
// return str
return strings.ReplaceAll(url.PathEscape(str), "%2F", "/")
}

View File

@@ -1,92 +0,0 @@
package rewriters
import (
_ "embed"
"fmt"
"sort"
"strings"
"golang.org/x/net/html"
"golang.org/x/net/html/atom"
)
// ScriptInjectorRewriter implements HTMLTokenRewriter
// ScriptInjectorRewriter is a struct that injects JS into the page
// It uses an HTML tokenizer to process HTML content and injects JS at a specified location
type ScriptInjectorRewriter struct {
execTime ScriptExecTime
script string
}
type ScriptExecTime int
const (
BeforeDOMContentLoaded ScriptExecTime = iota
AfterDOMContentLoaded
AfterDOMIdle
)
func (r *ScriptInjectorRewriter) ShouldModify(token *html.Token) bool {
// modify if token == <head>
return token.DataAtom == atom.Head && token.Type == html.StartTagToken
}
//go:embed after_dom_idle_script_injector.js
var afterDomIdleScriptInjector string
func (r *ScriptInjectorRewriter) ModifyToken(_ *html.Token) (string, string) {
switch {
case r.execTime == BeforeDOMContentLoaded:
return "", fmt.Sprintf("\n<script>\n%s\n</script>\n", r.script)
case r.execTime == AfterDOMContentLoaded:
return "", fmt.Sprintf("\n<script>\ndocument.addEventListener('DOMContentLoaded', () => { %s });\n</script>", r.script)
case r.execTime == AfterDOMIdle:
s := strings.Replace(afterDomIdleScriptInjector, `'{{AFTER_DOM_IDLE_SCRIPT}}'`, r.script, 1)
return "", fmt.Sprintf("\n<script>\n%s\n</script>\n", s)
default:
return "", ""
}
}
// applies parameters by string replacement of the template script
func (r *ScriptInjectorRewriter) applyParams(params map[string]string) {
// Sort the keys by length in descending order
keys := make([]string, 0, len(params))
for key := range params {
keys = append(keys, key)
}
sort.Slice(keys, func(i, j int) bool {
return len(keys[i]) > len(keys[j])
})
for _, key := range keys {
r.script = strings.ReplaceAll(r.script, key, params[key])
}
}
// NewScriptInjectorRewriter implements a HtmlTokenRewriter
// and injects JS into the page for execution at a particular time
func NewScriptInjectorRewriter(script string, execTime ScriptExecTime) *ScriptInjectorRewriter {
return &ScriptInjectorRewriter{
execTime: execTime,
script: script,
}
}
// NewScriptInjectorRewriterWith implements a HtmlTokenRewriter
// and injects JS into the page for execution at a particular time
// accepting arguments into the script, which will be added via a string replace
// the params map represents the key-value pair of the params.
// the key will be string replaced with the value
func NewScriptInjectorRewriterWithParams(script string, execTime ScriptExecTime, params map[string]string) *ScriptInjectorRewriter {
rr := &ScriptInjectorRewriter{
execTime: execTime,
script: script,
}
rr.applyParams(params)
return rr
}

View File

@@ -1,138 +0,0 @@
package ruleset_v2
import (
"encoding/json"
"fmt"
"gopkg.in/yaml.v3"
"ladder/proxychain"
)
type Rule struct {
Domains []string
RequestModifications []proxychain.RequestModification
_rqms []_rqm // internal represenation of RequestModifications
ResponseModifications []proxychain.ResponseModification
_rsms []_rsm // internal represenation of ResponseModifications
}
// internal represenation of ResponseModifications
type _rsm struct {
Name string `json:"name" yaml:"name"`
Params []string `json:"params" yaml:"params"`
}
// internal represenation of RequestModifications
type _rqm struct {
Name string `json:"name" yaml:"name"`
Params []string `json:"params" yaml:"params"`
}
// implement type encoding/json/Marshaler
func (rule *Rule) UnmarshalJSON(data []byte) error {
type Aux struct {
Domains []string `json:"domains"`
RequestModifications []_rqm `json:"requestmodifications"`
ResponseModifications []_rsm `json:"responsemodifications"`
}
aux := &Aux{}
if err := json.Unmarshal(data, aux); err != nil {
return err
}
rule.Domains = aux.Domains
rule._rqms = aux.RequestModifications
rule._rsms = aux.ResponseModifications
// convert requestModification function call string into actual functional option
for _, rqm := range aux.RequestModifications {
f, exists := rqmModMap[rqm.Name]
if !exists {
return fmt.Errorf("Rule::UnmarshalJSON => requestModifier '%s' does not exist, please check spelling", rqm.Name)
}
rule.RequestModifications = append(rule.RequestModifications, f(rqm.Params...))
}
// convert responseModification function call string into actual functional option
for _, rsm := range aux.ResponseModifications {
f, exists := rsmModMap[rsm.Name]
if !exists {
return fmt.Errorf("Rule::UnmarshalJSON => responseModifier '%s' does not exist, please check spelling", rsm.Name)
}
rule.ResponseModifications = append(rule.ResponseModifications, f(rsm.Params...))
}
return nil
}
func (rule *Rule) MarshalJSON() ([]byte, error) {
aux := struct {
Domains []string `json:"domains"`
RequestModifications []_rqm `json:"requestmodifications"`
ResponseModifications []_rsm `json:"responsemodifications"`
}{
Domains: rule.Domains,
RequestModifications: rule._rqms,
ResponseModifications: rule._rsms,
}
return json.MarshalIndent(aux, "", " ")
}
// ============================================================
// YAML
// implement type yaml marshaller
func (rule *Rule) UnmarshalYAML(unmarshal func(interface{}) error) error {
type Aux struct {
Domains []string `yaml:"domains"`
RequestModifications []_rqm `yaml:"requestmodifications"`
ResponseModifications []_rsm `yaml:"responsemodifications"`
}
var aux Aux
if err := unmarshal(&aux); err != nil {
return err
}
rule.Domains = aux.Domains
rule._rqms = aux.RequestModifications
rule._rsms = aux.ResponseModifications
// convert requestModification function call string into actual functional option
for _, rqm := range aux.RequestModifications {
f, exists := rqmModMap[rqm.Name]
if !exists {
return fmt.Errorf("Rule::UnmarshalYAML => requestModifier '%s' does not exist, please check spelling", rqm.Name)
}
rule.RequestModifications = append(rule.RequestModifications, f(rqm.Params...))
}
// convert responseModification function call string into actual functional option
for _, rsm := range aux.ResponseModifications {
f, exists := rsmModMap[rsm.Name]
if !exists {
return fmt.Errorf("Rule::UnmarshalYAML => responseModifier '%s' does not exist, please check spelling", rsm.Name)
}
rule.ResponseModifications = append(rule.ResponseModifications, f(rsm.Params...))
}
return nil
}
func (rule *Rule) MarshalYAML() (interface{}, error) {
type Aux struct {
Domains []string `yaml:"domains"`
RequestModifications []_rqm `yaml:"requestmodifications"`
ResponseModifications []_rsm `yaml:"responsemodifications"`
}
aux := &Aux{
Domains: rule.Domains,
RequestModifications: rule._rqms,
ResponseModifications: rule._rsms,
}
return yaml.Marshal(aux)
}

View File

@@ -1,182 +0,0 @@
package ruleset_v2
// DO NOT EDIT THIS FILE. It is automatically generated by ladder/proxychain/codegen/codegen.go
// The purpose of this is serialization of rulesets from JSON or YAML into functional options suitable
// for use in proxychains.
import (
"ladder/proxychain"
rx "ladder/proxychain/requestmodifiers"
)
type RequestModifierFactory func(params ...string) proxychain.RequestModification
var rqmModMap map[string]RequestModifierFactory
func init() {
rqmModMap = make(map[string]RequestModifierFactory)
rqmModMap["ForwardRequestHeaders"] = func(_ ...string) proxychain.RequestModification {
return rx.ForwardRequestHeaders()
}
rqmModMap["MasqueradeAsGoogleBot"] = func(_ ...string) proxychain.RequestModification {
return rx.MasqueradeAsGoogleBot()
}
rqmModMap["MasqueradeAsBingBot"] = func(_ ...string) proxychain.RequestModification {
return rx.MasqueradeAsBingBot()
}
rqmModMap["MasqueradeAsWaybackMachineBot"] = func(_ ...string) proxychain.RequestModification {
return rx.MasqueradeAsWaybackMachineBot()
}
rqmModMap["MasqueradeAsFacebookBot"] = func(_ ...string) proxychain.RequestModification {
return rx.MasqueradeAsFacebookBot()
}
rqmModMap["MasqueradeAsYandexBot"] = func(_ ...string) proxychain.RequestModification {
return rx.MasqueradeAsYandexBot()
}
rqmModMap["MasqueradeAsBaiduBot"] = func(_ ...string) proxychain.RequestModification {
return rx.MasqueradeAsBaiduBot()
}
rqmModMap["MasqueradeAsDuckDuckBot"] = func(_ ...string) proxychain.RequestModification {
return rx.MasqueradeAsDuckDuckBot()
}
rqmModMap["MasqueradeAsYahooBot"] = func(_ ...string) proxychain.RequestModification {
return rx.MasqueradeAsYahooBot()
}
rqmModMap["ModifyDomainWithRegex"] = func(params ...string) proxychain.RequestModification {
return rx.ModifyDomainWithRegex(params[0], params[1])
}
rqmModMap["SetOutgoingCookie"] = func(params ...string) proxychain.RequestModification {
return rx.SetOutgoingCookie(params[0], params[1])
}
rqmModMap["SetOutgoingCookies"] = func(params ...string) proxychain.RequestModification {
return rx.SetOutgoingCookies(params[0])
}
rqmModMap["DeleteOutgoingCookie"] = func(params ...string) proxychain.RequestModification {
return rx.DeleteOutgoingCookie(params[0])
}
rqmModMap["DeleteOutgoingCookies"] = func(_ ...string) proxychain.RequestModification {
return rx.DeleteOutgoingCookies()
}
rqmModMap["DeleteOutgoingCookiesExcept"] = func(params ...string) proxychain.RequestModification {
return rx.DeleteOutgoingCookiesExcept(params[0])
}
rqmModMap["ModifyPathWithRegex"] = func(params ...string) proxychain.RequestModification {
return rx.ModifyPathWithRegex(params[0], params[1])
}
rqmModMap["ModifyQueryParams"] = func(params ...string) proxychain.RequestModification {
return rx.ModifyQueryParams(params[0], params[1])
}
rqmModMap["SetRequestHeader"] = func(params ...string) proxychain.RequestModification {
return rx.SetRequestHeader(params[0], params[1])
}
rqmModMap["DeleteRequestHeader"] = func(params ...string) proxychain.RequestModification {
return rx.DeleteRequestHeader(params[0])
}
rqmModMap["RequestArchiveIs"] = func(_ ...string) proxychain.RequestModification {
return rx.RequestArchiveIs()
}
rqmModMap["RequestGoogleCache"] = func(_ ...string) proxychain.RequestModification {
return rx.RequestGoogleCache()
}
rqmModMap["RequestWaybackMachine"] = func(_ ...string) proxychain.RequestModification {
return rx.RequestWaybackMachine()
}
rqmModMap["ResolveWithGoogleDoH"] = func(_ ...string) proxychain.RequestModification {
return rx.ResolveWithGoogleDoH()
}
rqmModMap["SpoofOrigin"] = func(params ...string) proxychain.RequestModification {
return rx.SpoofOrigin(params[0])
}
rqmModMap["HideOrigin"] = func(_ ...string) proxychain.RequestModification {
return rx.HideOrigin()
}
rqmModMap["SpoofReferrer"] = func(params ...string) proxychain.RequestModification {
return rx.SpoofReferrer(params[0])
}
rqmModMap["HideReferrer"] = func(_ ...string) proxychain.RequestModification {
return rx.HideReferrer()
}
rqmModMap["SpoofReferrerFromBaiduSearch"] = func(_ ...string) proxychain.RequestModification {
return rx.SpoofReferrerFromBaiduSearch()
}
rqmModMap["SpoofReferrerFromBingSearch"] = func(_ ...string) proxychain.RequestModification {
return rx.SpoofReferrerFromBingSearch()
}
rqmModMap["SpoofReferrerFromGoogleSearch"] = func(_ ...string) proxychain.RequestModification {
return rx.SpoofReferrerFromGoogleSearch()
}
rqmModMap["SpoofReferrerFromLinkedInPost"] = func(_ ...string) proxychain.RequestModification {
return rx.SpoofReferrerFromLinkedInPost()
}
rqmModMap["SpoofReferrerFromNaverSearch"] = func(_ ...string) proxychain.RequestModification {
return rx.SpoofReferrerFromNaverSearch()
}
rqmModMap["SpoofReferrerFromPinterestPost"] = func(_ ...string) proxychain.RequestModification {
return rx.SpoofReferrerFromPinterestPost()
}
rqmModMap["SpoofReferrerFromQQPost"] = func(_ ...string) proxychain.RequestModification {
return rx.SpoofReferrerFromQQPost()
}
rqmModMap["SpoofReferrerFromRedditPost"] = func(_ ...string) proxychain.RequestModification {
return rx.SpoofReferrerFromRedditPost()
}
rqmModMap["SpoofReferrerFromTumblrPost"] = func(_ ...string) proxychain.RequestModification {
return rx.SpoofReferrerFromTumblrPost()
}
rqmModMap["SpoofReferrerFromTwitterPost"] = func(_ ...string) proxychain.RequestModification {
return rx.SpoofReferrerFromTwitterPost()
}
rqmModMap["SpoofReferrerFromVkontaktePost"] = func(_ ...string) proxychain.RequestModification {
return rx.SpoofReferrerFromVkontaktePost()
}
rqmModMap["SpoofReferrerFromWeiboPost"] = func(_ ...string) proxychain.RequestModification {
return rx.SpoofReferrerFromWeiboPost()
}
rqmModMap["SpoofUserAgent"] = func(params ...string) proxychain.RequestModification {
return rx.SpoofUserAgent(params[0])
}
rqmModMap["SpoofXForwardedFor"] = func(params ...string) proxychain.RequestModification {
return rx.SpoofXForwardedFor(params[0])
}
}

View File

@@ -1,98 +0,0 @@
package ruleset_v2
// DO NOT EDIT THIS FILE. It is automatically generated by ladder/proxychain/codegen/codegen.go
// The purpose of this is serialization of rulesets from JSON or YAML into functional options suitable
// for use in proxychains.
import (
"ladder/proxychain"
tx "ladder/proxychain/responsemodifiers"
)
type ResponseModifierFactory func(params ...string) proxychain.ResponseModification
var rsmModMap map[string]ResponseModifierFactory
func init() {
rsmModMap = make(map[string]ResponseModifierFactory)
rsmModMap["APIContent"] = func(_ ...string) proxychain.ResponseModification {
return tx.APIContent()
}
rsmModMap["BlockElementRemoval"] = func(params ...string) proxychain.ResponseModification {
return tx.BlockElementRemoval(params[0])
}
rsmModMap["BypassCORS"] = func(_ ...string) proxychain.ResponseModification {
return tx.BypassCORS()
}
rsmModMap["BypassContentSecurityPolicy"] = func(_ ...string) proxychain.ResponseModification {
return tx.BypassContentSecurityPolicy()
}
rsmModMap["SetContentSecurityPolicy"] = func(params ...string) proxychain.ResponseModification {
return tx.SetContentSecurityPolicy(params[0])
}
rsmModMap["ForwardResponseHeaders"] = func(_ ...string) proxychain.ResponseModification {
return tx.ForwardResponseHeaders()
}
rsmModMap["GenerateReadableOutline"] = func(_ ...string) proxychain.ResponseModification {
return tx.GenerateReadableOutline()
}
rsmModMap["InjectScriptBeforeDOMContentLoaded"] = func(params ...string) proxychain.ResponseModification {
return tx.InjectScriptBeforeDOMContentLoaded(params[0])
}
rsmModMap["InjectScriptAfterDOMContentLoaded"] = func(params ...string) proxychain.ResponseModification {
return tx.InjectScriptAfterDOMContentLoaded(params[0])
}
rsmModMap["InjectScriptAfterDOMIdle"] = func(params ...string) proxychain.ResponseModification {
return tx.InjectScriptAfterDOMIdle(params[0])
}
rsmModMap["DeleteIncomingCookies"] = func(params ...string) proxychain.ResponseModification {
return tx.DeleteIncomingCookies(params[0])
}
rsmModMap["DeleteIncomingCookiesExcept"] = func(params ...string) proxychain.ResponseModification {
return tx.DeleteIncomingCookiesExcept(params[0])
}
rsmModMap["SetIncomingCookies"] = func(params ...string) proxychain.ResponseModification {
return tx.SetIncomingCookies(params[0])
}
rsmModMap["SetIncomingCookie"] = func(params ...string) proxychain.ResponseModification {
return tx.SetIncomingCookie(params[0], params[1])
}
rsmModMap["SetResponseHeader"] = func(params ...string) proxychain.ResponseModification {
return tx.SetResponseHeader(params[0], params[1])
}
rsmModMap["DeleteResponseHeader"] = func(params ...string) proxychain.ResponseModification {
return tx.DeleteResponseHeader(params[0])
}
rsmModMap["PatchDynamicResourceURLs"] = func(_ ...string) proxychain.ResponseModification {
return tx.PatchDynamicResourceURLs()
}
rsmModMap["PatchGoogleAnalytics"] = func(_ ...string) proxychain.ResponseModification {
return tx.PatchGoogleAnalytics()
}
rsmModMap["PatchTrackerScripts"] = func(_ ...string) proxychain.ResponseModification {
return tx.PatchTrackerScripts()
}
rsmModMap["RewriteHTMLResourceURLs"] = func(_ ...string) proxychain.ResponseModification {
return tx.RewriteHTMLResourceURLs()
}
}

View File

@@ -1,138 +0,0 @@
package ruleset_v2
import (
"encoding/json"
"fmt"
"testing"
"gopkg.in/yaml.v3"
)
// unmarshalRule is a helper function to unmarshal a Rule from a JSON string.
func unmarshalRule(t *testing.T, ruleJSON string) *Rule {
rule := &Rule{}
err := json.Unmarshal([]byte(ruleJSON), rule)
if err != nil {
t.Fatalf("expected no error in Unmarshal, got '%s'", err)
}
return rule
}
func TestRuleUnmarshalJSON(t *testing.T) {
ruleJSON := `{
"domains": ["example.com", "www.example.com"],
"responsemodifications": [{"name": "APIContent", "params": []}, {"name": "SetContentSecurityPolicy", "params": ["foobar"]}, {"name": "SetIncomingCookie", "params": ["authorization-bearer", "hunter2"]}],
"requestmodifications": [{"name": "ForwardRequestHeaders", "params": []}]
}`
rule := unmarshalRule(t, ruleJSON)
if len(rule.Domains) != 2 {
t.Errorf("expected number of domains to be 2")
}
if !(rule.Domains[0] == "example.com" || rule.Domains[1] == "example.com") {
t.Errorf("expected domain to be example.com")
}
if len(rule.ResponseModifications) != 3 {
t.Errorf("expected number of ResponseModifications to be 3, got %d", len(rule.ResponseModifications))
}
if len(rule.RequestModifications) != 1 {
t.Errorf("expected number of RequestModifications to be 1, got %d", len(rule.RequestModifications))
}
}
func TestRuleMarshalJSON(t *testing.T) {
ruleJSON := `{
"domains": ["example.com", "www.example.com"],
"responsemodifications": [{"name": "APIContent", "params": []}, {"name": "SetContentSecurityPolicy", "params": ["foobar"]}, {"name": "SetIncomingCookie", "params": ["authorization-bearer", "hunter2"]}],
"requestmodifications": [{"name": "ForwardRequestHeaders", "params": []}]
}`
rule := unmarshalRule(t, ruleJSON)
jsonRule, err := json.Marshal(rule)
if err != nil {
t.Errorf("expected no error marshalling rule to json, got '%s'", err.Error())
}
fmt.Println(string(jsonRule))
}
// ===============================================
// unmarshalYAMLRule is a helper function to unmarshal a Rule from a YAML string.
func unmarshalYAMLRule(t *testing.T, ruleYAML string) *Rule {
rule := &Rule{}
err := yaml.Unmarshal([]byte(ruleYAML), rule)
if err != nil {
t.Fatalf("expected no error in Unmarshal, got '%s'", err)
}
return rule
}
func TestRuleUnmarshalYAML(t *testing.T) {
ruleYAML := `
domains:
- example.com
- www.example.com
responsemodifications:
- name: APIContent
params: []
- name: SetContentSecurityPolicy
params:
- foobar
- name: SetIncomingCookie
params:
- authorization-bearer
- hunter2
requestmodifications:
- name: ForwardRequestHeaders
params: []
`
rule := unmarshalYAMLRule(t, ruleYAML)
if len(rule.Domains) != 2 {
t.Errorf("expected number of domains to be 2")
}
if !(rule.Domains[0] == "example.com" || rule.Domains[1] == "example.com") {
t.Errorf("expected domain to be example.com")
}
if len(rule.ResponseModifications) != 3 {
t.Errorf("expected number of ResponseModifications to be 3, got %d", len(rule.ResponseModifications))
}
if len(rule.RequestModifications) != 1 {
t.Errorf("expected number of RequestModifications to be 1, got %d", len(rule.RequestModifications))
}
fmt.Println(rule.RequestModifications[0].Name)
}
func TestRuleMarshalYAML(t *testing.T) {
ruleYAML := `
domains:
- example.com
- www.example.com
responsemodifications:
- name: APIContent
params: []
- name: SetContentSecurityPolicy
params:
- foobar
- name: SetIncomingCookie
params:
- authorization-bearer
- hunter2
requestmodifications:
- name: ForwardRequestHeaders
params: []
`
rule := unmarshalYAMLRule(t, ruleYAML)
yamlRule, err := yaml.Marshal(rule)
if err != nil {
t.Errorf("expected no error marshalling rule to yaml, got '%s'", err.Error())
}
if yamlRule == nil {
t.Errorf("expected marshalling rule to yaml to not be nil")
}
}

View File

@@ -1,32 +0,0 @@
package ruleset_v2
import (
"net/url"
)
type IRuleset interface {
HasRule(url url.URL) bool
GetRule(url url.URL) (rule Rule, exists bool)
}
type Ruleset struct {
rulesetPath string
rules map[string]Rule
}
func (rs Ruleset) GetRule(url url.URL) (rule Rule, exists bool) {
rule, exists = rs.rules[url.Hostname()]
return rule, exists
}
func (rs Ruleset) HasRule(url url.URL) bool {
_, exists := rs.GetRule(url)
return exists
}
func NewRuleset(path string) (Ruleset, error) {
rs := Ruleset{
rulesetPath: path,
}
return rs, nil
}

View File

@@ -1,79 +1,3 @@
@tailwind base;
@tailwind components;
@tailwind utilities;
@layer base {
a {
@apply text-slate-600 dark:text-slate-400 hover:text-blue-500 dark:hover:text-blue-500 hover:underline underline-offset-2 transition-colors duration-300;
}
h1 {
@apply scroll-m-20 text-4xl font-extrabold tracking-tight lg:text-5xl text-slate-900 dark:text-slate-200;
}
h2 {
@apply scroll-m-20 border-b pb-2 text-3xl font-semibold tracking-tight first:mt-0 text-slate-900 dark:text-slate-200;
}
h3 {
@apply scroll-m-20 text-2xl font-semibold tracking-tight text-slate-900 dark:text-slate-200;
}
h4,
h5,
h6 {
@apply scroll-m-20 text-xl font-semibold tracking-tight text-slate-900 dark:text-slate-200;
}
p {
@apply leading-7 [&:not(:first-child)]:mt-6 text-slate-900 dark:text-slate-200;
}
kbd,
pre,
code {
@apply relative whitespace-break-spaces rounded bg-slate-200 dark:bg-slate-800 py-[0.2rem] font-mono text-sm font-semibold;
}
blockquote {
@apply mt-6 border-l-2 pl-6 italic;
}
ul {
@apply my-6 ml-6 list-disc [&>li]:mt-2 text-slate-900 dark:text-slate-200;
}
ol {
@apply my-6 ml-6 list-decimal [&>li]:mt-2 text-slate-900 dark:text-slate-200;
}
dl {
@apply my-6 text-slate-900 dark:text-slate-200 font-bold [&>dd]:font-normal [&>dd]:ml-6 [&>dt]:mt-3;
}
li {
@apply text-slate-900 dark:text-slate-200;
}
table {
@apply w-full caption-bottom text-sm;
}
thead {
@apply [&_tr]:border-b;
}
tbody {
@apply [&_tr:last-child]:border-0;
}
tfoot {
@apply border-t border-slate-400 dark:border-slate-700 bg-slate-700/50 dark:bg-slate-200/50 font-medium [&>tr]:last:border-b-0;
}
tr {
@apply border-b border-slate-400 dark:border-slate-700 transition-colors hover:bg-slate-200/50 dark:hover:bg-slate-700/50 data-[state=selected]:bg-slate-700 dark:data-[state=selected]:bg-slate-200;
}
th {
@apply h-12 px-4 text-left align-middle font-medium text-slate-600 dark:text-slate-200 [&:has([role=checkbox])]:pr-0;
}
td {
@apply p-4 align-middle [&:has([role=checkbox])]:pr-0;
}
caption {
@apply mt-4 text-sm text-slate-600 dark:text-slate-200;
}
img {
@apply h-auto w-auto object-cover max-w-full mx-auto rounded-md shadow-md dark:shadow-slate-700;
}
figcaption {
@apply mt-2 text-sm text-slate-600 dark:text-slate-400;
}
hr {
@apply shrink-0 bg-slate-200 dark:bg-slate-700 h-[1px] w-full;
}
}
@tailwind utilities;

Some files were not shown because too many files have changed in this diff Show More