Compare commits
1 Commits
fix/proxy_
...
add-versio
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5392992350 |
46
.air.toml
46
.air.toml
@@ -1,46 +0,0 @@
|
|||||||
root = "./"
|
|
||||||
testdata_dir = "testdata"
|
|
||||||
tmp_dir = "tmp"
|
|
||||||
|
|
||||||
[build]
|
|
||||||
args_bin = []
|
|
||||||
bin = "./tmp/main"
|
|
||||||
cmd = "go build -o ./tmp/main ./cmd"
|
|
||||||
delay = 1000
|
|
||||||
exclude_dir = ["assets", "tmp", "vendor", "testdata",]
|
|
||||||
exclude_file = ["proxychain/ruleset/rule_resmod_types.gen.go", "proxychain/ruleset/rule_reqmod_types.gen.go", "handlers/api_modifiers_structdef.gen.go"]
|
|
||||||
exclude_regex = ["_test.go"]
|
|
||||||
exclude_unchanged = false
|
|
||||||
follow_symlink = false
|
|
||||||
full_bin = "./tmp/main --ruleset ./ruleset_v2.yaml"
|
|
||||||
include_dir = []
|
|
||||||
include_ext = ["go", "tpl", "tmpl", "yaml", "html", "js"]
|
|
||||||
include_file = []
|
|
||||||
kill_delay = "0s"
|
|
||||||
log = "build-errors.log"
|
|
||||||
poll = false
|
|
||||||
poll_interval = 0
|
|
||||||
post_cmd = []
|
|
||||||
pre_cmd = ["git submodule update --init --recursive; git rev-parse --short HEAD > handlers/VERSION; git rev-parse --short HEAD > cmd/VERSION; cd proxychain/codegen && go run codegen.go && cd ../../handlers/api_modifiers_codegen && go run api_modifiers_codegen.go"]
|
|
||||||
rerun = false
|
|
||||||
rerun_delay = 500
|
|
||||||
send_interrupt = false
|
|
||||||
stop_on_error = false
|
|
||||||
|
|
||||||
[color]
|
|
||||||
app = ""
|
|
||||||
build = "yellow"
|
|
||||||
main = "magenta"
|
|
||||||
runner = "green"
|
|
||||||
watcher = "cyan"
|
|
||||||
|
|
||||||
[log]
|
|
||||||
main_only = false
|
|
||||||
time = false
|
|
||||||
|
|
||||||
[misc]
|
|
||||||
clean_on_exit = true
|
|
||||||
|
|
||||||
[screen]
|
|
||||||
clear_on_rebuild = true
|
|
||||||
keep_scroll = true
|
|
||||||
5
.github/workflows/build-css.yaml
vendored
5
.github/workflows/build-css.yaml
vendored
@@ -4,7 +4,6 @@ on:
|
|||||||
push:
|
push:
|
||||||
paths:
|
paths:
|
||||||
- "handlers/form.html"
|
- "handlers/form.html"
|
||||||
- "proxychain/responsemodifiers/vendor/generate_readable_outline.html"
|
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
@@ -30,14 +29,14 @@ jobs:
|
|||||||
-
|
-
|
||||||
name: Commit generated stylesheet
|
name: Commit generated stylesheet
|
||||||
run: |
|
run: |
|
||||||
if git diff --quiet handlers/styles.css; then
|
if git diff --quiet cmd/styles.css; then
|
||||||
echo "No changes to commit."
|
echo "No changes to commit."
|
||||||
exit 0
|
exit 0
|
||||||
else
|
else
|
||||||
echo "Changes detected, committing..."
|
echo "Changes detected, committing..."
|
||||||
git config --global user.name "Github action"
|
git config --global user.name "Github action"
|
||||||
git config --global user.email "username@users.noreply.github.com"
|
git config --global user.email "username@users.noreply.github.com"
|
||||||
git add handlers
|
git add cmd
|
||||||
git commit -m "Generated stylesheet"
|
git commit -m "Generated stylesheet"
|
||||||
git push
|
git push
|
||||||
fi
|
fi
|
||||||
6
.github/workflows/release-binaries.yaml
vendored
6
.github/workflows/release-binaries.yaml
vendored
@@ -22,7 +22,11 @@ jobs:
|
|||||||
-
|
-
|
||||||
name: Set version
|
name: Set version
|
||||||
run: |
|
run: |
|
||||||
echo -n $(git describe --tags --abbrev=0) > handlers/VERSION
|
VERSION=$(git describe --tags --abbrev=0)
|
||||||
|
echo -n $VERSION > handlers/VERSION
|
||||||
|
sed -i 's\VERSION\${VERSION}\g' handlers/form.html
|
||||||
|
echo handlers/form.html >> .gitignore
|
||||||
|
echo .gitignore >> .gitignore
|
||||||
-
|
-
|
||||||
name: Set up Go
|
name: Set up Go
|
||||||
uses: actions/setup-go@v3
|
uses: actions/setup-go@v3
|
||||||
|
|||||||
6
.github/workflows/release-docker.yaml
vendored
6
.github/workflows/release-docker.yaml
vendored
@@ -42,7 +42,11 @@ jobs:
|
|||||||
- name: Set version
|
- name: Set version
|
||||||
id: version
|
id: version
|
||||||
run: |
|
run: |
|
||||||
echo ${GITHUB_REF#refs/tags/v} > handlers/VERSION
|
VERSION=$(git describe --tags --abbrev=0)
|
||||||
|
echo -n $VERSION > handlers/VERSION
|
||||||
|
sed -i 's\VERSION\${VERSION}\g' handlers/form.html
|
||||||
|
echo handlers/form.html >> .gitignore
|
||||||
|
echo .gitignore >> .gitignore
|
||||||
|
|
||||||
# Install the cosign tool except on PR
|
# Install the cosign tool except on PR
|
||||||
# https://github.com/sigstore/cosign-installer
|
# https://github.com/sigstore/cosign-installer
|
||||||
|
|||||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -1,8 +1,5 @@
|
|||||||
# dev binary
|
# dev binary
|
||||||
ladder
|
ladder
|
||||||
tmp/main
|
|
||||||
tmp
|
|
||||||
|
|
||||||
VERSION
|
VERSION
|
||||||
output.css
|
output.css
|
||||||
.aider*
|
|
||||||
|
|||||||
6
.gitmodules
vendored
6
.gitmodules
vendored
@@ -1,6 +0,0 @@
|
|||||||
[submodule "proxychain/responsemodifiers/vendor/ddg-tracker-surrogates"]
|
|
||||||
path = proxychain/responsemodifiers/vendor/ddg-tracker-surrogates
|
|
||||||
url = https://github.com/duckduckgo/tracker-surrogates
|
|
||||||
[submodule "proxychain/requestmodifiers/vendor/ua-parser-js"]
|
|
||||||
path = proxychain/requestmodifiers/vendor/ua-parser-js
|
|
||||||
url = https://github.com/faisalman/ua-parser-js.git
|
|
||||||
@@ -7,7 +7,7 @@ COPY . .
|
|||||||
|
|
||||||
RUN go mod download
|
RUN go mod download
|
||||||
|
|
||||||
RUN make build
|
RUN CGO_ENABLED=0 GOOS=linux go build -o ladder cmd/main.go
|
||||||
|
|
||||||
FROM debian:12-slim as release
|
FROM debian:12-slim as release
|
||||||
|
|
||||||
@@ -18,4 +18,8 @@ RUN chmod +x /app/ladder
|
|||||||
|
|
||||||
RUN apt update && apt install -y ca-certificates && rm -rf /var/lib/apt/lists/*
|
RUN apt update && apt install -y ca-certificates && rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
#EXPOSE 8080
|
||||||
|
|
||||||
|
#ENTRYPOINT ["/usr/bin/dumb-init", "--"]
|
||||||
|
|
||||||
CMD ["sh", "-c", "/app/ladder"]
|
CMD ["sh", "-c", "/app/ladder"]
|
||||||
13
Makefile
13
Makefile
@@ -1,14 +1,6 @@
|
|||||||
build:
|
|
||||||
cd proxychain/codegen && go run codegen.go
|
|
||||||
cd handlers/api_modifiers_codegen && go run api_modifiers_codegen.go
|
|
||||||
git submodule update --init --recursive
|
|
||||||
git rev-parse --short HEAD > handlers/VERSION
|
|
||||||
git rev-parse --short HEAD > cmd/VERSION
|
|
||||||
go build -o ladder -ldflags="-s -w" cmd/main.go
|
|
||||||
|
|
||||||
lint:
|
lint:
|
||||||
gofumpt -l -w .
|
gofumpt -l -w .
|
||||||
golangci-lint run -c .golangci-lint.yaml --fix
|
golangci-lint run -c .golangci-lint.yaml
|
||||||
|
|
||||||
go mod tidy
|
go mod tidy
|
||||||
go clean
|
go clean
|
||||||
@@ -16,6 +8,3 @@ lint:
|
|||||||
install-linters:
|
install-linters:
|
||||||
go install mvdan.cc/gofumpt@latest
|
go install mvdan.cc/gofumpt@latest
|
||||||
go install github.com/golangci/golangci-lint/cmd/golangci-lint@v1.55.2
|
go install github.com/golangci/golangci-lint/cmd/golangci-lint@v1.55.2
|
||||||
|
|
||||||
run:
|
|
||||||
go run ./cmd/.
|
|
||||||
|
|||||||
54
README.md
54
README.md
@@ -14,18 +14,6 @@ Freedom of information is an essential pillar of democracy and informed decision
|
|||||||
|
|
||||||
> **Disclaimer:** This project is intended for educational purposes only. The author does not endorse or encourage any unethical or illegal activity. Use this tool at your own risk.
|
> **Disclaimer:** This project is intended for educational purposes only. The author does not endorse or encourage any unethical or illegal activity. Use this tool at your own risk.
|
||||||
|
|
||||||
### How it works
|
|
||||||
|
|
||||||
```mermaid
|
|
||||||
sequenceDiagram
|
|
||||||
client->>+ladder: GET
|
|
||||||
ladder-->>ladder: apply RequestModifications
|
|
||||||
ladder->>+website: GET
|
|
||||||
website->>-ladder: 200 OK
|
|
||||||
ladder-->>ladder: apply ResultModifications
|
|
||||||
ladder->>-client: 200 OK
|
|
||||||
```
|
|
||||||
|
|
||||||
### Features
|
### Features
|
||||||
- [x] Bypass Paywalls
|
- [x] Bypass Paywalls
|
||||||
- [x] Remove CORS headers from responses, assets, and images ...
|
- [x] Remove CORS headers from responses, assets, and images ...
|
||||||
@@ -60,12 +48,12 @@ Certain sites may display missing images or encounter formatting issues. This ca
|
|||||||
|
|
||||||
### Binary
|
### Binary
|
||||||
1) Download binary [here](https://github.com/everywall/ladder/releases/latest)
|
1) Download binary [here](https://github.com/everywall/ladder/releases/latest)
|
||||||
2) Unpack and run the binary `./ladder -r https://t.ly/14PSf`
|
2) Unpack and run the binary `./ladder`
|
||||||
3) Open Browser (Default: http://localhost:8080)
|
3) Open Browser (Default: http://localhost:8080)
|
||||||
|
|
||||||
### Docker
|
### Docker
|
||||||
```bash
|
```bash
|
||||||
docker run -p 8080:8080 -d --env RULESET=https://t.ly/14PSf --name ladder ghcr.io/everywall/ladder:latest
|
docker run -p 8080:8080 -d --name ladder ghcr.io/everywall/ladder:latest
|
||||||
```
|
```
|
||||||
|
|
||||||
### Docker Compose
|
### Docker Compose
|
||||||
@@ -91,28 +79,18 @@ Or create a bookmark with the following URL:
|
|||||||
```javascript
|
```javascript
|
||||||
javascript:window.location.href="http://localhost:8080/"+location.href
|
javascript:window.location.href="http://localhost:8080/"+location.href
|
||||||
```
|
```
|
||||||
### Outline
|
|
||||||
```bash
|
|
||||||
curl -X GET "http://localhost:8080/outline/https://www.example.com"
|
|
||||||
```
|
|
||||||
|
|
||||||
### API
|
### API
|
||||||
```bash
|
```bash
|
||||||
curl -X GET "http://localhost:8080/api/content/https://www.example.com"
|
curl -X GET "http://localhost:8080/api/https://www.example.com"
|
||||||
```
|
```
|
||||||
|
|
||||||
### RAW
|
### RAW
|
||||||
http://localhost:8080/api/raw/https://www.example.com
|
http://localhost:8080/raw/https://www.example.com
|
||||||
|
|
||||||
|
|
||||||
### Running Ruleset
|
### Running Ruleset
|
||||||
http://localhost:8080/api/ruleset
|
http://localhost:8080/ruleset
|
||||||
|
|
||||||
### Running Rule
|
|
||||||
http://localhost:8080/api/ruleset/https://example.com
|
|
||||||
|
|
||||||
### List available modifiers
|
|
||||||
http://localhost:8080/api/modifiers
|
|
||||||
|
|
||||||
## Configuration
|
## Configuration
|
||||||
|
|
||||||
@@ -128,7 +106,7 @@ http://localhost:8080/api/modifiers
|
|||||||
| `LOG_URLS` | Log fetched URL's | `true` |
|
| `LOG_URLS` | Log fetched URL's | `true` |
|
||||||
| `DISABLE_FORM` | Disables URL Form Frontpage | `false` |
|
| `DISABLE_FORM` | Disables URL Form Frontpage | `false` |
|
||||||
| `FORM_PATH` | Path to custom Form HTML | `` |
|
| `FORM_PATH` | Path to custom Form HTML | `` |
|
||||||
| `RULESET` | Path or URL to a ruleset file, accepts local directories | `https://raw.githubusercontent.com/everywall/ladder-rules/main/ruleset.yaml` or `/path/to/my/rules.yaml` or `/path/to/my/rules/` |
|
| `RULESET` | URL to a ruleset file | `https://raw.githubusercontent.com/everywall/ladder/main/ruleset.yaml` or `/path/to/my/rules.yaml` |
|
||||||
| `EXPOSE_RULESET` | Make your Ruleset available to other ladders | `true` |
|
| `EXPOSE_RULESET` | Make your Ruleset available to other ladders | `true` |
|
||||||
| `ALLOWED_DOMAINS` | Comma separated list of allowed domains. Empty = no limitations | `` |
|
| `ALLOWED_DOMAINS` | Comma separated list of allowed domains. Empty = no limitations | `` |
|
||||||
| `ALLOWED_DOMAINS_RULESET` | Allow Domains from Ruleset. false = no limitations | `false` |
|
| `ALLOWED_DOMAINS_RULESET` | Allow Domains from Ruleset. false = no limitations | `false` |
|
||||||
@@ -137,10 +115,9 @@ http://localhost:8080/api/modifiers
|
|||||||
|
|
||||||
### Ruleset
|
### Ruleset
|
||||||
|
|
||||||
It is possible to apply custom rules to modify the response or the requested URL. This can be used to remove unwanted or modify elements from the page. The ruleset is a YAML file, a directory with YAML Files, or an URL to a YAML file that contains a list of rules for each domain. These rules are loaded on startup.
|
It is possible to apply custom rules to modify the response or the requested URL. This can be used to remove unwanted or modify elements from the page. The ruleset is a YAML file that contains a list of rules for each domain and is loaded on startup
|
||||||
|
|
||||||
There is a basic ruleset available in a separate repository [ruleset.yaml](https://raw.githubusercontent.com/everywall/ladder-rules/main/ruleset.yaml). Feel free to add your own rules and create a pull request.
|
|
||||||
|
|
||||||
|
See in [ruleset.yaml](ruleset.yaml) for an example.
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
- domain: example.com # Includes all subdomains
|
- domain: example.com # Includes all subdomains
|
||||||
@@ -199,21 +176,8 @@ There is a basic ruleset available in a separate repository [ruleset.yaml](https
|
|||||||
To run a development server at http://localhost:8080:
|
To run a development server at http://localhost:8080:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
git clone git@github.com-ladddder:everywall/ladder.git
|
echo "DEV" > handler/VERSION
|
||||||
git submodule update --init --recursive
|
|
||||||
echo "dev " > handlers/VERSION
|
|
||||||
echo "dev " > cmd/VERSION
|
|
||||||
RULESET="./ruleset.yaml" go run cmd/main.go
|
RULESET="./ruleset.yaml" go run cmd/main.go
|
||||||
```
|
```
|
||||||
|
|
||||||
### Optional: Live reloading development server with [cosmtrek/air](https://github.com/cosmtrek/air)
|
|
||||||
|
|
||||||
Install air according to the [installation instructions](https://github.com/cosmtrek/air#installation).
|
|
||||||
|
|
||||||
Run a development server at http://localhost:8080:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
air # or the path to air if you haven't added a path alias to your .bashrc or .zshrc
|
|
||||||
```
|
|
||||||
|
|
||||||
This project uses [pnpm](https://pnpm.io/) to build a stylesheet with the [Tailwind CSS](https://tailwindcss.com/) classes. For local development, if you modify styles in `form.html`, run `pnpm build` to generate a new stylesheet.
|
This project uses [pnpm](https://pnpm.io/) to build a stylesheet with the [Tailwind CSS](https://tailwindcss.com/) classes. For local development, if you modify styles in `form.html`, run `pnpm build` to generate a new stylesheet.
|
||||||
|
|||||||
|
Before Width: | Height: | Size: 15 KiB After Width: | Height: | Size: 15 KiB |
163
cmd/main.go
163
cmd/main.go
@@ -1,25 +1,24 @@
|
|||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
_ "embed"
|
"embed"
|
||||||
"fmt"
|
"fmt"
|
||||||
"html/template"
|
|
||||||
"log"
|
"log"
|
||||||
"os"
|
"os"
|
||||||
|
"strings"
|
||||||
|
|
||||||
"github.com/everywall/ladder/handlers"
|
"ladder/handlers"
|
||||||
"github.com/everywall/ladder/internal/cli"
|
|
||||||
|
|
||||||
"github.com/everywall/ladder/proxychain/requestmodifiers/bot"
|
|
||||||
ruleset_v2 "github.com/everywall/ladder/proxychain/ruleset"
|
|
||||||
|
|
||||||
"github.com/akamensky/argparse"
|
"github.com/akamensky/argparse"
|
||||||
"github.com/gofiber/fiber/v2"
|
"github.com/gofiber/fiber/v2"
|
||||||
"github.com/gofiber/template/html/v2"
|
"github.com/gofiber/fiber/v2/middleware/basicauth"
|
||||||
|
"github.com/gofiber/fiber/v2/middleware/favicon"
|
||||||
)
|
)
|
||||||
|
|
||||||
//go:embed VERSION
|
//go:embed favicon.ico
|
||||||
var version string
|
var faviconData string
|
||||||
|
//go:embed styles.css
|
||||||
|
var cssData embed.FS
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
parser := argparse.NewParser("ladder", "Every Wall needs a Ladder")
|
parser := argparse.NewParser("ladder", "Every Wall needs a Ladder")
|
||||||
@@ -28,7 +27,6 @@ func main() {
|
|||||||
if os.Getenv("PORT") == "" {
|
if os.Getenv("PORT") == "" {
|
||||||
portEnv = "8080"
|
portEnv = "8080"
|
||||||
}
|
}
|
||||||
|
|
||||||
port := parser.String("p", "port", &argparse.Options{
|
port := parser.String("p", "port", &argparse.Options{
|
||||||
Required: false,
|
Required: false,
|
||||||
Default: portEnv,
|
Default: portEnv,
|
||||||
@@ -40,145 +38,58 @@ func main() {
|
|||||||
Help: "This will spawn multiple processes listening",
|
Help: "This will spawn multiple processes listening",
|
||||||
})
|
})
|
||||||
|
|
||||||
verbose := parser.Flag("v", "verbose", &argparse.Options{
|
|
||||||
Required: false,
|
|
||||||
Help: "Adds verbose logging",
|
|
||||||
})
|
|
||||||
|
|
||||||
randomGoogleBot := parser.Flag("", "random-googlebot", &argparse.Options{
|
|
||||||
Required: false,
|
|
||||||
Help: "Update the list of trusted Googlebot IPs, and use a random one for each masqueraded request",
|
|
||||||
})
|
|
||||||
|
|
||||||
randomBingBot := parser.Flag("", "random-bingbot", &argparse.Options{
|
|
||||||
Required: false,
|
|
||||||
Help: "Update the list of trusted Bingbot IPs, and use a random one for each masqueraded request",
|
|
||||||
})
|
|
||||||
|
|
||||||
// TODO: add version flag that reads from handers/VERSION
|
|
||||||
|
|
||||||
ruleset := parser.String("r", "ruleset", &argparse.Options{
|
|
||||||
Required: false,
|
|
||||||
Help: "File, Directory or URL to a ruleset.yaml. Overrides RULESET environment variable.",
|
|
||||||
})
|
|
||||||
|
|
||||||
mergeRulesets := parser.Flag("", "merge-rulesets", &argparse.Options{
|
|
||||||
Required: false,
|
|
||||||
Help: "Compiles a directory of yaml files into a single ruleset.yaml. Requires --ruleset arg.",
|
|
||||||
})
|
|
||||||
|
|
||||||
mergeRulesetsOutput := parser.String("", "merge-rulesets-output", &argparse.Options{
|
|
||||||
Required: false,
|
|
||||||
Help: "Specify output file for --merge-rulesets. Requires --ruleset and --merge-rulesets args.",
|
|
||||||
})
|
|
||||||
|
|
||||||
err := parser.Parse(os.Args)
|
err := parser.Parse(os.Args)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
fmt.Print(parser.Usage(err))
|
fmt.Print(parser.Usage(err))
|
||||||
}
|
}
|
||||||
|
|
||||||
if *randomGoogleBot {
|
|
||||||
err := bot.GoogleBot.UpdatePool("https://developers.google.com/static/search/apis/ipranges/googlebot.json")
|
|
||||||
if err != nil {
|
|
||||||
fmt.Println("error while retrieving list of Googlebot IPs: " + err.Error())
|
|
||||||
fmt.Println("defaulting to known trusted Googlebot identity")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if *randomBingBot {
|
|
||||||
err := bot.BingBot.UpdatePool("https://www.bing.com/toolbox/bingbot.json")
|
|
||||||
if err != nil {
|
|
||||||
fmt.Println("error while retrieving list of Bingbot IPs: " + err.Error())
|
|
||||||
fmt.Println("defaulting to known trusted Bingbot identity")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// utility cli flag to compile ruleset directory into single ruleset.yaml
|
|
||||||
if *mergeRulesets {
|
|
||||||
output := os.Stdout
|
|
||||||
|
|
||||||
if *mergeRulesetsOutput != "" {
|
|
||||||
output, err = os.Create(*mergeRulesetsOutput)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
fmt.Println(err)
|
|
||||||
os.Exit(1)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
err = cli.HandleRulesetMerge(*ruleset, *mergeRulesets, output)
|
|
||||||
if err != nil {
|
|
||||||
fmt.Println(err)
|
|
||||||
os.Exit(1)
|
|
||||||
}
|
|
||||||
os.Exit(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
if os.Getenv("PREFORK") == "true" {
|
if os.Getenv("PREFORK") == "true" {
|
||||||
*prefork = true
|
*prefork = true
|
||||||
}
|
}
|
||||||
|
|
||||||
var rs ruleset_v2.IRuleset
|
|
||||||
|
|
||||||
switch {
|
|
||||||
case *ruleset != "":
|
|
||||||
rs, err = ruleset_v2.NewRuleset(*ruleset)
|
|
||||||
if err != nil {
|
|
||||||
fmt.Printf("ERROR: failed to load ruleset from %s\n", *ruleset)
|
|
||||||
}
|
|
||||||
case os.Getenv("RULESET") != "":
|
|
||||||
rs = ruleset_v2.NewRulesetFromEnv()
|
|
||||||
}
|
|
||||||
|
|
||||||
engine := html.New("./handlers", ".html")
|
|
||||||
engine.AddFunc(
|
|
||||||
// add unescape function
|
|
||||||
"unescape", func(s string) template.HTML {
|
|
||||||
return template.HTML(s)
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
app := fiber.New(
|
app := fiber.New(
|
||||||
fiber.Config{
|
fiber.Config{
|
||||||
Prefork: *prefork,
|
Prefork: *prefork,
|
||||||
GETOnly: false,
|
GETOnly: true,
|
||||||
ReadBufferSize: 4096 * 4, // increase max header size
|
|
||||||
DisableStartupMessage: true,
|
|
||||||
Views: engine,
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
app.Use(handlers.Auth())
|
userpass := os.Getenv("USERPASS")
|
||||||
app.Use(handlers.Favicon())
|
if userpass != "" {
|
||||||
|
userpass := strings.Split(userpass, ":")
|
||||||
|
app.Use(basicauth.New(basicauth.Config{
|
||||||
|
Users: map[string]string{
|
||||||
|
userpass[0]: userpass[1],
|
||||||
|
},
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
app.Use(favicon.New(favicon.Config{
|
||||||
|
Data: []byte(faviconData),
|
||||||
|
URL: "/favicon.ico",
|
||||||
|
}))
|
||||||
|
|
||||||
if os.Getenv("NOLOGS") != "true" {
|
if os.Getenv("NOLOGS") != "true" {
|
||||||
app.Use(func(c *fiber.Ctx) error {
|
app.Use(func(c *fiber.Ctx) error {
|
||||||
log.Println(c.Method(), c.Path())
|
log.Println(c.Method(), c.Path())
|
||||||
|
|
||||||
return c.Next()
|
return c.Next()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
proxyOpts := &handlers.ProxyOptions{
|
|
||||||
Verbose: *verbose,
|
|
||||||
Ruleset: rs,
|
|
||||||
}
|
|
||||||
|
|
||||||
app.Get("/", handlers.Form)
|
app.Get("/", handlers.Form)
|
||||||
|
app.Get("/styles.css", func(c *fiber.Ctx) error {
|
||||||
|
cssData, err := cssData.ReadFile("styles.css")
|
||||||
|
if err != nil {
|
||||||
|
return c.Status(fiber.StatusInternalServerError).SendString("Internal Server Error")
|
||||||
|
}
|
||||||
|
c.Set("Content-Type", "text/css")
|
||||||
|
return c.Send(cssData)
|
||||||
|
})
|
||||||
|
app.Get("ruleset", handlers.Ruleset)
|
||||||
|
|
||||||
app.Get("styles.css", handlers.Styles)
|
app.Get("raw/*", handlers.Raw)
|
||||||
app.Get("script.js", handlers.Script)
|
app.Get("api/*", handlers.Api)
|
||||||
|
app.Get("/*", handlers.ProxySite)
|
||||||
|
|
||||||
app.All("api/raw/*", handlers.NewRawProxySiteHandler(proxyOpts))
|
|
||||||
|
|
||||||
app.Get("api/modifiers", handlers.NewAPIModifersListHandler(proxyOpts))
|
|
||||||
app.Get("api/ruleset/*", handlers.NewRulesetSiteHandler(proxyOpts))
|
|
||||||
app.Get("api/content/*", handlers.NewAPIContentHandler("api/outline/*", proxyOpts))
|
|
||||||
|
|
||||||
app.Get("outline/*", handlers.NewOutlineHandler("outline/*", proxyOpts))
|
|
||||||
|
|
||||||
app.All("/*", handlers.NewProxySiteHandler(proxyOpts))
|
|
||||||
|
|
||||||
fmt.Println(cli.StartupMessage(version, *port, *ruleset))
|
|
||||||
log.Fatal(app.Listen(":" + *port))
|
log.Fatal(app.Listen(":" + *port))
|
||||||
}
|
}
|
||||||
|
|||||||
1
cmd/styles.css
Normal file
1
cmd/styles.css
Normal file
File diff suppressed because one or more lines are too long
@@ -3,17 +3,16 @@ services:
|
|||||||
ladder:
|
ladder:
|
||||||
image: ghcr.io/everywall/ladder:latest
|
image: ghcr.io/everywall/ladder:latest
|
||||||
container_name: ladder
|
container_name: ladder
|
||||||
build: .
|
#build: .
|
||||||
#restart: always
|
#restart: always
|
||||||
#command: sh -c ./ladder
|
#command: sh -c ./ladder
|
||||||
environment:
|
environment:
|
||||||
- PORT=8080
|
- PORT=8080
|
||||||
- RULESET=/app/ruleset.yaml
|
- RULESET=/app/ruleset.yaml
|
||||||
#- ALLOWED_DOMAINS=example.com,example.org
|
|
||||||
#- ALLOWED_DOMAINS_RULESET=false
|
#- ALLOWED_DOMAINS_RULESET=false
|
||||||
#- EXPOSE_RULESET=true
|
#- EXPOSE_RULESET=true
|
||||||
#- PREFORK=false
|
#- PREFORK=false
|
||||||
#- DISABLE_FORM=false
|
#- DISABLE_FORM=fase
|
||||||
#- FORM_PATH=/app/form.html
|
#- FORM_PATH=/app/form.html
|
||||||
#- X_FORWARDED_FOR=66.249.66.1
|
#- X_FORWARDED_FOR=66.249.66.1
|
||||||
#- USER_AGENT=Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)
|
#- USER_AGENT=Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)
|
||||||
|
|||||||
44
go.mod
44
go.mod
@@ -1,59 +1,29 @@
|
|||||||
module github.com/everywall/ladder
|
module ladder
|
||||||
|
|
||||||
go 1.21.1
|
go 1.21.1
|
||||||
|
|
||||||
require (
|
require (
|
||||||
|
github.com/PuerkitoBio/goquery v1.8.1
|
||||||
github.com/akamensky/argparse v1.4.0
|
github.com/akamensky/argparse v1.4.0
|
||||||
github.com/bogdanfinn/fhttp v0.5.24
|
github.com/gofiber/fiber/v2 v2.50.0
|
||||||
github.com/bogdanfinn/tls-client v1.6.1
|
|
||||||
github.com/go-shiori/dom v0.0.0-20230515143342-73569d674e1c
|
|
||||||
github.com/gofiber/fiber/v2 v2.51.0
|
|
||||||
github.com/markusmobius/go-trafilatura v1.5.1
|
|
||||||
github.com/stretchr/testify v1.8.4
|
github.com/stretchr/testify v1.8.4
|
||||||
golang.org/x/net v0.19.0
|
|
||||||
golang.org/x/term v0.15.0
|
|
||||||
gopkg.in/yaml.v3 v3.0.1
|
gopkg.in/yaml.v3 v3.0.1
|
||||||
)
|
)
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/abadojack/whatlanggo v1.0.1 // indirect
|
|
||||||
github.com/andybalholm/brotli v1.0.6 // indirect
|
github.com/andybalholm/brotli v1.0.6 // indirect
|
||||||
github.com/andybalholm/cascadia v1.3.2 // indirect
|
github.com/andybalholm/cascadia v1.3.2 // indirect
|
||||||
github.com/bogdanfinn/utls v1.5.16 // indirect
|
|
||||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||||
github.com/elliotchance/pie/v2 v2.8.0 // indirect
|
|
||||||
github.com/forPelevin/gomoji v1.1.8 // indirect
|
|
||||||
github.com/go-shiori/go-readability v0.0.0-20231029095239-6b97d5aba789 // indirect
|
|
||||||
github.com/gofiber/template v1.8.2 // indirect
|
|
||||||
github.com/gofiber/template/html/v2 v2.0.5
|
|
||||||
github.com/gofiber/utils v1.1.0 // indirect
|
|
||||||
github.com/gogs/chardet v0.0.0-20211120154057-b7413eaefb8f // indirect
|
|
||||||
github.com/google/uuid v1.4.0 // indirect
|
github.com/google/uuid v1.4.0 // indirect
|
||||||
github.com/hablullah/go-hijri v1.0.2 // indirect
|
github.com/klauspost/compress v1.17.2 // indirect
|
||||||
github.com/hablullah/go-juliandays v1.0.0 // indirect
|
|
||||||
github.com/jalaali/go-jalaali v0.0.0-20210801064154-80525e88d958 // indirect
|
|
||||||
github.com/klauspost/compress v1.17.4 // indirect
|
|
||||||
github.com/magefile/mage v1.15.0 // indirect
|
|
||||||
github.com/markusmobius/go-dateparser v1.2.1 // indirect
|
|
||||||
github.com/markusmobius/go-domdistiller v0.0.0-20230515154422-71af71939ff3 // indirect
|
|
||||||
github.com/markusmobius/go-htmldate v1.2.2 // indirect
|
|
||||||
github.com/mattn/go-colorable v0.1.13 // indirect
|
github.com/mattn/go-colorable v0.1.13 // indirect
|
||||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||||
github.com/mattn/go-runewidth v0.0.15 // indirect
|
github.com/mattn/go-runewidth v0.0.15 // indirect
|
||||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||||
github.com/rivo/uniseg v0.4.4 // indirect
|
github.com/rivo/uniseg v0.4.4 // indirect
|
||||||
github.com/rs/zerolog v1.31.0 // indirect
|
|
||||||
github.com/sirupsen/logrus v1.9.3 // indirect
|
|
||||||
github.com/tam7t/hpkp v0.0.0-20160821193359-2b70b4024ed5 // indirect
|
|
||||||
github.com/tetratelabs/wazero v1.5.0 // indirect
|
|
||||||
github.com/valyala/bytebufferpool v1.0.0 // indirect
|
github.com/valyala/bytebufferpool v1.0.0 // indirect
|
||||||
github.com/valyala/fasthttp v1.51.0 // indirect
|
github.com/valyala/fasthttp v1.50.0 // indirect
|
||||||
github.com/valyala/tcplisten v1.0.0 // indirect
|
github.com/valyala/tcplisten v1.0.0 // indirect
|
||||||
github.com/wasilibs/go-re2 v1.4.1 // indirect
|
golang.org/x/net v0.18.0 // indirect
|
||||||
github.com/yosssi/gohtml v0.0.0-20201013000340-ee4748c638f4 // indirect
|
golang.org/x/sys v0.14.0 // indirect
|
||||||
golang.org/x/crypto v0.16.0 // indirect
|
|
||||||
golang.org/x/exp v0.0.0-20231127185646-65229373498e // indirect
|
|
||||||
golang.org/x/sys v0.15.0 // indirect
|
|
||||||
golang.org/x/text v0.14.0 // indirect
|
|
||||||
|
|
||||||
)
|
)
|
||||||
|
|||||||
109
go.sum
109
go.sum
@@ -1,158 +1,85 @@
|
|||||||
github.com/abadojack/whatlanggo v1.0.1 h1:19N6YogDnf71CTHm3Mp2qhYfkRdyvbgwWdd2EPxJRG4=
|
github.com/PuerkitoBio/goquery v1.8.1 h1:uQxhNlArOIdbrH1tr0UXwdVFgDcZDrZVdcpygAcwmWM=
|
||||||
github.com/abadojack/whatlanggo v1.0.1/go.mod h1:66WiQbSbJBIlOZMsvbKe5m6pzQovxCH9B/K8tQB2uoc=
|
github.com/PuerkitoBio/goquery v1.8.1/go.mod h1:Q8ICL1kNUJ2sXGoAhPGUdYDJvgQgHzJsnnd3H7Ho5jQ=
|
||||||
github.com/akamensky/argparse v1.4.0 h1:YGzvsTqCvbEZhL8zZu2AiA5nq805NZh75JNj4ajn1xc=
|
github.com/akamensky/argparse v1.4.0 h1:YGzvsTqCvbEZhL8zZu2AiA5nq805NZh75JNj4ajn1xc=
|
||||||
github.com/akamensky/argparse v1.4.0/go.mod h1:S5kwC7IuDcEr5VeXtGPRVZ5o/FdhcMlQz4IZQuw64xA=
|
github.com/akamensky/argparse v1.4.0/go.mod h1:S5kwC7IuDcEr5VeXtGPRVZ5o/FdhcMlQz4IZQuw64xA=
|
||||||
github.com/andybalholm/brotli v1.0.6 h1:Yf9fFpf49Zrxb9NlQaluyE92/+X7UVHlhMNJN2sxfOI=
|
github.com/andybalholm/brotli v1.0.6 h1:Yf9fFpf49Zrxb9NlQaluyE92/+X7UVHlhMNJN2sxfOI=
|
||||||
github.com/andybalholm/brotli v1.0.6/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
|
github.com/andybalholm/brotli v1.0.6/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
|
||||||
|
github.com/andybalholm/cascadia v1.3.1/go.mod h1:R4bJ1UQfqADjvDa4P6HZHLh/3OxWWEqc0Sk8XGwHqvA=
|
||||||
github.com/andybalholm/cascadia v1.3.2 h1:3Xi6Dw5lHF15JtdcmAHD3i1+T8plmv7BQ/nsViSLyss=
|
github.com/andybalholm/cascadia v1.3.2 h1:3Xi6Dw5lHF15JtdcmAHD3i1+T8plmv7BQ/nsViSLyss=
|
||||||
github.com/andybalholm/cascadia v1.3.2/go.mod h1:7gtRlve5FxPPgIgX36uWBX58OdBsSS6lUvCFb+h7KvU=
|
github.com/andybalholm/cascadia v1.3.2/go.mod h1:7gtRlve5FxPPgIgX36uWBX58OdBsSS6lUvCFb+h7KvU=
|
||||||
github.com/bogdanfinn/fhttp v0.5.24 h1:OlyBKjvJp6a3TotN3wuj4mQHHRbfK7QUMrzCPOZGhRc=
|
|
||||||
github.com/bogdanfinn/fhttp v0.5.24/go.mod h1:brqi5woc5eSCVHdKYBV8aZLbO7HGqpwyDLeXW+fT18I=
|
|
||||||
github.com/bogdanfinn/tls-client v1.6.1 h1:GTIqQssFoIvLaDf4btoYRzDhUzudLqYD4axvfUCXl3I=
|
|
||||||
github.com/bogdanfinn/tls-client v1.6.1/go.mod h1:FtwQ3DndVZ0xAOO704v4iNAgbHOcEc5kPk9tjICTNQ0=
|
|
||||||
github.com/bogdanfinn/utls v1.5.16 h1:NhhWkegEcYETBMj9nvgO4lwvc6NcLH+znrXzO3gnw4M=
|
|
||||||
github.com/bogdanfinn/utls v1.5.16/go.mod h1:mHeRCi69cUiEyVBkKONB1cAbLjRcZnlJbGzttmiuK4o=
|
|
||||||
github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc=
|
|
||||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
|
||||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/elliotchance/pie/v2 v2.8.0 h1://QS43W8sEha8XV/fjngO5iMudN3XARJV5cpBayAcVY=
|
github.com/gofiber/fiber/v2 v2.50.0 h1:ia0JaB+uw3GpNSCR5nvC5dsaxXjRU5OEu36aytx+zGw=
|
||||||
github.com/elliotchance/pie/v2 v2.8.0/go.mod h1:18t0dgGFH006g4eVdDtWfgFZPQEgl10IoEO8YWEq3Og=
|
github.com/gofiber/fiber/v2 v2.50.0/go.mod h1:21eytvay9Is7S6z+OgPi7c7n4++tnClWmhpimVHMimw=
|
||||||
github.com/forPelevin/gomoji v1.1.8 h1:JElzDdt0TyiUlecy6PfITDL6eGvIaxqYH1V52zrd0qQ=
|
|
||||||
github.com/forPelevin/gomoji v1.1.8/go.mod h1:8+Z3KNGkdslmeGZBC3tCrwMrcPy5GRzAD+gL9NAwMXg=
|
|
||||||
github.com/go-shiori/dom v0.0.0-20230515143342-73569d674e1c h1:wpkoddUomPfHiOziHZixGO5ZBS73cKqVzZipfrLmO1w=
|
|
||||||
github.com/go-shiori/dom v0.0.0-20230515143342-73569d674e1c/go.mod h1:oVDCh3qjJMLVUSILBRwrm+Bc6RNXGZYtoh9xdvf1ffM=
|
|
||||||
github.com/go-shiori/go-readability v0.0.0-20231029095239-6b97d5aba789 h1:G6wSuUyCoLB9jrUokipsmFuRi8aJozt3phw/g9Sl4Xs=
|
|
||||||
github.com/go-shiori/go-readability v0.0.0-20231029095239-6b97d5aba789/go.mod h1:2DpZlTJO/ycxp/vsc/C11oUyveStOgIXB88SYV1lncI=
|
|
||||||
github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
|
|
||||||
github.com/gofiber/fiber/v2 v2.51.0 h1:JNACcZy5e2tGApWB2QrRpenTWn0fq0hkFm6k0C86gKQ=
|
|
||||||
github.com/gofiber/fiber/v2 v2.51.0/go.mod h1:xaQRZQJGqnKOQnbQw+ltvku3/h8QxvNi8o6JiJ7Ll0U=
|
|
||||||
github.com/gofiber/template v1.8.2 h1:PIv9s/7Uq6m+Fm2MDNd20pAFFKt5wWs7ZBd8iV9pWwk=
|
|
||||||
github.com/gofiber/template v1.8.2/go.mod h1:bs/2n0pSNPOkRa5VJ8zTIvedcI/lEYxzV3+YPXdBvq8=
|
|
||||||
github.com/gofiber/template/html/v2 v2.0.5 h1:BKLJ6Qr940NjntbGmpO3zVa4nFNGDCi/IfUiDB9OC20=
|
|
||||||
github.com/gofiber/template/html/v2 v2.0.5/go.mod h1:RCF14eLeQDCSUPp0IGc2wbSSDv6yt+V54XB/+Unz+LM=
|
|
||||||
github.com/gofiber/utils v1.1.0 h1:vdEBpn7AzIUJRhe+CiTOJdUcTg4Q9RK+pEa0KPbLdrM=
|
|
||||||
github.com/gofiber/utils v1.1.0/go.mod h1:poZpsnhBykfnY1Mc0KeEa6mSHrS3dV0+oBWyeQmb2e0=
|
|
||||||
github.com/gogs/chardet v0.0.0-20211120154057-b7413eaefb8f h1:3BSP1Tbs2djlpprl7wCLuiqMaUh5SJkkzI2gDs+FgLs=
|
|
||||||
github.com/gogs/chardet v0.0.0-20211120154057-b7413eaefb8f/go.mod h1:Pcatq5tYkCW2Q6yrR2VRHlbHpZ/R4/7qyL1TCF7vl14=
|
|
||||||
github.com/google/uuid v1.4.0 h1:MtMxsa51/r9yyhkyLsVeVt0B+BGQZzpQiTQ4eHZ8bc4=
|
github.com/google/uuid v1.4.0 h1:MtMxsa51/r9yyhkyLsVeVt0B+BGQZzpQiTQ4eHZ8bc4=
|
||||||
github.com/google/uuid v1.4.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
github.com/google/uuid v1.4.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||||
github.com/hablullah/go-hijri v1.0.2 h1:drT/MZpSZJQXo7jftf5fthArShcaMtsal0Zf/dnmp6k=
|
github.com/klauspost/compress v1.17.2 h1:RlWWUY/Dr4fL8qk9YG7DTZ7PDgME2V4csBXA8L/ixi4=
|
||||||
github.com/hablullah/go-hijri v1.0.2/go.mod h1:OS5qyYLDjORXzK4O1adFw9Q5WfhOcMdAKglDkcTxgWQ=
|
github.com/klauspost/compress v1.17.2/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE=
|
||||||
github.com/hablullah/go-juliandays v1.0.0 h1:A8YM7wIj16SzlKT0SRJc9CD29iiaUzpBLzh5hr0/5p0=
|
|
||||||
github.com/hablullah/go-juliandays v1.0.0/go.mod h1:0JOYq4oFOuDja+oospuc61YoX+uNEn7Z6uHYTbBzdGc=
|
|
||||||
github.com/jalaali/go-jalaali v0.0.0-20210801064154-80525e88d958 h1:qxLoi6CAcXVzjfvu+KXIXJOAsQB62LXjsfbOaErsVzE=
|
|
||||||
github.com/jalaali/go-jalaali v0.0.0-20210801064154-80525e88d958/go.mod h1:Wqfu7mjUHj9WDzSSPI5KfBclTTEnLveRUFr/ujWnTgE=
|
|
||||||
github.com/klauspost/compress v1.17.4 h1:Ej5ixsIri7BrIjBkRZLTo6ghwrEtHFk7ijlczPW4fZ4=
|
|
||||||
github.com/klauspost/compress v1.17.4/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM=
|
|
||||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
|
||||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
|
||||||
github.com/magefile/mage v1.15.0 h1:BvGheCMAsG3bWUDbZ8AyXXpCNwU9u5CB6sM+HNb9HYg=
|
|
||||||
github.com/magefile/mage v1.15.0/go.mod h1:z5UZb/iS3GoOSn0JgWuiw7dxlurVYTu+/jHXqQg881A=
|
|
||||||
github.com/markusmobius/go-dateparser v1.2.1 h1:mYRRdu3TzpAeE6fSl2Gn3arfxEtoTRvFOKlumlVsUtg=
|
|
||||||
github.com/markusmobius/go-dateparser v1.2.1/go.mod h1:5xYsZ1h7iB3sE1BSu8bkjYpbFST7EU1/AFxcyO3mgYg=
|
|
||||||
github.com/markusmobius/go-domdistiller v0.0.0-20230515154422-71af71939ff3 h1:D83RvMz1lQ0ilKlJt6DWc65+Q77CXGRFmfihR0bfQvc=
|
|
||||||
github.com/markusmobius/go-domdistiller v0.0.0-20230515154422-71af71939ff3/go.mod h1:n1AYw0wiJDT3YXnIsElJPiDR63YGXT2yv3uq0CboGmU=
|
|
||||||
github.com/markusmobius/go-htmldate v1.2.2 h1:tp1IxhefCYpEoL9CM1LiU6l+2YayTpuTjkkdnik6hXE=
|
|
||||||
github.com/markusmobius/go-htmldate v1.2.2/go.mod h1:26VRz16sCosuiv42MNRW9iPBGnGLo+q/Z6TWitt8uzs=
|
|
||||||
github.com/markusmobius/go-trafilatura v1.5.1 h1:EXhZY2AVRyepUlLZHeuZUme3v7Ms9G8lDOLl4u+Jp5M=
|
|
||||||
github.com/markusmobius/go-trafilatura v1.5.1/go.mod h1:FhuBBPZ9ph4ufpGBKAkuq5oQwEhg0KKnIOUlv5h7EHg=
|
|
||||||
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
|
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
|
||||||
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
|
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
|
||||||
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
|
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
|
||||||
github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
|
||||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||||
github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZgg3U=
|
github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZgg3U=
|
||||||
github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
||||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs=
|
|
||||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
|
|
||||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
|
||||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||||
github.com/rivo/uniseg v0.4.4 h1:8TfxU8dW6PdqD27gjM8MVNuicgxIjxpm4K7x4jp8sis=
|
github.com/rivo/uniseg v0.4.4 h1:8TfxU8dW6PdqD27gjM8MVNuicgxIjxpm4K7x4jp8sis=
|
||||||
github.com/rivo/uniseg v0.4.4/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
github.com/rivo/uniseg v0.4.4/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
||||||
github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg=
|
|
||||||
github.com/rs/zerolog v1.31.0 h1:FcTR3NnLWW+NnTwwhFWiJSZr4ECLpqCm6QsEnyvbV4A=
|
|
||||||
github.com/rs/zerolog v1.31.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss=
|
|
||||||
github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0=
|
|
||||||
github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
|
|
||||||
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
|
|
||||||
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
|
|
||||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
|
||||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
|
||||||
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
|
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
|
||||||
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||||
github.com/tam7t/hpkp v0.0.0-20160821193359-2b70b4024ed5 h1:YqAladjX7xpA6BM04leXMWAEjS0mTZ5kUU9KRBriQJc=
|
|
||||||
github.com/tam7t/hpkp v0.0.0-20160821193359-2b70b4024ed5/go.mod h1:2JjD2zLQYH5HO74y5+aE3remJQvl6q4Sn6aWA2wD1Ng=
|
|
||||||
github.com/tetratelabs/wazero v1.5.0 h1:Yz3fZHivfDiZFUXnWMPUoiW7s8tC1sjdBtlJn08qYa0=
|
|
||||||
github.com/tetratelabs/wazero v1.5.0/go.mod h1:0U0G41+ochRKoPKCJlh0jMg1CHkyfK8kDqiirMmKY8A=
|
|
||||||
github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
|
github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
|
||||||
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
|
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
|
||||||
github.com/valyala/fasthttp v1.51.0 h1:8b30A5JlZ6C7AS81RsWjYMQmrZG6feChmgAolCl1SqA=
|
github.com/valyala/fasthttp v1.50.0 h1:H7fweIlBm0rXLs2q0XbalvJ6r0CUPFWK3/bB4N13e9M=
|
||||||
github.com/valyala/fasthttp v1.51.0/go.mod h1:oI2XroL+lI7vdXyYoQk03bXBThfFl2cVdIA3Xl7cH8g=
|
github.com/valyala/fasthttp v1.50.0/go.mod h1:k2zXd82h/7UZc3VOdJ2WaUqt1uZ/XpXAfE9i+HBC3lA=
|
||||||
github.com/valyala/tcplisten v1.0.0 h1:rBHj/Xf+E1tRGZyWIWwJDiRY0zc1Js+CV5DqwacVSA8=
|
github.com/valyala/tcplisten v1.0.0 h1:rBHj/Xf+E1tRGZyWIWwJDiRY0zc1Js+CV5DqwacVSA8=
|
||||||
github.com/valyala/tcplisten v1.0.0/go.mod h1:T0xQ8SeCZGxckz9qRXTfG43PvQ/mcWh7FwZEA7Ioqkc=
|
github.com/valyala/tcplisten v1.0.0/go.mod h1:T0xQ8SeCZGxckz9qRXTfG43PvQ/mcWh7FwZEA7Ioqkc=
|
||||||
github.com/wasilibs/go-re2 v1.4.1 h1:E5+9O1M8UoGeqLB2A9omeoaWImqpuYDs9cKwvTJq/Oo=
|
|
||||||
github.com/wasilibs/go-re2 v1.4.1/go.mod h1:ynB8eCwd9JsqUnsk8WlPDk6cEeme8BguZmnqOSURE4Y=
|
|
||||||
github.com/wasilibs/nottinygc v0.4.0 h1:h1TJMihMC4neN6Zq+WKpLxgd9xCFMw7O9ETLwY2exJQ=
|
|
||||||
github.com/wasilibs/nottinygc v0.4.0/go.mod h1:oDcIotskuYNMpqMF23l7Z8uzD4TC0WXHK8jetlB3HIo=
|
|
||||||
github.com/yosssi/gohtml v0.0.0-20201013000340-ee4748c638f4 h1:0sw0nJM544SpsihWx1bkXdYLQDlzRflMgFJQ4Yih9ts=
|
|
||||||
github.com/yosssi/gohtml v0.0.0-20201013000340-ee4748c638f4/go.mod h1:+ccdNT0xMY1dtc5XBxumbYfOUhmduiGudqaDgD2rVRE=
|
|
||||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||||
golang.org/x/crypto v0.16.0 h1:mMMrFzRSCF0GvB7Ne27XVtVAaXLrPmgPC7/v0tkwHaY=
|
|
||||||
golang.org/x/crypto v0.16.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4=
|
|
||||||
golang.org/x/exp v0.0.0-20231127185646-65229373498e h1:Gvh4YaCaXNs6dKTlfgismwWZKyjVZXwOPfIyUaqU3No=
|
|
||||||
golang.org/x/exp v0.0.0-20231127185646-65229373498e/go.mod h1:iRJReGqOEeBhDZGkGbynYwcHlctCvnjTYIamk7uXpHI=
|
|
||||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||||
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||||
|
golang.org/x/net v0.0.0-20210916014120-12bc252f5db8/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||||
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||||
|
golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||||
golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns=
|
golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns=
|
||||||
golang.org/x/net v0.19.0 h1:zTwKpTd2XuCqf8huc7Fo2iSy+4RHPd10s4KzeTnVr1c=
|
golang.org/x/net v0.18.0 h1:mIYleuAkSbHh0tCv7RvjL3F6ZVbLjq4+R7zbOn3Kokg=
|
||||||
golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U=
|
golang.org/x/net v0.18.0/go.mod h1:/czyP5RqHAH4odGYxBJ1qz0+CE5WZ+2j1YgoEo8F2jQ=
|
||||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
|
||||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.14.0 h1:Vz7Qs629MkJkGyHxUlRHizWJRG2j8fbQKjELVSNhy7Q=
|
||||||
golang.org/x/sys v0.15.0 h1:h48lPFYpsTvQJZF4EKyI4aLHaev3CxivZmv7yZig9pc=
|
golang.org/x/sys v0.14.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||||
golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
|
||||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||||
golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY=
|
golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY=
|
||||||
golang.org/x/term v0.15.0 h1:y/Oo/a/q3IXu26lQgl04j/gjuBDOBlx7X6Om1j2CPW4=
|
|
||||||
golang.org/x/term v0.15.0/go.mod h1:BDl952bC7+uMoWR75FIrCDx79TPU9oHkTZ9yRbYOrX0=
|
|
||||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
|
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||||
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||||
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||||
golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
|
|
||||||
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
|
||||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||||
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/check.v1 v1.0.0-20200902074654-038fdea0a05b h1:QRR6H1YWRnHb4Y/HeNFCTJLFVxaq6wH4YuVdsUOr75U=
|
|
||||||
gopkg.in/check.v1 v1.0.0-20200902074654-038fdea0a05b/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
|
||||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
|
||||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ package handlers
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
_ "embed"
|
_ "embed"
|
||||||
|
"log"
|
||||||
|
|
||||||
"github.com/gofiber/fiber/v2"
|
"github.com/gofiber/fiber/v2"
|
||||||
)
|
)
|
||||||
@@ -11,5 +12,48 @@ import (
|
|||||||
var version string
|
var version string
|
||||||
|
|
||||||
func Api(c *fiber.Ctx) error {
|
func Api(c *fiber.Ctx) error {
|
||||||
return nil
|
// Get the url from the URL
|
||||||
|
urlQuery := c.Params("*")
|
||||||
|
|
||||||
|
queries := c.Queries()
|
||||||
|
body, req, resp, err := fetchSite(urlQuery, queries)
|
||||||
|
if err != nil {
|
||||||
|
log.Println("ERROR:", err)
|
||||||
|
c.SendStatus(500)
|
||||||
|
return c.SendString(err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
response := Response{
|
||||||
|
Version: version,
|
||||||
|
Body: body,
|
||||||
|
}
|
||||||
|
|
||||||
|
response.Request.Headers = make([]any, 0, len(req.Header))
|
||||||
|
for k, v := range req.Header {
|
||||||
|
response.Request.Headers = append(response.Request.Headers, map[string]string{
|
||||||
|
"key": k,
|
||||||
|
"value": v[0],
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
response.Response.Headers = make([]any, 0, len(resp.Header))
|
||||||
|
for k, v := range resp.Header {
|
||||||
|
response.Response.Headers = append(response.Response.Headers, map[string]string{
|
||||||
|
"key": k,
|
||||||
|
"value": v[0],
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return c.JSON(response)
|
||||||
|
}
|
||||||
|
|
||||||
|
type Response struct {
|
||||||
|
Version string `json:"version"`
|
||||||
|
Body string `json:"body"`
|
||||||
|
Request struct {
|
||||||
|
Headers []interface{} `json:"headers"`
|
||||||
|
} `json:"request"`
|
||||||
|
Response struct {
|
||||||
|
Headers []interface{} `json:"headers"`
|
||||||
|
} `json:"response"`
|
||||||
}
|
}
|
||||||
|
|||||||
44
handlers/api.test.go
Normal file
44
handlers/api.test.go
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
// BEGIN: 7d5e1f7c7d5e
|
||||||
|
package handlers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/gofiber/fiber/v2"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestApi(t *testing.T) {
|
||||||
|
app := fiber.New()
|
||||||
|
app.Get("/api/*", Api)
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
url string
|
||||||
|
expectedStatus int
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "valid url",
|
||||||
|
url: "https://www.google.com",
|
||||||
|
expectedStatus: http.StatusOK,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "invalid url",
|
||||||
|
url: "invalid-url",
|
||||||
|
expectedStatus: http.StatusBadRequest,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
req := httptest.NewRequest(http.MethodGet, "/api/"+tt.url, nil)
|
||||||
|
resp, err := app.Test(req)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, tt.expectedStatus, resp.StatusCode)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// END: 7d5e1f7c7d5e
|
||||||
@@ -1,45 +0,0 @@
|
|||||||
package handlers
|
|
||||||
|
|
||||||
import (
|
|
||||||
rx "github.com/everywall/ladder/proxychain/requestmodifiers"
|
|
||||||
tx "github.com/everywall/ladder/proxychain/responsemodifiers"
|
|
||||||
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
|
|
||||||
"github.com/gofiber/fiber/v2"
|
|
||||||
)
|
|
||||||
|
|
||||||
func NewAPIContentHandler(path string, opts *ProxyOptions) fiber.Handler {
|
|
||||||
// TODO: implement ruleset logic
|
|
||||||
/*
|
|
||||||
var rs ruleset.RuleSet
|
|
||||||
if opts.RulesetPath != "" {
|
|
||||||
r, err := ruleset.NewRuleset(opts.RulesetPath)
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
rs = r
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
return func(c *fiber.Ctx) error {
|
|
||||||
proxychain := proxychain.
|
|
||||||
NewProxyChain().
|
|
||||||
WithAPIPath(path).
|
|
||||||
SetDebugLogging(opts.Verbose).
|
|
||||||
SetRequestModifications(
|
|
||||||
rx.MasqueradeAsGoogleBot(),
|
|
||||||
rx.ForwardRequestHeaders(),
|
|
||||||
rx.SpoofReferrerFromGoogleSearch(),
|
|
||||||
).
|
|
||||||
AddResponseModifications(
|
|
||||||
tx.DeleteIncomingCookies(),
|
|
||||||
tx.RewriteHTMLResourceURLs(),
|
|
||||||
tx.APIContent(),
|
|
||||||
).
|
|
||||||
SetFiberCtx(c).
|
|
||||||
Execute()
|
|
||||||
|
|
||||||
return proxychain
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,29 +0,0 @@
|
|||||||
package handlers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
|
|
||||||
"github.com/everywall/ladder/proxychain/responsemodifiers/api"
|
|
||||||
"github.com/gofiber/fiber/v2"
|
|
||||||
)
|
|
||||||
|
|
||||||
func NewAPIModifersListHandler(opts *ProxyOptions) fiber.Handler {
|
|
||||||
payload := ModifiersAPIResponse{
|
|
||||||
Success: true,
|
|
||||||
Result: AllMods,
|
|
||||||
}
|
|
||||||
body, err := json.MarshalIndent(payload, "", " ")
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return func(c *fiber.Ctx) error {
|
|
||||||
c.Set("content-type", "application/json")
|
|
||||||
if err != nil {
|
|
||||||
c.SendStatus(500)
|
|
||||||
return c.SendStream(api.CreateAPIErrReader(err))
|
|
||||||
}
|
|
||||||
|
|
||||||
return c.Send(body)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,196 +0,0 @@
|
|||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"go/ast"
|
|
||||||
"go/parser"
|
|
||||||
"go/token"
|
|
||||||
"io"
|
|
||||||
"io/fs"
|
|
||||||
"os/exec"
|
|
||||||
|
|
||||||
//"io/fs"
|
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
"strings"
|
|
||||||
//"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
func genModStruct(fn *ast.FuncDecl, githubEditLink string, filename string) string {
|
|
||||||
params := []string{}
|
|
||||||
for _, fd := range fn.Type.Params.List {
|
|
||||||
p := fmt.Sprintf(` {Name: "%s", Type: "%+v"},`, fd.Names[0], fd.Type)
|
|
||||||
params = append(params, p)
|
|
||||||
}
|
|
||||||
|
|
||||||
block := fmt.Sprintf(`{
|
|
||||||
Name: "%s",
|
|
||||||
Description: "%s",
|
|
||||||
CodeEditLink: "%s%s",
|
|
||||||
Params: []Param{
|
|
||||||
%s
|
|
||||||
},
|
|
||||||
},`,
|
|
||||||
fn.Name.String(),
|
|
||||||
strings.ReplaceAll(strings.ReplaceAll(strings.TrimSpace(fn.Doc.Text()), "\n", " "), `"`, `\"`),
|
|
||||||
githubEditLink, filename,
|
|
||||||
strings.Join(params, "\n"),
|
|
||||||
)
|
|
||||||
|
|
||||||
return block
|
|
||||||
}
|
|
||||||
|
|
||||||
func modCodeGen(dir string, githubEditLink string) (code string, err error) {
|
|
||||||
fset := token.NewFileSet()
|
|
||||||
|
|
||||||
files, err := os.ReadDir(dir)
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
modStructs := []string{}
|
|
||||||
for _, file := range files {
|
|
||||||
if !shouldGenCodeFor(file) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse each Go file
|
|
||||||
node, err := parser.ParseFile(fset, filepath.Join(dir, file.Name()), nil, parser.ParseComments)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
|
|
||||||
ast.Inspect(node, func(n ast.Node) bool {
|
|
||||||
fn, ok := n.(*ast.FuncDecl)
|
|
||||||
if ok && fn.Recv == nil && fn.Name.IsExported() {
|
|
||||||
modStructs = append(modStructs, genModStruct(fn, githubEditLink, file.Name()))
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
})
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
code = strings.Join(modStructs, "\n")
|
|
||||||
return code, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func shouldGenCodeFor(file fs.DirEntry) bool {
|
|
||||||
if file.IsDir() {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
if filepath.Ext(file.Name()) != ".go" {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
if strings.HasSuffix(file.Name(), "_test.go") {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
func getGitRemoteURL(remoteName string) (string, error) {
|
|
||||||
cmd := exec.Command("git", "remote", "get-url", remoteName)
|
|
||||||
output, err := cmd.Output()
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
url := strings.TrimSpace(string(output))
|
|
||||||
|
|
||||||
// Convert SSH format to HTTPS format
|
|
||||||
if strings.HasPrefix(url, "git@") {
|
|
||||||
url = strings.Replace(url, ":", "/", 1)
|
|
||||||
url = strings.Replace(url, "git@", "https://", 1)
|
|
||||||
url = strings.TrimSuffix(url, ".git")
|
|
||||||
}
|
|
||||||
|
|
||||||
return url, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func getCurrentGitBranch() (string, error) {
|
|
||||||
cmd := exec.Command("git", "rev-parse", "--abbrev-ref", "HEAD")
|
|
||||||
output, err := cmd.Output()
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
return strings.TrimSpace(string(output)), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
gitURL, err := getGitRemoteURL("origin")
|
|
||||||
if err != nil {
|
|
||||||
fmt.Println("Error getting Git remote URL:", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
branchName, err := getCurrentGitBranch()
|
|
||||||
if err != nil {
|
|
||||||
fmt.Println("Error getting current Git branch:", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
githubEditLink := fmt.Sprintf("%s/edit/%s/proxychain/requestmodifiers/", gitURL, branchName)
|
|
||||||
rqmCode, err := modCodeGen("../../proxychain/requestmodifiers/", githubEditLink)
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
githubEditLink = fmt.Sprintf("%s/edit/%s/proxychain/responsemodifiers/", gitURL, branchName)
|
|
||||||
rsmCode, err := modCodeGen("../../proxychain/responsemodifiers/", githubEditLink)
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
code := fmt.Sprintf(`
|
|
||||||
package handlers
|
|
||||||
// DO NOT EDIT THIS FILE. It is automatically generated by ladder/handlers/api_modifiers_codegen/api_modifiers_codegen.go
|
|
||||||
// The purpose of this is to produce an API reponse listing all the available modifier, their parameters and usage instructions.
|
|
||||||
// for use in proxychains.
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/everywall/ladder/proxychain/responsemodifiers/api"
|
|
||||||
)
|
|
||||||
|
|
||||||
type ModifiersAPIResponse struct {
|
|
||||||
Success bool ||json:"success"||
|
|
||||||
Error api.ErrorDetails ||json:"error"||
|
|
||||||
Result Modifiers ||json:"result"||
|
|
||||||
}
|
|
||||||
|
|
||||||
type Modifiers struct {
|
|
||||||
RequestModifiers []Modifier ||json:"requestmodifiers"||
|
|
||||||
ResponseModifiers []Modifier ||json:"responsemodifiers"||
|
|
||||||
}
|
|
||||||
|
|
||||||
type Modifier struct {
|
|
||||||
Name string ||json:"name"||
|
|
||||||
Description string ||json:"description"||
|
|
||||||
CodeEditLink string ||json:"code_edit_link"||
|
|
||||||
Params []Param ||json:"params"||
|
|
||||||
}
|
|
||||||
|
|
||||||
type Param struct {
|
|
||||||
Name string ||json:"name"||
|
|
||||||
Type string ||json:"type"||
|
|
||||||
}
|
|
||||||
|
|
||||||
var AllMods Modifiers = Modifiers{
|
|
||||||
RequestModifiers: []Modifier{
|
|
||||||
%s
|
|
||||||
},
|
|
||||||
ResponseModifiers: []Modifier{
|
|
||||||
%s
|
|
||||||
},
|
|
||||||
}
|
|
||||||
`, rqmCode, rsmCode)
|
|
||||||
code = strings.ReplaceAll(code, "||", "`")
|
|
||||||
|
|
||||||
//fmt.Println(code)
|
|
||||||
|
|
||||||
fq, err := os.Create("../api_modifiers_structdef.gen.go")
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
_, err = io.WriteString(fq, code)
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,558 +0,0 @@
|
|||||||
|
|
||||||
package handlers
|
|
||||||
// DO NOT EDIT THIS FILE. It is automatically generated by ladder/handlers/api_modifiers_codegen/api_modifiers_codegen.go
|
|
||||||
// The purpose of this is to produce an API reponse listing all the available modifier, their parameters and usage instructions.
|
|
||||||
// for use in proxychains.
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/everywall/ladder/proxychain/responsemodifiers/api"
|
|
||||||
)
|
|
||||||
|
|
||||||
type ModifiersAPIResponse struct {
|
|
||||||
Success bool `json:"success"`
|
|
||||||
Error api.ErrorDetails `json:"error"`
|
|
||||||
Result Modifiers `json:"result"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type Modifiers struct {
|
|
||||||
RequestModifiers []Modifier `json:"requestmodifiers"`
|
|
||||||
ResponseModifiers []Modifier `json:"responsemodifiers"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type Modifier struct {
|
|
||||||
Name string `json:"name"`
|
|
||||||
Description string `json:"description"`
|
|
||||||
CodeEditLink string `json:"code_edit_link"`
|
|
||||||
Params []Param `json:"params"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type Param struct {
|
|
||||||
Name string `json:"name"`
|
|
||||||
Type string `json:"type"`
|
|
||||||
}
|
|
||||||
|
|
||||||
var AllMods Modifiers = Modifiers{
|
|
||||||
RequestModifiers: []Modifier{
|
|
||||||
{
|
|
||||||
Name: "AddCacheBusterQuery",
|
|
||||||
Description: "AddCacheBusterQuery modifies query params to add a random parameter key In order to get the upstream network stack to serve a fresh copy of the page.",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/requestmodifiers/add_cache_buster_query.go",
|
|
||||||
Params: []Param{
|
|
||||||
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "ForwardRequestHeaders",
|
|
||||||
Description: "ForwardRequestHeaders forwards the requests headers sent from the client to the upstream server",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/requestmodifiers/forward_request_headers.go",
|
|
||||||
Params: []Param{
|
|
||||||
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "MasqueradeAsGoogleBot",
|
|
||||||
Description: "MasqueradeAsGoogleBot modifies user agent and x-forwarded for to appear to be a Google Bot",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/requestmodifiers/masquerade_as_trusted_bot.go",
|
|
||||||
Params: []Param{
|
|
||||||
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "MasqueradeAsBingBot",
|
|
||||||
Description: "MasqueradeAsBingBot modifies user agent and x-forwarded for to appear to be a Bing Bot",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/requestmodifiers/masquerade_as_trusted_bot.go",
|
|
||||||
Params: []Param{
|
|
||||||
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "MasqueradeAsWaybackMachineBot",
|
|
||||||
Description: "MasqueradeAsWaybackMachineBot modifies user agent and x-forwarded for to appear to be a archive.org (wayback machine) Bot",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/requestmodifiers/masquerade_as_trusted_bot.go",
|
|
||||||
Params: []Param{
|
|
||||||
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "MasqueradeAsFacebookBot",
|
|
||||||
Description: "MasqueradeAsFacebookBot modifies user agent and x-forwarded for to appear to be a Facebook Bot (link previews?)",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/requestmodifiers/masquerade_as_trusted_bot.go",
|
|
||||||
Params: []Param{
|
|
||||||
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "MasqueradeAsYandexBot",
|
|
||||||
Description: "MasqueradeAsYandexBot modifies user agent and x-forwarded for to appear to be a Yandex Spider Bot",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/requestmodifiers/masquerade_as_trusted_bot.go",
|
|
||||||
Params: []Param{
|
|
||||||
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "MasqueradeAsBaiduBot",
|
|
||||||
Description: "MasqueradeAsBaiduBot modifies user agent and x-forwarded for to appear to be a Baidu Spider Bot",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/requestmodifiers/masquerade_as_trusted_bot.go",
|
|
||||||
Params: []Param{
|
|
||||||
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "MasqueradeAsDuckDuckBot",
|
|
||||||
Description: "MasqueradeAsDuckDuckBot modifies user agent and x-forwarded for to appear to be a DuckDuckGo Bot",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/requestmodifiers/masquerade_as_trusted_bot.go",
|
|
||||||
Params: []Param{
|
|
||||||
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "MasqueradeAsYahooBot",
|
|
||||||
Description: "MasqueradeAsYahooBot modifies user agent and x-forwarded for to appear to be a Yahoo Bot",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/requestmodifiers/masquerade_as_trusted_bot.go",
|
|
||||||
Params: []Param{
|
|
||||||
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "ModifyDomainWithRegex",
|
|
||||||
Description: "",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/requestmodifiers/modify_domain_with_regex.go",
|
|
||||||
Params: []Param{
|
|
||||||
{Name: "matchRegex", Type: "string"},
|
|
||||||
{Name: "replacement", Type: "string"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "SetOutgoingCookie",
|
|
||||||
Description: "SetOutgoingCookie modifes a specific cookie name by modifying the request cookie headers going to the upstream server. If the cookie name does not already exist, it is created.",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/requestmodifiers/modify_outgoing_cookies.go",
|
|
||||||
Params: []Param{
|
|
||||||
{Name: "name", Type: "string"},
|
|
||||||
{Name: "val", Type: "string"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "SetOutgoingCookies",
|
|
||||||
Description: "SetOutgoingCookies modifies a client request's cookie header to a raw Cookie string, overwriting existing cookies",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/requestmodifiers/modify_outgoing_cookies.go",
|
|
||||||
Params: []Param{
|
|
||||||
{Name: "cookies", Type: "string"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "DeleteOutgoingCookie",
|
|
||||||
Description: "DeleteOutgoingCookie modifies the http request's cookies header to delete a specific request cookie going to the upstream server. If the cookie does not exist, it does not do anything.",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/requestmodifiers/modify_outgoing_cookies.go",
|
|
||||||
Params: []Param{
|
|
||||||
{Name: "name", Type: "string"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "DeleteOutgoingCookies",
|
|
||||||
Description: "DeleteOutgoingCookies removes the cookie header entirely, preventing any cookies from reaching the upstream server.",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/requestmodifiers/modify_outgoing_cookies.go",
|
|
||||||
Params: []Param{
|
|
||||||
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "DeleteOutgoingCookiesExcept",
|
|
||||||
Description: "DeleteOutGoingCookiesExcept prevents non-whitelisted cookies from being sent from the client to the upstream proxy server. Cookies whose names are in the whitelist are not removed.",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/requestmodifiers/modify_outgoing_cookies.go",
|
|
||||||
Params: []Param{
|
|
||||||
{Name: "whitelist", Type: "&{Ellipsis:12348 Elt:string}"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "ModifyPathWithRegex",
|
|
||||||
Description: "",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/requestmodifiers/modify_path_with_regex.go",
|
|
||||||
Params: []Param{
|
|
||||||
{Name: "matchRegex", Type: "string"},
|
|
||||||
{Name: "replacement", Type: "string"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "ModifyQueryParams",
|
|
||||||
Description: "ModifyQueryParams replaces query parameter values in URL's query params in a ProxyChain's URL. If the query param key doesn't exist, it is created.",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/requestmodifiers/modify_query_params.go",
|
|
||||||
Params: []Param{
|
|
||||||
{Name: "key", Type: "string"},
|
|
||||||
{Name: "value", Type: "string"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "SetRequestHeader",
|
|
||||||
Description: "SetRequestHeader modifies a specific outgoing header This is the header that the upstream server will see.",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/requestmodifiers/modify_request_headers.go",
|
|
||||||
Params: []Param{
|
|
||||||
{Name: "name", Type: "string"},
|
|
||||||
{Name: "val", Type: "string"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "DeleteRequestHeader",
|
|
||||||
Description: "DeleteRequestHeader modifies a specific outgoing header This is the header that the upstream server will see.",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/requestmodifiers/modify_request_headers.go",
|
|
||||||
Params: []Param{
|
|
||||||
{Name: "name", Type: "string"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "RequestArchiveIs",
|
|
||||||
Description: "RequestArchiveIs modifies a ProxyChain's URL to request an archived version from archive.is",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/requestmodifiers/request_archive_is.go",
|
|
||||||
Params: []Param{
|
|
||||||
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "RequestGoogleCache",
|
|
||||||
Description: "RequestGoogleCache modifies a ProxyChain's URL to request its Google Cache version.",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/requestmodifiers/request_google_cache.go",
|
|
||||||
Params: []Param{
|
|
||||||
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "RequestWaybackMachine",
|
|
||||||
Description: "RequestWaybackMachine modifies a ProxyChain's URL to request the wayback machine (archive.org) version.",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/requestmodifiers/request_wayback_machine.go",
|
|
||||||
Params: []Param{
|
|
||||||
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "ResolveWithGoogleDoH",
|
|
||||||
Description: "ResolveWithGoogleDoH modifies a ProxyChain's client to make the request by resolving the URL using Google's DNS over HTTPs service",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/requestmodifiers/resolve_with_google_doh.go",
|
|
||||||
Params: []Param{
|
|
||||||
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "SpoofOrigin",
|
|
||||||
Description: "SpoofOrigin modifies the origin header if the upstream server returns a Vary header it means you might get a different response if you change this",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/requestmodifiers/spoof_origin.go",
|
|
||||||
Params: []Param{
|
|
||||||
{Name: "url", Type: "string"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "HideOrigin",
|
|
||||||
Description: "HideOrigin modifies the origin header so that it is the original origin, not the proxy",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/requestmodifiers/spoof_origin.go",
|
|
||||||
Params: []Param{
|
|
||||||
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "SpoofReferrer",
|
|
||||||
Description: "SpoofReferrer modifies the referrer header. It is useful if the page can be accessed from a search engine or social media site, but not by browsing the website itself. if url is \"\", then the referrer header is removed.",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/requestmodifiers/spoof_referrer.go",
|
|
||||||
Params: []Param{
|
|
||||||
{Name: "url", Type: "string"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "HideReferrer",
|
|
||||||
Description: "HideReferrer modifies the referrer header so that it is the original referrer, not the proxy",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/requestmodifiers/spoof_referrer.go",
|
|
||||||
Params: []Param{
|
|
||||||
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "SpoofReferrerFromBaiduSearch",
|
|
||||||
Description: "SpoofReferrerFromBaiduSearch modifies the referrer header pretending to be from a BaiduSearch",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/requestmodifiers/spoof_referrer_from_baidu_post.go",
|
|
||||||
Params: []Param{
|
|
||||||
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "SpoofReferrerFromBingSearch",
|
|
||||||
Description: "SpoofReferrerFromBingSearch modifies the referrer header pretending to be from a bing search site",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/requestmodifiers/spoof_referrer_from_bing_search.go",
|
|
||||||
Params: []Param{
|
|
||||||
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "SpoofReferrerFromGoogleSearch",
|
|
||||||
Description: "SpoofReferrerFromGoogleSearch modifies the referrer header pretending to be from a google search site",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/requestmodifiers/spoof_referrer_from_google_search.go",
|
|
||||||
Params: []Param{
|
|
||||||
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "SpoofReferrerFromLinkedInPost",
|
|
||||||
Description: "SpoofReferrerFromLinkedInPost modifies the referrer header pretending to be from a linkedin post",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/requestmodifiers/spoof_referrer_from_linkedin_post.go",
|
|
||||||
Params: []Param{
|
|
||||||
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "SpoofReferrerFromNaverSearch",
|
|
||||||
Description: "SpoofReferrerFromNaverSearch modifies the referrer header pretending to be from a Naver search (popular in South Korea)",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/requestmodifiers/spoof_referrer_from_naver_post.go",
|
|
||||||
Params: []Param{
|
|
||||||
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "SpoofReferrerFromPinterestPost",
|
|
||||||
Description: "SpoofReferrerFromPinterestPost modifies the referrer header pretending to be from a pinterest post",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/requestmodifiers/spoof_referrer_from_pinterest_post.go",
|
|
||||||
Params: []Param{
|
|
||||||
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "SpoofReferrerFromQQPost",
|
|
||||||
Description: "SpoofReferrerFromQQPost modifies the referrer header pretending to be from a QQ post (popular social media in China)",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/requestmodifiers/spoof_referrer_from_qq_post.go",
|
|
||||||
Params: []Param{
|
|
||||||
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "SpoofReferrerFromRedditPost",
|
|
||||||
Description: "SpoofReferrerFromRedditPost modifies the referrer header pretending to be from a reddit post",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/requestmodifiers/spoof_referrer_from_reddit_post.go",
|
|
||||||
Params: []Param{
|
|
||||||
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "SpoofReferrerFromTumblrPost",
|
|
||||||
Description: "SpoofReferrerFromTumblrPost modifies the referrer header pretending to be from a tumblr post",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/requestmodifiers/spoof_referrer_from_tumblr_post.go",
|
|
||||||
Params: []Param{
|
|
||||||
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "SpoofReferrerFromTwitterPost",
|
|
||||||
Description: "SpoofReferrerFromTwitterPost modifies the referrer header pretending to be from a twitter post",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/requestmodifiers/spoof_referrer_from_twitter_post.go",
|
|
||||||
Params: []Param{
|
|
||||||
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "SpoofReferrerFromVkontaktePost",
|
|
||||||
Description: "SpoofReferrerFromVkontaktePost modifies the referrer header pretending to be from a vkontakte post (popular in Russia)",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/requestmodifiers/spoof_referrer_from_vkontake_post.go",
|
|
||||||
Params: []Param{
|
|
||||||
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "SpoofReferrerFromWeiboPost",
|
|
||||||
Description: "SpoofReferrerFromWeiboPost modifies the referrer header pretending to be from a Weibo post (popular in China)",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/requestmodifiers/spoof_referrer_from_weibo_post.go",
|
|
||||||
Params: []Param{
|
|
||||||
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "SpoofUserAgent",
|
|
||||||
Description: "SpoofUserAgent modifies the user agent",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/requestmodifiers/spoof_user_agent.go",
|
|
||||||
Params: []Param{
|
|
||||||
{Name: "ua", Type: "string"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "SpoofXForwardedFor",
|
|
||||||
Description: "SpoofXForwardedFor modifies the X-Forwarded-For header in some cases, a forward proxy may interpret this as the source IP",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/requestmodifiers/spoof_x_forwarded_for.go",
|
|
||||||
Params: []Param{
|
|
||||||
{Name: "ip", Type: "string"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
ResponseModifiers: []Modifier{
|
|
||||||
{
|
|
||||||
Name: "APIContent",
|
|
||||||
Description: "APIContent creates an JSON representation of the article and returns it as an API response.",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/responsemodifiers/api_content.go",
|
|
||||||
Params: []Param{
|
|
||||||
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "BlockElementRemoval",
|
|
||||||
Description: "BlockElementRemoval prevents paywall javascript from removing a particular element by detecting the removal, then immediately reinserting it. This is useful when a page will return a \"fake\" 404, after flashing the content briefly. If the /outline/ API works, but the regular API doesn't, try this modifier.",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/responsemodifiers/block_element_removal.go",
|
|
||||||
Params: []Param{
|
|
||||||
{Name: "cssSelector", Type: "string"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "BlockThirdPartyScripts",
|
|
||||||
Description: "BlockThirdPartyScripts rewrites HTML and injects JS to block all third party JS from loading.",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/responsemodifiers/block_third_party_scripts.go",
|
|
||||||
Params: []Param{
|
|
||||||
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "BypassCORS",
|
|
||||||
Description: "BypassCORS modifies response headers to prevent the browser from enforcing any CORS restrictions. This should run at the end of the chain.",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/responsemodifiers/bypass_cors.go",
|
|
||||||
Params: []Param{
|
|
||||||
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "BypassContentSecurityPolicy",
|
|
||||||
Description: "BypassContentSecurityPolicy modifies response headers to prevent the browser from enforcing any CSP restrictions. This should run at the end of the chain.",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/responsemodifiers/bypass_csp.go",
|
|
||||||
Params: []Param{
|
|
||||||
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "SetContentSecurityPolicy",
|
|
||||||
Description: "SetContentSecurityPolicy modifies response headers to a specific CSP",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/responsemodifiers/bypass_csp.go",
|
|
||||||
Params: []Param{
|
|
||||||
{Name: "csp", Type: "string"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "DeleteLocalStorageData",
|
|
||||||
Description: "DeleteLocalStorageData deletes localstorage cookies. If the page works once in a fresh incognito window, but fails for subsequent loads, try this response modifier alongside DeleteSessionStorageData and DeleteIncomingCookies",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/responsemodifiers/delete_localstorage_data.go",
|
|
||||||
Params: []Param{
|
|
||||||
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "DeleteSessionStorageData",
|
|
||||||
Description: "DeleteSessionStorageData deletes localstorage cookies. If the page works once in a fresh incognito window, but fails for subsequent loads, try this response modifier alongside DeleteLocalStorageData and DeleteIncomingCookies",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/responsemodifiers/delete_sessionstorage_data.go",
|
|
||||||
Params: []Param{
|
|
||||||
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "ForwardResponseHeaders",
|
|
||||||
Description: "ForwardResponseHeaders forwards the response headers from the upstream server to the client",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/responsemodifiers/forward_response_headers.go",
|
|
||||||
Params: []Param{
|
|
||||||
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "GenerateReadableOutline",
|
|
||||||
Description: "GenerateReadableOutline creates an reader-friendly distilled representation of the article. This is a reliable way of bypassing soft-paywalled articles, where the content is hidden, but still present in the DOM.",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/responsemodifiers/generate_readable_outline.go",
|
|
||||||
Params: []Param{
|
|
||||||
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "InjectScriptBeforeDOMContentLoaded",
|
|
||||||
Description: "InjectScriptBeforeDOMContentLoaded modifies HTTP responses to inject a JS before DOM Content is loaded (script tag in head)",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/responsemodifiers/inject_script.go",
|
|
||||||
Params: []Param{
|
|
||||||
{Name: "js", Type: "string"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "InjectScriptAfterDOMContentLoaded",
|
|
||||||
Description: "InjectScriptAfterDOMContentLoaded modifies HTTP responses to inject a JS after DOM Content is loaded (script tag in head)",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/responsemodifiers/inject_script.go",
|
|
||||||
Params: []Param{
|
|
||||||
{Name: "js", Type: "string"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "InjectScriptAfterDOMIdle",
|
|
||||||
Description: "InjectScriptAfterDOMIdle modifies HTTP responses to inject a JS after the DOM is idle (ie: js framework loaded)",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/responsemodifiers/inject_script.go",
|
|
||||||
Params: []Param{
|
|
||||||
{Name: "js", Type: "string"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "DeleteIncomingCookies",
|
|
||||||
Description: "DeleteIncomingCookies prevents ALL cookies from being sent from the proxy server back down to the client.",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/responsemodifiers/modify_incoming_cookies.go",
|
|
||||||
Params: []Param{
|
|
||||||
{Name: "_", Type: "&{Ellipsis:16319 Elt:string}"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "DeleteIncomingCookiesExcept",
|
|
||||||
Description: "DeleteIncomingCookiesExcept prevents non-whitelisted cookies from being sent from the proxy server to the client. Cookies whose names are in the whitelist are not removed.",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/responsemodifiers/modify_incoming_cookies.go",
|
|
||||||
Params: []Param{
|
|
||||||
{Name: "whitelist", Type: "&{Ellipsis:16864 Elt:string}"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "SetIncomingCookies",
|
|
||||||
Description: "SetIncomingCookies adds a raw cookie string being sent from the proxy server down to the client",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/responsemodifiers/modify_incoming_cookies.go",
|
|
||||||
Params: []Param{
|
|
||||||
{Name: "cookies", Type: "string"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "SetIncomingCookie",
|
|
||||||
Description: "SetIncomingCookie modifies a specific cookie in the response from the proxy server to the client.",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/responsemodifiers/modify_incoming_cookies.go",
|
|
||||||
Params: []Param{
|
|
||||||
{Name: "name", Type: "string"},
|
|
||||||
{Name: "val", Type: "string"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "SetResponseHeader",
|
|
||||||
Description: "SetResponseHeader modifies response headers from the upstream server",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/responsemodifiers/modify_response_header.go",
|
|
||||||
Params: []Param{
|
|
||||||
{Name: "key", Type: "string"},
|
|
||||||
{Name: "value", Type: "string"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "DeleteResponseHeader",
|
|
||||||
Description: "DeleteResponseHeader removes response headers from the upstream server",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/responsemodifiers/modify_response_header.go",
|
|
||||||
Params: []Param{
|
|
||||||
{Name: "key", Type: "string"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "PatchDynamicResourceURLs",
|
|
||||||
Description: "PatchDynamicResourceURLs patches the javascript runtime to rewrite URLs client-side. - This function is designed to allow the proxified page to still be browsible by routing all resource URLs through the proxy. - Native APIs capable of network requests will be hooked and the URLs arguments modified to point to the proxy instead. - fetch('/relative_path') -> fetch('/https://proxiedsite.com/relative_path') - Element.setAttribute('src', \"/assets/img.jpg\") -> Element.setAttribute('src', \"/https://proxiedsite.com/assets/img.jpg\") -> fetch('/https://proxiedsite.com/relative_path')",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/responsemodifiers/patch_dynamic_resource_urls.go",
|
|
||||||
Params: []Param{
|
|
||||||
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "PatchTrackerScripts",
|
|
||||||
Description: "PatchTrackerScripts replaces any request to tracker scripts such as google analytics with a no-op stub that mocks the API structure of the original scripts they replace. Some pages depend on the existence of these structures for proper loading, so this may fix some broken elements. Surrogate script code borrowed from: DuckDuckGo Privacy Essentials browser extension for Firefox, Chrome. (Apache 2.0 license)",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/responsemodifiers/patch_tracker_scripts.go",
|
|
||||||
Params: []Param{
|
|
||||||
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "RewriteHTMLResourceURLs",
|
|
||||||
Description: "RewriteHTMLResourceURLs modifies HTTP responses to rewrite URLs attributes in HTML content (such as src, href) - `<img src='/relative_path'>` -> `<img src='/https://proxiedsite.com/relative_path'>` - This function is designed to allow the proxified page to still be browsible by routing all resource URLs through the proxy.",
|
|
||||||
CodeEditLink: "https://github.com/everywall/ladder/edit/origin/proxy_v2/proxychain/responsemodifiers/rewrite_http_resource_urls.go",
|
|
||||||
Params: []Param{
|
|
||||||
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
@@ -1,51 +0,0 @@
|
|||||||
package handlers
|
|
||||||
|
|
||||||
import (
|
|
||||||
rx "github.com/everywall/ladder/proxychain/requestmodifiers"
|
|
||||||
tx "github.com/everywall/ladder/proxychain/responsemodifiers"
|
|
||||||
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
|
|
||||||
"github.com/gofiber/fiber/v2"
|
|
||||||
)
|
|
||||||
|
|
||||||
func NewRawProxySiteHandler(opts *ProxyOptions) fiber.Handler {
|
|
||||||
|
|
||||||
return func(c *fiber.Ctx) error {
|
|
||||||
proxychain := proxychain.
|
|
||||||
NewProxyChain().
|
|
||||||
SetFiberCtx(c).
|
|
||||||
SetRequestModifications(
|
|
||||||
rx.AddCacheBusterQuery(),
|
|
||||||
rx.MasqueradeAsGoogleBot(),
|
|
||||||
rx.ForwardRequestHeaders(),
|
|
||||||
rx.HideOrigin(),
|
|
||||||
rx.DeleteOutgoingCookies(),
|
|
||||||
rx.SpoofReferrerFromRedditPost(),
|
|
||||||
)
|
|
||||||
|
|
||||||
// no options passed in, return early
|
|
||||||
if opts == nil {
|
|
||||||
// return as plaintext, overriding any rules
|
|
||||||
proxychain.AddOnceResponseModifications(
|
|
||||||
tx.SetResponseHeader("content-type", "text/plain; charset=UTF-8"),
|
|
||||||
)
|
|
||||||
|
|
||||||
return proxychain.Execute()
|
|
||||||
}
|
|
||||||
|
|
||||||
// load ruleset
|
|
||||||
rule, exists := opts.Ruleset.GetRule(proxychain.Request.URL)
|
|
||||||
if exists {
|
|
||||||
proxychain.AddOnceRequestModifications(rule.RequestModifications...)
|
|
||||||
proxychain.AddOnceResponseModifications(rule.ResponseModifications...)
|
|
||||||
}
|
|
||||||
|
|
||||||
// return as plaintext, overriding any rules
|
|
||||||
proxychain.AddOnceResponseModifications(
|
|
||||||
tx.SetResponseHeader("content-type", "text/plain; charset=UTF-8"),
|
|
||||||
)
|
|
||||||
|
|
||||||
return proxychain.Execute()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,96 +0,0 @@
|
|||||||
package handlers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"github.com/gofiber/fiber/v2"
|
|
||||||
"gopkg.in/yaml.v3"
|
|
||||||
"net/url"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
func NewRulesetSiteHandler(opts *ProxyOptions) fiber.Handler {
|
|
||||||
|
|
||||||
return func(c *fiber.Ctx) error {
|
|
||||||
if opts == nil {
|
|
||||||
c.SendStatus(404)
|
|
||||||
c.SendString("No ruleset specified. Set the RULESET environment variable or use the --ruleset flag.")
|
|
||||||
}
|
|
||||||
|
|
||||||
// no specific rule requested, return the entire ruleset
|
|
||||||
if c.Params("*") == "" {
|
|
||||||
switch c.Get("accept") {
|
|
||||||
case "application/json":
|
|
||||||
jsn, err := opts.Ruleset.JSON()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
c.Set("content-type", "application/json")
|
|
||||||
return c.Send([]byte(jsn))
|
|
||||||
|
|
||||||
default:
|
|
||||||
yml, err := opts.Ruleset.YAML()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
c.Set("content-type", "text/yaml")
|
|
||||||
return c.Send([]byte(yml))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// a specific rule was requested by path /ruleset/https://example.com
|
|
||||||
// return only that particular rule
|
|
||||||
reqURL, err := extractURLFromContext(c, "api/ruleset/")
|
|
||||||
if err != nil {
|
|
||||||
c.SendStatus(404)
|
|
||||||
return c.SendString(fmt.Sprintf("A rule that matches '%s' was not found in the ruleset. Possible URL formatting issue.", c.Params("*")))
|
|
||||||
}
|
|
||||||
rule, exists := opts.Ruleset.GetRule(reqURL)
|
|
||||||
if !exists {
|
|
||||||
c.SendStatus(404)
|
|
||||||
return c.SendString(fmt.Sprintf("A rule that matches '%s' was not found in the ruleset.", reqURL))
|
|
||||||
}
|
|
||||||
|
|
||||||
switch c.Get("accept") {
|
|
||||||
case "application/json":
|
|
||||||
jsn, err := json.MarshalIndent(rule, "", " ")
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
c.Set("content-type", "application/json")
|
|
||||||
return c.Send(jsn)
|
|
||||||
default:
|
|
||||||
yml, err := yaml.Marshal(rule)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
c.Set("content-type", "text/yaml")
|
|
||||||
return c.Send(yml)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// extractURLFromContext extracts a URL from the request ctx.
|
|
||||||
func extractURLFromContext(ctx *fiber.Ctx, apiPrefix string) (*url.URL, error) {
|
|
||||||
reqURL := ctx.Params("*")
|
|
||||||
|
|
||||||
reqURL = strings.TrimPrefix(reqURL, apiPrefix)
|
|
||||||
if !strings.HasPrefix(reqURL, "http") {
|
|
||||||
reqURL = "https://" + reqURL
|
|
||||||
}
|
|
||||||
|
|
||||||
// sometimes client requests doubleroot '//'
|
|
||||||
// there is a bug somewhere else, but this is a workaround until we find it
|
|
||||||
if strings.HasPrefix(reqURL, "/") || strings.HasPrefix(reqURL, `%2F`) {
|
|
||||||
reqURL = strings.TrimPrefix(reqURL, "/")
|
|
||||||
reqURL = strings.TrimPrefix(reqURL, `%2F`)
|
|
||||||
}
|
|
||||||
|
|
||||||
// unescape url query
|
|
||||||
uReqURL, err := url.QueryUnescape(reqURL)
|
|
||||||
if err == nil {
|
|
||||||
reqURL = uReqURL
|
|
||||||
}
|
|
||||||
|
|
||||||
return url.Parse(reqURL)
|
|
||||||
}
|
|
||||||
@@ -1,25 +0,0 @@
|
|||||||
package handlers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"os"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/gofiber/fiber/v2"
|
|
||||||
"github.com/gofiber/fiber/v2/middleware/basicauth"
|
|
||||||
)
|
|
||||||
|
|
||||||
func Auth() fiber.Handler {
|
|
||||||
userpass := os.Getenv("USERPASS")
|
|
||||||
if userpass != "" {
|
|
||||||
userpass := strings.Split(userpass, ":")
|
|
||||||
return basicauth.New(basicauth.Config{
|
|
||||||
Users: map[string]string{
|
|
||||||
userpass[0]: userpass[1],
|
|
||||||
},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
return func(c *fiber.Ctx) error {
|
|
||||||
return c.Next()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,18 +0,0 @@
|
|||||||
package handlers
|
|
||||||
|
|
||||||
import (
|
|
||||||
_ "embed"
|
|
||||||
|
|
||||||
"github.com/gofiber/fiber/v2"
|
|
||||||
"github.com/gofiber/fiber/v2/middleware/favicon"
|
|
||||||
)
|
|
||||||
|
|
||||||
//go:embed favicon.ico
|
|
||||||
var faviconData string
|
|
||||||
|
|
||||||
func Favicon() fiber.Handler {
|
|
||||||
return favicon.New(favicon.Config{
|
|
||||||
Data: []byte(faviconData),
|
|
||||||
URL: "/favicon.ico",
|
|
||||||
})
|
|
||||||
}
|
|
||||||
@@ -1,299 +1,79 @@
|
|||||||
<!DOCTYPE html>
|
<!DOCTYPE html>
|
||||||
<html lang="en">
|
<html lang="en">
|
||||||
<head>
|
|
||||||
<meta charset="UTF-8" />
|
<head>
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
<meta charset="UTF-8">
|
||||||
<link rel="stylesheet" href="/styles.css" />
|
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||||
<title>ladder</title>
|
<title>ladder</title>
|
||||||
<script src="/script.js" defer></script>
|
<link rel="stylesheet" href="/styles.css">
|
||||||
<script>
|
</head>
|
||||||
const handleThemeChange = () => {
|
|
||||||
let theme = localStorage.getItem("theme");
|
|
||||||
if (theme === null) {
|
|
||||||
localStorage.setItem("theme", "system");
|
|
||||||
theme = "system";
|
|
||||||
}
|
|
||||||
if (
|
|
||||||
theme === "dark" ||
|
|
||||||
(theme === "system" &&
|
|
||||||
window.matchMedia("(prefers-color-scheme: dark)").matches)
|
|
||||||
) {
|
|
||||||
document.documentElement.classList.add("dark");
|
|
||||||
} else {
|
|
||||||
document.documentElement.classList.remove("dark");
|
|
||||||
}
|
|
||||||
};
|
|
||||||
handleThemeChange();
|
|
||||||
</script>
|
|
||||||
</head>
|
|
||||||
|
|
||||||
<body class="antialiased bg-white dark:bg-slate-900">
|
<body class="antialiased text-slate-500 dark:text-slate-400 bg-white dark:bg-slate-900">
|
||||||
<div class="flex flex-col gap-4 max-w-3xl mx-auto pt-10">
|
<div class="grid grid-cols-1 gap-4 max-w-3xl mx-auto pt-10">
|
||||||
<div class="place-self-end z-10">
|
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="100%" height="250" viewBox="0 0 512 512">
|
||||||
<div class="relative" id="dropdown">
|
<path fill="#7AA7D1" d="M262.074 485.246C254.809 485.265 247.407 485.534 240.165 484.99L226.178 483.306C119.737 468.826 34.1354 383.43 25.3176 274.714C24.3655 262.975 23.5876 253.161 24.3295 241.148C31.4284 126.212 123.985 31.919 238.633 24.1259L250.022 23.8366C258.02 23.8001 266.212 23.491 274.183 24.1306C320.519 27.8489 366.348 45.9743 402.232 75.4548L416.996 88.2751C444.342 114.373 464.257 146.819 475.911 182.72L480.415 197.211C486.174 219.054 488.67 242.773 487.436 265.259L486.416 275.75C478.783 352.041 436.405 418.1 369.36 455.394L355.463 462.875C326.247 477.031 294.517 484.631 262.074 485.246ZM253.547 72.4475C161.905 73.0454 83.5901 144.289 73.0095 234.5C69.9101 260.926 74.7763 292.594 83.9003 317.156C104.53 372.691 153.9 416.616 211.281 430.903C226.663 434.733 242.223 436.307 258.044 436.227C353.394 435.507 430.296 361.835 438.445 267.978C439.794 252.442 438.591 236.759 435.59 221.5C419.554 139.955 353.067 79.4187 269.856 72.7052C264.479 72.2714 258.981 72.423 253.586 72.4127L253.547 72.4475Z"/>
|
||||||
<button
|
<path fill="#7AA7D1" d="M153.196 310.121L133.153 285.021C140.83 283.798 148.978 285.092 156.741 284.353L156.637 277.725L124.406 278.002C123.298 277.325 122.856 276.187 122.058 275.193L116.089 267.862C110.469 260.975 103.827 254.843 98.6026 247.669C103.918 246.839 105.248 246.537 111.14 246.523L129.093 246.327C130.152 238.785 128.62 240.843 122.138 240.758C111.929 240.623 110.659 242.014 105.004 234.661L97.9953 225.654C94.8172 221.729 91.2219 218.104 88.2631 214.005C84.1351 208.286 90.1658 209.504 94.601 209.489L236.752 209.545C257.761 209.569 268.184 211.009 285.766 221.678L285.835 206.051C285.837 197.542 286.201 189.141 284.549 180.748C280.22 158.757 260.541 143.877 240.897 135.739C238.055 134.561 232.259 133.654 235.575 129.851C244.784 119.288 263.680 111.990 277.085 111.105C288.697 109.828 301.096 113.537 311.75 117.703C360.649 136.827 393.225 183.042 398.561 234.866C402.204 270.253 391.733 308.356 367.999 335.1C332.832 374.727 269.877 384.883 223.294 360.397C206.156 351.388 183.673 333.299 175.08 316.6C173.511 313.551 174.005 313.555 170.443 313.52L160.641 313.449C158.957 313.435 156.263 314.031 155.122 312.487L153.196 310.121Z"/>
|
||||||
aria-expanded="closed"
|
|
||||||
onclick="toggleDropdown()"
|
|
||||||
type="button"
|
|
||||||
class="inline-flex items-center justify-center whitespace-nowrap rounded-full h-12 px-4 py-2 text-sm font-medium text-slate-600 dark:text-slate-400 ring-offset-white dark:ring-offset-slate-900 transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:pointer-events-none disabled:opacity-50 bg-white dark:bg-slate-900 hover:bg-slate-200 dark:hover:bg-slate-700 hover:text-slate-500 dark:hover:text-slate-200"
|
|
||||||
>
|
|
||||||
<svg
|
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
|
||||||
viewBox="0 0 24 24"
|
|
||||||
fill="none"
|
|
||||||
stroke="currentColor"
|
|
||||||
stroke-width="2"
|
|
||||||
stroke-linecap="round"
|
|
||||||
stroke-linejoin="round"
|
|
||||||
class="h-5 w-5"
|
|
||||||
>
|
|
||||||
<path
|
|
||||||
d="M12.22 2h-.44a2 2 0 0 0-2 2v.18a2 2 0 0 1-1 1.73l-.43.25a2 2 0 0 1-2 0l-.15-.08a2 2 0 0 0-2.73.73l-.22.38a2 2 0 0 0 .73 2.73l.15.1a2 2 0 0 1 1 1.72v.51a2 2 0 0 1-1 1.74l-.15.09a2 2 0 0 0-.73 2.73l.22.38a2 2 0 0 0 2.73.73l.15-.08a2 2 0 0 1 2 0l.43.25a2 2 0 0 1 1 1.73V20a2 2 0 0 0 2 2h.44a2 2 0 0 0 2-2v-.18a2 2 0 0 1 1-1.73l.43-.25a2 2 0 0 1 2 0l.15.08a2 2 0 0 0 2.73-.73l.22-.39a2 2 0 0 0-.73-2.73l-.15-.08a2 2 0 0 1-1-1.74v-.5a2 2 0 0 1 1-1.74l.15-.09a2 2 0 0 0 .73-2.73l-.22-.38a2 2 0 0 0-2.73-.73l-.15.08a2 2 0 0 1-2 0l-.43-.25a2 2 0 0 1-1-1.73V4a2 2 0 0 0-2-2z"
|
|
||||||
/>
|
|
||||||
<circle cx="12" cy="12" r="3" />
|
|
||||||
</svg>
|
|
||||||
</button>
|
|
||||||
|
|
||||||
<div
|
|
||||||
id="dropdown_panel"
|
|
||||||
class="hidden absolute right-0 mt-2 w-52 rounded-md bg-white dark:bg-slate-900 text-slate-900 dark:text-slate-200 shadow-md border border-slate-400 dark:border-slate-700"
|
|
||||||
>
|
|
||||||
<div
|
|
||||||
class="flex flex-col gap-2 w-full first-of-type:rounded-t-md last-of-type:rounded-b-md px-4 py-2.5 text-left text-sm"
|
|
||||||
>
|
|
||||||
Appearance
|
|
||||||
<div class="grid grid-cols-4 gap-2">
|
|
||||||
<div>
|
|
||||||
<input
|
|
||||||
type="radio"
|
|
||||||
name="theme"
|
|
||||||
id="light"
|
|
||||||
value="light"
|
|
||||||
class="peer hidden"
|
|
||||||
/>
|
|
||||||
<label
|
|
||||||
for="light"
|
|
||||||
tabindex="0"
|
|
||||||
title="Light"
|
|
||||||
class="flex items-end justify-center h-10 w-10 cursor-pointer select-none rounded-md p-2 text-sm text-slate-600 dark:text-slate-200 text-center hover:bg-slate-200 dark:hover:bg-slate-700 peer-checked:bg-slate-200 dark:peer-checked:bg-slate-700"
|
|
||||||
>
|
|
||||||
<svg
|
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
|
||||||
viewBox="0 0 24 24"
|
|
||||||
fill="none"
|
|
||||||
stroke="currentColor"
|
|
||||||
stroke-width="2"
|
|
||||||
stroke-linecap="round"
|
|
||||||
stroke-linejoin="round"
|
|
||||||
class="h-5 w-5"
|
|
||||||
>
|
|
||||||
<circle cx="12" cy="12" r="4" />
|
|
||||||
<path d="M12 2v2" />
|
|
||||||
<path d="M12 20v2" />
|
|
||||||
<path d="m4.93 4.93 1.41 1.41" />
|
|
||||||
<path d="m17.66 17.66 1.41 1.41" />
|
|
||||||
<path d="M2 12h2" />
|
|
||||||
<path d="M20 12h2" />
|
|
||||||
<path d="m6.34 17.66-1.41 1.41" />
|
|
||||||
<path d="m19.07 4.93-1.41 1.41" />
|
|
||||||
</svg>
|
|
||||||
</label>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<input
|
|
||||||
type="radio"
|
|
||||||
name="theme"
|
|
||||||
id="dark"
|
|
||||||
value="dark"
|
|
||||||
class="peer hidden"
|
|
||||||
/>
|
|
||||||
<label
|
|
||||||
for="dark"
|
|
||||||
tabindex="0"
|
|
||||||
title="Dark"
|
|
||||||
class="flex items-end justify-center h-10 w-10 cursor-pointer select-none rounded-md p-2 text-base text-slate-600 dark:text-slate-200 text-center hover:bg-slate-200 dark:hover:bg-slate-700 peer-checked:bg-slate-200 dark:peer-checked:bg-slate-700"
|
|
||||||
>
|
|
||||||
<svg
|
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
|
||||||
width="24"
|
|
||||||
height="24"
|
|
||||||
viewBox="0 0 24 24"
|
|
||||||
fill="none"
|
|
||||||
stroke="currentColor"
|
|
||||||
stroke-width="2"
|
|
||||||
stroke-linecap="round"
|
|
||||||
stroke-linejoin="round"
|
|
||||||
class="h-5 w-5"
|
|
||||||
>
|
|
||||||
<path d="M12 3a6 6 0 0 0 9 9 9 9 0 1 1-9-9Z" />
|
|
||||||
</svg>
|
|
||||||
</label>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<input
|
|
||||||
type="radio"
|
|
||||||
name="theme"
|
|
||||||
id="system"
|
|
||||||
value="system"
|
|
||||||
class="peer hidden"
|
|
||||||
checked
|
|
||||||
/>
|
|
||||||
<label
|
|
||||||
for="system"
|
|
||||||
tabindex="0"
|
|
||||||
title="System preference"
|
|
||||||
class="flex items-end justify-center h-10 w-10 cursor-pointer select-none rounded-md p-2 text-lg text-slate-600 dark:text-slate-200 text-center hover:bg-slate-200 dark:hover:bg-slate-700 peer-checked:bg-slate-200 dark:peer-checked:bg-slate-700"
|
|
||||||
>
|
|
||||||
<svg
|
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
|
||||||
width="24"
|
|
||||||
height="24"
|
|
||||||
viewBox="0 0 24 24"
|
|
||||||
fill="none"
|
|
||||||
stroke="currentColor"
|
|
||||||
stroke-width="2"
|
|
||||||
stroke-linecap="round"
|
|
||||||
stroke-linejoin="round"
|
|
||||||
class="h-5 w-5"
|
|
||||||
>
|
|
||||||
<path d="M12 8a2.83 2.83 0 0 0 4 4 4 4 0 1 1-4-4" />
|
|
||||||
<path d="M12 2v2" />
|
|
||||||
<path d="M12 20v2" />
|
|
||||||
<path d="m4.9 4.9 1.4 1.4" />
|
|
||||||
<path d="m17.7 17.7 1.4 1.4" />
|
|
||||||
<path d="M2 12h2" />
|
|
||||||
<path d="M20 12h2" />
|
|
||||||
<path d="m6.3 17.7-1.4 1.4" />
|
|
||||||
<path d="m19.1 4.9-1.4 1.4" />
|
|
||||||
</svg>
|
|
||||||
</label>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="mx-auto -mt-12">
|
|
||||||
<svg
|
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
|
||||||
xmlns:xlink="http://www.w3.org/1999/xlink"
|
|
||||||
viewBox="0 0 512 512"
|
|
||||||
class="h-[250px] hover:drop-shadow-[0_0px_10px_rgba(122,167,209,.3)] transition-colors duration-300"
|
|
||||||
>
|
|
||||||
<path
|
|
||||||
fill="#7AA7D1"
|
|
||||||
d="M262.074 485.246C254.809 485.265 247.407 485.534 240.165 484.99L226.178 483.306C119.737 468.826 34.1354 383.43 25.3176 274.714C24.3655 262.975 23.5876 253.161 24.3295 241.148C31.4284 126.212 123.985 31.919 238.633 24.1259L250.022 23.8366C258.02 23.8001 266.212 23.491 274.183 24.1306C320.519 27.8489 366.348 45.9743 402.232 75.4548L416.996 88.2751C444.342 114.373 464.257 146.819 475.911 182.72L480.415 197.211C486.174 219.054 488.67 242.773 487.436 265.259L486.416 275.75C478.783 352.041 436.405 418.1 369.36 455.394L355.463 462.875C326.247 477.031 294.517 484.631 262.074 485.246ZM253.547 72.4475C161.905 73.0454 83.5901 144.289 73.0095 234.5C69.9101 260.926 74.7763 292.594 83.9003 317.156C104.53 372.691 153.9 416.616 211.281 430.903C226.663 434.733 242.223 436.307 258.044 436.227C353.394 435.507 430.296 361.835 438.445 267.978C439.794 252.442 438.591 236.759 435.59 221.5C419.554 139.955 353.067 79.4187 269.856 72.7052C264.479 72.2714 258.981 72.423 253.586 72.4127L253.547 72.4475Z"
|
|
||||||
/>
|
|
||||||
<path
|
|
||||||
fill="#7AA7D1"
|
|
||||||
d="M153.196 310.121L133.153 285.021C140.83 283.798 148.978 285.092 156.741 284.353L156.637 277.725L124.406 278.002C123.298 277.325 122.856 276.187 122.058 275.193L116.089 267.862C110.469 260.975 103.827 254.843 98.6026 247.669C103.918 246.839 105.248 246.537 111.14 246.523L129.093 246.327C130.152 238.785 128.62 240.843 122.138 240.758C111.929 240.623 110.659 242.014 105.004 234.661L97.9953 225.654C94.8172 221.729 91.2219 218.104 88.2631 214.005C84.1351 208.286 90.1658 209.504 94.601 209.489L236.752 209.545C257.761 209.569 268.184 211.009 285.766 221.678L285.835 206.051C285.837 197.542 286.201 189.141 284.549 180.748C280.22 158.757 260.541 143.877 240.897 135.739C238.055 134.561 232.259 133.654 235.575 129.851C244.784 119.288 263.680 111.990 277.085 111.105C288.697 109.828 301.096 113.537 311.75 117.703C360.649 136.827 393.225 183.042 398.561 234.866C402.204 270.253 391.733 308.356 367.999 335.1C332.832 374.727 269.877 384.883 223.294 360.397C206.156 351.388 183.673 333.299 175.08 316.6C173.511 313.551 174.005 313.555 170.443 313.52L160.641 313.449C158.957 313.435 156.263 314.031 155.122 312.487L153.196 310.121Z"
|
|
||||||
/>
|
|
||||||
</svg>
|
</svg>
|
||||||
</div>
|
<header>
|
||||||
|
<h1 class="text-center text-3xl sm:text-4xl font-extrabold text-slate-900 tracking-tight dark:text-slate-200">ladddddddder</h1>
|
||||||
<header>
|
</header>
|
||||||
<h1
|
<form id="inputForm" method="get" class="mx-4 relative">
|
||||||
class="text-center text-3xl sm:text-4xl font-extrabold text-slate-900 tracking-tight dark:text-slate-200 cursor-default"
|
<div>
|
||||||
>
|
<input type="text" id="inputField" placeholder="Proxy Search" name="inputField" class="w-full text-sm leading-6 text-slate-400 rounded-md ring-1 ring-slate-900/10 shadow-sm py-1.5 pl-2 pr-3 hover:ring-slate-300 dark:bg-slate-800 dark:highlight-white/5 dark:hover:bg-slate-700" required autofocus>
|
||||||
ladddddddder
|
<button id="clearButton" type="button" aria-label="Clear Search" title="Clear Search" class="hidden absolute inset-y-0 right-0 items-center pr-2 hover:text-slate-400 hover:dark:text-slate-300" tabindex="-1">
|
||||||
</h1>
|
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round""><path d="M18 6 6 18"/><path d="m6 6 12 12"/></svg>
|
||||||
</header>
|
</button>
|
||||||
|
</div>
|
||||||
<form id="inputForm" method="get" class="flex flex-col gap-2 mx-4">
|
</form>
|
||||||
<div class="relative">
|
<footer class="mt-10 mx-4 text-center text-slate-600 dark:text-slate-400">
|
||||||
<input
|
<p>
|
||||||
type="url"
|
Code Licensed Under GPL v3.0 |
|
||||||
id="inputField"
|
<a href="https://github.com/everywall/ladder" class="hover:text-blue-500 hover:underline underline-offset-2 transition-colors duration-300">Source</a> |
|
||||||
placeholder="Enter URL"
|
<a href="https://github.com/everywall/ladder/releases" class="hover:text-blue-500 hover:underline underline-offset-2 transition-colors duration-300">VERSION</a>
|
||||||
name="inputField"
|
</p>
|
||||||
class="w-full text-sm leading-6 text-slate-400 rounded-md ring-1 ring-slate-900/10 shadow-sm py-1.5 pl-2 pr-3 hover:ring-slate-300 dark:bg-slate-800 dark:highlight-white/5 dark:hover:bg-slate-700"
|
</footer>
|
||||||
autocomplete="off"
|
|
||||||
autofocus
|
|
||||||
required
|
|
||||||
/>
|
|
||||||
<button
|
|
||||||
id="clearButton"
|
|
||||||
type="reset"
|
|
||||||
aria-label="Clear Search"
|
|
||||||
title="Clear Search"
|
|
||||||
class="hidden absolute inset-y-0 right-0 items-center pr-2 text-slate-600 dark:text-slate-400 hover:text-slate-400 hover:dark:text-slate-300"
|
|
||||||
tabindex="-1"
|
|
||||||
>
|
|
||||||
<svg
|
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
|
||||||
viewBox="0 0 24 24"
|
|
||||||
fill="none"
|
|
||||||
stroke="currentColor"
|
|
||||||
stroke-width="2"
|
|
||||||
stroke-linecap="round"
|
|
||||||
stroke-linejoin="round"
|
|
||||||
class="h-4 w-4"
|
|
||||||
>
|
|
||||||
<path d="M18 6 6 18" />
|
|
||||||
<path d="m6 6 12 12" />
|
|
||||||
</svg>
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div
|
|
||||||
class="flex flex-wrap-reverse mt-5 gap-x-10 gap-y-4 justify-center"
|
|
||||||
>
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
id="outlineButton"
|
|
||||||
class="inline-flex items-center justify-center h-11 px-8 whitespace-nowrap rounded-md text-sm font-medium text-slate-900 dark:text-slate-200 ring-offset-white dark:ring-offset-slate-900 transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:pointer-events-none disabled:opacity-50 bg-slate-200 dark:bg-slate-800 hover:bg-slate-200/90 dark:hover:bg-slate-800/90"
|
|
||||||
>
|
|
||||||
Create Outline
|
|
||||||
</button>
|
|
||||||
<button
|
|
||||||
type="submit"
|
|
||||||
class="inline-flex items-center justify-center h-11 px-8 whitespace-nowrap rounded-md text-sm font-medium text-slate-200 dark:text-slate-900 ring-offset-white dark:ring-offset-slate-900 transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:pointer-events-none disabled:opacity-50 bg-slate-800 dark:bg-slate-200 hover:bg-slate-800/90 dark:hover:bg-slate-200/90"
|
|
||||||
>
|
|
||||||
Proxy Search
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</form>
|
|
||||||
|
|
||||||
<footer class="mx-4 text-center text-slate-600 dark:text-slate-400">
|
|
||||||
<p>
|
|
||||||
Code Licensed Under GPL v3.0 |
|
|
||||||
<a
|
|
||||||
href="https://github.com/everywall/ladder"
|
|
||||||
class="hover:text-blue-500 dark:hover:text-blue-500 hover:underline underline-offset-2 transition-colors duration-300"
|
|
||||||
>View Source</a
|
|
||||||
>
|
|
||||||
|
|
|
||||||
<a
|
|
||||||
href="https://github.com/everywall"
|
|
||||||
class="hover:text-blue-500 dark:hover:text-blue-500 hover:underline underline-offset-2 transition-colors duration-300"
|
|
||||||
>Everywall</a
|
|
||||||
>
|
|
||||||
</p>
|
|
||||||
</footer>
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<script>
|
<script>
|
||||||
function validateAndRedirect(destination) {
|
document.getElementById('inputForm').addEventListener('submit', function (e) {
|
||||||
let url = inputField.value;
|
e.preventDefault();
|
||||||
const redirectUrl =
|
let url = document.getElementById('inputField').value;
|
||||||
destination === "outline" ? "/outline/" + url : "/" + url;
|
if (url.indexOf('http') === -1) {
|
||||||
window.location.href = redirectUrl;
|
url = 'https://' + url;
|
||||||
return true;
|
}
|
||||||
}
|
window.location.href = '/' + url;
|
||||||
|
return false;
|
||||||
document
|
|
||||||
.getElementById("inputForm")
|
|
||||||
.addEventListener("submit", function (e) {
|
|
||||||
e.preventDefault();
|
|
||||||
validateAndRedirect("default");
|
|
||||||
});
|
});
|
||||||
|
document.getElementById('inputField').addEventListener('input', function() {
|
||||||
document
|
const clearButton = document.getElementById('clearButton');
|
||||||
.getElementById("outlineButton")
|
if (this.value.trim().length > 0) {
|
||||||
.addEventListener("click", function () {
|
clearButton.style.display = 'block';
|
||||||
validateAndRedirect("outline");
|
} else {
|
||||||
|
clearButton.style.display = 'none';
|
||||||
|
}
|
||||||
|
});
|
||||||
|
document.getElementById('clearButton').addEventListener('click', function() {
|
||||||
|
document.getElementById('inputField').value = '';
|
||||||
|
this.style.display = 'none';
|
||||||
|
document.getElementById('inputField').focus();
|
||||||
});
|
});
|
||||||
</script>
|
</script>
|
||||||
</body>
|
|
||||||
|
<style>
|
||||||
|
@media (prefers-color-scheme: light) {
|
||||||
|
body {
|
||||||
|
background-color: #ffffff;
|
||||||
|
color: #333333;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@media (prefers-color-scheme: dark) {
|
||||||
|
body {
|
||||||
|
background-color: #1a202c;
|
||||||
|
color: #ffffff;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</body>
|
||||||
|
|
||||||
</html>
|
</html>
|
||||||
@@ -1,32 +0,0 @@
|
|||||||
package handlers
|
|
||||||
|
|
||||||
import (
|
|
||||||
rx "github.com/everywall/ladder/proxychain/requestmodifiers"
|
|
||||||
tx "github.com/everywall/ladder/proxychain/responsemodifiers"
|
|
||||||
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
|
|
||||||
"github.com/gofiber/fiber/v2"
|
|
||||||
)
|
|
||||||
|
|
||||||
func NewOutlineHandler(path string, opts *ProxyOptions) fiber.Handler {
|
|
||||||
return func(c *fiber.Ctx) error {
|
|
||||||
return proxychain.
|
|
||||||
NewProxyChain().
|
|
||||||
WithAPIPath(path).
|
|
||||||
SetDebugLogging(opts.Verbose).
|
|
||||||
SetRequestModifications(
|
|
||||||
rx.MasqueradeAsGoogleBot(),
|
|
||||||
rx.ForwardRequestHeaders(),
|
|
||||||
rx.SpoofReferrerFromGoogleSearch(),
|
|
||||||
).
|
|
||||||
AddResponseModifications(
|
|
||||||
tx.SetResponseHeader("content-type", "text/html"),
|
|
||||||
tx.DeleteIncomingCookies(),
|
|
||||||
tx.RewriteHTMLResourceURLs(),
|
|
||||||
tx.GenerateReadableOutline(), // <-- this response modification does the outline rendering
|
|
||||||
).
|
|
||||||
SetFiberCtx(c).
|
|
||||||
Execute()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,76 +1,364 @@
|
|||||||
package handlers
|
package handlers
|
||||||
|
|
||||||
import (
|
import (
|
||||||
rx "github.com/everywall/ladder/proxychain/requestmodifiers"
|
"fmt"
|
||||||
tx "github.com/everywall/ladder/proxychain/responsemodifiers"
|
"io"
|
||||||
|
"log"
|
||||||
"github.com/everywall/ladder/proxychain"
|
"net/http"
|
||||||
ruleset_v2 "github.com/everywall/ladder/proxychain/ruleset"
|
"net/url"
|
||||||
|
"os"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/PuerkitoBio/goquery"
|
||||||
"github.com/gofiber/fiber/v2"
|
"github.com/gofiber/fiber/v2"
|
||||||
|
"gopkg.in/yaml.v3"
|
||||||
)
|
)
|
||||||
|
|
||||||
type ProxyOptions struct {
|
var (
|
||||||
Ruleset ruleset_v2.IRuleset
|
UserAgent = getenv("USER_AGENT", "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)")
|
||||||
Verbose bool
|
ForwardedFor = getenv("X_FORWARDED_FOR", "66.249.66.1")
|
||||||
}
|
rulesSet = loadRules()
|
||||||
|
allowedDomains = strings.Split(os.Getenv("ALLOWED_DOMAINS"), ",")
|
||||||
|
)
|
||||||
|
|
||||||
func NewProxySiteHandler(opts *ProxyOptions) fiber.Handler {
|
// extracts a URL from the request ctx. If the URL in the request
|
||||||
/*
|
// is a relative path, it reconstructs the full URL using the referer header.
|
||||||
var rs ruleset.RuleSet
|
func extractUrl(c *fiber.Ctx) (string, error) {
|
||||||
if opts.RulesetPath != "" {
|
// try to extract url-encoded
|
||||||
r, err := ruleset.NewRuleset(opts.RulesetPath)
|
reqUrl, err := url.QueryUnescape(c.Params("*"))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
// fallback
|
||||||
}
|
reqUrl = c.Params("*")
|
||||||
rs = r
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
return func(c *fiber.Ctx) error {
|
|
||||||
proxychain := proxychain.
|
|
||||||
NewProxyChain().
|
|
||||||
SetFiberCtx(c).
|
|
||||||
SetDebugLogging(opts.Verbose).
|
|
||||||
SetRequestModifications(
|
|
||||||
//rx.SpoofJA3fingerprint(ja3, "Googlebot"),
|
|
||||||
rx.AddCacheBusterQuery(),
|
|
||||||
rx.MasqueradeAsGoogleBot(),
|
|
||||||
rx.ForwardRequestHeaders(),
|
|
||||||
rx.DeleteOutgoingCookies(),
|
|
||||||
rx.SpoofReferrerFromRedditPost(),
|
|
||||||
//rx.SpoofReferrerFromLinkedInPost(),
|
|
||||||
//rx.RequestWaybackMachine(),
|
|
||||||
//rx.RequestArchiveIs(),
|
|
||||||
).
|
|
||||||
AddResponseModifications(
|
|
||||||
tx.ForwardResponseHeaders(),
|
|
||||||
//tx.BlockThirdPartyScripts(),
|
|
||||||
tx.DeleteIncomingCookies(),
|
|
||||||
tx.DeleteLocalStorageData(),
|
|
||||||
tx.DeleteSessionStorageData(),
|
|
||||||
tx.BypassCORS(),
|
|
||||||
tx.BypassContentSecurityPolicy(),
|
|
||||||
tx.RewriteHTMLResourceURLs(),
|
|
||||||
tx.PatchDynamicResourceURLs(),
|
|
||||||
tx.PatchTrackerScripts(),
|
|
||||||
tx.BlockElementRemoval(".article-content"), // techcrunch
|
|
||||||
//tx.BlockElementRemoval(".available-content"), // substack
|
|
||||||
// tx.SetContentSecurityPolicy("default-src * 'unsafe-inline' 'unsafe-eval' data: blob:;"),
|
|
||||||
)
|
|
||||||
|
|
||||||
// no options passed in, return early
|
|
||||||
if opts == nil {
|
|
||||||
return proxychain.Execute()
|
|
||||||
}
|
|
||||||
|
|
||||||
// load ruleset
|
|
||||||
rule, exists := opts.Ruleset.GetRule(proxychain.Request.URL)
|
|
||||||
if exists {
|
|
||||||
proxychain.AddOnceRequestModifications(rule.RequestModifications...)
|
|
||||||
proxychain.AddOnceResponseModifications(rule.ResponseModifications...)
|
|
||||||
}
|
|
||||||
|
|
||||||
return proxychain.Execute()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Extract the actual path from req ctx
|
||||||
|
urlQuery, err := url.Parse(reqUrl)
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("error parsing request URL '%s': %v", reqUrl, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
isRelativePath := urlQuery.Scheme == ""
|
||||||
|
|
||||||
|
// eg: https://localhost:8080/images/foobar.jpg -> https://realsite.com/images/foobar.jpg
|
||||||
|
if isRelativePath {
|
||||||
|
// Parse the referer URL from the request header.
|
||||||
|
refererUrl, err := url.Parse(c.Get("referer"))
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("error parsing referer URL from req: '%s': %v", reqUrl, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract the real url from referer path
|
||||||
|
realUrl, err := url.Parse(strings.TrimPrefix(refererUrl.Path, "/"))
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("error parsing real URL from referer '%s': %v", refererUrl.Path, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// reconstruct the full URL using the referer's scheme, host, and the relative path / queries
|
||||||
|
fullUrl := &url.URL{
|
||||||
|
Scheme: realUrl.Scheme,
|
||||||
|
Host: realUrl.Host,
|
||||||
|
Path: urlQuery.Path,
|
||||||
|
RawQuery: urlQuery.RawQuery,
|
||||||
|
}
|
||||||
|
|
||||||
|
if os.Getenv("LOG_URLS") == "true" {
|
||||||
|
log.Printf("modified relative URL: '%s' -> '%s'", reqUrl, fullUrl.String())
|
||||||
|
}
|
||||||
|
return fullUrl.String(), nil
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
// default behavior:
|
||||||
|
// eg: https://localhost:8080/https://realsite.com/images/foobar.jpg -> https://realsite.com/images/foobar.jpg
|
||||||
|
return urlQuery.String(), nil
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func ProxySite(c *fiber.Ctx) error {
|
||||||
|
// Get the url from the URL
|
||||||
|
url, err := extractUrl(c)
|
||||||
|
if err != nil {
|
||||||
|
log.Println("ERROR In URL extraction:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
queries := c.Queries()
|
||||||
|
body, _, resp, err := fetchSite(url, queries)
|
||||||
|
if err != nil {
|
||||||
|
log.Println("ERROR:", err)
|
||||||
|
c.SendStatus(fiber.StatusInternalServerError)
|
||||||
|
return c.SendString(err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
c.Set("Content-Type", resp.Header.Get("Content-Type"))
|
||||||
|
c.Set("Content-Security-Policy", resp.Header.Get("Content-Security-Policy"))
|
||||||
|
|
||||||
|
return c.SendString(body)
|
||||||
|
}
|
||||||
|
|
||||||
|
func modifyURL(uri string, rule Rule) (string, error) {
|
||||||
|
newUrl, err := url.Parse(uri)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, urlMod := range rule.UrlMods.Domain {
|
||||||
|
re := regexp.MustCompile(urlMod.Match)
|
||||||
|
newUrl.Host = re.ReplaceAllString(newUrl.Host, urlMod.Replace)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, urlMod := range rule.UrlMods.Path {
|
||||||
|
re := regexp.MustCompile(urlMod.Match)
|
||||||
|
newUrl.Path = re.ReplaceAllString(newUrl.Path, urlMod.Replace)
|
||||||
|
}
|
||||||
|
|
||||||
|
v := newUrl.Query()
|
||||||
|
for _, query := range rule.UrlMods.Query {
|
||||||
|
if query.Value == "" {
|
||||||
|
v.Del(query.Key)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
v.Set(query.Key, query.Value)
|
||||||
|
}
|
||||||
|
newUrl.RawQuery = v.Encode()
|
||||||
|
|
||||||
|
if rule.GoogleCache {
|
||||||
|
newUrl, err = url.Parse("https://webcache.googleusercontent.com/search?q=cache:" + newUrl.String())
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return newUrl.String(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func fetchSite(urlpath string, queries map[string]string) (string, *http.Request, *http.Response, error) {
|
||||||
|
urlQuery := "?"
|
||||||
|
if len(queries) > 0 {
|
||||||
|
for k, v := range queries {
|
||||||
|
urlQuery += k + "=" + v + "&"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
urlQuery = strings.TrimSuffix(urlQuery, "&")
|
||||||
|
urlQuery = strings.TrimSuffix(urlQuery, "?")
|
||||||
|
|
||||||
|
u, err := url.Parse(urlpath)
|
||||||
|
if err != nil {
|
||||||
|
return "", nil, nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(allowedDomains) > 0 && !StringInSlice(u.Host, allowedDomains) {
|
||||||
|
return "", nil, nil, fmt.Errorf("domain not allowed. %s not in %s", u.Host, allowedDomains)
|
||||||
|
}
|
||||||
|
|
||||||
|
if os.Getenv("LOG_URLS") == "true" {
|
||||||
|
log.Println(u.String() + urlQuery)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Modify the URI according to ruleset
|
||||||
|
rule := fetchRule(u.Host, u.Path)
|
||||||
|
url, err := modifyURL(u.String()+urlQuery, rule)
|
||||||
|
if err != nil {
|
||||||
|
return "", nil, nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fetch the site
|
||||||
|
client := &http.Client{}
|
||||||
|
req, _ := http.NewRequest("GET", url, nil)
|
||||||
|
|
||||||
|
if rule.Headers.UserAgent != "" {
|
||||||
|
req.Header.Set("User-Agent", rule.Headers.UserAgent)
|
||||||
|
} else {
|
||||||
|
req.Header.Set("User-Agent", UserAgent)
|
||||||
|
}
|
||||||
|
|
||||||
|
if rule.Headers.XForwardedFor != "" {
|
||||||
|
if rule.Headers.XForwardedFor != "none" {
|
||||||
|
req.Header.Set("X-Forwarded-For", rule.Headers.XForwardedFor)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
req.Header.Set("X-Forwarded-For", ForwardedFor)
|
||||||
|
}
|
||||||
|
|
||||||
|
if rule.Headers.Referer != "" {
|
||||||
|
if rule.Headers.Referer != "none" {
|
||||||
|
req.Header.Set("Referer", rule.Headers.Referer)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
req.Header.Set("Referer", u.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
if rule.Headers.Cookie != "" {
|
||||||
|
req.Header.Set("Cookie", rule.Headers.Cookie)
|
||||||
|
}
|
||||||
|
|
||||||
|
resp, err := client.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return "", nil, nil, err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
bodyB, err := io.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return "", nil, nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if rule.Headers.CSP != "" {
|
||||||
|
log.Println(rule.Headers.CSP)
|
||||||
|
resp.Header.Set("Content-Security-Policy", rule.Headers.CSP)
|
||||||
|
}
|
||||||
|
|
||||||
|
//log.Print("rule", rule) TODO: Add a debug mode to print the rule
|
||||||
|
body := rewriteHtml(bodyB, u, rule)
|
||||||
|
return body, req, resp, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func rewriteHtml(bodyB []byte, u *url.URL, rule Rule) string {
|
||||||
|
// Rewrite the HTML
|
||||||
|
body := string(bodyB)
|
||||||
|
|
||||||
|
// images
|
||||||
|
imagePattern := `<img\s+([^>]*\s+)?src="(/)([^"]*)"`
|
||||||
|
re := regexp.MustCompile(imagePattern)
|
||||||
|
body = re.ReplaceAllString(body, fmt.Sprintf(`<img $1 src="%s$3"`, "/https://"+u.Host+"/"))
|
||||||
|
|
||||||
|
// scripts
|
||||||
|
scriptPattern := `<script\s+([^>]*\s+)?src="(/)([^"]*)"`
|
||||||
|
reScript := regexp.MustCompile(scriptPattern)
|
||||||
|
body = reScript.ReplaceAllString(body, fmt.Sprintf(`<script $1 script="%s$3"`, "/https://"+u.Host+"/"))
|
||||||
|
|
||||||
|
// body = strings.ReplaceAll(body, "srcset=\"/", "srcset=\"/https://"+u.Host+"/") // TODO: Needs a regex to rewrite the URL's
|
||||||
|
body = strings.ReplaceAll(body, "href=\"/", "href=\"/https://"+u.Host+"/")
|
||||||
|
body = strings.ReplaceAll(body, "url('/", "url('/https://"+u.Host+"/")
|
||||||
|
body = strings.ReplaceAll(body, "url(/", "url(/https://"+u.Host+"/")
|
||||||
|
body = strings.ReplaceAll(body, "href=\"https://"+u.Host, "href=\"/https://"+u.Host+"/")
|
||||||
|
|
||||||
|
if os.Getenv("RULESET") != "" {
|
||||||
|
body = applyRules(body, rule)
|
||||||
|
}
|
||||||
|
return body
|
||||||
|
}
|
||||||
|
|
||||||
|
func getenv(key, fallback string) string {
|
||||||
|
value := os.Getenv(key)
|
||||||
|
if len(value) == 0 {
|
||||||
|
return fallback
|
||||||
|
}
|
||||||
|
return value
|
||||||
|
}
|
||||||
|
|
||||||
|
func loadRules() RuleSet {
|
||||||
|
rulesUrl := os.Getenv("RULESET")
|
||||||
|
if rulesUrl == "" {
|
||||||
|
RulesList := RuleSet{}
|
||||||
|
return RulesList
|
||||||
|
}
|
||||||
|
log.Println("Loading rules")
|
||||||
|
|
||||||
|
var ruleSet RuleSet
|
||||||
|
if strings.HasPrefix(rulesUrl, "http") {
|
||||||
|
|
||||||
|
resp, err := http.Get(rulesUrl)
|
||||||
|
if err != nil {
|
||||||
|
log.Println("ERROR:", err)
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode >= 400 {
|
||||||
|
log.Println("ERROR:", resp.StatusCode, rulesUrl)
|
||||||
|
}
|
||||||
|
|
||||||
|
body, err := io.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
log.Println("ERROR:", err)
|
||||||
|
}
|
||||||
|
yaml.Unmarshal(body, &ruleSet)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Println("ERROR:", err)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
yamlFile, err := os.ReadFile(rulesUrl)
|
||||||
|
if err != nil {
|
||||||
|
log.Println("ERROR:", err)
|
||||||
|
}
|
||||||
|
yaml.Unmarshal(yamlFile, &ruleSet)
|
||||||
|
}
|
||||||
|
|
||||||
|
domains := []string{}
|
||||||
|
for _, rule := range ruleSet {
|
||||||
|
|
||||||
|
domains = append(domains, rule.Domain)
|
||||||
|
domains = append(domains, rule.Domains...)
|
||||||
|
if os.Getenv("ALLOWED_DOMAINS_RULESET") == "true" {
|
||||||
|
allowedDomains = append(allowedDomains, domains...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Println("Loaded ", len(ruleSet), " rules for", len(domains), "Domains")
|
||||||
|
return ruleSet
|
||||||
|
}
|
||||||
|
|
||||||
|
func fetchRule(domain string, path string) Rule {
|
||||||
|
if len(rulesSet) == 0 {
|
||||||
|
return Rule{}
|
||||||
|
}
|
||||||
|
rule := Rule{}
|
||||||
|
for _, rule := range rulesSet {
|
||||||
|
domains := rule.Domains
|
||||||
|
if rule.Domain != "" {
|
||||||
|
domains = append(domains, rule.Domain)
|
||||||
|
}
|
||||||
|
for _, ruleDomain := range domains {
|
||||||
|
if ruleDomain == domain || strings.HasSuffix(domain, ruleDomain) {
|
||||||
|
if len(rule.Paths) > 0 && !StringInSlice(path, rule.Paths) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// return first match
|
||||||
|
return rule
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return rule
|
||||||
|
}
|
||||||
|
|
||||||
|
func applyRules(body string, rule Rule) string {
|
||||||
|
if len(rulesSet) == 0 {
|
||||||
|
return body
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, regexRule := range rule.RegexRules {
|
||||||
|
re := regexp.MustCompile(regexRule.Match)
|
||||||
|
body = re.ReplaceAllString(body, regexRule.Replace)
|
||||||
|
}
|
||||||
|
for _, injection := range rule.Injections {
|
||||||
|
doc, err := goquery.NewDocumentFromReader(strings.NewReader(body))
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
if injection.Replace != "" {
|
||||||
|
doc.Find(injection.Position).ReplaceWithHtml(injection.Replace)
|
||||||
|
}
|
||||||
|
if injection.Append != "" {
|
||||||
|
doc.Find(injection.Position).AppendHtml(injection.Append)
|
||||||
|
}
|
||||||
|
if injection.Prepend != "" {
|
||||||
|
doc.Find(injection.Position).PrependHtml(injection.Prepend)
|
||||||
|
}
|
||||||
|
body, err = doc.Html()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return body
|
||||||
|
}
|
||||||
|
|
||||||
|
func StringInSlice(s string, list []string) bool {
|
||||||
|
for _, x := range list {
|
||||||
|
if strings.HasPrefix(s, x) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
}
|
}
|
||||||
|
|||||||
58
handlers/proxy.test.go
Normal file
58
handlers/proxy.test.go
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
// BEGIN: 6f8b3f5d5d5d
|
||||||
|
package handlers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
|
"net/url"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/gofiber/fiber/v2"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestProxySite(t *testing.T) {
|
||||||
|
app := fiber.New()
|
||||||
|
app.Get("/:url", ProxySite)
|
||||||
|
|
||||||
|
req := httptest.NewRequest("GET", "/https://example.com", nil)
|
||||||
|
resp, err := app.Test(req)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, http.StatusOK, resp.StatusCode)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestRewriteHtml(t *testing.T) {
|
||||||
|
bodyB := []byte(`
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<title>Test Page</title>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<img src="/image.jpg">
|
||||||
|
<script src="/script.js"></script>
|
||||||
|
<a href="/about">About Us</a>
|
||||||
|
<div style="background-image: url('/background.jpg')"></div>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
`)
|
||||||
|
u := &url.URL{Host: "example.com"}
|
||||||
|
|
||||||
|
expected := `
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<title>Test Page</title>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<img src="/https://example.com/image.jpg">
|
||||||
|
<script script="/https://example.com/script.js"></script>
|
||||||
|
<a href="/https://example.com/about">About Us</a>
|
||||||
|
<div style="background-image: url('/https://example.com/background.jpg')"></div>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
`
|
||||||
|
|
||||||
|
actual := rewriteHtml(bodyB, u, Rule{})
|
||||||
|
assert.Equal(t, expected, actual)
|
||||||
|
}
|
||||||
|
|
||||||
|
// END: 6f8b3f5d5d5d
|
||||||
21
handlers/raw.go
Normal file
21
handlers/raw.go
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
package handlers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"log"
|
||||||
|
|
||||||
|
"github.com/gofiber/fiber/v2"
|
||||||
|
)
|
||||||
|
|
||||||
|
func Raw(c *fiber.Ctx) error {
|
||||||
|
// Get the url from the URL
|
||||||
|
urlQuery := c.Params("*")
|
||||||
|
|
||||||
|
queries := c.Queries()
|
||||||
|
body, _, _, err := fetchSite(urlQuery, queries)
|
||||||
|
if err != nil {
|
||||||
|
log.Println("ERROR:", err)
|
||||||
|
c.SendStatus(500)
|
||||||
|
return c.SendString(err.Error())
|
||||||
|
}
|
||||||
|
return c.SendString(body)
|
||||||
|
}
|
||||||
60
handlers/raw.test.go
Normal file
60
handlers/raw.test.go
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
// BEGIN: 7f8d9e6d4b5c
|
||||||
|
package handlers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/gofiber/fiber/v2"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestRaw(t *testing.T) {
|
||||||
|
app := fiber.New()
|
||||||
|
app.Get("/raw/*", Raw)
|
||||||
|
|
||||||
|
testCases := []struct {
|
||||||
|
name string
|
||||||
|
url string
|
||||||
|
expected string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "valid url",
|
||||||
|
url: "https://www.google.com",
|
||||||
|
expected: "<!doctype html>",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "invalid url",
|
||||||
|
url: "invalid-url",
|
||||||
|
expected: "parse invalid-url: invalid URI for request",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tc := range testCases {
|
||||||
|
t.Run(tc.name, func(t *testing.T) {
|
||||||
|
req := httptest.NewRequest(http.MethodGet, "/raw/"+tc.url, nil)
|
||||||
|
resp, err := app.Test(req)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
t.Errorf("expected status OK; got %v", resp.Status)
|
||||||
|
}
|
||||||
|
|
||||||
|
body, err := io.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.Contains(string(body), tc.expected) {
|
||||||
|
t.Errorf("expected body to contain %q; got %q", tc.expected, string(body))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// END: 7f8d9e6d4b5c
|
||||||
23
handlers/ruleset.go
Normal file
23
handlers/ruleset.go
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
package handlers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"github.com/gofiber/fiber/v2"
|
||||||
|
"gopkg.in/yaml.v3"
|
||||||
|
)
|
||||||
|
|
||||||
|
func Ruleset(c *fiber.Ctx) error {
|
||||||
|
if os.Getenv("EXPOSE_RULESET") == "false" {
|
||||||
|
c.SendStatus(fiber.StatusForbidden)
|
||||||
|
return c.SendString("Rules Disabled")
|
||||||
|
}
|
||||||
|
|
||||||
|
body, err := yaml.Marshal(rulesSet)
|
||||||
|
if err != nil {
|
||||||
|
c.SendStatus(fiber.StatusInternalServerError)
|
||||||
|
return c.SendString(err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
return c.SendString(string(body))
|
||||||
|
}
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
package handlers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"embed"
|
|
||||||
|
|
||||||
"github.com/gofiber/fiber/v2"
|
|
||||||
)
|
|
||||||
|
|
||||||
//go:embed script.js
|
|
||||||
var scriptData embed.FS
|
|
||||||
|
|
||||||
func Script(c *fiber.Ctx) error {
|
|
||||||
|
|
||||||
scriptData, err := scriptData.ReadFile("script.js")
|
|
||||||
if err != nil {
|
|
||||||
return c.Status(fiber.StatusInternalServerError).SendString("Internal Server Error")
|
|
||||||
}
|
|
||||||
|
|
||||||
c.Set("Content-Type", "text/javascript")
|
|
||||||
|
|
||||||
return c.Send(scriptData)
|
|
||||||
|
|
||||||
}
|
|
||||||
@@ -1,327 +0,0 @@
|
|||||||
const labels = document.querySelectorAll("label");
|
|
||||||
const inputs = document.querySelectorAll('input[type="radio"]');
|
|
||||||
const mainElement = document.querySelector("main");
|
|
||||||
const inputField = document.getElementById("inputField");
|
|
||||||
const clearButton = document.getElementById("clearButton");
|
|
||||||
|
|
||||||
window.addEventListener("DOMContentLoaded", handleDOMContentLoaded);
|
|
||||||
|
|
||||||
function handleDOMContentLoaded() {
|
|
||||||
handleFontChange();
|
|
||||||
handleFontSizeChange();
|
|
||||||
inputs.forEach((input) => {
|
|
||||||
const storedValue = localStorage.getItem(input.name);
|
|
||||||
if (storedValue === input.value) {
|
|
||||||
input.checked = true;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
window.removeEventListener("DOMContentLoaded", handleDOMContentLoaded);
|
|
||||||
}
|
|
||||||
|
|
||||||
function clearInput() {
|
|
||||||
inputField.value = "";
|
|
||||||
clearButton.style.display = "none";
|
|
||||||
inputField.focus();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (inputField !== null && clearButton !== null) {
|
|
||||||
inputField.addEventListener("input", () => {
|
|
||||||
const clearButton = document.getElementById("clearButton");
|
|
||||||
if (clearButton !== null) {
|
|
||||||
if (inputField.value.trim().length > 0) {
|
|
||||||
clearButton.style.display = "block";
|
|
||||||
} else {
|
|
||||||
clearButton.style.display = "none";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
inputField.addEventListener("keydown", (event) => {
|
|
||||||
if (event.code === "Escape") {
|
|
||||||
clearInput();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
clearButton.addEventListener("click", () => {
|
|
||||||
clearInput();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function focusable_children(node) {
|
|
||||||
const nodes = Array.from(
|
|
||||||
node.querySelectorAll(
|
|
||||||
'a[href], button, input, textarea, select, details, [tabindex]:not([tabindex="-1"])'
|
|
||||||
)
|
|
||||||
).filter((s) => s.offsetParent !== null);
|
|
||||||
const index = nodes.indexOf(document.activeElement);
|
|
||||||
const update = (d) => {
|
|
||||||
let i = index + d;
|
|
||||||
i += nodes.length;
|
|
||||||
i %= nodes.length;
|
|
||||||
nodes[i].focus();
|
|
||||||
};
|
|
||||||
return {
|
|
||||||
next: (selector) => {
|
|
||||||
const reordered = [
|
|
||||||
...nodes.slice(index + 1),
|
|
||||||
...nodes.slice(0, index + 1),
|
|
||||||
];
|
|
||||||
for (let i = 0; i < reordered.length; i += 1) {
|
|
||||||
if (!selector || reordered[i].matches(selector)) {
|
|
||||||
reordered[i].focus();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
prev: (selector) => {
|
|
||||||
const reordered = [
|
|
||||||
...nodes.slice(index + 1),
|
|
||||||
...nodes.slice(0, index + 1),
|
|
||||||
];
|
|
||||||
for (let i = reordered.length - 2; i >= 0; i -= 1) {
|
|
||||||
if (!selector || reordered[i].matches(selector)) {
|
|
||||||
reordered[i].focus();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
update,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function trap(node) {
|
|
||||||
const handle_keydown = (e) => {
|
|
||||||
if (e.key === "Tab") {
|
|
||||||
e.preventDefault();
|
|
||||||
const group = focusable_children(node);
|
|
||||||
if (e.shiftKey) {
|
|
||||||
group.prev();
|
|
||||||
} else {
|
|
||||||
group.next();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
node.addEventListener("keydown", handle_keydown);
|
|
||||||
return {
|
|
||||||
destroy: () => {
|
|
||||||
node.removeEventListener("keydown", handle_keydown);
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const toggleDropdown = () => {
|
|
||||||
const dropdown = document.getElementById("dropdown");
|
|
||||||
const dropdown_panel = document.getElementById("dropdown_panel");
|
|
||||||
const focusTrap = trap(dropdown);
|
|
||||||
|
|
||||||
const closeDropdown = () => {
|
|
||||||
dropdown_panel.classList.add("hidden");
|
|
||||||
focusTrap.destroy();
|
|
||||||
dropdown.removeEventListener("keydown", handleEscapeKey);
|
|
||||||
document.removeEventListener("click", handleClickOutside);
|
|
||||||
inputs.forEach((input) => {
|
|
||||||
input.removeEventListener("change", handleInputChange);
|
|
||||||
});
|
|
||||||
labels.forEach((label) => {
|
|
||||||
label.removeEventListener("click", handleLabelSelection);
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleClickOutside = (e) => {
|
|
||||||
if (dropdown !== null && !dropdown.contains(e.target)) {
|
|
||||||
closeDropdown();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleEscapeKey = (e) => {
|
|
||||||
if (e.key === "Escape") {
|
|
||||||
dropdown_panel.classList.add("hidden");
|
|
||||||
closeDropdown();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleInputChange = (e) => {
|
|
||||||
if (e.target.checked) {
|
|
||||||
localStorage.setItem(e.target.name, e.target.value);
|
|
||||||
switch (e.target.name) {
|
|
||||||
case "theme": {
|
|
||||||
handleThemeChange();
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case "font": {
|
|
||||||
handleFontChange();
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case "fontsize": {
|
|
||||||
handleFontSizeChange();
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
default: {
|
|
||||||
console.error("Unknown event");
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleLabelSelection = (e) => {
|
|
||||||
if (e.key === "Enter" || e.key === " ") {
|
|
||||||
e.preventDefault();
|
|
||||||
const input = document.getElementById(e.target.getAttribute("for"));
|
|
||||||
input.checked = true;
|
|
||||||
input.dispatchEvent(new Event("change", { bubbles: true }));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
if (dropdown_panel.classList.contains("hidden")) {
|
|
||||||
dropdown_panel.classList.remove("hidden");
|
|
||||||
dropdown.addEventListener("keydown", handleEscapeKey);
|
|
||||||
inputs.forEach((input) => {
|
|
||||||
input.addEventListener("change", handleInputChange);
|
|
||||||
});
|
|
||||||
labels.forEach((label) => {
|
|
||||||
label.addEventListener("keydown", handleLabelSelection);
|
|
||||||
});
|
|
||||||
document.addEventListener("click", handleClickOutside);
|
|
||||||
} else {
|
|
||||||
closeDropdown();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleFontChange = () => {
|
|
||||||
if (mainElement === null) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
let font = localStorage.getItem("font");
|
|
||||||
if (font === null) {
|
|
||||||
localStorage.setItem("font", "sans-serif");
|
|
||||||
font = "sans-serif";
|
|
||||||
}
|
|
||||||
if (font === "serif") {
|
|
||||||
mainElement.classList.add("font-serif");
|
|
||||||
mainElement.classList.remove("font-sans");
|
|
||||||
} else {
|
|
||||||
mainElement.classList.add("font-sans");
|
|
||||||
mainElement.classList.remove("font-serif");
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const changeFontSize = (node, classes) => {
|
|
||||||
const sizes = [
|
|
||||||
"text-xs",
|
|
||||||
"text-sm",
|
|
||||||
"text-base",
|
|
||||||
"text-lg",
|
|
||||||
"text-xl",
|
|
||||||
"text-2xl",
|
|
||||||
"text-3xl",
|
|
||||||
"text-4xl",
|
|
||||||
"text-5xl",
|
|
||||||
"lg:text-4xl",
|
|
||||||
"lg:text-5xl",
|
|
||||||
"lg:text-6xl",
|
|
||||||
];
|
|
||||||
const currentClasses = sizes.filter((size) => node.classList.contains(size));
|
|
||||||
node.classList.remove(...currentClasses);
|
|
||||||
node.classList.add(...classes);
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleFontSizeChange = () => {
|
|
||||||
if (mainElement === null) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
let fontSize = localStorage.getItem("fontsize");
|
|
||||||
if (fontSize === null) {
|
|
||||||
localStorage.setItem("fontsize", "text-base");
|
|
||||||
fontSize = "text-base";
|
|
||||||
}
|
|
||||||
if (fontSize === "text-sm") {
|
|
||||||
changeFontSize(document.querySelector("body"), ["text-sm"]);
|
|
||||||
} else if (fontSize === "text-lg") {
|
|
||||||
changeFontSize(document.querySelector("body"), ["text-lg"]);
|
|
||||||
} else {
|
|
||||||
changeFontSize(document.querySelector("body"), ["text-base"]);
|
|
||||||
}
|
|
||||||
|
|
||||||
const nodes = document.querySelectorAll(
|
|
||||||
"h1, h2, h3, h4, h5, h6, code, pre, kbd, table"
|
|
||||||
);
|
|
||||||
if (fontSize === "text-sm") {
|
|
||||||
changeFontSize(mainElement, ["text-sm"]);
|
|
||||||
} else if (fontSize === "text-lg") {
|
|
||||||
changeFontSize(mainElement, ["text-lg"]);
|
|
||||||
} else {
|
|
||||||
changeFontSize(mainElement, ["text-base"]);
|
|
||||||
}
|
|
||||||
nodes.forEach((node) => {
|
|
||||||
let classes = "";
|
|
||||||
switch (node.tagName) {
|
|
||||||
case "H1": {
|
|
||||||
if (fontSize === "text-sm") {
|
|
||||||
classes = ["text-3xl", "lg:text-4xl"];
|
|
||||||
} else if (fontSize === "text-lg") {
|
|
||||||
classes = ["text-5xl", "lg:text-6xl"];
|
|
||||||
} else {
|
|
||||||
classes = ["text-4xl", "lg:text-5xl"];
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case "H2": {
|
|
||||||
if (fontSize === "text-sm") {
|
|
||||||
classes = ["text-2xl"];
|
|
||||||
} else if (fontSize === "text-lg") {
|
|
||||||
classes = ["text-4xl"];
|
|
||||||
} else {
|
|
||||||
classes = ["text-3xl"];
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case "H3": {
|
|
||||||
if (fontSize === "text-sm") {
|
|
||||||
classes = ["text-xl"];
|
|
||||||
} else if (fontSize === "text-lg") {
|
|
||||||
classes = ["text-3xl"];
|
|
||||||
} else {
|
|
||||||
classes = ["text-2xl"];
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case "H4":
|
|
||||||
case "H5":
|
|
||||||
case "H6": {
|
|
||||||
if (fontSize === "text-sm") {
|
|
||||||
classes = ["text-lg"];
|
|
||||||
} else if (fontSize === "text-lg") {
|
|
||||||
classes = ["text-2xl"];
|
|
||||||
} else {
|
|
||||||
classes = ["text-xl"];
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case "CODE":
|
|
||||||
case "PRE":
|
|
||||||
case "KBD":
|
|
||||||
case "TABLE": {
|
|
||||||
if (fontSize === "text-sm") {
|
|
||||||
classes = ["text-xs"];
|
|
||||||
} else if (fontSize === "text-lg") {
|
|
||||||
classes = ["text-base"];
|
|
||||||
} else {
|
|
||||||
classes = ["text-sm"];
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
default: {
|
|
||||||
if (fontSize === "text-sm") {
|
|
||||||
classes = ["text-sm"];
|
|
||||||
} else if (fontSize === "text-lg") {
|
|
||||||
classes = ["text-lg"];
|
|
||||||
} else {
|
|
||||||
classes = ["text-base"];
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
changeFontSize(node, classes);
|
|
||||||
});
|
|
||||||
};
|
|
||||||
File diff suppressed because one or more lines are too long
@@ -1,23 +0,0 @@
|
|||||||
package handlers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"embed"
|
|
||||||
|
|
||||||
"github.com/gofiber/fiber/v2"
|
|
||||||
)
|
|
||||||
|
|
||||||
//go:embed styles.css
|
|
||||||
var cssData embed.FS
|
|
||||||
|
|
||||||
func Styles(c *fiber.Ctx) error {
|
|
||||||
|
|
||||||
cssData, err := cssData.ReadFile("styles.css")
|
|
||||||
if err != nil {
|
|
||||||
return c.Status(fiber.StatusInternalServerError).SendString("Internal Server Error")
|
|
||||||
}
|
|
||||||
|
|
||||||
c.Set("Content-Type", "text/css")
|
|
||||||
|
|
||||||
return c.Send(cssData)
|
|
||||||
|
|
||||||
}
|
|
||||||
40
handlers/types.go
Normal file
40
handlers/types.go
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
package handlers
|
||||||
|
|
||||||
|
type Regex struct {
|
||||||
|
Match string `yaml:"match"`
|
||||||
|
Replace string `yaml:"replace"`
|
||||||
|
}
|
||||||
|
type KV struct {
|
||||||
|
Key string `yaml:"key"`
|
||||||
|
Value string `yaml:"value"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type RuleSet []Rule
|
||||||
|
|
||||||
|
type Rule struct {
|
||||||
|
Domain string `yaml:"domain,omitempty"`
|
||||||
|
Domains []string `yaml:"domains,omitempty"`
|
||||||
|
Paths []string `yaml:"paths,omitempty"`
|
||||||
|
Headers struct {
|
||||||
|
UserAgent string `yaml:"user-agent,omitempty"`
|
||||||
|
XForwardedFor string `yaml:"x-forwarded-for,omitempty"`
|
||||||
|
Referer string `yaml:"referer,omitempty"`
|
||||||
|
Cookie string `yaml:"cookie,omitempty"`
|
||||||
|
CSP string `yaml:"content-security-policy,omitempty"`
|
||||||
|
} `yaml:"headers,omitempty"`
|
||||||
|
GoogleCache bool `yaml:"googleCache,omitempty"`
|
||||||
|
RegexRules []Regex `yaml:"regexRules"`
|
||||||
|
|
||||||
|
UrlMods struct {
|
||||||
|
Domain []Regex `yaml:"domain"`
|
||||||
|
Path []Regex `yaml:"path"`
|
||||||
|
Query []KV `yaml:"query"`
|
||||||
|
} `yaml:"urlMods"`
|
||||||
|
|
||||||
|
Injections []struct {
|
||||||
|
Position string `yaml:"position"`
|
||||||
|
Append string `yaml:"append"`
|
||||||
|
Prepend string `yaml:"prepend"`
|
||||||
|
Replace string `yaml:"replace"`
|
||||||
|
} `yaml:"injections"`
|
||||||
|
}
|
||||||
@@ -1,82 +0,0 @@
|
|||||||
package cli
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"os"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"golang.org/x/term"
|
|
||||||
)
|
|
||||||
|
|
||||||
var art string = `
|
|
||||||
_____╬═╬____________________________________________
|
|
||||||
|_|__╬═╬___|___|___|___| EVERYWALL |___|___|___|___|
|
|
||||||
|___|╬═╬|___▄▄▌ ▄▄▄· ·▄▄▄▄ ·▄▄▄▄ ▄▄▄ .▄▄▄ __|_|
|
|
||||||
|_|__╬═╬___|██• ▐█ ▀█ ██▪ ██ ██▪ ██ ▀▄.▀·▀▄ █·|___|
|
|
||||||
|___|╬═╬|___██▪ ▄█▀▀█ ▐█· ▐█▌▐█· ▐█▌▐▀▀▪▄▐▀▀▄ __|_|
|
|
||||||
|_|__╬═╬___|▐█▌▐▌▐█ ▪▐▌██. ██ ██. ██ ▐█▄▄▌▐█•█▌|___|
|
|
||||||
|___|╬═╬|___.▀▀▀ ▀ ▀ ▀▀▀▀▀• ▀▀▀▀▀• ▀▀▀ .▀ ▀__|_|
|
|
||||||
|_|__╬═╬___|___|___|_ VERSION %-7s__|___|___|___|
|
|
||||||
|___|╬═╬|____|___|___|___|___|___|___|___|___|___|_|
|
|
||||||
╬═╬
|
|
||||||
╬═╬ %s
|
|
||||||
`
|
|
||||||
|
|
||||||
func StartupMessage(version string, port string, ruleset string) string {
|
|
||||||
isTerm := term.IsTerminal(int(os.Stdout.Fd()))
|
|
||||||
version = strings.Trim(version, " ")
|
|
||||||
version = strings.Trim(version, "\n")
|
|
||||||
|
|
||||||
var link string
|
|
||||||
if isTerm {
|
|
||||||
link = createHyperlink("http://localhost:" + port)
|
|
||||||
} else {
|
|
||||||
link = "http://localhost:" + port
|
|
||||||
}
|
|
||||||
|
|
||||||
buf := fmt.Sprintf(art, version, link)
|
|
||||||
if isTerm {
|
|
||||||
buf = blinkChars(buf, '.', '•', '·', '▪')
|
|
||||||
}
|
|
||||||
|
|
||||||
if ruleset == "" {
|
|
||||||
buf += "\n [!] no ruleset specified.\n [!] for better performance, use a ruleset using --ruleset\n"
|
|
||||||
}
|
|
||||||
if isTerm {
|
|
||||||
buf = colorizeNonASCII(buf)
|
|
||||||
}
|
|
||||||
return buf
|
|
||||||
}
|
|
||||||
|
|
||||||
func createHyperlink(url string) string {
|
|
||||||
return fmt.Sprintf("\033[4m%s\033[0m", url)
|
|
||||||
}
|
|
||||||
|
|
||||||
func colorizeNonASCII(input string) string {
|
|
||||||
result := ""
|
|
||||||
for _, r := range input {
|
|
||||||
if r > 127 {
|
|
||||||
// If the character is non-ASCII, color it blue
|
|
||||||
result += fmt.Sprintf("\033[34m%c\033[0m", r)
|
|
||||||
} else {
|
|
||||||
// ASCII characters remain unchanged
|
|
||||||
result += string(r)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
func blinkChars(input string, chars ...rune) string {
|
|
||||||
result := ""
|
|
||||||
MAIN:
|
|
||||||
for _, x := range input {
|
|
||||||
for _, y := range chars {
|
|
||||||
if x == y {
|
|
||||||
result += fmt.Sprintf("\033[5m%s\033[0m", string(x))
|
|
||||||
continue MAIN
|
|
||||||
}
|
|
||||||
}
|
|
||||||
result += fmt.Sprintf("%s", string(x))
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
@@ -1,71 +0,0 @@
|
|||||||
package cli
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"os"
|
|
||||||
|
|
||||||
ruleset_v2 "github.com/everywall/ladder/proxychain/ruleset"
|
|
||||||
)
|
|
||||||
|
|
||||||
// HandleRulesetMerge merges a set of ruleset files, specified by the rulesetPath or RULESET env variable, into either YAML or Gzip format.
|
|
||||||
// Exits the program with an error message if the ruleset path is not provided or if loading the ruleset fails.
|
|
||||||
//
|
|
||||||
// Parameters:
|
|
||||||
// - rulesetPath: Specifies the path to the ruleset file.
|
|
||||||
// - mergeRulesets: Indicates if a merge operation should be performed.
|
|
||||||
// - useGzip: Indicates if the merged rulesets should be gzip-ped.
|
|
||||||
// - output: Specifies the output file. If nil, stdout will be used.
|
|
||||||
//
|
|
||||||
// Returns:
|
|
||||||
// - An error if the ruleset loading or merging process fails, otherwise nil.
|
|
||||||
func HandleRulesetMerge(rulesetPath string, mergeRulesets bool, output *os.File) error {
|
|
||||||
if !mergeRulesets {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
if rulesetPath == "" {
|
|
||||||
rulesetPath = os.Getenv("RULESET")
|
|
||||||
}
|
|
||||||
|
|
||||||
if rulesetPath == "" {
|
|
||||||
fmt.Println("error: no ruleset provided. Try again with --ruleset <ruleset.yaml>")
|
|
||||||
os.Exit(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
rs, err := ruleset_v2.NewRuleset(rulesetPath)
|
|
||||||
if err != nil {
|
|
||||||
fmt.Println(err)
|
|
||||||
os.Exit(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
return yamlMerge(rs, output)
|
|
||||||
}
|
|
||||||
|
|
||||||
// yamlMerge takes a RuleSet and an optional output file path pointer. It converts the RuleSet into YAML format.
|
|
||||||
// If the output file path is provided, the YAML data is written to this file. If not, the YAML data is printed to stdout.
|
|
||||||
//
|
|
||||||
// Parameters:
|
|
||||||
// - rs: The ruleset.RuleSet to be converted to YAML.
|
|
||||||
// - output: The output for the merged data. If nil, stdout will be used.
|
|
||||||
//
|
|
||||||
// Returns:
|
|
||||||
// - An error if YAML conversion or file writing fails, otherwise nil.
|
|
||||||
func yamlMerge(rs ruleset_v2.Ruleset, output io.Writer) error {
|
|
||||||
yaml, err := rs.YAML()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if output == nil {
|
|
||||||
fmt.Println(yaml)
|
|
||||||
os.Exit(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
_, err = io.WriteString(output, yaml)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("failed to write merged YAML ruleset: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"build": "pnpx tailwindcss -i ./styles/input.css -o ./styles/output.css --build && pnpx minify ./styles/output.css > ./handlers/styles.css"
|
"build": "pnpx tailwindcss -i ./styles/input.css -o ./styles/output.css --build && pnpx minify ./styles/output.css > ./cmd/styles.css"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"minify": "^10.5.2",
|
"minify": "^10.5.2",
|
||||||
|
|||||||
@@ -1,16 +0,0 @@
|
|||||||
## TLDR
|
|
||||||
- If you create, delete or rename any request/response modifier, run `go run codegen.go`, so that ruleset unmarshaling will work properly.
|
|
||||||
|
|
||||||
## Overview
|
|
||||||
|
|
||||||
The `codegen.go` file is a utility for the rulesets that automatically generates Go code that maps functional options names found in response/request modifiers to corresponding factory functions. This generation is crucial for the serialization of rulesets from JSON or YAML into functional options suitable for use in proxychains. The tool processes Go files containing modifier functions and generates the necessary mappings.
|
|
||||||
|
|
||||||
- The generated mappings will be written in `proxychain/ruleset/rule_reqmod_types.gen.go` and `proxychain/ruleset/rule_resmod_types.gen.go`.
|
|
||||||
- These files are used in UnmarshalJSON and UnmarshalYAML methods of the rule type, found in `proxychain/ruleset/rule.go`
|
|
||||||
|
|
||||||
|
|
||||||
## Usage
|
|
||||||
```sh
|
|
||||||
go run codegen.go
|
|
||||||
```
|
|
||||||
|
|
||||||
@@ -1,205 +0,0 @@
|
|||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"go/ast"
|
|
||||||
"go/parser"
|
|
||||||
"go/token"
|
|
||||||
"io"
|
|
||||||
"io/fs"
|
|
||||||
|
|
||||||
//"io/fs"
|
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
"strings"
|
|
||||||
//"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
func responseModToFactoryMap(fn *ast.FuncDecl) (modMap string) {
|
|
||||||
paramCount := len(fn.Type.Params.List)
|
|
||||||
name := fn.Name.Name
|
|
||||||
var x string
|
|
||||||
switch paramCount {
|
|
||||||
case 0:
|
|
||||||
x = fmt.Sprintf(" rsmModMap[\"%s\"] = func(_ ...string) proxychain.ResponseModification {\n return tx.%s()\n }\n", name, name)
|
|
||||||
default:
|
|
||||||
p := []string{}
|
|
||||||
for i := 0; i < paramCount; i++ {
|
|
||||||
p = append(p, fmt.Sprintf("params[%d]", i))
|
|
||||||
}
|
|
||||||
params := strings.Join(p, ", ")
|
|
||||||
x = fmt.Sprintf(" rsmModMap[\"%s\"] = func(params ...string) proxychain.ResponseModification {\n return tx.%s(%s)\n }\n", name, name, params)
|
|
||||||
}
|
|
||||||
return x
|
|
||||||
}
|
|
||||||
|
|
||||||
func responseModCodeGen(dir string) (code string, err error) {
|
|
||||||
fset := token.NewFileSet()
|
|
||||||
|
|
||||||
files, err := os.ReadDir(dir)
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
factoryMaps := []string{}
|
|
||||||
for _, file := range files {
|
|
||||||
if !shouldGenCodeFor(file) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse each Go file
|
|
||||||
node, err := parser.ParseFile(fset, filepath.Join(dir, file.Name()), nil, parser.ParseComments)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
|
|
||||||
ast.Inspect(node, func(n ast.Node) bool {
|
|
||||||
fn, ok := n.(*ast.FuncDecl)
|
|
||||||
if ok && fn.Recv == nil && fn.Name.IsExported() {
|
|
||||||
factoryMaps = append(factoryMaps, responseModToFactoryMap(fn))
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
})
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
code = fmt.Sprintf(`
|
|
||||||
package ruleset_v2
|
|
||||||
// DO NOT EDIT THIS FILE. It is automatically generated by ladder/proxychain/codegen/codegen.go
|
|
||||||
// The purpose of this is serialization of rulesets from JSON or YAML into functional options suitable
|
|
||||||
// for use in proxychains.
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
tx "github.com/everywall/ladder/proxychain/responsemodifiers"
|
|
||||||
)
|
|
||||||
|
|
||||||
type ResponseModifierFactory func(params ...string) proxychain.ResponseModification
|
|
||||||
|
|
||||||
var rsmModMap map[string]ResponseModifierFactory
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
rsmModMap = make(map[string]ResponseModifierFactory)
|
|
||||||
|
|
||||||
%s
|
|
||||||
}`, strings.Join(factoryMaps, "\n"))
|
|
||||||
// fmt.Println(code)
|
|
||||||
return code, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func requestModToFactoryMap(fn *ast.FuncDecl) (modMap string) {
|
|
||||||
paramCount := len(fn.Type.Params.List)
|
|
||||||
name := fn.Name.Name
|
|
||||||
var x string
|
|
||||||
switch paramCount {
|
|
||||||
case 0:
|
|
||||||
x = fmt.Sprintf(" rqmModMap[\"%s\"] = func(_ ...string) proxychain.RequestModification {\n return rx.%s()\n }\n", name, name)
|
|
||||||
default:
|
|
||||||
p := []string{}
|
|
||||||
for i := 0; i < paramCount; i++ {
|
|
||||||
p = append(p, fmt.Sprintf("params[%d]", i))
|
|
||||||
}
|
|
||||||
params := strings.Join(p, ", ")
|
|
||||||
x = fmt.Sprintf(" rqmModMap[\"%s\"] = func(params ...string) proxychain.RequestModification {\n return rx.%s(%s)\n }\n", name, name, params)
|
|
||||||
}
|
|
||||||
return x
|
|
||||||
}
|
|
||||||
|
|
||||||
func requestModCodeGen(dir string) (code string, err error) {
|
|
||||||
fset := token.NewFileSet()
|
|
||||||
|
|
||||||
files, err := os.ReadDir(dir)
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
factoryMaps := []string{}
|
|
||||||
for _, file := range files {
|
|
||||||
if !shouldGenCodeFor(file) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse each Go file
|
|
||||||
node, err := parser.ParseFile(fset, filepath.Join(dir, file.Name()), nil, parser.ParseComments)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
|
|
||||||
ast.Inspect(node, func(n ast.Node) bool {
|
|
||||||
fn, ok := n.(*ast.FuncDecl)
|
|
||||||
if ok && fn.Recv == nil && fn.Name.IsExported() {
|
|
||||||
factoryMaps = append(factoryMaps, requestModToFactoryMap(fn))
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
})
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
code = fmt.Sprintf(`
|
|
||||||
package ruleset_v2
|
|
||||||
// DO NOT EDIT THIS FILE. It is automatically generated by ladder/proxychain/codegen/codegen.go
|
|
||||||
// The purpose of this is serialization of rulesets from JSON or YAML into functional options suitable
|
|
||||||
// for use in proxychains.
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
rx "github.com/everywall/ladder/proxychain/requestmodifiers"
|
|
||||||
)
|
|
||||||
|
|
||||||
type RequestModifierFactory func(params ...string) proxychain.RequestModification
|
|
||||||
|
|
||||||
var rqmModMap map[string]RequestModifierFactory
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
rqmModMap = make(map[string]RequestModifierFactory)
|
|
||||||
|
|
||||||
%s
|
|
||||||
}`, strings.Join(factoryMaps, "\n"))
|
|
||||||
// fmt.Println(code)
|
|
||||||
return code, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func shouldGenCodeFor(file fs.DirEntry) bool {
|
|
||||||
if file.IsDir() {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
if filepath.Ext(file.Name()) != ".go" {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
if strings.HasSuffix(file.Name(), "_test.go") {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
rqmCode, err := requestModCodeGen("../requestmodifiers/")
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
// fmt.Println(rqmCode)
|
|
||||||
|
|
||||||
fq, err := os.Create("../ruleset/rule_reqmod_types.gen.go")
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
_, err = io.WriteString(fq, rqmCode)
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
rsmCode, err := responseModCodeGen("../responsemodifiers/")
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
// fmt.Println(rsmCode)
|
|
||||||
|
|
||||||
fs, err := os.Create("../ruleset/rule_resmod_types.gen.go")
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
_, err = io.WriteString(fs, rsmCode)
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,541 +0,0 @@
|
|||||||
package proxychain
|
|
||||||
|
|
||||||
import (
|
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
http "github.com/bogdanfinn/fhttp"
|
|
||||||
tls_client "github.com/bogdanfinn/tls-client"
|
|
||||||
profiles "github.com/bogdanfinn/tls-client/profiles"
|
|
||||||
"io"
|
|
||||||
"log"
|
|
||||||
"net/url"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/gofiber/fiber/v2"
|
|
||||||
)
|
|
||||||
|
|
||||||
/*
|
|
||||||
ProxyChain manages the process of forwarding an HTTP request to an upstream server,
|
|
||||||
applying request and response modifications along the way.
|
|
||||||
|
|
||||||
- It accepts incoming HTTP requests (as a Fiber *ctx), and applies
|
|
||||||
request modifiers (ReqMods) and response modifiers (ResMods) before passing the
|
|
||||||
upstream response back to the client.
|
|
||||||
|
|
||||||
- ProxyChains can be reused to avoid memory allocations. However, they are not concurrent-safe
|
|
||||||
so a ProxyChainPool should be used with mutexes to avoid memory errors.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
# EXAMPLE
|
|
||||||
|
|
||||||
```
|
|
||||||
|
|
||||||
import (
|
|
||||||
|
|
||||||
rx "ladder/pkg/proxychain/requestmodifiers"
|
|
||||||
tx "ladder/pkg/proxychain/responsemodifiers"
|
|
||||||
"ladder/pkg/proxychain/responsemodifiers/rewriters"
|
|
||||||
"ladder/internal/proxychain"
|
|
||||||
|
|
||||||
)
|
|
||||||
|
|
||||||
proxychain.NewProxyChain().
|
|
||||||
|
|
||||||
SetFiberCtx(c).
|
|
||||||
SetRequestModifications(
|
|
||||||
rx.BlockOutgoingCookies(),
|
|
||||||
rx.SpoofOrigin(),
|
|
||||||
rx.SpoofReferrer(),
|
|
||||||
).
|
|
||||||
SetResultModifications(
|
|
||||||
tx.BlockIncomingCookies(),
|
|
||||||
tx.RewriteHTMLResourceURLs()
|
|
||||||
).
|
|
||||||
Execute()
|
|
||||||
|
|
||||||
```
|
|
||||||
|
|
||||||
client ladder service upstream
|
|
||||||
|
|
||||||
┌─────────┐ ┌────────────────────────┐ ┌─────────┐
|
|
||||||
│ │GET │ │ │ │
|
|
||||||
│ req────┼───► ProxyChain │ │ │
|
|
||||||
│ │ │ │ │ │ │
|
|
||||||
│ │ │ ▼ │ │ │
|
|
||||||
│ │ │ apply │ │ │
|
|
||||||
│ │ │ RequestModifications │ │ │
|
|
||||||
│ │ │ │ │ │ │
|
|
||||||
│ │ │ ▼ │ │ │
|
|
||||||
│ │ │ send GET │ │ │
|
|
||||||
│ │ │ Request req────────┼─► │ │
|
|
||||||
│ │ │ │ │ │
|
|
||||||
│ │ │ 200 OK │ │ │
|
|
||||||
│ │ │ ┌────────────────┼─response │
|
|
||||||
│ │ │ ▼ │ │ │
|
|
||||||
│ │ │ apply │ │ │
|
|
||||||
│ │ │ ResultModifications │ │ │
|
|
||||||
│ │ │ │ │ │ │
|
|
||||||
│ │◄───┼───────┘ │ │ │
|
|
||||||
│ │ │ 200 OK │ │ │
|
|
||||||
│ │ │ │ │ │
|
|
||||||
└─────────┘ └────────────────────────┘ └─────────┘
|
|
||||||
*/
|
|
||||||
type ProxyChain struct {
|
|
||||||
Context *fiber.Ctx
|
|
||||||
Client HTTPClient
|
|
||||||
onceClient HTTPClient
|
|
||||||
Request *http.Request
|
|
||||||
Response *http.Response
|
|
||||||
requestModifications []RequestModification
|
|
||||||
onceRequestModifications []RequestModification
|
|
||||||
onceResponseModifications []ResponseModification
|
|
||||||
responseModifications []ResponseModification
|
|
||||||
debugMode bool
|
|
||||||
abortErr error
|
|
||||||
APIPrefix string
|
|
||||||
}
|
|
||||||
|
|
||||||
// a ProxyStrategy is a pre-built proxychain with purpose-built defaults
|
|
||||||
type ProxyStrategy ProxyChain
|
|
||||||
|
|
||||||
// A RequestModification is a function that should operate on the
|
|
||||||
// ProxyChain Req or Client field, using the fiber ctx as needed.
|
|
||||||
type RequestModification func(*ProxyChain) error
|
|
||||||
|
|
||||||
// A ResponseModification is a function that should operate on the
|
|
||||||
// ProxyChain Res (http result) & Body (buffered http response body) field
|
|
||||||
type ResponseModification func(*ProxyChain) error
|
|
||||||
|
|
||||||
// abstraction over HTTPClient
|
|
||||||
type HTTPClient interface {
|
|
||||||
GetCookies(u *url.URL) []*http.Cookie
|
|
||||||
SetCookies(u *url.URL, cookies []*http.Cookie)
|
|
||||||
SetCookieJar(jar http.CookieJar)
|
|
||||||
GetCookieJar() http.CookieJar
|
|
||||||
SetProxy(proxyURL string) error
|
|
||||||
GetProxy() string
|
|
||||||
SetFollowRedirect(followRedirect bool)
|
|
||||||
GetFollowRedirect() bool
|
|
||||||
CloseIdleConnections()
|
|
||||||
Do(req *http.Request) (*http.Response, error)
|
|
||||||
Get(url string) (resp *http.Response, err error)
|
|
||||||
Head(url string) (resp *http.Response, err error)
|
|
||||||
Post(url, contentType string, body io.Reader) (resp *http.Response, err error)
|
|
||||||
}
|
|
||||||
|
|
||||||
// SetRequestModifications sets the ProxyChain's request modifiers
|
|
||||||
// the modifier will not fire until ProxyChain.Execute() is run.
|
|
||||||
func (chain *ProxyChain) SetRequestModifications(mods ...RequestModification) *ProxyChain {
|
|
||||||
chain.requestModifications = mods
|
|
||||||
return chain
|
|
||||||
}
|
|
||||||
|
|
||||||
// AddRequestModifications adds more request modifiers to the ProxyChain
|
|
||||||
// the modifier will not fire until ProxyChain.Execute() is run.
|
|
||||||
func (chain *ProxyChain) AddRequestModifications(mods ...RequestModification) *ProxyChain {
|
|
||||||
chain.requestModifications = append(chain.requestModifications, mods...)
|
|
||||||
return chain
|
|
||||||
}
|
|
||||||
|
|
||||||
// AddOnceRequestModifications adds a request modifier to the ProxyChain that should only fire once
|
|
||||||
// the modifier will not fire until ProxyChain.Execute() is run and will be removed after it has been applied.
|
|
||||||
func (chain *ProxyChain) AddOnceRequestModifications(mods ...RequestModification) *ProxyChain {
|
|
||||||
chain.onceRequestModifications = append(chain.onceRequestModifications, mods...)
|
|
||||||
return chain
|
|
||||||
}
|
|
||||||
|
|
||||||
// AddOnceResponseModifications adds a response modifier to the ProxyChain that should only fire once
|
|
||||||
// the modifier will not fire until ProxyChain.Execute() is run and will be removed after it has been applied.
|
|
||||||
func (chain *ProxyChain) AddOnceResponseModifications(mods ...ResponseModification) *ProxyChain {
|
|
||||||
chain.onceResponseModifications = append(chain.onceResponseModifications, mods...)
|
|
||||||
return chain
|
|
||||||
}
|
|
||||||
|
|
||||||
// AddResponseModifications sets the ProxyChain's response modifiers
|
|
||||||
// the modifier will not fire until ProxyChain.Execute() is run.
|
|
||||||
func (chain *ProxyChain) AddResponseModifications(mods ...ResponseModification) *ProxyChain {
|
|
||||||
chain.responseModifications = mods
|
|
||||||
return chain
|
|
||||||
}
|
|
||||||
|
|
||||||
// WithAPIPath trims the path during URL extraction.
|
|
||||||
// example: using path = "api/outline/", a path like "http://localhost:8080/api/outline/https://example.com" becomes "https://example.com"
|
|
||||||
func (chain *ProxyChain) WithAPIPath(path string) *ProxyChain {
|
|
||||||
chain.APIPrefix = path
|
|
||||||
chain.APIPrefix = strings.TrimSuffix(chain.APIPrefix, "*")
|
|
||||||
return chain
|
|
||||||
}
|
|
||||||
|
|
||||||
func (chain *ProxyChain) _initializeRequest() (*http.Request, error) {
|
|
||||||
if chain.Context == nil {
|
|
||||||
chain.abortErr = chain.abort(errors.New("no context set"))
|
|
||||||
return nil, chain.abortErr
|
|
||||||
}
|
|
||||||
// initialize a request (without url)
|
|
||||||
req, err := http.NewRequest(chain.Context.Method(), "", nil)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
chain.Request = req
|
|
||||||
switch chain.Context.Method() {
|
|
||||||
case "GET":
|
|
||||||
case "DELETE":
|
|
||||||
case "HEAD":
|
|
||||||
case "OPTIONS":
|
|
||||||
break
|
|
||||||
case "POST":
|
|
||||||
case "PUT":
|
|
||||||
case "PATCH":
|
|
||||||
// stream content of body from client request to upstream request
|
|
||||||
chain.Request.Body = io.NopCloser(chain.Context.Request().BodyStream())
|
|
||||||
default:
|
|
||||||
return nil, fmt.Errorf("unsupported request method from client: '%s'", chain.Context.Method())
|
|
||||||
}
|
|
||||||
|
|
||||||
return req, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// reconstructURLFromReferer reconstructs the URL using the referer's scheme, host, and the relative path / queries
|
|
||||||
func reconstructURLFromReferer(referer *url.URL, relativeURL *url.URL) (*url.URL, error) {
|
|
||||||
// Extract the real url from referer path
|
|
||||||
realURL, err := url.Parse(strings.TrimPrefix(referer.Path, "/"))
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("error parsing real URL from referer '%s': %v", referer.Path, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if realURL.Scheme == "" || realURL.Host == "" {
|
|
||||||
return nil, fmt.Errorf("invalid referer URL: '%s' on request '%s", referer.String(), relativeURL.String())
|
|
||||||
}
|
|
||||||
|
|
||||||
log.Printf("rewrite relative URL using referer: '%s' -> '%s'\n", relativeURL.String(), realURL.String())
|
|
||||||
|
|
||||||
return &url.URL{
|
|
||||||
Scheme: referer.Scheme,
|
|
||||||
Host: referer.Host,
|
|
||||||
Path: realURL.Path,
|
|
||||||
RawQuery: realURL.RawQuery,
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// prevents calls like: http://localhost:8080/http://localhost:8080
|
|
||||||
func preventRecursiveProxyRequest(urlQuery *url.URL, baseProxyURL string) *url.URL {
|
|
||||||
u := urlQuery.String()
|
|
||||||
isRecursive := strings.HasPrefix(u, baseProxyURL) || u == baseProxyURL
|
|
||||||
if !isRecursive {
|
|
||||||
return urlQuery
|
|
||||||
}
|
|
||||||
|
|
||||||
fixedURL, err := url.Parse(strings.TrimPrefix(strings.TrimPrefix(urlQuery.String(), baseProxyURL), "/"))
|
|
||||||
if err != nil {
|
|
||||||
log.Printf("proxychain: failed to fix recursive request: '%s' -> '%s\n'", baseProxyURL, u)
|
|
||||||
return urlQuery
|
|
||||||
}
|
|
||||||
return preventRecursiveProxyRequest(fixedURL, baseProxyURL)
|
|
||||||
}
|
|
||||||
|
|
||||||
// extractURL extracts a URL from the request ctx
|
|
||||||
func (chain *ProxyChain) extractURL() (*url.URL, error) {
|
|
||||||
isLocal := strings.HasPrefix(chain.Context.BaseURL(), "http://localhost") || strings.HasPrefix(chain.Context.BaseURL(), "http://127.0.0.1")
|
|
||||||
isReqPath := strings.HasPrefix(chain.Context.Path(), "/http")
|
|
||||||
isAPI := strings.HasPrefix(chain.Context.Path(), "/api")
|
|
||||||
isOutline := strings.HasPrefix(chain.Context.Path(), "/outline")
|
|
||||||
|
|
||||||
if isLocal || isReqPath || isAPI || isOutline {
|
|
||||||
return chain.extractURLFromPath()
|
|
||||||
}
|
|
||||||
|
|
||||||
u, err := url.Parse(chain.Context.BaseURL())
|
|
||||||
if err != nil {
|
|
||||||
return &url.URL{}, err
|
|
||||||
}
|
|
||||||
parts := strings.Split(u.Hostname(), ".")
|
|
||||||
if len(parts) < 2 {
|
|
||||||
fmt.Println("path")
|
|
||||||
return chain.extractURLFromPath()
|
|
||||||
}
|
|
||||||
|
|
||||||
return chain.extractURLFromSubdomain()
|
|
||||||
}
|
|
||||||
|
|
||||||
// extractURLFromPath extracts a URL from the request ctx if subdomains are used.
|
|
||||||
func (chain *ProxyChain) extractURLFromSubdomain() (*url.URL, error) {
|
|
||||||
u, err := url.Parse(chain.Context.BaseURL())
|
|
||||||
if err != nil {
|
|
||||||
return &url.URL{}, err
|
|
||||||
}
|
|
||||||
parts := strings.Split(u.Hostname(), ".")
|
|
||||||
if len(parts) < 2 {
|
|
||||||
// no subdomain set, fallback to path extraction
|
|
||||||
//panic("asdf")
|
|
||||||
return chain.extractURLFromPath()
|
|
||||||
}
|
|
||||||
subdomain := strings.Join(parts[:len(parts)-2], ".")
|
|
||||||
subURL := subdomain
|
|
||||||
subURL = strings.ReplaceAll(subURL, "--", "|")
|
|
||||||
subURL = strings.ReplaceAll(subURL, "-", ".")
|
|
||||||
subURL = strings.ReplaceAll(subURL, "|", "-")
|
|
||||||
return url.Parse(fmt.Sprintf("https://%s/%s", subURL, u.Path))
|
|
||||||
}
|
|
||||||
|
|
||||||
// extractURLFromPath extracts a URL from the request ctx. If the URL in the request
|
|
||||||
// is a relative path, it reconstructs the full URL using the referer header.
|
|
||||||
func (chain *ProxyChain) extractURLFromPath() (*url.URL, error) {
|
|
||||||
reqURL := chain.Context.Params("*")
|
|
||||||
|
|
||||||
reqURL = strings.TrimPrefix(reqURL, chain.APIPrefix)
|
|
||||||
|
|
||||||
// sometimes client requests doubleroot '//'
|
|
||||||
// there is a bug somewhere else, but this is a workaround until we find it
|
|
||||||
if strings.HasPrefix(reqURL, "/") || strings.HasPrefix(reqURL, `%2F`) {
|
|
||||||
reqURL = strings.TrimPrefix(reqURL, "/")
|
|
||||||
reqURL = strings.TrimPrefix(reqURL, `%2F`)
|
|
||||||
}
|
|
||||||
|
|
||||||
// unescape url query
|
|
||||||
uReqURL, err := url.QueryUnescape(reqURL)
|
|
||||||
if err == nil {
|
|
||||||
reqURL = uReqURL
|
|
||||||
}
|
|
||||||
|
|
||||||
urlQuery, err := url.Parse(reqURL)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("error parsing request URL '%s': %v", reqURL, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// prevent recursive proxy requests
|
|
||||||
fullURL := chain.Context.Request().URI()
|
|
||||||
proxyURL := fmt.Sprintf("%s://%s", fullURL.Scheme(), fullURL.Host())
|
|
||||||
urlQuery = preventRecursiveProxyRequest(urlQuery, proxyURL)
|
|
||||||
|
|
||||||
// Handle standard paths
|
|
||||||
// eg: https://localhost:8080/https://realsite.com/images/foobar.jpg -> https://realsite.com/images/foobar.jpg
|
|
||||||
isRelativePath := urlQuery.Scheme == ""
|
|
||||||
if !isRelativePath {
|
|
||||||
return urlQuery, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle relative URLs
|
|
||||||
// eg: https://localhost:8080/images/foobar.jpg -> https://realsite.com/images/foobar.jpg
|
|
||||||
referer, err := url.Parse(chain.Context.Get("referer"))
|
|
||||||
relativePath := urlQuery
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("error parsing referer URL from req: '%s': %v", relativePath, err)
|
|
||||||
}
|
|
||||||
return reconstructURLFromReferer(referer, relativePath)
|
|
||||||
}
|
|
||||||
|
|
||||||
// SetFiberCtx takes the request ctx from the client
|
|
||||||
// for the modifiers and execute function to use.
|
|
||||||
// it must be set everytime a new request comes through
|
|
||||||
// if the upstream request url cannot be extracted from the ctx,
|
|
||||||
// a 500 error will be sent back to the client
|
|
||||||
func (chain *ProxyChain) SetFiberCtx(ctx *fiber.Ctx) *ProxyChain {
|
|
||||||
chain.Context = ctx
|
|
||||||
|
|
||||||
// initialize the request and prepare it for modification
|
|
||||||
req, err := chain._initializeRequest()
|
|
||||||
if err != nil {
|
|
||||||
chain.abortErr = chain.abort(err)
|
|
||||||
}
|
|
||||||
chain.Request = req
|
|
||||||
|
|
||||||
// extract the URL for the request and add it to the new request
|
|
||||||
url, err := chain.extractURL()
|
|
||||||
if err != nil {
|
|
||||||
chain.abortErr = chain.abort(err)
|
|
||||||
} else {
|
|
||||||
chain.Request.URL = url
|
|
||||||
fmt.Printf("extracted URL: %s\n", chain.Request.URL)
|
|
||||||
}
|
|
||||||
|
|
||||||
return chain
|
|
||||||
}
|
|
||||||
|
|
||||||
func (chain *ProxyChain) validateCtxIsSet() error {
|
|
||||||
if chain.Context != nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
err := errors.New("proxyChain was called without setting a fiber Ctx. Use ProxyChain.SetFiberCtx()")
|
|
||||||
chain.abortErr = chain.abort(err)
|
|
||||||
return chain.abortErr
|
|
||||||
}
|
|
||||||
|
|
||||||
// SetHTTPClient sets a new upstream http client transport
|
|
||||||
// useful for modifying TLS
|
|
||||||
func (chain *ProxyChain) SetHTTPClient(httpClient HTTPClient) *ProxyChain {
|
|
||||||
chain.Client = httpClient
|
|
||||||
return chain
|
|
||||||
}
|
|
||||||
|
|
||||||
// SetOnceHTTPClient sets a new upstream http client transport temporarily
|
|
||||||
// and clears it once it is used.
|
|
||||||
func (chain *ProxyChain) SetOnceHTTPClient(httpClient HTTPClient) *ProxyChain {
|
|
||||||
chain.onceClient = httpClient
|
|
||||||
return chain
|
|
||||||
}
|
|
||||||
|
|
||||||
// SetVerbose changes the logging behavior to print
|
|
||||||
// the modification steps and applied rulesets for debugging
|
|
||||||
func (chain *ProxyChain) SetDebugLogging(isDebugMode bool) *ProxyChain {
|
|
||||||
if isDebugMode {
|
|
||||||
log.Println("DEBUG MODE ENABLED")
|
|
||||||
}
|
|
||||||
chain.debugMode = isDebugMode
|
|
||||||
return chain
|
|
||||||
}
|
|
||||||
|
|
||||||
// abort proxychain and return 500 error to client
|
|
||||||
// this will prevent Execute from firing and reset the state
|
|
||||||
// returns the initial error enriched with context
|
|
||||||
func (chain *ProxyChain) abort(err error) error {
|
|
||||||
// defer chain._reset()
|
|
||||||
chain.abortErr = err
|
|
||||||
chain.Context.Response().SetStatusCode(500)
|
|
||||||
var e error
|
|
||||||
if chain.Request.URL != nil {
|
|
||||||
e = fmt.Errorf("ProxyChain error for '%s': %s", chain.Request.URL.String(), err.Error())
|
|
||||||
} else {
|
|
||||||
e = fmt.Errorf("ProxyChain error: '%s'", err.Error())
|
|
||||||
}
|
|
||||||
chain.Context.SendString(e.Error())
|
|
||||||
log.Println(e.Error())
|
|
||||||
return e
|
|
||||||
}
|
|
||||||
|
|
||||||
// internal function to reset state of ProxyChain for reuse
|
|
||||||
func (chain *ProxyChain) _reset() {
|
|
||||||
chain.abortErr = nil
|
|
||||||
chain.Request = nil
|
|
||||||
// chain.Response = nil
|
|
||||||
chain.Context = nil
|
|
||||||
chain.onceResponseModifications = []ResponseModification{}
|
|
||||||
chain.onceRequestModifications = []RequestModification{}
|
|
||||||
// chain.onceClient = nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewProxyChain initializes a new ProxyChain
|
|
||||||
func NewProxyChain() *ProxyChain {
|
|
||||||
chain := new(ProxyChain)
|
|
||||||
|
|
||||||
options := []tls_client.HttpClientOption{
|
|
||||||
tls_client.WithTimeoutSeconds(20),
|
|
||||||
//tls_client.WithRandomTLSExtensionOrder(),
|
|
||||||
tls_client.WithClientProfile(profiles.Chrome_117),
|
|
||||||
// tls_client.WithNotFollowRedirects(),
|
|
||||||
// tls_client.WithCookieJar(jar), // create cookieJar instance and pass it as argument
|
|
||||||
}
|
|
||||||
client, err := tls_client.NewHttpClient(tls_client.NewNoopLogger(), options...)
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
chain.Client = client
|
|
||||||
|
|
||||||
return chain
|
|
||||||
}
|
|
||||||
|
|
||||||
/// ========================================================================================================
|
|
||||||
|
|
||||||
// _execute sends the request for the ProxyChain and returns the raw body only
|
|
||||||
// the caller is responsible for returning a response back to the requestor
|
|
||||||
// the caller is also responsible for calling chain._reset() when they are done with the body
|
|
||||||
func (chain *ProxyChain) _execute() (io.Reader, error) {
|
|
||||||
// ================== PREFLIGHT CHECKS =============================
|
|
||||||
if chain.validateCtxIsSet() != nil || chain.abortErr != nil {
|
|
||||||
return nil, chain.abortErr
|
|
||||||
}
|
|
||||||
if chain.Request == nil {
|
|
||||||
return nil, errors.New("proxychain request not yet initialized")
|
|
||||||
}
|
|
||||||
if chain.Request.URL.Scheme == "" {
|
|
||||||
return nil, errors.New("request url not set or invalid. Check ProxyChain ReqMods for issues")
|
|
||||||
}
|
|
||||||
|
|
||||||
// ======== REQUEST MODIFICATIONS :: [client -> ladder] -> upstream -> ladder -> client =============================
|
|
||||||
// Apply requestModifications to proxychain
|
|
||||||
for _, applyRequestModificationsTo := range chain.requestModifications {
|
|
||||||
err := applyRequestModificationsTo(chain)
|
|
||||||
if err != nil {
|
|
||||||
return nil, chain.abort(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Apply onceRequestModifications to proxychain and clear them
|
|
||||||
for _, applyOnceRequestModificationsTo := range chain.onceRequestModifications {
|
|
||||||
err := applyOnceRequestModificationsTo(chain)
|
|
||||||
if err != nil {
|
|
||||||
return nil, chain.abort(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
chain.onceRequestModifications = []RequestModification{}
|
|
||||||
|
|
||||||
// ======== SEND REQUEST UPSTREAM :: client -> [ladder -> upstream] -> ladder -> client =============================
|
|
||||||
// Send Request Upstream
|
|
||||||
if chain.onceClient != nil {
|
|
||||||
// if chain.SetOnceClient() is used, use that client instead of the
|
|
||||||
// default http client temporarily.
|
|
||||||
resp, err := chain.onceClient.Do(chain.Request)
|
|
||||||
if err != nil {
|
|
||||||
return nil, chain.abort(err)
|
|
||||||
}
|
|
||||||
chain.Response = resp
|
|
||||||
// chain.onceClient = nil
|
|
||||||
} else {
|
|
||||||
resp, err := chain.Client.Do(chain.Request)
|
|
||||||
if err != nil {
|
|
||||||
return nil, chain.abort(err)
|
|
||||||
}
|
|
||||||
chain.Response = resp
|
|
||||||
}
|
|
||||||
|
|
||||||
// ======== APPLY RESPONSE MODIFIERS :: client -> ladder -> [upstream -> ladder] -> client =============================
|
|
||||||
// Apply ResponseModifiers to proxychain
|
|
||||||
for _, applyResultModificationsTo := range chain.responseModifications {
|
|
||||||
err := applyResultModificationsTo(chain)
|
|
||||||
if err != nil {
|
|
||||||
return nil, chain.abort(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Apply onceResponseModifications to proxychain and clear them
|
|
||||||
for _, applyOnceResponseModificationsTo := range chain.onceResponseModifications {
|
|
||||||
err := applyOnceResponseModificationsTo(chain)
|
|
||||||
if err != nil {
|
|
||||||
return nil, chain.abort(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
chain.onceResponseModifications = []ResponseModification{}
|
|
||||||
|
|
||||||
// ======== RETURN BODY TO CLIENT :: client -> ladder -> upstream -> [ladder -> client] =============================
|
|
||||||
return chain.Response.Body, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Execute sends the request for the ProxyChain and returns the request to the sender
|
|
||||||
// and resets the fields so that the ProxyChain can be reused.
|
|
||||||
// if any step in the ProxyChain fails, the request will abort and a 500 error will
|
|
||||||
// be returned to the client
|
|
||||||
func (chain *ProxyChain) Execute() error {
|
|
||||||
defer chain._reset()
|
|
||||||
body, err := chain._execute()
|
|
||||||
if err != nil {
|
|
||||||
log.Println(err)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if chain.Context == nil {
|
|
||||||
return errors.New("no context set")
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: this seems broken
|
|
||||||
// in case api user did not set or forward content-type, we do it for them
|
|
||||||
/*
|
|
||||||
ct := string(chain.Context.Response().Header.Peek("content-type"))
|
|
||||||
if ct == "" {
|
|
||||||
chain.Context.Set("content-type", chain.Response.Header.Get("content-type"))
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
// Return request back to client
|
|
||||||
return chain.Context.SendStream(body)
|
|
||||||
|
|
||||||
// return chain.Context.SendStream(body)
|
|
||||||
}
|
|
||||||
@@ -1,11 +0,0 @@
|
|||||||
package proxychain
|
|
||||||
|
|
||||||
import (
|
|
||||||
"net/url"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Pool map[url.URL]ProxyChain
|
|
||||||
|
|
||||||
func NewPool() Pool {
|
|
||||||
return map[url.URL]ProxyChain{}
|
|
||||||
}
|
|
||||||
@@ -1,29 +0,0 @@
|
|||||||
package requestmodifiers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"math/rand"
|
|
||||||
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
)
|
|
||||||
|
|
||||||
// AddCacheBusterQuery modifies query params to add a random parameter key
|
|
||||||
// In order to get the upstream network stack to serve a fresh copy of the page.
|
|
||||||
func AddCacheBusterQuery() proxychain.RequestModification {
|
|
||||||
return func(chain *proxychain.ProxyChain) error {
|
|
||||||
chain.AddOnceRequestModifications(
|
|
||||||
ModifyQueryParams("ord", randomString(15)),
|
|
||||||
)
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func randomString(length int) string {
|
|
||||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789."
|
|
||||||
|
|
||||||
b := make([]byte, length)
|
|
||||||
for i := range b {
|
|
||||||
b[i] = charset[rand.Intn(len(charset))]
|
|
||||||
}
|
|
||||||
return string(b)
|
|
||||||
}
|
|
||||||
@@ -1,142 +0,0 @@
|
|||||||
package bot
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"math/big"
|
|
||||||
"math/bits"
|
|
||||||
"math/rand"
|
|
||||||
"net"
|
|
||||||
"net/http"
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Bot interface {
|
|
||||||
UpdatePool() error
|
|
||||||
GetRandomIdentity() string
|
|
||||||
}
|
|
||||||
|
|
||||||
type bot struct {
|
|
||||||
UserAgent string
|
|
||||||
Fingerprint string
|
|
||||||
IPPool botPool
|
|
||||||
}
|
|
||||||
|
|
||||||
type botPool struct {
|
|
||||||
Timestamp string `json:"creationTime"`
|
|
||||||
Prefixes []botPrefix `json:"prefixes"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type botPrefix struct {
|
|
||||||
IPv6 string `json:"ipv6Prefix,omitempty"`
|
|
||||||
IPv4 string `json:"ipv4Prefix,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: move pointers around, not global variables
|
|
||||||
var GoogleBot = bot{
|
|
||||||
UserAgent: "Mozilla/5.0 AppleWebKit/537.36 (KHTML, like Gecko; compatible; Googlebot/2.1; http://www.google.com/bot.html) Chrome/79.0.3945.120 Safari/537.36",
|
|
||||||
|
|
||||||
// https://github.com/trisulnsm/trisul-scripts/blob/master/lua/frontend_scripts/reassembly/ja3/prints/ja3fingerprint.json
|
|
||||||
Fingerprint: "769,49195-49199-49196-49200-52393-52392-52244-52243-49161-49171-49162-49172-156-157-47-53-10,65281-0-23-35-13-5-18-16-11-10-21,29-23-24,0",
|
|
||||||
|
|
||||||
IPPool: botPool{
|
|
||||||
Timestamp: "2023-11-28T23:00:56.000000",
|
|
||||||
Prefixes: []botPrefix{
|
|
||||||
{
|
|
||||||
IPv4: "34.100.182.96/28",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
var BingBot = bot{
|
|
||||||
UserAgent: "Mozilla/5.0 AppleWebKit/537.36 (KHTML, like Gecko; compatible; bingbot/2.0; +http://www.bing.com/bingbot.htm) Chrome/79.0.3945.120 Safari/537.36",
|
|
||||||
IPPool: botPool{
|
|
||||||
Timestamp: "2023-03-08T10:00:00.121331",
|
|
||||||
Prefixes: []botPrefix{
|
|
||||||
{
|
|
||||||
IPv4: "207.46.13.0/24",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *bot) UpdatePool(url string) error {
|
|
||||||
client := &http.Client{Timeout: 10 * time.Second}
|
|
||||||
|
|
||||||
resp, err := client.Get(url)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if resp.StatusCode != http.StatusOK {
|
|
||||||
return fmt.Errorf("failed to update googlebot IP pool: status code %s", resp.Status)
|
|
||||||
}
|
|
||||||
|
|
||||||
defer resp.Body.Close()
|
|
||||||
|
|
||||||
body, err := io.ReadAll(resp.Body)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
err = json.Unmarshal(body, &b.IPPool)
|
|
||||||
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *bot) GetRandomIP() string {
|
|
||||||
count := len(b.IPPool.Prefixes)
|
|
||||||
|
|
||||||
var prefix botPrefix
|
|
||||||
|
|
||||||
if count == 1 {
|
|
||||||
prefix = b.IPPool.Prefixes[0]
|
|
||||||
} else {
|
|
||||||
idx := rand.Intn(count)
|
|
||||||
prefix = b.IPPool.Prefixes[idx]
|
|
||||||
}
|
|
||||||
|
|
||||||
if prefix.IPv4 != "" {
|
|
||||||
ip, err := randomIPFromSubnet(prefix.IPv4)
|
|
||||||
if err == nil {
|
|
||||||
return ip.String()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if prefix.IPv6 != "" {
|
|
||||||
ip, err := randomIPFromSubnet(prefix.IPv6)
|
|
||||||
if err == nil {
|
|
||||||
return ip.String()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// fallback to default IP which is known to work
|
|
||||||
ip, _ := randomIPFromSubnet(b.IPPool.Prefixes[0].IPv4)
|
|
||||||
|
|
||||||
return ip.String()
|
|
||||||
}
|
|
||||||
|
|
||||||
func randomIPFromSubnet(c string) (net.IP, error) {
|
|
||||||
ip, ipnet, err := net.ParseCIDR(c)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// int representation of byte mask
|
|
||||||
mask := big.NewInt(0).SetBytes(ipnet.Mask).Uint64()
|
|
||||||
|
|
||||||
// how many unset bits there are at the end of the mask
|
|
||||||
offset := bits.TrailingZeros8(byte(0) ^ byte(mask))
|
|
||||||
|
|
||||||
// total number of ips available in the block
|
|
||||||
offset *= offset
|
|
||||||
|
|
||||||
toAdd := rand.Intn(offset)
|
|
||||||
|
|
||||||
last := len(ip) - 1
|
|
||||||
ip[last] = ip[last] + byte(toAdd)
|
|
||||||
|
|
||||||
return ip, nil
|
|
||||||
}
|
|
||||||
@@ -1,36 +0,0 @@
|
|||||||
package bot
|
|
||||||
|
|
||||||
import (
|
|
||||||
"net"
|
|
||||||
"testing"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestRandomIPFromSubnet(t *testing.T) {
|
|
||||||
err := GoogleBot.UpdatePool("https://developers.google.com/static/search/apis/ipranges/googlebot.json")
|
|
||||||
if err != nil {
|
|
||||||
t.Error(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, prefix := range GoogleBot.IPPool.Prefixes {
|
|
||||||
subnet := prefix.IPv4
|
|
||||||
if prefix.IPv6 != "" {
|
|
||||||
subnet = prefix.IPv6
|
|
||||||
}
|
|
||||||
|
|
||||||
t.Run(subnet, func(t *testing.T) {
|
|
||||||
_, ipnet, err := net.ParseCIDR(subnet)
|
|
||||||
if err != nil {
|
|
||||||
t.Error(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
ip, err := randomIPFromSubnet(subnet)
|
|
||||||
if err != nil {
|
|
||||||
t.Error(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if !ipnet.Contains(ip) {
|
|
||||||
t.Fail()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,45 +0,0 @@
|
|||||||
package requestmodifiers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"strings"
|
|
||||||
//"fmt"
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
)
|
|
||||||
|
|
||||||
var forwardBlacklist map[string]bool
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
forwardBlacklist = map[string]bool{
|
|
||||||
"host": true,
|
|
||||||
"connection": true,
|
|
||||||
"keep-alive": true,
|
|
||||||
"content-length": true,
|
|
||||||
"content-encoding": true,
|
|
||||||
"transfer-encoding": true,
|
|
||||||
"referer": true,
|
|
||||||
"x-forwarded-for": true,
|
|
||||||
"x-real-ip": true,
|
|
||||||
"forwarded": true,
|
|
||||||
"accept-encoding": true,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// ForwardRequestHeaders forwards the requests headers sent from the client to the upstream server
|
|
||||||
func ForwardRequestHeaders() proxychain.RequestModification {
|
|
||||||
return func(chain *proxychain.ProxyChain) error {
|
|
||||||
forwardHeaders := func(key, value []byte) {
|
|
||||||
k := strings.ToLower(string(key))
|
|
||||||
v := string(value)
|
|
||||||
if forwardBlacklist[k] {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
// fmt.Println(k, v)
|
|
||||||
chain.Request.Header.Set(k, v)
|
|
||||||
}
|
|
||||||
|
|
||||||
chain.Context.Request().
|
|
||||||
Header.VisitAll(forwardHeaders)
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,127 +0,0 @@
|
|||||||
package requestmodifiers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/everywall/ladder/proxychain/requestmodifiers/bot"
|
|
||||||
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
)
|
|
||||||
|
|
||||||
// MasqueradeAsGoogleBot modifies user agent and x-forwarded for
|
|
||||||
// to appear to be a Google Bot
|
|
||||||
func MasqueradeAsGoogleBot() proxychain.RequestModification {
|
|
||||||
ip := bot.GoogleBot.GetRandomIP()
|
|
||||||
|
|
||||||
return masqueradeAsTrustedBot(bot.GoogleBot.UserAgent, ip, bot.GoogleBot.Fingerprint)
|
|
||||||
}
|
|
||||||
|
|
||||||
// MasqueradeAsBingBot modifies user agent and x-forwarded for
|
|
||||||
// to appear to be a Bing Bot
|
|
||||||
func MasqueradeAsBingBot() proxychain.RequestModification {
|
|
||||||
ip := bot.BingBot.GetRandomIP()
|
|
||||||
|
|
||||||
return masqueradeAsTrustedBot(bot.BingBot.Fingerprint, ip, "")
|
|
||||||
}
|
|
||||||
|
|
||||||
// MasqueradeAsWaybackMachineBot modifies user agent and x-forwarded for
|
|
||||||
// to appear to be a archive.org (wayback machine) Bot
|
|
||||||
func MasqueradeAsWaybackMachineBot() proxychain.RequestModification {
|
|
||||||
const botUA string = "Mozilla/5.0 (compatible; archive.org_bot +http://www.archive.org/details/archive.org_bot)"
|
|
||||||
const botIP string = "207.241.235.164"
|
|
||||||
return masqueradeAsTrustedBot(botUA, botIP, "")
|
|
||||||
}
|
|
||||||
|
|
||||||
// MasqueradeAsFacebookBot modifies user agent and x-forwarded for
|
|
||||||
// to appear to be a Facebook Bot (link previews?)
|
|
||||||
func MasqueradeAsFacebookBot() proxychain.RequestModification {
|
|
||||||
const botUA string = "facebookexternalhit/1.1 (+http://www.facebook.com/externalhit_uatext.php)"
|
|
||||||
// 31.13.97.0/24, 31.13.99.0/24, 31.13.100.0/24, 66.220.144.0/20, 69.63.189.0/24, 69.63.190.0/24, 69.171.224.0/20, 69.171.240.0/21, 69.171.248.0/24, 173.252.73.0/24, 173.252.74.0/24, 173.252.77.0/24, 173.252.100.0/22, 173.252.104.0/21, 173.252.112.0/24, 2a03:2880:10::/48, 2a03:2880:10ff::/48, 2a03:2880:11::/48, 2a03:2880:11ff::/48, 2a03:2880:20::/48, 2a03:2880:20ff::/48, 2a03:2880:21ff::/48, 2a03:2880:30ff::/48, 2a03:2880:31ff::/48, 2a03:2880:1010::/48, 2a03:2880:1020::/48, 2a03:2880:2020::/48, 2a03:2880:2050::/48, 2a03:2880:2040::/48, 2a03:2880:2110::/48, 2a03:2880:2130::/48, 2a03:2880:3010::/48, 2a03:2880:3020::/48
|
|
||||||
const botIP string = "31.13.99.8"
|
|
||||||
const ja3 string = "771,49199-49195-49171-49161-49200-49196-49172-49162-51-57-50-49169-49159-47-53-10-5-4-255,0-11-10-13-13172-16,23-25-28-27-24-26-22-14-13-11-12-9-10,0-1-2"
|
|
||||||
return masqueradeAsTrustedBot(botUA, botIP, ja3)
|
|
||||||
}
|
|
||||||
|
|
||||||
// MasqueradeAsYandexBot modifies user agent and x-forwarded for
|
|
||||||
// to appear to be a Yandex Spider Bot
|
|
||||||
func MasqueradeAsYandexBot() proxychain.RequestModification {
|
|
||||||
const botUA string = "Mozilla/5.0 (compatible; YandexBot/3.0; +http://yandex.com/bots)"
|
|
||||||
// 100.43.90.0/24, 37.9.115.0/24, 37.140.165.0/24, 77.88.22.0/25, 77.88.29.0/24, 77.88.31.0/24, 77.88.59.0/24, 84.201.146.0/24, 84.201.148.0/24, 84.201.149.0/24, 87.250.243.0/24, 87.250.253.0/24, 93.158.147.0/24, 93.158.148.0/24, 93.158.151.0/24, 93.158.153.0/32, 95.108.128.0/24, 95.108.138.0/24, 95.108.150.0/23, 95.108.158.0/24, 95.108.156.0/24, 95.108.188.128/25, 95.108.234.0/24, 95.108.248.0/24, 100.43.80.0/24, 130.193.62.0/24, 141.8.153.0/24, 178.154.165.0/24, 178.154.166.128/25, 178.154.173.29, 178.154.200.158, 178.154.202.0/24, 178.154.205.0/24, 178.154.239.0/24, 178.154.243.0/24, 37.9.84.253, 199.21.99.99, 178.154.162.29, 178.154.203.251, 178.154.211.250, 178.154.171.0/24, 178.154.200.0/24, 178.154.244.0/24, 178.154.246.0/24, 95.108.181.0/24, 95.108.246.252, 5.45.254.0/24, 5.255.253.0/24, 37.140.141.0/24, 37.140.188.0/24, 100.43.81.0/24, 100.43.85.0/24, 100.43.91.0/24, 199.21.99.0/24, 2a02:6b8:b000::/32, 2a02:6b8:b010::/32, 2a02:6b8:b011::/32, 2a02:6b8:c0e::/32
|
|
||||||
const botIP string = "37.9.115.9"
|
|
||||||
const ja3 string = "769,49200-49196-49192-49188-49172-49162-165-163-161-159-107-106-105-104-57-56-55-54-136-135-134-133-49202-49198-49194-49190-49167-49157-157-61-53-132-49199-49195-49191-49187-49171-49161-164-162-160-158-103-64-63-62-51-50-49-48-154-153-152-151-69-68-67-66-49201-49197-49193-49189-49166-49156-156-60-47-150-65-7-49169-49159-49164-49154-5-4-49170-49160-22-19-16-13-49165-49155-10-255,0-11-10-35-13-15,23-25-28-27-24-26-22-14-13-11-12-9-10,0-1-2"
|
|
||||||
return masqueradeAsTrustedBot(botUA, botIP, ja3)
|
|
||||||
}
|
|
||||||
|
|
||||||
// MasqueradeAsBaiduBot modifies user agent and x-forwarded for
|
|
||||||
// to appear to be a Baidu Spider Bot
|
|
||||||
func MasqueradeAsBaiduBot() proxychain.RequestModification {
|
|
||||||
const botUA string = "Mozilla/5.0 (compatible; Baiduspider/2.0; +http://www.baidu.com/search/spider.html)"
|
|
||||||
// 180.76.15.0/24, 119.63.196.0/24, 115.239.212./24, 119.63.199.0/24, 122.81.208.0/22, 123.125.71.0/24, 180.76.4.0/24, 180.76.5.0/24, 180.76.6.0/24, 185.10.104.0/24, 220.181.108.0/24, 220.181.51.0/24, 111.13.102.0/24, 123.125.67.144/29, 123.125.67.152/31, 61.135.169.0/24, 123.125.68.68/30, 123.125.68.72/29, 123.125.68.80/28, 123.125.68.96/30, 202.46.48.0/20, 220.181.38.0/24, 123.125.68.80/30, 123.125.68.84/31, 123.125.68.0/24
|
|
||||||
const botIP string = "180.76.15.7"
|
|
||||||
return masqueradeAsTrustedBot(botUA, botIP, "")
|
|
||||||
}
|
|
||||||
|
|
||||||
// MasqueradeAsDuckDuckBot modifies user agent and x-forwarded for
|
|
||||||
// to appear to be a DuckDuckGo Bot
|
|
||||||
func MasqueradeAsDuckDuckBot() proxychain.RequestModification {
|
|
||||||
const botUA string = "DuckDuckBot/1.0; (+http://duckduckgo.com/duckduckbot.html)"
|
|
||||||
// 46.51.197.88, 46.51.197.89, 50.18.192.250, 50.18.192.251, 107.21.1.61, 176.34.131.233, 176.34.135.167, 184.72.106.52, 184.72.115.86
|
|
||||||
const botIP string = "46.51.197.88"
|
|
||||||
return masqueradeAsTrustedBot(botUA, botIP, "")
|
|
||||||
}
|
|
||||||
|
|
||||||
// MasqueradeAsYahooBot modifies user agent and x-forwarded for
|
|
||||||
// to appear to be a Yahoo Bot
|
|
||||||
func MasqueradeAsYahooBot() proxychain.RequestModification {
|
|
||||||
const botUA string = "Mozilla/5.0 (compatible; Yahoo! Slurp; http://help.yahoo.com/help/us/ysearch/slurp)"
|
|
||||||
// 5.255.250.0/24, 37.9.87.0/24, 67.195.37.0/24, 67.195.50.0/24, 67.195.110.0/24, 67.195.111.0/24, 67.195.112.0/23, 67.195.114.0/24, 67.195.115.0/24, 68.180.224.0/21, 72.30.132.0/24, 72.30.142.0/24, 72.30.161.0/24, 72.30.196.0/24, 72.30.198.0/24, 74.6.254.0/24, 74.6.8.0/24, 74.6.13.0/24, 74.6.17.0/24, 74.6.18.0/24, 74.6.22.0/24, 74.6.27.0/24, 74.6.168.0/24, 77.88.5.0/24, 77.88.47.0/24, 93.158.161.0/24, 98.137.72.0/24, 98.137.206.0/24, 98.137.207.0/24, 98.139.168.0/24, 114.111.95.0/24, 124.83.159.0/24, 124.83.179.0/24, 124.83.223.0/24, 141.8.144.0/24, 183.79.63.0/24, 183.79.92.0/24, 203.216.255.0/24, 211.14.11.0/24
|
|
||||||
const ja3 = "769,49200-49196-49192-49188-49172-49162-163-159-107-106-57-56-136-135-49202-49198-49194-49190-49167-49157-157-61-53-132-49199-49195-49191-49187-49171-49161-162-158-103-64-51-50-49170-49160-154-153-69-68-22-19-49201-49197-49193-49189-49166-49156-49165-49155-156-60-47-150-65-10-7-49169-49159-49164-49154-5-4-255,0-11-10-13-15,25-24-23,0-1-2"
|
|
||||||
const botIP string = "37.9.87.5"
|
|
||||||
return masqueradeAsTrustedBot(botUA, botIP, ja3)
|
|
||||||
}
|
|
||||||
|
|
||||||
func masqueradeAsTrustedBot(botUA string, botIP string, ja3 string) proxychain.RequestModification {
|
|
||||||
return func(chain *proxychain.ProxyChain) error {
|
|
||||||
chain.AddOnceRequestModifications(
|
|
||||||
SpoofUserAgent(botUA),
|
|
||||||
|
|
||||||
// general / nginx
|
|
||||||
SetRequestHeader("X-Forwarded-For", botIP),
|
|
||||||
SetRequestHeader("X-Real-IP", botIP),
|
|
||||||
SetRequestHeader("True-Client-IP", botIP),
|
|
||||||
SetRequestHeader("WL-Proxy-Client-IP", botIP),
|
|
||||||
SetRequestHeader("X-Cluster-Client-IP", botIP),
|
|
||||||
/*
|
|
||||||
// akamai
|
|
||||||
SetRequestHeader("True-Client-IP", botIP),
|
|
||||||
|
|
||||||
// cloudflare
|
|
||||||
// TODO: this seems to cause issues with CF... figure out workaround or remove
|
|
||||||
Error 1000
|
|
||||||
Ray ID: xxxxxxxxxxxxxxxx •
|
|
||||||
2023-12-01 20:09:22 UTC
|
|
||||||
DNS points to prohibited IP
|
|
||||||
What happened?
|
|
||||||
You've requested a page on a website (xxxxxxxxxxxxxxxxxxx) that is on the Cloudflare network. Unfortunately, it is resolving to an IP address that is creating a conflict within Cloudflare's system
|
|
||||||
|
|
||||||
SetRequestHeader("CF-Connecting-IP", botIP),
|
|
||||||
|
|
||||||
// weblogic
|
|
||||||
SetRequestHeader("WL-Proxy-Client-IP", botIP),
|
|
||||||
// azure
|
|
||||||
SetRequestHeader("X-Cluster-Client-IP", botIP),
|
|
||||||
*/
|
|
||||||
|
|
||||||
DeleteRequestHeader("referrer"),
|
|
||||||
DeleteRequestHeader("origin"),
|
|
||||||
)
|
|
||||||
|
|
||||||
/*
|
|
||||||
if ja3 != "" {
|
|
||||||
chain.AddOnceRequestModifications(
|
|
||||||
SpoofJA3fingerprint(ja3, botUA),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,19 +0,0 @@
|
|||||||
package requestmodifiers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"regexp"
|
|
||||||
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
)
|
|
||||||
|
|
||||||
func ModifyDomainWithRegex(matchRegex string, replacement string) proxychain.RequestModification {
|
|
||||||
match, err := regexp.Compile(matchRegex)
|
|
||||||
return func(px *proxychain.ProxyChain) error {
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("RequestModification :: ModifyDomainWithRegex error => invalid match regex: %s - %s", matchRegex, err.Error())
|
|
||||||
}
|
|
||||||
px.Request.URL.Host = match.ReplaceAllString(px.Request.URL.Host, replacement)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,100 +0,0 @@
|
|||||||
package requestmodifiers
|
|
||||||
|
|
||||||
import (
|
|
||||||
//"net/http"
|
|
||||||
//http "github.com/Danny-Dasilva/fhttp"
|
|
||||||
http "github.com/bogdanfinn/fhttp"
|
|
||||||
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
)
|
|
||||||
|
|
||||||
// SetOutgoingCookie modifes a specific cookie name
|
|
||||||
// by modifying the request cookie headers going to the upstream server.
|
|
||||||
// If the cookie name does not already exist, it is created.
|
|
||||||
func SetOutgoingCookie(name string, val string) proxychain.RequestModification {
|
|
||||||
return func(chain *proxychain.ProxyChain) error {
|
|
||||||
cookies := chain.Request.Cookies()
|
|
||||||
hasCookie := false
|
|
||||||
for _, cookie := range cookies {
|
|
||||||
if cookie.Name != name {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
hasCookie = true
|
|
||||||
cookie.Value = val
|
|
||||||
}
|
|
||||||
|
|
||||||
if hasCookie {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
chain.Request.AddCookie(&http.Cookie{
|
|
||||||
Domain: chain.Request.URL.Host,
|
|
||||||
Name: name,
|
|
||||||
Value: val,
|
|
||||||
})
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// SetOutgoingCookies modifies a client request's cookie header
|
|
||||||
// to a raw Cookie string, overwriting existing cookies
|
|
||||||
func SetOutgoingCookies(cookies string) proxychain.RequestModification {
|
|
||||||
return func(chain *proxychain.ProxyChain) error {
|
|
||||||
chain.Request.Header.Set("Cookies", cookies)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// DeleteOutgoingCookie modifies the http request's cookies header to
|
|
||||||
// delete a specific request cookie going to the upstream server.
|
|
||||||
// If the cookie does not exist, it does not do anything.
|
|
||||||
func DeleteOutgoingCookie(name string) proxychain.RequestModification {
|
|
||||||
return func(chain *proxychain.ProxyChain) error {
|
|
||||||
cookies := chain.Request.Cookies()
|
|
||||||
chain.Request.Header.Del("Cookies")
|
|
||||||
|
|
||||||
for _, cookie := range cookies {
|
|
||||||
if cookie.Name == name {
|
|
||||||
chain.Request.AddCookie(cookie)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// DeleteOutgoingCookies removes the cookie header entirely,
|
|
||||||
// preventing any cookies from reaching the upstream server.
|
|
||||||
func DeleteOutgoingCookies() proxychain.RequestModification {
|
|
||||||
return func(px *proxychain.ProxyChain) error {
|
|
||||||
px.Request.Header.Del("Cookie")
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// DeleteOutGoingCookiesExcept prevents non-whitelisted cookies from being sent from the client
|
|
||||||
// to the upstream proxy server. Cookies whose names are in the whitelist are not removed.
|
|
||||||
func DeleteOutgoingCookiesExcept(whitelist ...string) proxychain.RequestModification {
|
|
||||||
return func(px *proxychain.ProxyChain) error {
|
|
||||||
// Convert whitelist slice to a map for efficient lookups
|
|
||||||
whitelistMap := make(map[string]struct{})
|
|
||||||
for _, cookieName := range whitelist {
|
|
||||||
whitelistMap[cookieName] = struct{}{}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get all cookies from the request header
|
|
||||||
cookies := px.Request.Cookies()
|
|
||||||
|
|
||||||
// Clear the original Cookie header
|
|
||||||
px.Request.Header.Del("Cookie")
|
|
||||||
|
|
||||||
// Re-add cookies that are in the whitelist
|
|
||||||
for _, cookie := range cookies {
|
|
||||||
if _, found := whitelistMap[cookie.Name]; found {
|
|
||||||
px.Request.AddCookie(cookie)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,19 +0,0 @@
|
|||||||
package requestmodifiers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"regexp"
|
|
||||||
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
)
|
|
||||||
|
|
||||||
func ModifyPathWithRegex(matchRegex string, replacement string) proxychain.RequestModification {
|
|
||||||
match, err := regexp.Compile(matchRegex)
|
|
||||||
return func(px *proxychain.ProxyChain) error {
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("RequestModification :: ModifyPathWithRegex error => invalid match regex: %s - %s", matchRegex, err.Error())
|
|
||||||
}
|
|
||||||
px.Request.URL.Path = match.ReplaceAllString(px.Request.URL.Path, replacement)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,28 +0,0 @@
|
|||||||
package requestmodifiers
|
|
||||||
|
|
||||||
import (
|
|
||||||
//"fmt"
|
|
||||||
"net/url"
|
|
||||||
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
)
|
|
||||||
|
|
||||||
// ModifyQueryParams replaces query parameter values in URL's query params in a ProxyChain's URL.
|
|
||||||
// If the query param key doesn't exist, it is created.
|
|
||||||
func ModifyQueryParams(key string, value string) proxychain.RequestModification {
|
|
||||||
return func(chain *proxychain.ProxyChain) error {
|
|
||||||
q := chain.Request.URL.Query()
|
|
||||||
chain.Request.URL.RawQuery = modifyQueryParams(key, value, q)
|
|
||||||
//fmt.Println(chain.Request.URL.String())
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func modifyQueryParams(key string, value string, q url.Values) string {
|
|
||||||
if value == "" {
|
|
||||||
q.Del(key)
|
|
||||||
return q.Encode()
|
|
||||||
}
|
|
||||||
q.Set(key, value)
|
|
||||||
return q.Encode()
|
|
||||||
}
|
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
package requestmodifiers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
)
|
|
||||||
|
|
||||||
// SetRequestHeader modifies a specific outgoing header
|
|
||||||
// This is the header that the upstream server will see.
|
|
||||||
func SetRequestHeader(name string, val string) proxychain.RequestModification {
|
|
||||||
return func(px *proxychain.ProxyChain) error {
|
|
||||||
px.Request.Header.Set(name, val)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// DeleteRequestHeader modifies a specific outgoing header
|
|
||||||
// This is the header that the upstream server will see.
|
|
||||||
func DeleteRequestHeader(name string) proxychain.RequestModification {
|
|
||||||
return func(px *proxychain.ProxyChain) error {
|
|
||||||
px.Request.Header.Del(name)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,48 +0,0 @@
|
|||||||
package requestmodifiers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"net/url"
|
|
||||||
"regexp"
|
|
||||||
|
|
||||||
tx "github.com/everywall/ladder/proxychain/responsemodifiers"
|
|
||||||
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
)
|
|
||||||
|
|
||||||
const archivistUrl string = "https://archive.is/latest"
|
|
||||||
|
|
||||||
// RequestArchiveIs modifies a ProxyChain's URL to request an archived version from archive.is
|
|
||||||
func RequestArchiveIs() proxychain.RequestModification {
|
|
||||||
return func(chain *proxychain.ProxyChain) error {
|
|
||||||
rURL := preventRecursiveArchivistURLs(chain.Request.URL.String())
|
|
||||||
chain.Request.URL.RawQuery = ""
|
|
||||||
newURL, err := url.Parse(fmt.Sprintf("%s/%s", archivistUrl, rURL))
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
// archivist seems to sabotage requests from cloudflare's DNS
|
|
||||||
// bypass this just in case
|
|
||||||
chain.AddOnceRequestModifications(ResolveWithGoogleDoH())
|
|
||||||
|
|
||||||
chain.Request.URL = newURL
|
|
||||||
|
|
||||||
// cleanup archivst headers
|
|
||||||
script := `[...document.querySelector("body > center").childNodes].filter(e => e.id != "SOLID").forEach(e => e.remove())`
|
|
||||||
chain.AddOnceResponseModifications(
|
|
||||||
tx.InjectScriptAfterDOMContentLoaded(script),
|
|
||||||
)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// https://archive.is/20200421201055/https://rt.live/ -> http://rt.live/
|
|
||||||
func preventRecursiveArchivistURLs(url string) string {
|
|
||||||
re := regexp.MustCompile(`https?:\/\/archive\.is\/\d+\/(https?:\/\/.*)`)
|
|
||||||
match := re.FindStringSubmatch(url)
|
|
||||||
if match != nil {
|
|
||||||
return match[1]
|
|
||||||
}
|
|
||||||
return url
|
|
||||||
}
|
|
||||||
@@ -1,22 +0,0 @@
|
|||||||
package requestmodifiers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"net/url"
|
|
||||||
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
)
|
|
||||||
|
|
||||||
const googleCacheUrl string = "https://webcache.googleusercontent.com/search?q=cache:"
|
|
||||||
|
|
||||||
// RequestGoogleCache modifies a ProxyChain's URL to request its Google Cache version.
|
|
||||||
func RequestGoogleCache() proxychain.RequestModification {
|
|
||||||
return func(px *proxychain.ProxyChain) error {
|
|
||||||
encodedURL := url.QueryEscape(px.Request.URL.String())
|
|
||||||
newURL, err := url.Parse(googleCacheUrl + encodedURL)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
px.Request.URL = newURL
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,44 +0,0 @@
|
|||||||
package requestmodifiers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"net/url"
|
|
||||||
"regexp"
|
|
||||||
|
|
||||||
tx "github.com/everywall/ladder/proxychain/responsemodifiers"
|
|
||||||
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
)
|
|
||||||
|
|
||||||
const waybackUrl string = "https://web.archive.org/web/"
|
|
||||||
|
|
||||||
// RequestWaybackMachine modifies a ProxyChain's URL to request the wayback machine (archive.org) version.
|
|
||||||
func RequestWaybackMachine() proxychain.RequestModification {
|
|
||||||
return func(chain *proxychain.ProxyChain) error {
|
|
||||||
chain.Request.URL.RawQuery = ""
|
|
||||||
rURL := preventRecursiveWaybackURLs(chain.Request.URL.String())
|
|
||||||
newURLString := waybackUrl + rURL
|
|
||||||
newURL, err := url.Parse(newURLString)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
chain.Request.URL = newURL
|
|
||||||
|
|
||||||
// cleanup wayback headers
|
|
||||||
script := `["wm-ipp-print", "wm-ipp-base"].forEach(id => { try { document.getElementById(id).remove() } catch{ } })`
|
|
||||||
chain.AddOnceResponseModifications(
|
|
||||||
tx.InjectScriptAfterDOMContentLoaded(script),
|
|
||||||
)
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func preventRecursiveWaybackURLs(url string) string {
|
|
||||||
re := regexp.MustCompile(`https:\/\/web\.archive\.org\/web\/\d+\/\*(https?:\/\/.*)`)
|
|
||||||
|
|
||||||
match := re.FindStringSubmatch(url)
|
|
||||||
if match != nil {
|
|
||||||
return match[1]
|
|
||||||
}
|
|
||||||
return url
|
|
||||||
}
|
|
||||||
@@ -1,94 +0,0 @@
|
|||||||
package requestmodifiers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"net"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
http "github.com/bogdanfinn/fhttp"
|
|
||||||
|
|
||||||
/*
|
|
||||||
tls_client "github.com/bogdanfinn/tls-client"
|
|
||||||
//"net/http"
|
|
||||||
*/
|
|
||||||
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
)
|
|
||||||
|
|
||||||
// resolveWithGoogleDoH resolves DNS using Google's DNS-over-HTTPS
|
|
||||||
func resolveWithGoogleDoH(host string) (string, error) {
|
|
||||||
url := "https://dns.google/resolve?name=" + host + "&type=A"
|
|
||||||
resp, err := http.Get(url)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
defer resp.Body.Close()
|
|
||||||
|
|
||||||
var result struct {
|
|
||||||
Answer []struct {
|
|
||||||
Data string `json:"data"`
|
|
||||||
} `json:"Answer"`
|
|
||||||
}
|
|
||||||
err = json.NewDecoder(resp.Body).Decode(&result)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get the first A record
|
|
||||||
if len(result.Answer) > 0 {
|
|
||||||
return result.Answer[0].Data, nil
|
|
||||||
}
|
|
||||||
return "", fmt.Errorf("no DoH DNS record found for %s", host)
|
|
||||||
}
|
|
||||||
|
|
||||||
type CustomDialer struct {
|
|
||||||
*net.Dialer
|
|
||||||
}
|
|
||||||
|
|
||||||
func newCustomDialer(timeout, keepAlive time.Duration) *CustomDialer {
|
|
||||||
return &CustomDialer{
|
|
||||||
Dialer: &net.Dialer{
|
|
||||||
Timeout: timeout,
|
|
||||||
KeepAlive: keepAlive,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (cd *CustomDialer) DialContext(ctx context.Context, network, addr string) (net.Conn, error) {
|
|
||||||
host, port, err := net.SplitHostPort(addr)
|
|
||||||
if err != nil {
|
|
||||||
port = "443"
|
|
||||||
}
|
|
||||||
|
|
||||||
resolvedHost, err := resolveWithGoogleDoH(host)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return cd.Dialer.DialContext(ctx, network, net.JoinHostPort(resolvedHost, port))
|
|
||||||
}
|
|
||||||
|
|
||||||
// ResolveWithGoogleDoH modifies a ProxyChain's client to make the request by resolving the URL
|
|
||||||
// using Google's DNS over HTTPs service
|
|
||||||
func ResolveWithGoogleDoH() proxychain.RequestModification {
|
|
||||||
///customDialer := NewCustomDialer(10*time.Second, 10*time.Second)
|
|
||||||
return func(chain *proxychain.ProxyChain) error {
|
|
||||||
/*
|
|
||||||
options := []tls_client.HttpClientOption{
|
|
||||||
tls_client.WithTimeoutSeconds(30),
|
|
||||||
tls_client.WithRandomTLSExtensionOrder(),
|
|
||||||
tls_client.WithDialer(*customDialer.Dialer),
|
|
||||||
//tls_client.WithClientProfile(profiles.Chrome_105),
|
|
||||||
}
|
|
||||||
|
|
||||||
client, err := tls_client.NewHttpClient(tls_client.NewNoopLogger(), options...)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
chain.SetOnceHTTPClient(client)
|
|
||||||
*/
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
package requestmodifiers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
)
|
|
||||||
|
|
||||||
// SpoofOrigin modifies the origin header
|
|
||||||
// if the upstream server returns a Vary header
|
|
||||||
// it means you might get a different response if you change this
|
|
||||||
func SpoofOrigin(url string) proxychain.RequestModification {
|
|
||||||
return func(px *proxychain.ProxyChain) error {
|
|
||||||
px.Request.Header.Set("origin", url)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// HideOrigin modifies the origin header
|
|
||||||
// so that it is the original origin, not the proxy
|
|
||||||
func HideOrigin() proxychain.RequestModification {
|
|
||||||
return func(px *proxychain.ProxyChain) error {
|
|
||||||
px.Request.Header.Set("origin", px.Request.URL.String())
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,39 +0,0 @@
|
|||||||
package requestmodifiers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
|
|
||||||
tx "github.com/everywall/ladder/proxychain/responsemodifiers"
|
|
||||||
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
)
|
|
||||||
|
|
||||||
// SpoofReferrer modifies the referrer header.
|
|
||||||
// It is useful if the page can be accessed from a search engine
|
|
||||||
// or social media site, but not by browsing the website itself.
|
|
||||||
// if url is "", then the referrer header is removed.
|
|
||||||
func SpoofReferrer(url string) proxychain.RequestModification {
|
|
||||||
return func(chain *proxychain.ProxyChain) error {
|
|
||||||
// change refer on client side js
|
|
||||||
script := fmt.Sprintf(`document.referrer = "%s"`, url)
|
|
||||||
chain.AddOnceResponseModifications(
|
|
||||||
tx.InjectScriptBeforeDOMContentLoaded(script),
|
|
||||||
)
|
|
||||||
|
|
||||||
if url == "" {
|
|
||||||
chain.Request.Header.Del("referrer")
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
chain.Request.Header.Set("referrer", url)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// HideReferrer modifies the referrer header
|
|
||||||
// so that it is the original referrer, not the proxy
|
|
||||||
func HideReferrer() proxychain.RequestModification {
|
|
||||||
return func(px *proxychain.ProxyChain) error {
|
|
||||||
px.Request.Header.Set("referrer", px.Request.URL.String())
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,43 +0,0 @@
|
|||||||
package requestmodifiers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"math/rand"
|
|
||||||
"strings"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
)
|
|
||||||
|
|
||||||
// SpoofReferrerFromBaiduSearch modifies the referrer header
|
|
||||||
// pretending to be from a BaiduSearch
|
|
||||||
func SpoofReferrerFromBaiduSearch() proxychain.RequestModification {
|
|
||||||
return func(chain *proxychain.ProxyChain) error {
|
|
||||||
// https://www.baidu.com/link?url=5biIeDvUIihawf3Zbbysach2Xn4H3w3FzO6LZKgSs-B5Yt4M4RUFikokOk5zetf2&wd=&eqid=9da80d8208009b8480000706655d5ed6
|
|
||||||
referrer := fmt.Sprintf("https://baidu.com/link?url=%s", generateRandomBaiduURL())
|
|
||||||
chain.Request.Header.Set("referrer", referrer)
|
|
||||||
chain.Request.Header.Set("sec-fetch-site", "cross-site")
|
|
||||||
chain.Request.Header.Set("sec-fetch-dest", "document")
|
|
||||||
chain.Request.Header.Set("sec-fetch-mode", "navigate")
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// utility functions ==================
|
|
||||||
|
|
||||||
func generateRandomString(charset string, length int) string {
|
|
||||||
var seededRand *rand.Rand = rand.New(rand.NewSource(time.Now().UnixNano()))
|
|
||||||
var stringBuilder strings.Builder
|
|
||||||
for i := 0; i < length; i++ {
|
|
||||||
stringBuilder.WriteByte(charset[seededRand.Intn(len(charset))])
|
|
||||||
}
|
|
||||||
return stringBuilder.String()
|
|
||||||
}
|
|
||||||
|
|
||||||
func generateRandomBaiduURL() string {
|
|
||||||
const alphanumericCharset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
|
|
||||||
const hexCharset = "0123456789abcdef"
|
|
||||||
randomAlphanumeric := generateRandomString(alphanumericCharset, 30) // Length before "-"
|
|
||||||
randomHex := generateRandomString(hexCharset, 16) // Length of eqid
|
|
||||||
return randomAlphanumeric + "-" + "&wd=&eqid=" + randomHex
|
|
||||||
}
|
|
||||||
@@ -1,20 +0,0 @@
|
|||||||
package requestmodifiers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
)
|
|
||||||
|
|
||||||
// SpoofReferrerFromBingSearch modifies the referrer header
|
|
||||||
// pretending to be from a bing search site
|
|
||||||
func SpoofReferrerFromBingSearch() proxychain.RequestModification {
|
|
||||||
return func(chain *proxychain.ProxyChain) error {
|
|
||||||
chain.AddOnceRequestModifications(
|
|
||||||
SpoofReferrer("https://www.bing.com/"),
|
|
||||||
SetRequestHeader("sec-fetch-site", "cross-site"),
|
|
||||||
SetRequestHeader("sec-fetch-dest", "document"),
|
|
||||||
SetRequestHeader("sec-fetch-mode", "navigate"),
|
|
||||||
ModifyQueryParams("utm_source", "bing"),
|
|
||||||
)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,20 +0,0 @@
|
|||||||
package requestmodifiers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
)
|
|
||||||
|
|
||||||
// SpoofReferrerFromGoogleSearch modifies the referrer header
|
|
||||||
// pretending to be from a google search site
|
|
||||||
func SpoofReferrerFromGoogleSearch() proxychain.RequestModification {
|
|
||||||
return func(chain *proxychain.ProxyChain) error {
|
|
||||||
chain.AddOnceRequestModifications(
|
|
||||||
SpoofReferrer("https://www.google.com"),
|
|
||||||
SetRequestHeader("sec-fetch-site", "cross-site"),
|
|
||||||
SetRequestHeader("sec-fetch-dest", "document"),
|
|
||||||
SetRequestHeader("sec-fetch-mode", "navigate"),
|
|
||||||
ModifyQueryParams("utm_source", "google"),
|
|
||||||
)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
package requestmodifiers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
)
|
|
||||||
|
|
||||||
// SpoofReferrerFromLinkedInPost modifies the referrer header
|
|
||||||
// pretending to be from a linkedin post
|
|
||||||
func SpoofReferrerFromLinkedInPost() proxychain.RequestModification {
|
|
||||||
return func(chain *proxychain.ProxyChain) error {
|
|
||||||
chain.AddOnceRequestModifications(
|
|
||||||
SpoofReferrer("https://www.linkedin.com/"),
|
|
||||||
SetRequestHeader("sec-fetch-site", "cross-site"),
|
|
||||||
SetRequestHeader("sec-fetch-dest", "document"),
|
|
||||||
SetRequestHeader("sec-fetch-mode", "navigate"),
|
|
||||||
ModifyQueryParams("utm_campaign", "post"),
|
|
||||||
ModifyQueryParams("utm_medium", "web"),
|
|
||||||
)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
package requestmodifiers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
)
|
|
||||||
|
|
||||||
// SpoofReferrerFromNaverSearch modifies the referrer header
|
|
||||||
// pretending to be from a Naver search (popular in South Korea)
|
|
||||||
func SpoofReferrerFromNaverSearch() proxychain.RequestModification {
|
|
||||||
return func(chain *proxychain.ProxyChain) error {
|
|
||||||
referrer := fmt.Sprintf(
|
|
||||||
"https://search.naver.com/search.naver?where=nexearch&sm=top_hty&fbm=0&ie=utf8&query=%s",
|
|
||||||
chain.Request.URL.Host,
|
|
||||||
)
|
|
||||||
chain.Request.Header.Set("referrer", referrer)
|
|
||||||
chain.Request.Header.Set("sec-fetch-site", "cross-site")
|
|
||||||
chain.Request.Header.Set("sec-fetch-dest", "document")
|
|
||||||
chain.Request.Header.Set("sec-fetch-mode", "navigate")
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
package requestmodifiers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
)
|
|
||||||
|
|
||||||
// SpoofReferrerFromPinterestPost modifies the referrer header
|
|
||||||
// pretending to be from a pinterest post
|
|
||||||
func SpoofReferrerFromPinterestPost() proxychain.RequestModification {
|
|
||||||
return func(chain *proxychain.ProxyChain) error {
|
|
||||||
chain.Request.Header.Set("referrer", "https://www.pinterest.com/")
|
|
||||||
chain.Request.Header.Set("sec-fetch-site", "cross-site")
|
|
||||||
chain.Request.Header.Set("sec-fetch-dest", "document")
|
|
||||||
chain.Request.Header.Set("sec-fetch-mode", "navigate")
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
package requestmodifiers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
)
|
|
||||||
|
|
||||||
// SpoofReferrerFromQQPost modifies the referrer header
|
|
||||||
// pretending to be from a QQ post (popular social media in China)
|
|
||||||
func SpoofReferrerFromQQPost() proxychain.RequestModification {
|
|
||||||
return func(chain *proxychain.ProxyChain) error {
|
|
||||||
chain.Request.Header.Set("referrer", "https://new.qq.com/")
|
|
||||||
chain.Request.Header.Set("sec-fetch-site", "cross-site")
|
|
||||||
chain.Request.Header.Set("sec-fetch-dest", "document")
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
package requestmodifiers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
)
|
|
||||||
|
|
||||||
// SpoofReferrerFromRedditPost modifies the referrer header
|
|
||||||
// pretending to be from a reddit post
|
|
||||||
func SpoofReferrerFromRedditPost() proxychain.RequestModification {
|
|
||||||
return func(chain *proxychain.ProxyChain) error {
|
|
||||||
chain.Request.Header.Set("referrer", "https://www.reddit.com/")
|
|
||||||
chain.Request.Header.Set("sec-fetch-site", "cross-site")
|
|
||||||
chain.Request.Header.Set("sec-fetch-dest", "document")
|
|
||||||
chain.Request.Header.Set("sec-fetch-mode", "navigate")
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,19 +0,0 @@
|
|||||||
package requestmodifiers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
)
|
|
||||||
|
|
||||||
// SpoofReferrerFromTumblrPost modifies the referrer header
|
|
||||||
// pretending to be from a tumblr post
|
|
||||||
func SpoofReferrerFromTumblrPost() proxychain.RequestModification {
|
|
||||||
return func(chain *proxychain.ProxyChain) error {
|
|
||||||
chain.AddOnceRequestModifications(
|
|
||||||
SpoofReferrer("https://www.tumblr.com/"),
|
|
||||||
SetRequestHeader("sec-fetch-site", "cross-site"),
|
|
||||||
SetRequestHeader("sec-fetch-dest", "document"),
|
|
||||||
SetRequestHeader("sec-fetch-mode", "navigate"),
|
|
||||||
)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,19 +0,0 @@
|
|||||||
package requestmodifiers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
)
|
|
||||||
|
|
||||||
// SpoofReferrerFromTwitterPost modifies the referrer header
|
|
||||||
// pretending to be from a twitter post
|
|
||||||
func SpoofReferrerFromTwitterPost() proxychain.RequestModification {
|
|
||||||
return func(chain *proxychain.ProxyChain) error {
|
|
||||||
chain.AddOnceRequestModifications(
|
|
||||||
SpoofReferrer("https://t.co/"),
|
|
||||||
SetRequestHeader("sec-fetch-site", "cross-site"),
|
|
||||||
SetRequestHeader("sec-fetch-dest", "document"),
|
|
||||||
SetRequestHeader("sec-fetch-mode", "navigate"),
|
|
||||||
)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,19 +0,0 @@
|
|||||||
package requestmodifiers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
)
|
|
||||||
|
|
||||||
// SpoofReferrerFromVkontaktePost modifies the referrer header
|
|
||||||
// pretending to be from a vkontakte post (popular in Russia)
|
|
||||||
func SpoofReferrerFromVkontaktePost() proxychain.RequestModification {
|
|
||||||
return func(chain *proxychain.ProxyChain) error {
|
|
||||||
chain.AddOnceRequestModifications(
|
|
||||||
SpoofReferrer("https://away.vk.com/"),
|
|
||||||
SetRequestHeader("sec-fetch-site", "cross-site"),
|
|
||||||
SetRequestHeader("sec-fetch-dest", "document"),
|
|
||||||
SetRequestHeader("sec-fetch-mode", "navigate"),
|
|
||||||
)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
package requestmodifiers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"math/rand"
|
|
||||||
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
)
|
|
||||||
|
|
||||||
// SpoofReferrerFromWeiboPost modifies the referrer header
|
|
||||||
// pretending to be from a Weibo post (popular in China)
|
|
||||||
func SpoofReferrerFromWeiboPost() proxychain.RequestModification {
|
|
||||||
return func(chain *proxychain.ProxyChain) error {
|
|
||||||
referrer := fmt.Sprintf("http://weibo.com/u/%d", rand.Intn(90001))
|
|
||||||
chain.Request.Header.Set("referrer", referrer)
|
|
||||||
chain.Request.Header.Set("sec-fetch-site", "cross-site")
|
|
||||||
chain.Request.Header.Set("sec-fetch-dest", "document")
|
|
||||||
chain.Request.Header.Set("sec-fetch-mode", "navigate")
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,41 +0,0 @@
|
|||||||
package requestmodifiers
|
|
||||||
|
|
||||||
import (
|
|
||||||
_ "embed"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
tx "github.com/everywall/ladder/proxychain/responsemodifiers"
|
|
||||||
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
)
|
|
||||||
|
|
||||||
// https://github.com/faisalman/ua-parser-js/tree/master
|
|
||||||
// update using:
|
|
||||||
// git submodule update --remote --merge
|
|
||||||
//
|
|
||||||
//go:embed vendor/ua-parser-js/dist/ua-parser.min.js
|
|
||||||
var UAParserJS string
|
|
||||||
|
|
||||||
// note: spoof_user_agent.js has a dependency on ua-parser.min.js
|
|
||||||
// ua-parser.min.js should be loaded first.
|
|
||||||
//
|
|
||||||
//go:embed spoof_user_agent.js
|
|
||||||
var spoofUserAgentJS string
|
|
||||||
|
|
||||||
// SpoofUserAgent modifies the user agent
|
|
||||||
func SpoofUserAgent(ua string) proxychain.RequestModification {
|
|
||||||
return func(chain *proxychain.ProxyChain) error {
|
|
||||||
// modify ua headers
|
|
||||||
chain.AddOnceRequestModifications(
|
|
||||||
SetRequestHeader("user-agent", ua),
|
|
||||||
)
|
|
||||||
|
|
||||||
script := strings.ReplaceAll(spoofUserAgentJS, "{{USER_AGENT}}", ua)
|
|
||||||
chain.AddOnceResponseModifications(
|
|
||||||
tx.InjectScriptBeforeDOMContentLoaded(script),
|
|
||||||
tx.InjectScriptBeforeDOMContentLoaded(UAParserJS),
|
|
||||||
)
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,100 +0,0 @@
|
|||||||
(() => {
|
|
||||||
const UA = "{{USER_AGENT}}";
|
|
||||||
|
|
||||||
// monkey-patch navigator.userAgent
|
|
||||||
{
|
|
||||||
const { get } = Object.getOwnPropertyDescriptor(
|
|
||||||
Navigator.prototype,
|
|
||||||
"userAgent",
|
|
||||||
);
|
|
||||||
Object.defineProperty(Navigator.prototype, "userAgent", {
|
|
||||||
get: new Proxy(get, {
|
|
||||||
apply() {
|
|
||||||
return UA;
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// monkey-patch navigator.appVersion
|
|
||||||
{
|
|
||||||
const { get } = Object.getOwnPropertyDescriptor(
|
|
||||||
Navigator.prototype,
|
|
||||||
"appVersion",
|
|
||||||
);
|
|
||||||
Object.defineProperty(Navigator.prototype, "appVersion", {
|
|
||||||
get: new Proxy(get, {
|
|
||||||
apply() {
|
|
||||||
return UA.replace("Mozilla/", "");
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// monkey-patch navigator.UserAgentData
|
|
||||||
// Assuming UAParser is already loaded and available
|
|
||||||
function spoofUserAgentData(uaString) {
|
|
||||||
// Parse the user-agent string
|
|
||||||
const parser = new UAParser(uaString);
|
|
||||||
const parsedData = parser.getResult();
|
|
||||||
|
|
||||||
// Extracted data
|
|
||||||
const platform = parsedData.os.name;
|
|
||||||
const browserName = parsedData.browser.name;
|
|
||||||
const browserMajorVersion = parsedData.browser.major;
|
|
||||||
const isMobile =
|
|
||||||
/Android|webOS|iPhone|iPad|iPod|BlackBerry|IEMobile|Opera Mini/i.test(
|
|
||||||
uaString,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Overwrite navigator.userAgentData
|
|
||||||
self.NavigatorUAData = self.NavigatorUAData || new class NavigatorUAData {
|
|
||||||
brands = [{
|
|
||||||
brand: browserName,
|
|
||||||
version: browserMajorVersion,
|
|
||||||
}];
|
|
||||||
mobile = isMobile;
|
|
||||||
platform = platform;
|
|
||||||
toJSON() {
|
|
||||||
return {
|
|
||||||
brands: this.brands,
|
|
||||||
mobile: this.mobile,
|
|
||||||
platform: this.platform,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
getHighEntropyValues(hints) {
|
|
||||||
const result = this.toJSON();
|
|
||||||
// Add additional high entropy values based on hints
|
|
||||||
// Modify these as per your requirements
|
|
||||||
if (hints.includes("architecture")) {
|
|
||||||
result.architecture = "x86";
|
|
||||||
}
|
|
||||||
if (hints.includes("bitness")) {
|
|
||||||
result.bitness = "64";
|
|
||||||
}
|
|
||||||
if (hints.includes("model")) {
|
|
||||||
result.model = "";
|
|
||||||
}
|
|
||||||
if (hints.includes("platformVersion")) {
|
|
||||||
result.platformVersion = "10.0.0"; // Example value
|
|
||||||
}
|
|
||||||
if (hints.includes("uaFullVersion")) {
|
|
||||||
result.uaFullVersion = browserMajorVersion;
|
|
||||||
}
|
|
||||||
if (hints.includes("fullVersionList")) {
|
|
||||||
result.fullVersionList = this.brands;
|
|
||||||
}
|
|
||||||
return Promise.resolve(result);
|
|
||||||
}
|
|
||||||
}();
|
|
||||||
|
|
||||||
// Apply the monkey patch
|
|
||||||
Object.defineProperty(navigator, "userAgentData", {
|
|
||||||
value: new self.NavigatorUAData(),
|
|
||||||
writable: false,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
spoofUserAgentData(UA);
|
|
||||||
// TODO: use hideMonkeyPatch to hide overrides
|
|
||||||
})();
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
package requestmodifiers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
)
|
|
||||||
|
|
||||||
// SpoofXForwardedFor modifies the X-Forwarded-For header
|
|
||||||
// in some cases, a forward proxy may interpret this as the source IP
|
|
||||||
func SpoofXForwardedFor(ip string) proxychain.RequestModification {
|
|
||||||
return func(px *proxychain.ProxyChain) error {
|
|
||||||
px.Request.Header.Set("X-FORWARDED-FOR", ip)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Submodule proxychain/requestmodifiers/vendor/ua-parser-js deleted from 3622b614a7
@@ -1,56 +0,0 @@
|
|||||||
package api
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"encoding/json"
|
|
||||||
"errors"
|
|
||||||
"io"
|
|
||||||
"reflect"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Error struct {
|
|
||||||
Success bool `json:"success"`
|
|
||||||
Error ErrorDetails `json:"error"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type ErrorDetails struct {
|
|
||||||
Message string `json:"message"`
|
|
||||||
Type string `json:"type"`
|
|
||||||
Cause string `json:"cause"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func CreateAPIErrReader(err error) io.ReadCloser {
|
|
||||||
if err == nil {
|
|
||||||
return io.NopCloser(bytes.NewBufferString(`{"success":false, "error": "No error provided"}`))
|
|
||||||
}
|
|
||||||
|
|
||||||
baseErr := getBaseError(err)
|
|
||||||
apiErr := Error{
|
|
||||||
Success: false,
|
|
||||||
Error: ErrorDetails{
|
|
||||||
Message: err.Error(),
|
|
||||||
Type: reflect.TypeOf(err).String(),
|
|
||||||
Cause: baseErr.Error(),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
// Serialize the APIError into JSON
|
|
||||||
jsonData, jsonErr := json.Marshal(apiErr)
|
|
||||||
if jsonErr != nil {
|
|
||||||
return io.NopCloser(bytes.NewBufferString(`{"success":false, "error": "Failed to serialize error"}`))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Return the JSON data as an io.ReadCloser
|
|
||||||
return io.NopCloser(bytes.NewBuffer(jsonData))
|
|
||||||
}
|
|
||||||
|
|
||||||
func getBaseError(err error) error {
|
|
||||||
for {
|
|
||||||
unwrapped := errors.Unwrap(err)
|
|
||||||
if unwrapped == nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
err = unwrapped
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,174 +0,0 @@
|
|||||||
package api
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/go-shiori/dom"
|
|
||||||
"github.com/markusmobius/go-trafilatura"
|
|
||||||
"golang.org/x/net/html"
|
|
||||||
)
|
|
||||||
|
|
||||||
// =======================================================================================
|
|
||||||
// credit @joncrangle https://github.com/everywall/ladder/issues/38#issuecomment-1831252934
|
|
||||||
|
|
||||||
type ImageContent struct {
|
|
||||||
Type string `json:"type"`
|
|
||||||
URL string `json:"url"`
|
|
||||||
Alt string `json:"alt"`
|
|
||||||
Caption string `json:"caption"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type LinkContent struct {
|
|
||||||
Type string `json:"type"`
|
|
||||||
Href string `json:"href"`
|
|
||||||
Data string `json:"data"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type TextContent struct {
|
|
||||||
Type string `json:"type"`
|
|
||||||
Data string `json:"data"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type ListContent struct {
|
|
||||||
Type string `json:"type"`
|
|
||||||
ListItems []ListItemContent `json:"listItems"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type ListItemContent struct {
|
|
||||||
Data string `json:"data"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type JSONDocument struct {
|
|
||||||
Success bool `json:"success"`
|
|
||||||
Error ErrorDetails `json:"error"`
|
|
||||||
Metadata struct {
|
|
||||||
Title string `json:"title"`
|
|
||||||
Author string `json:"author"`
|
|
||||||
URL string `json:"url"`
|
|
||||||
Hostname string `json:"hostname"`
|
|
||||||
Image string `json:"image"`
|
|
||||||
Description string `json:"description"`
|
|
||||||
Sitename string `json:"sitename"`
|
|
||||||
Date string `json:"date"`
|
|
||||||
Categories []string `json:"categories"`
|
|
||||||
Tags []string `json:"tags"`
|
|
||||||
License string `json:"license"`
|
|
||||||
} `json:"metadata"`
|
|
||||||
Content []interface{} `json:"content"`
|
|
||||||
Comments string `json:"comments"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func ExtractResultToAPIResponse(extract *trafilatura.ExtractResult) *JSONDocument {
|
|
||||||
jsonDoc := &JSONDocument{}
|
|
||||||
|
|
||||||
// Populate success
|
|
||||||
jsonDoc.Success = true
|
|
||||||
|
|
||||||
// Populate metadata
|
|
||||||
jsonDoc.Metadata.Title = extract.Metadata.Title
|
|
||||||
jsonDoc.Metadata.Author = extract.Metadata.Author
|
|
||||||
jsonDoc.Metadata.URL = extract.Metadata.URL
|
|
||||||
jsonDoc.Metadata.Hostname = extract.Metadata.Hostname
|
|
||||||
jsonDoc.Metadata.Description = extract.Metadata.Description
|
|
||||||
jsonDoc.Metadata.Image = extract.Metadata.Image
|
|
||||||
jsonDoc.Metadata.Sitename = extract.Metadata.Sitename
|
|
||||||
jsonDoc.Metadata.Date = extract.Metadata.Date.Format("2006-01-02")
|
|
||||||
jsonDoc.Metadata.Categories = extract.Metadata.Categories
|
|
||||||
jsonDoc.Metadata.Tags = extract.Metadata.Tags
|
|
||||||
jsonDoc.Metadata.License = extract.Metadata.License
|
|
||||||
jsonDoc.Metadata.Hostname = extract.Metadata.Hostname
|
|
||||||
|
|
||||||
// Populate content
|
|
||||||
if extract.ContentNode != nil {
|
|
||||||
jsonDoc.Content = parseContent(extract.ContentNode)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Populate comments
|
|
||||||
if extract.CommentsNode != nil {
|
|
||||||
jsonDoc.Comments = dom.OuterHTML(extract.CommentsNode)
|
|
||||||
}
|
|
||||||
|
|
||||||
return jsonDoc
|
|
||||||
}
|
|
||||||
|
|
||||||
func parseContent(node *html.Node) []interface{} {
|
|
||||||
var content []interface{}
|
|
||||||
|
|
||||||
for child := node.FirstChild; child != nil; child = child.NextSibling {
|
|
||||||
switch child.Data {
|
|
||||||
case "img":
|
|
||||||
image := ImageContent{
|
|
||||||
Type: "img",
|
|
||||||
URL: dom.GetAttribute(child, "src"),
|
|
||||||
Alt: dom.GetAttribute(child, "alt"),
|
|
||||||
Caption: dom.GetAttribute(child, "caption"),
|
|
||||||
}
|
|
||||||
content = append(content, image)
|
|
||||||
|
|
||||||
case "a":
|
|
||||||
link := LinkContent{
|
|
||||||
Type: "a",
|
|
||||||
Href: dom.GetAttribute(child, "href"),
|
|
||||||
Data: dom.InnerText(child),
|
|
||||||
}
|
|
||||||
content = append(content, link)
|
|
||||||
|
|
||||||
case "h1":
|
|
||||||
text := TextContent{
|
|
||||||
Type: "h1",
|
|
||||||
Data: dom.InnerText(child),
|
|
||||||
}
|
|
||||||
content = append(content, text)
|
|
||||||
|
|
||||||
case "h2":
|
|
||||||
text := TextContent{
|
|
||||||
Type: "h2",
|
|
||||||
Data: dom.InnerText(child),
|
|
||||||
}
|
|
||||||
content = append(content, text)
|
|
||||||
|
|
||||||
case "h3":
|
|
||||||
text := TextContent{
|
|
||||||
Type: "h3",
|
|
||||||
Data: dom.InnerText(child),
|
|
||||||
}
|
|
||||||
content = append(content, text)
|
|
||||||
|
|
||||||
case "h4":
|
|
||||||
text := TextContent{
|
|
||||||
Type: "h4",
|
|
||||||
Data: dom.InnerText(child),
|
|
||||||
}
|
|
||||||
content = append(content, text)
|
|
||||||
|
|
||||||
case "h5":
|
|
||||||
text := TextContent{
|
|
||||||
Type: "h5",
|
|
||||||
Data: dom.InnerText(child),
|
|
||||||
}
|
|
||||||
content = append(content, text)
|
|
||||||
|
|
||||||
case "ul", "ol":
|
|
||||||
list := ListContent{
|
|
||||||
Type: child.Data,
|
|
||||||
ListItems: []ListItemContent{},
|
|
||||||
}
|
|
||||||
for listItem := child.FirstChild; listItem != nil; listItem = listItem.NextSibling {
|
|
||||||
if listItem.Data == "li" {
|
|
||||||
listItemContent := ListItemContent{
|
|
||||||
Data: dom.InnerText(listItem),
|
|
||||||
}
|
|
||||||
list.ListItems = append(list.ListItems, listItemContent)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
content = append(content, list)
|
|
||||||
|
|
||||||
default:
|
|
||||||
text := TextContent{
|
|
||||||
Type: "p",
|
|
||||||
Data: dom.InnerText(child),
|
|
||||||
}
|
|
||||||
content = append(content, text)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return content
|
|
||||||
}
|
|
||||||
@@ -1,48 +0,0 @@
|
|||||||
package responsemodifiers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"encoding/json"
|
|
||||||
"io"
|
|
||||||
|
|
||||||
"github.com/markusmobius/go-trafilatura"
|
|
||||||
|
|
||||||
"github.com/everywall/ladder/proxychain/responsemodifiers/api"
|
|
||||||
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
)
|
|
||||||
|
|
||||||
// APIContent creates an JSON representation of the article and returns it as an API response.
|
|
||||||
func APIContent() proxychain.ResponseModification {
|
|
||||||
return func(chain *proxychain.ProxyChain) error {
|
|
||||||
// we set content-type twice here, in case another response modifier
|
|
||||||
// tries to forward over the original headers
|
|
||||||
chain.Context.Set("content-type", "application/json")
|
|
||||||
chain.Response.Header.Set("content-type", "application/json")
|
|
||||||
|
|
||||||
// extract dom contents
|
|
||||||
opts := trafilatura.Options{
|
|
||||||
IncludeImages: true,
|
|
||||||
IncludeLinks: true,
|
|
||||||
// FavorPrecision: true,
|
|
||||||
FallbackCandidates: nil, // TODO: https://github.com/markusmobius/go-trafilatura/blob/main/examples/chained/main.go
|
|
||||||
// implement fallbacks from "github.com/markusmobius/go-domdistiller" and "github.com/go-shiori/go-readability"
|
|
||||||
OriginalURL: chain.Request.URL,
|
|
||||||
}
|
|
||||||
|
|
||||||
result, err := trafilatura.Extract(chain.Response.Body, opts)
|
|
||||||
if err != nil {
|
|
||||||
chain.Response.Body = api.CreateAPIErrReader(err)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
res := api.ExtractResultToAPIResponse(result)
|
|
||||||
jsonData, err := json.MarshalIndent(res, "", " ")
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
chain.Response.Body = io.NopCloser(bytes.NewReader(jsonData))
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,70 +0,0 @@
|
|||||||
package responsemodifiers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"net/url"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/everywall/ladder/proxychain/responsemodifiers/api"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestCreateAPIErrReader(t *testing.T) {
|
|
||||||
_, baseErr := url.Parse("://this is an invalid url")
|
|
||||||
wrappedErr := fmt.Errorf("wrapped error: %w", baseErr)
|
|
||||||
|
|
||||||
readCloser := api.CreateAPIErrReader(wrappedErr)
|
|
||||||
defer readCloser.Close()
|
|
||||||
|
|
||||||
// Read and unmarshal the JSON output
|
|
||||||
data, err := io.ReadAll(readCloser)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("Failed to read from ReadCloser: %v", err)
|
|
||||||
}
|
|
||||||
fmt.Println(string(data))
|
|
||||||
|
|
||||||
var apiErr api.Error
|
|
||||||
err = json.Unmarshal(data, &apiErr)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("Failed to unmarshal JSON: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Verify the structure of the APIError
|
|
||||||
if apiErr.Success {
|
|
||||||
t.Errorf("Expected Success to be false, got true")
|
|
||||||
}
|
|
||||||
|
|
||||||
if apiErr.Error.Message != wrappedErr.Error() {
|
|
||||||
t.Errorf("Expected error message to be '%v', got '%v'", wrappedErr.Error(), apiErr.Error.Message)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCreateAPIErrReader2(t *testing.T) {
|
|
||||||
_, baseErr := url.Parse("://this is an invalid url")
|
|
||||||
|
|
||||||
readCloser := api.CreateAPIErrReader(baseErr)
|
|
||||||
defer readCloser.Close()
|
|
||||||
|
|
||||||
// Read and unmarshal the JSON output
|
|
||||||
data, err := io.ReadAll(readCloser)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("Failed to read from ReadCloser: %v", err)
|
|
||||||
}
|
|
||||||
fmt.Println(string(data))
|
|
||||||
|
|
||||||
var apiErr api.Error
|
|
||||||
err = json.Unmarshal(data, &apiErr)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("Failed to unmarshal JSON: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Verify the structure of the APIError
|
|
||||||
if apiErr.Success {
|
|
||||||
t.Errorf("Expected Success to be false, got true")
|
|
||||||
}
|
|
||||||
|
|
||||||
if apiErr.Error.Message != baseErr.Error() {
|
|
||||||
t.Errorf("Expected error message to be '%v', got '%v'", baseErr.Error(), apiErr.Error.Message)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,43 +0,0 @@
|
|||||||
package responsemodifiers
|
|
||||||
|
|
||||||
import (
|
|
||||||
_ "embed"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/everywall/ladder/proxychain/responsemodifiers/rewriters"
|
|
||||||
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
)
|
|
||||||
|
|
||||||
//go:embed vendor/block_element_removal.js
|
|
||||||
var blockElementRemoval string
|
|
||||||
|
|
||||||
// BlockElementRemoval prevents paywall javascript from removing a
|
|
||||||
// particular element by detecting the removal, then immediately reinserting it.
|
|
||||||
// This is useful when a page will return a "fake" 404, after flashing the content briefly.
|
|
||||||
// If the /outline/ API works, but the regular API doesn't, try this modifier.
|
|
||||||
func BlockElementRemoval(cssSelector string) proxychain.ResponseModification {
|
|
||||||
return func(chain *proxychain.ProxyChain) error {
|
|
||||||
// don't add rewriter if it's not even html
|
|
||||||
ct := chain.Response.Header.Get("content-type")
|
|
||||||
if !strings.HasPrefix(ct, "text/html") {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
params := map[string]string{
|
|
||||||
// ie: "div.article-content"
|
|
||||||
"{{CSS_SELECTOR}}": cssSelector,
|
|
||||||
}
|
|
||||||
|
|
||||||
rr := rewriters.NewScriptInjectorRewriterWithParams(
|
|
||||||
blockElementRemoval,
|
|
||||||
rewriters.BeforeDOMContentLoaded,
|
|
||||||
params,
|
|
||||||
)
|
|
||||||
|
|
||||||
htmlRewriter := rewriters.NewHTMLRewriter(chain.Response.Body, rr)
|
|
||||||
chain.Response.Body = htmlRewriter
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,34 +0,0 @@
|
|||||||
package responsemodifiers
|
|
||||||
|
|
||||||
import (
|
|
||||||
_ "embed"
|
|
||||||
"fmt"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/everywall/ladder/proxychain/responsemodifiers/rewriters"
|
|
||||||
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
)
|
|
||||||
|
|
||||||
// BlockThirdPartyScripts rewrites HTML and injects JS to block all third party JS from loading.
|
|
||||||
func BlockThirdPartyScripts() proxychain.ResponseModification {
|
|
||||||
// TODO: monkey patch fetch and XMLHttpRequest to firewall 3P JS as well.
|
|
||||||
return func(chain *proxychain.ProxyChain) error {
|
|
||||||
// don't add rewriter if it's not even html
|
|
||||||
ct := chain.Response.Header.Get("content-type")
|
|
||||||
if !strings.HasPrefix(ct, "text/html") {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// proxyURL is the URL of the ladder: http://localhost:8080 (ladder)
|
|
||||||
originalURI := chain.Context.Request().URI()
|
|
||||||
proxyURL := fmt.Sprintf("%s://%s", originalURI.Scheme(), originalURI.Host())
|
|
||||||
|
|
||||||
// replace http.Response.Body with a readcloser that wraps the original, modifying the html attributes
|
|
||||||
rr := rewriters.NewBlockThirdPartyScriptsRewriter(chain.Request.URL, proxyURL)
|
|
||||||
blockJSRewriter := rewriters.NewHTMLRewriter(chain.Response.Body, rr)
|
|
||||||
chain.Response.Body = blockJSRewriter
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
package responsemodifiers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
)
|
|
||||||
|
|
||||||
// BypassCORS modifies response headers to prevent the browser
|
|
||||||
// from enforcing any CORS restrictions. This should run at the end of the chain.
|
|
||||||
func BypassCORS() proxychain.ResponseModification {
|
|
||||||
return func(chain *proxychain.ProxyChain) error {
|
|
||||||
chain.AddOnceResponseModifications(
|
|
||||||
SetResponseHeader("Access-Control-Allow-Origin", "*"),
|
|
||||||
SetResponseHeader("Access-Control-Expose-Headers", "*"),
|
|
||||||
SetResponseHeader("Access-Control-Allow-Credentials", "true"),
|
|
||||||
SetResponseHeader("Access-Control-Allow-Methods", "GET, PUT, POST, DELETE, HEAD, OPTIONS, PATCH"),
|
|
||||||
SetResponseHeader("Access-Control-Allow-Headers", "*"),
|
|
||||||
DeleteResponseHeader("X-Frame-Options"),
|
|
||||||
)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,30 +0,0 @@
|
|||||||
package responsemodifiers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
)
|
|
||||||
|
|
||||||
// TODO: handle edge case where CSP is specified in meta tag:
|
|
||||||
// <meta http-equiv="Content-Security-Policy" content="default-src 'self'">
|
|
||||||
|
|
||||||
// BypassContentSecurityPolicy modifies response headers to prevent the browser
|
|
||||||
// from enforcing any CSP restrictions. This should run at the end of the chain.
|
|
||||||
func BypassContentSecurityPolicy() proxychain.ResponseModification {
|
|
||||||
return func(chain *proxychain.ProxyChain) error {
|
|
||||||
chain.AddOnceResponseModifications(
|
|
||||||
DeleteResponseHeader("Content-Security-Policy"),
|
|
||||||
DeleteResponseHeader("Content-Security-Policy-Report-Only"),
|
|
||||||
DeleteResponseHeader("X-Content-Security-Policy"),
|
|
||||||
DeleteResponseHeader("X-WebKit-CSP"),
|
|
||||||
)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// SetContentSecurityPolicy modifies response headers to a specific CSP
|
|
||||||
func SetContentSecurityPolicy(csp string) proxychain.ResponseModification {
|
|
||||||
return func(chain *proxychain.ProxyChain) error {
|
|
||||||
chain.Response.Header.Set("Content-Security-Policy", csp)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,28 +0,0 @@
|
|||||||
package responsemodifiers
|
|
||||||
|
|
||||||
import (
|
|
||||||
_ "embed"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
)
|
|
||||||
|
|
||||||
// DeleteLocalStorageData deletes localstorage cookies.
|
|
||||||
// If the page works once in a fresh incognito window, but fails
|
|
||||||
// for subsequent loads, try this response modifier alongside
|
|
||||||
// DeleteSessionStorageData and DeleteIncomingCookies
|
|
||||||
func DeleteLocalStorageData() proxychain.ResponseModification {
|
|
||||||
return func(chain *proxychain.ProxyChain) error {
|
|
||||||
// don't add rewriter if it's not even html
|
|
||||||
ct := chain.Response.Header.Get("content-type")
|
|
||||||
if !strings.HasPrefix(ct, "text/html") {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
chain.AddOnceResponseModifications(
|
|
||||||
InjectScriptBeforeDOMContentLoaded(`window.sessionStorage.clear()`),
|
|
||||||
InjectScriptAfterDOMContentLoaded(`window.sessionStorage.clear()`),
|
|
||||||
)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,28 +0,0 @@
|
|||||||
package responsemodifiers
|
|
||||||
|
|
||||||
import (
|
|
||||||
_ "embed"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
)
|
|
||||||
|
|
||||||
// DeleteSessionStorageData deletes localstorage cookies.
|
|
||||||
// If the page works once in a fresh incognito window, but fails
|
|
||||||
// for subsequent loads, try this response modifier alongside
|
|
||||||
// DeleteLocalStorageData and DeleteIncomingCookies
|
|
||||||
func DeleteSessionStorageData() proxychain.ResponseModification {
|
|
||||||
return func(chain *proxychain.ProxyChain) error {
|
|
||||||
// don't add rewriter if it's not even html
|
|
||||||
ct := chain.Response.Header.Get("content-type")
|
|
||||||
if !strings.HasPrefix(ct, "text/html") {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
chain.AddOnceResponseModifications(
|
|
||||||
InjectScriptBeforeDOMContentLoaded(`window.sessionStorage.clear()`),
|
|
||||||
InjectScriptAfterDOMContentLoaded(`window.sessionStorage.clear()`),
|
|
||||||
)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,53 +0,0 @@
|
|||||||
package responsemodifiers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"net/url"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
)
|
|
||||||
|
|
||||||
var forwardBlacklist map[string]bool
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
forwardBlacklist = map[string]bool{
|
|
||||||
"content-length": true,
|
|
||||||
"content-encoding": true,
|
|
||||||
"transfer-encoding": true,
|
|
||||||
"strict-transport-security": true,
|
|
||||||
"connection": true,
|
|
||||||
"keep-alive": true,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// ForwardResponseHeaders forwards the response headers from the upstream server to the client
|
|
||||||
func ForwardResponseHeaders() proxychain.ResponseModification {
|
|
||||||
return func(chain *proxychain.ProxyChain) error {
|
|
||||||
// fmt.Println(chain.Response.Header)
|
|
||||||
for uname, headers := range chain.Response.Header {
|
|
||||||
name := strings.ToLower(uname)
|
|
||||||
if forwardBlacklist[name] {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// patch location header to forward to proxy instead
|
|
||||||
if name == "location" {
|
|
||||||
u, err := url.Parse(chain.Context.BaseURL())
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
newLocation := fmt.Sprintf("%s://%s/%s", u.Scheme, u.Host, headers[0])
|
|
||||||
chain.Context.Set("location", newLocation)
|
|
||||||
}
|
|
||||||
|
|
||||||
// forward headers
|
|
||||||
for _, value := range headers {
|
|
||||||
fmt.Println(name, value)
|
|
||||||
chain.Context.Set(name, value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,189 +0,0 @@
|
|||||||
package responsemodifiers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"embed"
|
|
||||||
"fmt"
|
|
||||||
"html/template"
|
|
||||||
"io"
|
|
||||||
"log"
|
|
||||||
"net/url"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
|
|
||||||
"golang.org/x/net/html"
|
|
||||||
"golang.org/x/net/html/atom"
|
|
||||||
|
|
||||||
//"github.com/go-shiori/dom"
|
|
||||||
"github.com/markusmobius/go-trafilatura"
|
|
||||||
)
|
|
||||||
|
|
||||||
//go:embed vendor/generate_readable_outline.html
|
|
||||||
var templateFS embed.FS
|
|
||||||
|
|
||||||
// GenerateReadableOutline creates an reader-friendly distilled representation of the article.
|
|
||||||
// This is a reliable way of bypassing soft-paywalled articles, where the content is hidden, but still present in the DOM.
|
|
||||||
func GenerateReadableOutline() proxychain.ResponseModification {
|
|
||||||
// get template only once, and resuse for subsequent calls
|
|
||||||
f := "vendor/generate_readable_outline.html"
|
|
||||||
tmpl, err := template.ParseFS(templateFS, f)
|
|
||||||
if err != nil {
|
|
||||||
panic(fmt.Errorf("tx.GenerateReadableOutline Error: %s not found", f))
|
|
||||||
}
|
|
||||||
|
|
||||||
return func(chain *proxychain.ProxyChain) error {
|
|
||||||
// ===========================================================
|
|
||||||
// 1. extract dom contents using reading mode algo
|
|
||||||
// ===========================================================
|
|
||||||
opts := trafilatura.Options{
|
|
||||||
IncludeImages: false,
|
|
||||||
IncludeLinks: true,
|
|
||||||
FavorRecall: true,
|
|
||||||
Deduplicate: true,
|
|
||||||
FallbackCandidates: nil, // TODO: https://github.com/markusmobius/go-trafilatura/blob/main/examples/chained/main.go
|
|
||||||
// implement fallbacks from "github.com/markusmobius/go-domdistiller" and "github.com/go-shiori/go-readability"
|
|
||||||
OriginalURL: chain.Request.URL,
|
|
||||||
}
|
|
||||||
|
|
||||||
extract, err := trafilatura.Extract(chain.Response.Body, opts)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
// ============================================================================
|
|
||||||
// 2. render generate_readable_outline.html template using metadata from step 1
|
|
||||||
// ============================================================================
|
|
||||||
|
|
||||||
// render DOM to string without H1 title
|
|
||||||
removeFirstH1(extract.ContentNode)
|
|
||||||
// rewrite all links to stay on /outline/ path
|
|
||||||
rewriteHrefLinks(extract.ContentNode, chain.Context.BaseURL(), chain.APIPrefix)
|
|
||||||
var b bytes.Buffer
|
|
||||||
html.Render(&b, extract.ContentNode)
|
|
||||||
distilledHTML := b.String()
|
|
||||||
|
|
||||||
// populate template parameters
|
|
||||||
data := map[string]interface{}{
|
|
||||||
"Success": true,
|
|
||||||
"Image": extract.Metadata.Image,
|
|
||||||
"Description": extract.Metadata.Description,
|
|
||||||
"Sitename": extract.Metadata.Sitename,
|
|
||||||
"Hostname": extract.Metadata.Hostname,
|
|
||||||
"Url": "/" + chain.Request.URL.String(),
|
|
||||||
"Title": extract.Metadata.Title, // todo: modify CreateReadableDocument so we don't have <h1> titles duplicated?
|
|
||||||
"Date": extract.Metadata.Date.String(),
|
|
||||||
"Author": createWikipediaSearchLinks(extract.Metadata.Author),
|
|
||||||
//"Author": extract.Metadata.Author,
|
|
||||||
"Body": distilledHTML,
|
|
||||||
}
|
|
||||||
|
|
||||||
// ============================================================================
|
|
||||||
// 3. queue sending the response back to the client by replacing the response body
|
|
||||||
// (the response body will be read as a stream in proxychain.Execute() later on.)
|
|
||||||
// ============================================================================
|
|
||||||
pr, pw := io.Pipe() // pipe io.writer contents into io.reader
|
|
||||||
|
|
||||||
// Use a goroutine for writing to the pipe so we don't deadlock the request
|
|
||||||
go func() {
|
|
||||||
defer pw.Close()
|
|
||||||
|
|
||||||
err := tmpl.Execute(pw, data) // <- render template
|
|
||||||
if err != nil {
|
|
||||||
log.Printf("WARN: GenerateReadableOutline template rendering error: %s\n", err)
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
|
|
||||||
chain.Context.Set("content-type", "text/html")
|
|
||||||
chain.Response.Body = pr // <- replace response body reader with our new reader from pipe
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// =============================================
|
|
||||||
// DOM Rendering helpers
|
|
||||||
// =============================================
|
|
||||||
|
|
||||||
func removeFirstH1(n *html.Node) {
|
|
||||||
var recurse func(*html.Node) bool
|
|
||||||
recurse = func(n *html.Node) bool {
|
|
||||||
if n.Type == html.ElementNode && n.DataAtom == atom.H1 {
|
|
||||||
return true // Found the first H1, return true to stop
|
|
||||||
}
|
|
||||||
for c := n.FirstChild; c != nil; c = c.NextSibling {
|
|
||||||
if recurse(c) {
|
|
||||||
n.RemoveChild(c)
|
|
||||||
return false // Removed first H1, no need to continue
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
recurse(n)
|
|
||||||
}
|
|
||||||
|
|
||||||
func rewriteHrefLinks(n *html.Node, baseURL string, apiPath string) {
|
|
||||||
u, err := url.Parse(baseURL)
|
|
||||||
if err != nil {
|
|
||||||
log.Printf("GenerateReadableOutline :: rewriteHrefLinks error - %s\n", err)
|
|
||||||
}
|
|
||||||
apiPath = strings.Trim(apiPath, "/")
|
|
||||||
proxyURL := fmt.Sprintf("%s://%s", u.Scheme, u.Host)
|
|
||||||
newProxyURL := fmt.Sprintf("%s/%s", proxyURL, apiPath)
|
|
||||||
|
|
||||||
var recurse func(*html.Node) bool
|
|
||||||
recurse = func(n *html.Node) bool {
|
|
||||||
if n.Type == html.ElementNode && n.DataAtom == atom.A {
|
|
||||||
for i := range n.Attr {
|
|
||||||
attr := n.Attr[i]
|
|
||||||
if attr.Key != "href" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
// rewrite url on a.href: http://localhost:8080/https://example.com -> http://localhost:8080/outline/https://example.com
|
|
||||||
attr.Val = strings.Replace(attr.Val, proxyURL, newProxyURL, 1)
|
|
||||||
// rewrite relative URLs too
|
|
||||||
if strings.HasPrefix(attr.Val, "/") {
|
|
||||||
attr.Val = fmt.Sprintf("/%s%s", apiPath, attr.Val)
|
|
||||||
}
|
|
||||||
n.Attr[i].Val = attr.Val
|
|
||||||
log.Println(attr.Val)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for c := n.FirstChild; c != nil; c = c.NextSibling {
|
|
||||||
recurse(c)
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
recurse(n)
|
|
||||||
}
|
|
||||||
|
|
||||||
// createWikipediaSearchLinks takes in comma or semicolon separated terms,
|
|
||||||
// then turns them into <a> links searching for the term.
|
|
||||||
func createWikipediaSearchLinks(searchTerms string) string {
|
|
||||||
semiColonSplit := strings.Split(searchTerms, ";")
|
|
||||||
|
|
||||||
var links []string
|
|
||||||
for i, termGroup := range semiColonSplit {
|
|
||||||
commaSplit := strings.Split(termGroup, ",")
|
|
||||||
for _, term := range commaSplit {
|
|
||||||
trimmedTerm := strings.TrimSpace(term)
|
|
||||||
if trimmedTerm == "" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
encodedTerm := url.QueryEscape(trimmedTerm)
|
|
||||||
|
|
||||||
wikiURL := fmt.Sprintf("https://en.wikipedia.org/w/index.php?search=%s", encodedTerm)
|
|
||||||
|
|
||||||
link := fmt.Sprintf("<a href=\"%s\">%s</a>", wikiURL, trimmedTerm)
|
|
||||||
links = append(links, link)
|
|
||||||
}
|
|
||||||
|
|
||||||
// If it's not the last element in semiColonSplit, add a comma to the last link
|
|
||||||
if i < len(semiColonSplit)-1 {
|
|
||||||
links[len(links)-1] = links[len(links)-1] + ","
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return strings.Join(links, " ")
|
|
||||||
}
|
|
||||||
@@ -1,42 +0,0 @@
|
|||||||
package responsemodifiers
|
|
||||||
|
|
||||||
import (
|
|
||||||
_ "embed"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/everywall/ladder/proxychain/responsemodifiers/rewriters"
|
|
||||||
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
)
|
|
||||||
|
|
||||||
// injectScript modifies HTTP responses
|
|
||||||
// to execute javascript at a particular time.
|
|
||||||
func injectScript(js string, execTime rewriters.ScriptExecTime) proxychain.ResponseModification {
|
|
||||||
return func(chain *proxychain.ProxyChain) error {
|
|
||||||
// don't add rewriter if it's not even html
|
|
||||||
ct := chain.Response.Header.Get("content-type")
|
|
||||||
if !strings.HasPrefix(ct, "text/html") {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
rr := rewriters.NewScriptInjectorRewriter(js, execTime)
|
|
||||||
htmlRewriter := rewriters.NewHTMLRewriter(chain.Response.Body, rr)
|
|
||||||
chain.Response.Body = htmlRewriter
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// InjectScriptBeforeDOMContentLoaded modifies HTTP responses to inject a JS before DOM Content is loaded (script tag in head)
|
|
||||||
func InjectScriptBeforeDOMContentLoaded(js string) proxychain.ResponseModification {
|
|
||||||
return injectScript(js, rewriters.BeforeDOMContentLoaded)
|
|
||||||
}
|
|
||||||
|
|
||||||
// InjectScriptAfterDOMContentLoaded modifies HTTP responses to inject a JS after DOM Content is loaded (script tag in head)
|
|
||||||
func InjectScriptAfterDOMContentLoaded(js string) proxychain.ResponseModification {
|
|
||||||
return injectScript(js, rewriters.AfterDOMContentLoaded)
|
|
||||||
}
|
|
||||||
|
|
||||||
// InjectScriptAfterDOMIdle modifies HTTP responses to inject a JS after the DOM is idle (ie: js framework loaded)
|
|
||||||
func InjectScriptAfterDOMIdle(js string) proxychain.ResponseModification {
|
|
||||||
return injectScript(js, rewriters.AfterDOMIdle)
|
|
||||||
}
|
|
||||||
@@ -1,111 +0,0 @@
|
|||||||
package responsemodifiers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
|
|
||||||
http "github.com/bogdanfinn/fhttp"
|
|
||||||
//"net/http"
|
|
||||||
//http "github.com/Danny-Dasilva/fhttp"
|
|
||||||
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
)
|
|
||||||
|
|
||||||
// DeleteIncomingCookies prevents ALL cookies from being sent from the proxy server
|
|
||||||
// back down to the client.
|
|
||||||
func DeleteIncomingCookies(_ ...string) proxychain.ResponseModification {
|
|
||||||
return func(chain *proxychain.ProxyChain) error {
|
|
||||||
chain.Response.Header.Del("Set-Cookie")
|
|
||||||
chain.AddOnceResponseModifications(
|
|
||||||
InjectScriptBeforeDOMContentLoaded(`document.cookie = ""`),
|
|
||||||
InjectScriptAfterDOMContentLoaded(`document.cookie = ""`),
|
|
||||||
)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// DeleteIncomingCookiesExcept prevents non-whitelisted cookies from being sent from the proxy server
|
|
||||||
// to the client. Cookies whose names are in the whitelist are not removed.
|
|
||||||
func DeleteIncomingCookiesExcept(whitelist ...string) proxychain.ResponseModification {
|
|
||||||
return func(px *proxychain.ProxyChain) error {
|
|
||||||
// Convert whitelist slice to a map for efficient lookups
|
|
||||||
whitelistMap := make(map[string]struct{})
|
|
||||||
for _, cookieName := range whitelist {
|
|
||||||
whitelistMap[cookieName] = struct{}{}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If the response has no cookies, return early
|
|
||||||
if px.Response.Header == nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Filter the cookies in the response
|
|
||||||
filteredCookies := []string{}
|
|
||||||
for _, cookieStr := range px.Response.Header["Set-Cookie"] {
|
|
||||||
cookie := parseCookie(cookieStr)
|
|
||||||
|
|
||||||
if _, found := whitelistMap[cookie.Name]; found {
|
|
||||||
filteredCookies = append(filteredCookies, cookieStr)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update the Set-Cookie header with the filtered cookies
|
|
||||||
if len(filteredCookies) > 0 {
|
|
||||||
px.Response.Header["Set-Cookie"] = filteredCookies
|
|
||||||
} else {
|
|
||||||
px.Response.Header.Del("Set-Cookie")
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// parseCookie parses a cookie string and returns an http.Cookie object.
|
|
||||||
func parseCookie(cookieStr string) *http.Cookie {
|
|
||||||
header := http.Header{}
|
|
||||||
header.Add("Set-Cookie", cookieStr)
|
|
||||||
request := http.Request{Header: header}
|
|
||||||
return request.Cookies()[0]
|
|
||||||
}
|
|
||||||
|
|
||||||
// SetIncomingCookies adds a raw cookie string being sent from the proxy server down to the client
|
|
||||||
func SetIncomingCookies(cookies string) proxychain.ResponseModification {
|
|
||||||
return func(px *proxychain.ProxyChain) error {
|
|
||||||
px.Response.Header.Set("Set-Cookie", cookies)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// SetIncomingCookie modifies a specific cookie in the response from the proxy server to the client.
|
|
||||||
func SetIncomingCookie(name string, val string) proxychain.ResponseModification {
|
|
||||||
return func(px *proxychain.ProxyChain) error {
|
|
||||||
if px.Response.Header == nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
updatedCookies := []string{}
|
|
||||||
found := false
|
|
||||||
|
|
||||||
// Iterate over existing cookies and modify the one that matches the cookieName
|
|
||||||
for _, cookieStr := range px.Response.Header["Set-Cookie"] {
|
|
||||||
cookie := parseCookie(cookieStr)
|
|
||||||
if cookie.Name == name {
|
|
||||||
// Replace the cookie with the new value
|
|
||||||
updatedCookies = append(updatedCookies, fmt.Sprintf("%s=%s", name, val))
|
|
||||||
found = true
|
|
||||||
} else {
|
|
||||||
// Keep the cookie as is
|
|
||||||
updatedCookies = append(updatedCookies, cookieStr)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If the specified cookie wasn't found, add it
|
|
||||||
if !found {
|
|
||||||
updatedCookies = append(updatedCookies, fmt.Sprintf("%s=%s", name, val))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update the Set-Cookie header
|
|
||||||
px.Response.Header["Set-Cookie"] = updatedCookies
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
package responsemodifiers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
)
|
|
||||||
|
|
||||||
// SetResponseHeader modifies response headers from the upstream server
|
|
||||||
func SetResponseHeader(key string, value string) proxychain.ResponseModification {
|
|
||||||
return func(chain *proxychain.ProxyChain) error {
|
|
||||||
chain.Context.Set(key, value)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// DeleteResponseHeader removes response headers from the upstream server
|
|
||||||
func DeleteResponseHeader(key string) proxychain.ResponseModification {
|
|
||||||
return func(chain *proxychain.ProxyChain) error {
|
|
||||||
chain.Context.Response().Header.Del(key)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,57 +0,0 @@
|
|||||||
package responsemodifiers
|
|
||||||
|
|
||||||
import (
|
|
||||||
_ "embed"
|
|
||||||
"fmt"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/everywall/ladder/proxychain/responsemodifiers/rewriters"
|
|
||||||
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
)
|
|
||||||
|
|
||||||
//go:embed vendor/patch_dynamic_resource_urls.js
|
|
||||||
var patchDynamicResourceURLsScript string
|
|
||||||
|
|
||||||
// PatchDynamicResourceURLs patches the javascript runtime to rewrite URLs client-side.
|
|
||||||
// - This function is designed to allow the proxified page
|
|
||||||
// to still be browsible by routing all resource URLs through the proxy.
|
|
||||||
// - Native APIs capable of network requests will be hooked
|
|
||||||
// and the URLs arguments modified to point to the proxy instead.
|
|
||||||
// - fetch('/relative_path') -> fetch('/https://proxiedsite.com/relative_path')
|
|
||||||
// - Element.setAttribute('src', "/assets/img.jpg") -> Element.setAttribute('src', "/https://proxiedsite.com/assets/img.jpg") -> fetch('/https://proxiedsite.com/relative_path')
|
|
||||||
func PatchDynamicResourceURLs() proxychain.ResponseModification {
|
|
||||||
return func(chain *proxychain.ProxyChain) error {
|
|
||||||
// don't add rewriter if it's not even html
|
|
||||||
ct := chain.Response.Header.Get("content-type")
|
|
||||||
if !strings.HasPrefix(ct, "text/html") {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// this is the original URL sent by client:
|
|
||||||
// http://localhost:8080/http://proxiedsite.com/foo/bar
|
|
||||||
originalURI := chain.Context.Request().URI()
|
|
||||||
|
|
||||||
// this is the extracted URL that the client requests to proxy
|
|
||||||
// http://proxiedsite.com/foo/bar
|
|
||||||
reqURL := chain.Request.URL
|
|
||||||
|
|
||||||
params := map[string]string{
|
|
||||||
// ie: http://localhost:8080
|
|
||||||
"{{PROXY_ORIGIN}}": fmt.Sprintf("%s://%s", originalURI.Scheme(), originalURI.Host()),
|
|
||||||
// ie: http://proxiedsite.com
|
|
||||||
"{{ORIGIN}}": fmt.Sprintf("%s://%s", reqURL.Scheme, reqURL.Host),
|
|
||||||
}
|
|
||||||
|
|
||||||
rr := rewriters.NewScriptInjectorRewriterWithParams(
|
|
||||||
patchDynamicResourceURLsScript,
|
|
||||||
rewriters.BeforeDOMContentLoaded,
|
|
||||||
params,
|
|
||||||
)
|
|
||||||
|
|
||||||
htmlRewriter := rewriters.NewHTMLRewriter(chain.Response.Body, rr)
|
|
||||||
chain.Response.Body = htmlRewriter
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,101 +0,0 @@
|
|||||||
package responsemodifiers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"embed"
|
|
||||||
"encoding/json"
|
|
||||||
"io"
|
|
||||||
"log"
|
|
||||||
"regexp"
|
|
||||||
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
)
|
|
||||||
|
|
||||||
//go:embed vendor/ddg-tracker-surrogates/mapping.json
|
|
||||||
var mappingJSON []byte
|
|
||||||
|
|
||||||
//go:embed vendor/ddg-tracker-surrogates/surrogates/*
|
|
||||||
var surrogateFS embed.FS
|
|
||||||
|
|
||||||
var rules domainRules
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
err := json.Unmarshal([]byte(mappingJSON), &rules)
|
|
||||||
if err != nil {
|
|
||||||
log.Printf("[ERROR]: PatchTrackerScripts: failed to deserialize ladder/proxychain/responsemodifiers/vendor/ddg-tracker-surrogates/mapping.json")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// mapping.json schema
|
|
||||||
type rule struct {
|
|
||||||
RegexRule *regexp.Regexp `json:"regexRule"`
|
|
||||||
Surrogate string `json:"surrogate"`
|
|
||||||
Action string `json:"action,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type domainRules map[string][]rule
|
|
||||||
|
|
||||||
func (r *rule) UnmarshalJSON(data []byte) error {
|
|
||||||
type Tmp struct {
|
|
||||||
RegexRule string `json:"regexRule"`
|
|
||||||
Surrogate string `json:"surrogate"`
|
|
||||||
Action string `json:"action,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
var tmp Tmp
|
|
||||||
if err := json.Unmarshal(data, &tmp); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
regex := regexp.MustCompile(tmp.RegexRule)
|
|
||||||
|
|
||||||
r.RegexRule = regex
|
|
||||||
r.Surrogate = tmp.Surrogate
|
|
||||||
r.Action = tmp.Action
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// PatchTrackerScripts replaces any request to tracker scripts such as google analytics
|
|
||||||
// with a no-op stub that mocks the API structure of the original scripts they replace.
|
|
||||||
// Some pages depend on the existence of these structures for proper loading, so this may fix
|
|
||||||
// some broken elements.
|
|
||||||
// Surrogate script code borrowed from: DuckDuckGo Privacy Essentials browser extension for Firefox, Chrome. (Apache 2.0 license)
|
|
||||||
func PatchTrackerScripts() proxychain.ResponseModification {
|
|
||||||
|
|
||||||
return func(chain *proxychain.ProxyChain) error {
|
|
||||||
|
|
||||||
// preflight checks
|
|
||||||
reqURL := chain.Request.URL.String()
|
|
||||||
isTracker := false
|
|
||||||
//
|
|
||||||
|
|
||||||
var surrogateScript io.ReadCloser
|
|
||||||
for domain, domainRules := range rules {
|
|
||||||
for _, rule := range domainRules {
|
|
||||||
if !rule.RegexRule.MatchString(reqURL) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// found tracker script, replacing response body with nop stub from
|
|
||||||
// ./vendor/ddg-tracker-surrogates/surrogates/{{rule.Surrogate}}
|
|
||||||
isTracker = true
|
|
||||||
script, err := surrogateFS.Open("vendor/ddg-tracker-surrogates/surrogates/" + rule.Surrogate)
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
surrogateScript = io.NopCloser(script)
|
|
||||||
log.Printf("INFO: PatchTrackerScripts :: injecting surrogate for '%s' => 'surrogates/%s'\n", domain, rule.Surrogate)
|
|
||||||
break
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if !isTracker {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
chain.Response.Body = surrogateScript
|
|
||||||
chain.Context.Set("content-type", "text/javascript")
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,37 +0,0 @@
|
|||||||
package responsemodifiers
|
|
||||||
|
|
||||||
import (
|
|
||||||
_ "embed"
|
|
||||||
"fmt"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/everywall/ladder/proxychain/responsemodifiers/rewriters"
|
|
||||||
|
|
||||||
"github.com/everywall/ladder/proxychain"
|
|
||||||
)
|
|
||||||
|
|
||||||
// RewriteHTMLResourceURLs modifies HTTP responses
|
|
||||||
// to rewrite URLs attributes in HTML content (such as src, href)
|
|
||||||
// - `<img src='/relative_path'>` -> `<img src='/https://proxiedsite.com/relative_path'>`
|
|
||||||
// - This function is designed to allow the proxified page
|
|
||||||
// to still be browsible by routing all resource URLs through the proxy.
|
|
||||||
func RewriteHTMLResourceURLs() proxychain.ResponseModification {
|
|
||||||
return func(chain *proxychain.ProxyChain) error {
|
|
||||||
// don't add rewriter if it's not even html
|
|
||||||
ct := chain.Response.Header.Get("content-type")
|
|
||||||
if !strings.HasPrefix(ct, "text/html") {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// proxyURL is the URL of the ladder: http://localhost:8080 (ladder)
|
|
||||||
originalURI := chain.Context.Request().URI()
|
|
||||||
proxyURL := fmt.Sprintf("%s://%s", originalURI.Scheme(), originalURI.Host())
|
|
||||||
|
|
||||||
// replace http.Response.Body with a readcloser that wraps the original, modifying the html attributes
|
|
||||||
rr := rewriters.NewHTMLTokenURLRewriter(chain.Request.URL, proxyURL)
|
|
||||||
htmlRewriter := rewriters.NewHTMLRewriter(chain.Response.Body, rr)
|
|
||||||
chain.Response.Body = htmlRewriter
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,28 +0,0 @@
|
|||||||
(() => {
|
|
||||||
document.addEventListener("DOMContentLoaded", (event) => {
|
|
||||||
initIdleMutationObserver();
|
|
||||||
});
|
|
||||||
|
|
||||||
function initIdleMutationObserver() {
|
|
||||||
let debounceTimer;
|
|
||||||
const debounceDelay = 500; // adjust the delay as needed
|
|
||||||
|
|
||||||
const observer = new MutationObserver((mutations) => {
|
|
||||||
// Clear the previous timer and set a new one
|
|
||||||
clearTimeout(debounceTimer);
|
|
||||||
debounceTimer = setTimeout(() => {
|
|
||||||
execute();
|
|
||||||
observer.disconnect(); // Disconnect after first execution
|
|
||||||
}, debounceDelay);
|
|
||||||
});
|
|
||||||
|
|
||||||
const config = { attributes: false, childList: true, subtree: true };
|
|
||||||
observer.observe(document.body, config);
|
|
||||||
}
|
|
||||||
|
|
||||||
function execute() {
|
|
||||||
"{{AFTER_DOM_IDLE_SCRIPT}}";
|
|
||||||
//console.log('DOM is now idle. Executing...');
|
|
||||||
}
|
|
||||||
})();
|
|
||||||
|
|
||||||
@@ -1,69 +0,0 @@
|
|||||||
package rewriters
|
|
||||||
|
|
||||||
import (
|
|
||||||
_ "embed"
|
|
||||||
"fmt"
|
|
||||||
"log"
|
|
||||||
"net/url"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"golang.org/x/net/html"
|
|
||||||
"golang.org/x/net/html/atom"
|
|
||||||
)
|
|
||||||
|
|
||||||
// BlockThirdPartyScriptsRewriter implements HTMLTokenRewriter
|
|
||||||
// and blocks 3rd party JS in script tags by replacing the src attribute value "blocked"
|
|
||||||
type BlockThirdPartyScriptsRewriter struct {
|
|
||||||
baseURL *url.URL
|
|
||||||
proxyURL string // ladder URL, not proxied site URL
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewBlockThirdPartyScriptsRewriter creates a new instance of BlockThirdPartyScriptsRewriter.
|
|
||||||
// This rewriter will strip out 3rd party JS URLs from script tags.
|
|
||||||
func NewBlockThirdPartyScriptsRewriter(baseURL *url.URL, proxyURL string) *BlockThirdPartyScriptsRewriter {
|
|
||||||
return &BlockThirdPartyScriptsRewriter{
|
|
||||||
baseURL: baseURL,
|
|
||||||
proxyURL: proxyURL,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *BlockThirdPartyScriptsRewriter) ShouldModify(token *html.Token) bool {
|
|
||||||
if token.DataAtom != atom.Script {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
// check for 3p .js urls in html elements
|
|
||||||
for i := range token.Attr {
|
|
||||||
attr := token.Attr[i]
|
|
||||||
switch {
|
|
||||||
case attr.Key != "src":
|
|
||||||
continue
|
|
||||||
case strings.HasPrefix(attr.Val, "/"):
|
|
||||||
return false
|
|
||||||
case !strings.HasPrefix(attr.Val, "http"):
|
|
||||||
return false
|
|
||||||
case strings.HasPrefix(attr.Val, r.proxyURL):
|
|
||||||
return false
|
|
||||||
case strings.HasPrefix(attr.Val, fmt.Sprintf("%s://%s", r.baseURL.Scheme, r.baseURL.Hostname())):
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *BlockThirdPartyScriptsRewriter) ModifyToken(token *html.Token) (string, string) {
|
|
||||||
for i := range token.Attr {
|
|
||||||
attr := &token.Attr[i]
|
|
||||||
if attr.Key != "src" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if !strings.HasPrefix(attr.Val, "http") {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
log.Printf("INFO: blocked 3P js: '%s' on '%s'\n", attr.Val, r.baseURL.String())
|
|
||||||
attr.Key = "blocked"
|
|
||||||
}
|
|
||||||
return "", ""
|
|
||||||
}
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
package rewriters
|
|
||||||
|
|
||||||
// todo: implement
|
|
||||||
@@ -1,133 +0,0 @@
|
|||||||
package rewriters
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"io"
|
|
||||||
|
|
||||||
"golang.org/x/net/html"
|
|
||||||
)
|
|
||||||
|
|
||||||
// IHTMLTokenRewriter defines an interface for modifying HTML tokens.
|
|
||||||
type IHTMLTokenRewriter interface {
|
|
||||||
// ShouldModify determines whether a given HTML token requires modification.
|
|
||||||
ShouldModify(*html.Token) bool
|
|
||||||
|
|
||||||
// ModifyToken applies modifications to a given HTML token.
|
|
||||||
// It returns strings representing content to be prepended and
|
|
||||||
// appended to the token. If no modifications are required or if an error occurs,
|
|
||||||
// it returns empty strings for both 'prepend' and 'append'.
|
|
||||||
// Note: The original token is not modified if an error occurs.
|
|
||||||
ModifyToken(*html.Token) (prepend, append string)
|
|
||||||
}
|
|
||||||
|
|
||||||
// HTMLRewriter is a struct that can take multiple TokenHandlers and process all
|
|
||||||
// HTML tokens from http.Response.Body in a single pass, making changes and returning a new io.ReadCloser
|
|
||||||
//
|
|
||||||
// - HTMLRewriter reads the http.Response.Body stream,
|
|
||||||
// parsing each HTML token one at a time and making modifications (defined by implementations of IHTMLTokenRewriter)
|
|
||||||
//
|
|
||||||
// - When ProxyChain.Execute() is called, the response body will be read from the server
|
|
||||||
// and pulled through each ResponseModification which wraps the ProxyChain.Response.Body
|
|
||||||
// without ever buffering the entire HTTP response in memory.
|
|
||||||
type HTMLRewriter struct {
|
|
||||||
tokenizer *html.Tokenizer
|
|
||||||
currentToken *html.Token
|
|
||||||
tokenBuffer *bytes.Buffer
|
|
||||||
currentTokenProcessed bool
|
|
||||||
rewriters []IHTMLTokenRewriter
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewHTMLRewriter creates a new HTMLRewriter instance.
|
|
||||||
// It processes HTML tokens from an io.ReadCloser source (typically http.Response.Body)
|
|
||||||
// using a series of HTMLTokenRewriters. Each HTMLTokenRewriter in the 'rewriters' slice
|
|
||||||
// applies its specific modifications to the HTML tokens.
|
|
||||||
// The HTMLRewriter reads from the provided 'src', applies the modifications,
|
|
||||||
// and returns the processed content as a new io.ReadCloser.
|
|
||||||
// This new io.ReadCloser can be used to stream the modified content back to the client.
|
|
||||||
//
|
|
||||||
// Parameters:
|
|
||||||
// - src: An io.ReadCloser representing the source of the HTML content, such as http.Response.Body.
|
|
||||||
// - rewriters: A slice of HTMLTokenRewriters that define the modifications to be applied to the HTML tokens.
|
|
||||||
//
|
|
||||||
// Returns:
|
|
||||||
// - A pointer to an HTMLRewriter, which implements io.ReadCloser, containing the modified HTML content.
|
|
||||||
func NewHTMLRewriter(src io.ReadCloser, rewriters ...IHTMLTokenRewriter) *HTMLRewriter {
|
|
||||||
return &HTMLRewriter{
|
|
||||||
tokenizer: html.NewTokenizer(src),
|
|
||||||
currentToken: nil,
|
|
||||||
tokenBuffer: new(bytes.Buffer),
|
|
||||||
currentTokenProcessed: false,
|
|
||||||
rewriters: rewriters,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Close resets the internal state of HTMLRewriter, clearing buffers and token data.
|
|
||||||
func (r *HTMLRewriter) Close() error {
|
|
||||||
r.tokenBuffer.Reset()
|
|
||||||
r.currentToken = nil
|
|
||||||
r.currentTokenProcessed = false
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Read processes the HTML content, rewriting URLs and managing the state of tokens.
|
|
||||||
func (r *HTMLRewriter) Read(p []byte) (int, error) {
|
|
||||||
if r.currentToken == nil || r.currentToken.Data == "" || r.currentTokenProcessed {
|
|
||||||
tokenType := r.tokenizer.Next()
|
|
||||||
|
|
||||||
// done reading html, close out reader
|
|
||||||
if tokenType == html.ErrorToken {
|
|
||||||
if r.tokenizer.Err() == io.EOF {
|
|
||||||
return 0, io.EOF
|
|
||||||
}
|
|
||||||
|
|
||||||
return 0, r.tokenizer.Err()
|
|
||||||
}
|
|
||||||
|
|
||||||
// get the next token; reset buffer
|
|
||||||
t := r.tokenizer.Token()
|
|
||||||
r.currentToken = &t
|
|
||||||
r.tokenBuffer.Reset()
|
|
||||||
|
|
||||||
// buffer += "<prepends> <token> <appends>"
|
|
||||||
// process token through all registered rewriters
|
|
||||||
// rewriters will modify the token, and optionally
|
|
||||||
// return a <prepend> or <append> string token
|
|
||||||
appends := make([]string, 0, len(r.rewriters))
|
|
||||||
for _, rewriter := range r.rewriters {
|
|
||||||
if !rewriter.ShouldModify(r.currentToken) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
prepend, a := rewriter.ModifyToken(r.currentToken)
|
|
||||||
appends = append(appends, a)
|
|
||||||
// add <prepends> to buffer
|
|
||||||
r.tokenBuffer.WriteString(prepend)
|
|
||||||
}
|
|
||||||
|
|
||||||
// add <token> to buffer
|
|
||||||
if tokenType == html.TextToken {
|
|
||||||
// don't unescape textTokens (such as inline scripts).
|
|
||||||
// Token.String() by default will escape the inputs, but
|
|
||||||
// we don't want to modify the original source
|
|
||||||
r.tokenBuffer.WriteString(r.currentToken.Data)
|
|
||||||
} else {
|
|
||||||
r.tokenBuffer.WriteString(r.currentToken.String())
|
|
||||||
}
|
|
||||||
|
|
||||||
// add <appends> to buffer
|
|
||||||
for _, a := range appends {
|
|
||||||
r.tokenBuffer.WriteString(a)
|
|
||||||
}
|
|
||||||
|
|
||||||
r.currentTokenProcessed = false
|
|
||||||
}
|
|
||||||
|
|
||||||
n, err := r.tokenBuffer.Read(p)
|
|
||||||
if err == io.EOF || r.tokenBuffer.Len() == 0 {
|
|
||||||
r.currentTokenProcessed = true
|
|
||||||
err = nil // EOF in this context is expected and not an actual error
|
|
||||||
}
|
|
||||||
|
|
||||||
return n, err
|
|
||||||
}
|
|
||||||
@@ -1,288 +0,0 @@
|
|||||||
package rewriters
|
|
||||||
|
|
||||||
import (
|
|
||||||
_ "embed"
|
|
||||||
"fmt"
|
|
||||||
"log"
|
|
||||||
"net/url"
|
|
||||||
"path"
|
|
||||||
"regexp"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"golang.org/x/net/html/atom"
|
|
||||||
|
|
||||||
"golang.org/x/net/html"
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
rewriteAttrs map[string]map[string]bool
|
|
||||||
specialRewriteAttrs map[string]map[string]bool
|
|
||||||
schemeBlacklist map[string]bool
|
|
||||||
)
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
// define all tag/attributes which might contain URLs
|
|
||||||
// to attempt to rewrite to point to proxy instead
|
|
||||||
rewriteAttrs = map[string]map[string]bool{
|
|
||||||
"img": {"src": true, "srcset": true, "longdesc": true, "usemap": true},
|
|
||||||
"a": {"href": true},
|
|
||||||
"form": {"action": true},
|
|
||||||
"link": {"href": true, "manifest": true, "icon": true},
|
|
||||||
"script": {"src": true},
|
|
||||||
"video": {"src": true, "poster": true},
|
|
||||||
"audio": {"src": true},
|
|
||||||
"iframe": {"src": true, "longdesc": true},
|
|
||||||
"embed": {"src": true},
|
|
||||||
"object": {"data": true, "codebase": true},
|
|
||||||
"source": {"src": true, "srcset": true},
|
|
||||||
"track": {"src": true},
|
|
||||||
"area": {"href": true},
|
|
||||||
"base": {"href": true},
|
|
||||||
"blockquote": {"cite": true},
|
|
||||||
"del": {"cite": true},
|
|
||||||
"ins": {"cite": true},
|
|
||||||
"q": {"cite": true},
|
|
||||||
"body": {"background": true},
|
|
||||||
"button": {"formaction": true},
|
|
||||||
"input": {"src": true, "formaction": true},
|
|
||||||
"meta": {"content": true},
|
|
||||||
}
|
|
||||||
|
|
||||||
// might contain URL but requires special handling
|
|
||||||
specialRewriteAttrs = map[string]map[string]bool{
|
|
||||||
"img": {"srcset": true},
|
|
||||||
"source": {"srcset": true},
|
|
||||||
"meta": {"content": true},
|
|
||||||
}
|
|
||||||
|
|
||||||
// define URIs to NOT rewrite
|
|
||||||
// for example: don't overwrite <img src="data:image/png;base64;iVBORw...">"
|
|
||||||
schemeBlacklist = map[string]bool{
|
|
||||||
"data": true,
|
|
||||||
"tel": true,
|
|
||||||
"mailto": true,
|
|
||||||
"file": true,
|
|
||||||
"blob": true,
|
|
||||||
"javascript": true,
|
|
||||||
"about": true,
|
|
||||||
"magnet": true,
|
|
||||||
"ws": true,
|
|
||||||
"wss": true,
|
|
||||||
"ftp": true,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// HTMLTokenURLRewriter implements HTMLTokenRewriter
|
|
||||||
// it rewrites URLs within HTML resources to use a specified proxy URL.
|
|
||||||
// <img src='/relative_path'> -> <img src='/https://proxiedsite.com/relative_path'>
|
|
||||||
type HTMLTokenURLRewriter struct {
|
|
||||||
baseURL *url.URL
|
|
||||||
proxyURL string // ladder URL, not proxied site URL
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewHTMLTokenURLRewriter creates a new instance of HTMLResourceURLRewriter.
|
|
||||||
// It initializes the tokenizer with the provided source and sets the proxy URL.
|
|
||||||
// baseURL might be https://medium.com/foobar
|
|
||||||
// proxyURL is http://localhost:8080
|
|
||||||
func NewHTMLTokenURLRewriter(baseURL *url.URL, proxyURL string) *HTMLTokenURLRewriter {
|
|
||||||
return &HTMLTokenURLRewriter{
|
|
||||||
baseURL: baseURL,
|
|
||||||
proxyURL: proxyURL,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *HTMLTokenURLRewriter) ShouldModify(token *html.Token) bool {
|
|
||||||
// fmt.Printf("touch token: %s\n", token.String())
|
|
||||||
attrLen := len(token.Attr)
|
|
||||||
if attrLen == 0 {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
if token.Type == html.StartTagToken {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
if token.Type == html.SelfClosingTagToken {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *HTMLTokenURLRewriter) ModifyToken(token *html.Token) (string, string) {
|
|
||||||
for i := range token.Attr {
|
|
||||||
attr := &token.Attr[i]
|
|
||||||
|
|
||||||
switch {
|
|
||||||
// don't touch tag/attributes that don't contain URIs
|
|
||||||
case !rewriteAttrs[token.Data][attr.Key]:
|
|
||||||
continue
|
|
||||||
// don't touch attributes with special URIs (like data:)
|
|
||||||
case schemeBlacklist[strings.Split(attr.Val, ":")[0]]:
|
|
||||||
continue
|
|
||||||
// don't double-overwrite the url
|
|
||||||
case strings.HasPrefix(attr.Val, r.proxyURL):
|
|
||||||
continue
|
|
||||||
case strings.HasPrefix(attr.Val, "/http://"):
|
|
||||||
continue
|
|
||||||
case strings.HasPrefix(attr.Val, "/https://"):
|
|
||||||
continue
|
|
||||||
// handle special rewrites
|
|
||||||
case specialRewriteAttrs[token.Data][attr.Key]:
|
|
||||||
r.handleSpecialAttr(token, attr, r.baseURL)
|
|
||||||
continue
|
|
||||||
default:
|
|
||||||
// rewrite url
|
|
||||||
handleURLPart(attr, r.baseURL)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return "", ""
|
|
||||||
}
|
|
||||||
|
|
||||||
// dispatcher for ModifyURL based on URI type
|
|
||||||
func handleURLPart(attr *html.Attribute, baseURL *url.URL) {
|
|
||||||
switch {
|
|
||||||
case strings.HasPrefix(attr.Val, "//"):
|
|
||||||
handleProtocolRelativePath(attr, baseURL)
|
|
||||||
case strings.HasPrefix(attr.Val, "/"):
|
|
||||||
handleRootRelativePath(attr, baseURL)
|
|
||||||
case strings.HasPrefix(attr.Val, "https://"):
|
|
||||||
handleAbsolutePath(attr, baseURL)
|
|
||||||
case strings.HasPrefix(attr.Val, "http://"):
|
|
||||||
handleAbsolutePath(attr, baseURL)
|
|
||||||
default:
|
|
||||||
handleDocumentRelativePath(attr, baseURL)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Protocol-relative URLs: These start with "//" and will use the same protocol (http or https) as the current page.
|
|
||||||
func handleProtocolRelativePath(attr *html.Attribute, baseURL *url.URL) {
|
|
||||||
attr.Val = strings.TrimPrefix(attr.Val, "/")
|
|
||||||
handleRootRelativePath(attr, baseURL)
|
|
||||||
log.Printf("proto rel url rewritten-> '%s'='%s'", attr.Key, attr.Val)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Root-relative URLs: These are relative to the root path and start with a "/".
|
|
||||||
func handleRootRelativePath(attr *html.Attribute, baseURL *url.URL) {
|
|
||||||
// Skip processing if it's already in the correct format
|
|
||||||
if strings.HasPrefix(attr.Val, "/http://") || strings.HasPrefix(attr.Val, "/https://") {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// doublecheck this is a valid relative URL
|
|
||||||
log.Printf("PROCESSING: key: %s val: %s\n", attr.Key, attr.Val)
|
|
||||||
_, err := url.Parse(fmt.Sprintf("http://localhost.com%s", attr.Val))
|
|
||||||
if err != nil {
|
|
||||||
log.Println(err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// log.Printf("BASEURL patch: %s\n", baseURL)
|
|
||||||
|
|
||||||
attr.Val = fmt.Sprintf(
|
|
||||||
"%s://%s/%s",
|
|
||||||
baseURL.Scheme,
|
|
||||||
baseURL.Host,
|
|
||||||
strings.TrimPrefix(attr.Val, "/"),
|
|
||||||
)
|
|
||||||
attr.Val = escape(attr.Val)
|
|
||||||
attr.Val = fmt.Sprintf("/%s", attr.Val)
|
|
||||||
|
|
||||||
log.Printf("root rel url rewritten-> '%s'='%s'", attr.Key, attr.Val)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Document-relative URLs: These are relative to the current document's path and don't start with a "/".
|
|
||||||
func handleDocumentRelativePath(attr *html.Attribute, baseURL *url.URL) {
|
|
||||||
log.Printf("PROCESSING: key: %s val: %s\n", attr.Key, attr.Val)
|
|
||||||
|
|
||||||
if strings.HasPrefix(attr.Val, "#") {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
relativePath := path.Join(strings.Trim(baseURL.RawPath, "/"), strings.Trim(attr.Val, "/"))
|
|
||||||
attr.Val = fmt.Sprintf(
|
|
||||||
"%s://%s/%s",
|
|
||||||
baseURL.Scheme,
|
|
||||||
strings.Trim(baseURL.Host, "/"),
|
|
||||||
relativePath,
|
|
||||||
)
|
|
||||||
attr.Val = escape(attr.Val)
|
|
||||||
attr.Val = fmt.Sprintf("/%s", attr.Val)
|
|
||||||
|
|
||||||
log.Printf("doc rel url rewritten-> '%s'='%s'", attr.Key, attr.Val)
|
|
||||||
}
|
|
||||||
|
|
||||||
// full URIs beginning with https?://proxiedsite.com
|
|
||||||
func handleAbsolutePath(attr *html.Attribute, _ *url.URL) {
|
|
||||||
// check if valid URL
|
|
||||||
log.Printf("PROCESSING: key: %s val: %s\n", attr.Key, attr.Val)
|
|
||||||
|
|
||||||
u, err := url.Parse(attr.Val)
|
|
||||||
if err != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if !(u.Scheme == "http" || u.Scheme == "https") {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
attr.Val = fmt.Sprintf("/%s", escape(strings.TrimPrefix(attr.Val, "/")))
|
|
||||||
// attr.Val = fmt.Sprintf("/%s", escape(attr.Val))
|
|
||||||
|
|
||||||
log.Printf("abs url rewritten-> '%s'='%s'", attr.Key, attr.Val)
|
|
||||||
}
|
|
||||||
|
|
||||||
// handle edge cases for special attributes
|
|
||||||
func (r *HTMLTokenURLRewriter) handleSpecialAttr(token *html.Token, attr *html.Attribute, baseURL *url.URL) {
|
|
||||||
switch {
|
|
||||||
// srcset attribute doesn't contain a single URL but a comma-separated list of URLs, each potentially followed by a space and a descriptor (like a width, pixel density, or other conditions).
|
|
||||||
case token.DataAtom == atom.Img && attr.Key == "srcset":
|
|
||||||
handleSrcSet(attr, baseURL)
|
|
||||||
case token.DataAtom == atom.Source && attr.Key == "srcset":
|
|
||||||
handleSrcSet(attr, baseURL)
|
|
||||||
// meta with http-equiv="refresh": The content attribute of a meta tag, when used for a refresh directive, contains a time interval followed by a URL, like content="5;url=http://example.com/".
|
|
||||||
case token.DataAtom == atom.Meta && attr.Key == "content" && regexp.MustCompile(`^\d+;url=`).MatchString(attr.Val):
|
|
||||||
handleMetaRefresh(attr, baseURL)
|
|
||||||
default:
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func handleMetaRefresh(attr *html.Attribute, baseURL *url.URL) {
|
|
||||||
sec := strings.Split(attr.Val, ";url=")[0]
|
|
||||||
url := strings.Split(attr.Val, ";url=")[1]
|
|
||||||
f := &html.Attribute{Val: url, Key: "src"}
|
|
||||||
handleURLPart(f, baseURL)
|
|
||||||
attr.Val = fmt.Sprintf("%s;url=%s", sec, f.Val)
|
|
||||||
}
|
|
||||||
|
|
||||||
func handleSrcSet(attr *html.Attribute, baseURL *url.URL) {
|
|
||||||
var srcSetBuilder strings.Builder
|
|
||||||
srcSetItems := strings.Split(attr.Val, ",")
|
|
||||||
|
|
||||||
for i, srcItem := range srcSetItems {
|
|
||||||
srcParts := strings.Fields(srcItem)
|
|
||||||
|
|
||||||
if len(srcParts) == 0 {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
f := &html.Attribute{Val: srcParts[0], Key: "src"}
|
|
||||||
handleURLPart(f, baseURL)
|
|
||||||
|
|
||||||
if i > 0 {
|
|
||||||
srcSetBuilder.WriteString(", ")
|
|
||||||
}
|
|
||||||
|
|
||||||
srcSetBuilder.WriteString(f.Val)
|
|
||||||
if len(srcParts) > 1 {
|
|
||||||
srcSetBuilder.WriteString(" ")
|
|
||||||
srcSetBuilder.WriteString(strings.Join(srcParts[1:], " "))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
attr.Val = srcSetBuilder.String()
|
|
||||||
}
|
|
||||||
|
|
||||||
func escape(str string) string {
|
|
||||||
// return str
|
|
||||||
return strings.ReplaceAll(url.PathEscape(str), "%2F", "/")
|
|
||||||
}
|
|
||||||
@@ -1,103 +0,0 @@
|
|||||||
package rewriters
|
|
||||||
|
|
||||||
import (
|
|
||||||
_ "embed"
|
|
||||||
"fmt"
|
|
||||||
"sort"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"crypto/md5"
|
|
||||||
"encoding/hex"
|
|
||||||
"golang.org/x/net/html"
|
|
||||||
"golang.org/x/net/html/atom"
|
|
||||||
)
|
|
||||||
|
|
||||||
// ScriptInjectorRewriter implements HTMLTokenRewriter
|
|
||||||
// ScriptInjectorRewriter is a struct that injects JS into the page
|
|
||||||
// It uses an HTML tokenizer to process HTML content and injects JS at a specified location
|
|
||||||
type ScriptInjectorRewriter struct {
|
|
||||||
execTime ScriptExecTime
|
|
||||||
script string
|
|
||||||
scriptMD5 string
|
|
||||||
}
|
|
||||||
|
|
||||||
type ScriptExecTime int
|
|
||||||
|
|
||||||
const (
|
|
||||||
BeforeDOMContentLoaded ScriptExecTime = iota
|
|
||||||
AfterDOMContentLoaded
|
|
||||||
AfterDOMIdle
|
|
||||||
)
|
|
||||||
|
|
||||||
func (r *ScriptInjectorRewriter) ShouldModify(token *html.Token) bool {
|
|
||||||
// modify if token == <head>
|
|
||||||
return token.DataAtom == atom.Head && token.Type == html.StartTagToken
|
|
||||||
}
|
|
||||||
|
|
||||||
//go:embed after_dom_idle_script_injector.js
|
|
||||||
var afterDomIdleScriptInjector string
|
|
||||||
|
|
||||||
func (r *ScriptInjectorRewriter) ModifyToken(_ *html.Token) (string, string) {
|
|
||||||
switch {
|
|
||||||
case r.execTime == BeforeDOMContentLoaded:
|
|
||||||
return "", fmt.Sprintf("\n<script id='%s'>\n%s\n</script>\n", r.scriptMD5, r.script)
|
|
||||||
|
|
||||||
case r.execTime == AfterDOMContentLoaded:
|
|
||||||
return "", fmt.Sprintf("\n<script id='%s'>\ndocument.addEventListener('DOMContentLoaded', () => { %s });\n</script>", r.scriptMD5, r.script)
|
|
||||||
|
|
||||||
case r.execTime == AfterDOMIdle:
|
|
||||||
s := strings.Replace(afterDomIdleScriptInjector, `'{{AFTER_DOM_IDLE_SCRIPT}}'`, r.script, 1)
|
|
||||||
return "", fmt.Sprintf("\n<script id='%s'>\n%s\n</script>\n", r.scriptMD5, s)
|
|
||||||
|
|
||||||
default:
|
|
||||||
return "", ""
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// GenerateMD5Hash takes a string and returns its MD5 hash as a hexadecimal string
|
|
||||||
func generateMD5Hash(input string) string {
|
|
||||||
hasher := md5.New()
|
|
||||||
hasher.Write([]byte(input))
|
|
||||||
return hex.EncodeToString(hasher.Sum(nil))
|
|
||||||
}
|
|
||||||
|
|
||||||
// applies parameters by string replacement of the template script
|
|
||||||
func (r *ScriptInjectorRewriter) applyParams(params map[string]string) {
|
|
||||||
// Sort the keys by length in descending order
|
|
||||||
keys := make([]string, 0, len(params))
|
|
||||||
for key := range params {
|
|
||||||
keys = append(keys, key)
|
|
||||||
}
|
|
||||||
|
|
||||||
sort.Slice(keys, func(i, j int) bool {
|
|
||||||
return len(keys[i]) > len(keys[j])
|
|
||||||
})
|
|
||||||
|
|
||||||
for _, key := range keys {
|
|
||||||
r.script = strings.ReplaceAll(r.script, key, params[key])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewScriptInjectorRewriter implements a HtmlTokenRewriter
|
|
||||||
// and injects JS into the page for execution at a particular time
|
|
||||||
func NewScriptInjectorRewriter(script string, execTime ScriptExecTime) *ScriptInjectorRewriter {
|
|
||||||
scriptMD5 := generateMD5Hash(script)
|
|
||||||
executeOnceScript := fmt.Sprintf(`if (!document.getElementById("x-%s")) { %s; document.getElementById("%s").id = "x-%s" };`, scriptMD5, script, scriptMD5, scriptMD5)
|
|
||||||
|
|
||||||
return &ScriptInjectorRewriter{
|
|
||||||
execTime: execTime,
|
|
||||||
script: executeOnceScript,
|
|
||||||
scriptMD5: scriptMD5,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewScriptInjectorRewriterWith implements a HtmlTokenRewriter
|
|
||||||
// and injects JS into the page for execution at a particular time
|
|
||||||
// accepting arguments into the script, which will be added via a string replace
|
|
||||||
// the params map represents the key-value pair of the params.
|
|
||||||
// the key will be string replaced with the value
|
|
||||||
func NewScriptInjectorRewriterWithParams(script string, execTime ScriptExecTime, params map[string]string) *ScriptInjectorRewriter {
|
|
||||||
rr := NewScriptInjectorRewriter(script, execTime)
|
|
||||||
rr.applyParams(params)
|
|
||||||
return rr
|
|
||||||
}
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user