Compare commits
165 Commits
v0.0.4
...
ladder_tes
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cb409f96d4 | ||
|
|
d71ebe5137 | ||
|
|
6c54d31086 | ||
|
|
5d55a2f3f0 | ||
|
|
7668713b1a | ||
|
|
bfd647e526 | ||
|
|
efa43a6f36 | ||
|
|
854dafbcfa | ||
|
|
a4e016b36c | ||
|
|
0e620e46ab | ||
|
|
0fc0942095 | ||
|
|
dab77d786f | ||
|
|
543192afbe | ||
|
|
79a229f28c | ||
|
|
6222476684 | ||
|
|
5d46adc486 | ||
|
|
1d88f14de2 | ||
|
|
5035f65d6b | ||
|
|
ee9066dedb | ||
|
|
98fa53287b | ||
|
|
f6341f2c3e | ||
|
|
6d8e943df5 | ||
|
|
68e5023ed9 | ||
|
|
8d00e29c43 | ||
|
|
c8d39ea21f | ||
|
|
dae4afb55e | ||
|
|
a83503170e | ||
|
|
0eef3e5808 | ||
|
|
7597ea2807 | ||
|
|
235dca8dd0 | ||
|
|
191279c00c | ||
|
|
f4060c3e78 | ||
|
|
55284f0b24 | ||
|
|
f7f4586032 | ||
|
|
fe881ca661 | ||
|
|
86700d8828 | ||
|
|
7be62e2735 | ||
|
|
5e76ff0879 | ||
|
|
ee641bf8f6 | ||
|
|
2fb089ea28 | ||
|
|
9f857eca8b | ||
|
|
0673255fc8 | ||
|
|
4dbc103cf7 | ||
|
|
514facd2c0 | ||
|
|
a8d920548c | ||
|
|
e87d19d7f5 | ||
|
|
531b7da811 | ||
|
|
9a53f28b3f | ||
|
|
6cbccbfadb | ||
|
|
b07d49f230 | ||
|
|
af10efb7f2 | ||
|
|
3f0f4207a1 | ||
|
|
2236c4fff9 | ||
|
|
78454f8713 | ||
|
|
6bff28e18d | ||
|
|
cdd429e4be | ||
|
|
11ee581fd4 | ||
|
|
4e44a24261 | ||
|
|
0ddb029aae | ||
|
|
a262afe035 | ||
|
|
fdca9d39d9 | ||
|
|
30a6ab501d | ||
|
|
7a51243ff4 | ||
|
|
c8b94dc702 | ||
|
|
fbc9567820 | ||
|
|
4d5c25c148 | ||
|
|
082868af2d | ||
|
|
a4abce78fb | ||
|
|
190de6d9c5 | ||
|
|
bdd19dcbb6 | ||
|
|
02e6b1c090 | ||
|
|
84a173a3af | ||
|
|
c91cbeb8a2 | ||
|
|
fb0ccc9ad5 | ||
|
|
d56864a841 | ||
|
|
31902c21d2 | ||
|
|
0011095fd3 | ||
|
|
e4e0619c9d | ||
|
|
cd14e879ba | ||
|
|
3b1152ade0 | ||
|
|
136387cd34 | ||
|
|
571eb4174d | ||
|
|
f87e35b5f8 | ||
|
|
8d1554e10e | ||
|
|
ff5bb61891 | ||
|
|
936b418b00 | ||
|
|
ac44f12d85 | ||
|
|
b6f0c644f8 | ||
|
|
66c4b3c911 | ||
|
|
924696c015 | ||
|
|
81aa00c2ea | ||
|
|
6c1f58e2e7 | ||
|
|
d3c995df34 | ||
|
|
6f4a2daeca | ||
|
|
f728b2c1de | ||
|
|
bc346a3954 | ||
|
|
5442da81b9 | ||
|
|
73b13914fe | ||
|
|
b127f81a9b | ||
|
|
79438a0b59 | ||
|
|
8058ebf0ca | ||
|
|
afca5eda80 | ||
|
|
1aa917e0c1 | ||
|
|
84617b32e3 | ||
|
|
501dfb106a | ||
|
|
719373bb7d | ||
|
|
a9f22ef428 | ||
|
|
54926a6644 | ||
|
|
63933991fd | ||
|
|
46ca742f92 | ||
|
|
a1e63c9ecb | ||
|
|
9e9c50181c | ||
|
|
43e90cf7f2 | ||
|
|
46a32ec548 | ||
|
|
bad7eebd36 | ||
|
|
51476759da | ||
|
|
1a708959f7 | ||
|
|
1f89661ed9 | ||
|
|
a7299049c3 | ||
|
|
3a1d2bc187 | ||
|
|
46c91a05d0 | ||
|
|
5df9a937c5 | ||
|
|
945f499e88 | ||
|
|
cdcbfd4ee9 | ||
|
|
a2f909501c | ||
|
|
cc56f03607 | ||
|
|
e3eb866d48 | ||
|
|
34a2683457 | ||
|
|
07513f6dc4 | ||
|
|
ec4dc5c2cc | ||
|
|
ba87d6b980 | ||
|
|
a6ee6aebfc | ||
|
|
7b519c7016 | ||
|
|
fba9db3d94 | ||
|
|
0dd5edd5ba | ||
|
|
4023718b12 | ||
|
|
cb02f52a46 | ||
|
|
3e20678e3d | ||
|
|
184a79b0af | ||
|
|
923d3178ec | ||
|
|
a1e5d540fe | ||
|
|
890d6929e0 | ||
|
|
8a6320ccca | ||
|
|
67f9af0296 | ||
|
|
cde6ed7229 | ||
|
|
96dd4de876 | ||
|
|
d5c58f42da | ||
|
|
d34c5680b3 | ||
|
|
6dfdaaa25b | ||
|
|
7ea7f253e8 | ||
|
|
bf2529753d | ||
|
|
e3389d2df3 | ||
|
|
b786796595 | ||
|
|
377a577c67 | ||
|
|
6eb7b481d8 | ||
|
|
2f1de95e06 | ||
|
|
7ae1a29932 | ||
|
|
63dcaeba3c | ||
|
|
62e03a384a | ||
|
|
7f4d749c55 | ||
|
|
e748cb09a5 | ||
|
|
c98e49f2b3 | ||
|
|
3e3eebcdc2 | ||
|
|
45a3fe2adf | ||
|
|
ec7f2089fc |
42
.github/workflows/build-css.yaml
vendored
Normal file
42
.github/workflows/build-css.yaml
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
name: Build Tailwind CSS
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- "handlers/form.html"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
tailwindbuilder:
|
||||
permissions:
|
||||
# Give the default GITHUB_TOKEN write permission to commit and push the
|
||||
# added or changed files to the repository.
|
||||
contents: write
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
-
|
||||
name: Install pnpm
|
||||
uses: pnpm/action-setup@v2
|
||||
with:
|
||||
version: 8
|
||||
-
|
||||
name: Build Tailwind CSS
|
||||
run: pnpm build
|
||||
-
|
||||
name: Commit generated stylesheet
|
||||
run: |
|
||||
if git diff --quiet cmd/styles.css; then
|
||||
echo "No changes to commit."
|
||||
exit 0
|
||||
else
|
||||
echo "Changes detected, committing..."
|
||||
git config --global user.name "Github action"
|
||||
git config --global user.email "username@users.noreply.github.com"
|
||||
git add cmd
|
||||
git commit -m "Generated stylesheet"
|
||||
git push
|
||||
fi
|
||||
6
.github/workflows/release-binaries.yaml
vendored
6
.github/workflows/release-binaries.yaml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
-
|
||||
name: Set version
|
||||
run: |
|
||||
echo -n $(git describe --tags --abbrev=0) > cmd/VERSION
|
||||
echo -n $(git describe --tags --abbrev=0) > handlers/VERSION
|
||||
-
|
||||
name: Set up Go
|
||||
uses: actions/setup-go@v3
|
||||
@@ -36,5 +36,5 @@ jobs:
|
||||
version: latest
|
||||
args: release --clean
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GORELEASER_GITHUB_TOKEN: ${{ secrets.GORELEASER_GITHUB_TOKEN }}
|
||||
GITHUB_TOKEN: ${{ secrets.PAT_GORELEASER }}
|
||||
# GORELEASER_GITHUB_TOKEN: ${{ secrets.GORELEASER_GITHUB_TOKEN }}
|
||||
2
.github/workflows/release-docker.yaml
vendored
2
.github/workflows/release-docker.yaml
vendored
@@ -42,7 +42,7 @@ jobs:
|
||||
- name: Set version
|
||||
id: version
|
||||
run: |
|
||||
echo ${GITHUB_REF#refs/tags/v} > cmd/VERSION
|
||||
echo ${GITHUB_REF#refs/tags/v} > handlers/VERSION
|
||||
|
||||
# Install the cosign tool except on PR
|
||||
# https://github.com/sigstore/cosign-installer
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -1,4 +1,5 @@
|
||||
# dev binary
|
||||
ladder
|
||||
|
||||
VERSION
|
||||
VERSION
|
||||
output.css
|
||||
24
.golangci-lint.yaml
Normal file
24
.golangci-lint.yaml
Normal file
@@ -0,0 +1,24 @@
|
||||
linters:
|
||||
enable:
|
||||
- errcheck
|
||||
- gosimple
|
||||
- govet
|
||||
- ineffassign
|
||||
- staticcheck
|
||||
- unused
|
||||
- cyclop
|
||||
- dupword
|
||||
- wsl
|
||||
- varnamelen
|
||||
- usestdlibvars
|
||||
- unparam
|
||||
- revive
|
||||
- prealloc
|
||||
- misspell
|
||||
- gocyclo
|
||||
- funlen
|
||||
- bodyclose
|
||||
|
||||
linters-settings:
|
||||
cyclop:
|
||||
max-complexity: 15
|
||||
@@ -7,7 +7,7 @@ before:
|
||||
builds:
|
||||
-
|
||||
main: cmd/main.go
|
||||
binary: kubero
|
||||
binary: ladder
|
||||
env:
|
||||
- CGO_ENABLED=0
|
||||
goos:
|
||||
@@ -33,10 +33,10 @@ changelog:
|
||||
#brews:
|
||||
# -
|
||||
# repository:
|
||||
# owner: kubero-dev
|
||||
# owner: everywall
|
||||
# name: homebrew-ladder
|
||||
# token: "{{ .Env.GORELEASER_GITHUB_TOKEN }}"
|
||||
# homepage: "https://www.kubero.dev"
|
||||
# description: "Manage your kubero applications with the CLI"
|
||||
# homepage: "https://www.everyladder.dev"
|
||||
# description: "Manage your everyladder applications modify every website"
|
||||
# test: |
|
||||
# system "#{bin}/kubero", "--version"
|
||||
# system "#{bin}/everyladder", "--version"
|
||||
10
Makefile
Normal file
10
Makefile
Normal file
@@ -0,0 +1,10 @@
|
||||
lint:
|
||||
gofumpt -l -w .
|
||||
golangci-lint run -c .golangci-lint.yaml
|
||||
|
||||
go mod tidy
|
||||
go clean
|
||||
|
||||
install-linters:
|
||||
go install mvdan.cc/gofumpt@latest
|
||||
go install github.com/golangci/golangci-lint/cmd/golangci-lint@v1.55.2
|
||||
147
README.md
147
README.md
@@ -3,6 +3,8 @@
|
||||
</p>
|
||||
|
||||
<h1 align="center">Ladder</h1>
|
||||
<div><img alt="License" src="https://img.shields.io/github/license/everywall/ladder"> <img alt="go.mod Go version " src="https://img.shields.io/github/go-mod/go-version/everywall/ladder"> <img alt="GitHub tag (with filter)" src="https://img.shields.io/github/v/tag/everywall/ladder"> <img alt="GitHub (Pre-)Release Date" src="https://img.shields.io/github/release-date-pre/everywall/ladder"> <img alt="GitHub Downloads all releases" src="https://img.shields.io/github/downloads/everywall/ladder/total"> <img alt="GitHub Build Status (with event)" src="https://img.shields.io/github/actions/workflow/status/everywall/ladder/release-binaries.yaml"></div>
|
||||
|
||||
|
||||
*Ladder is a web proxy to help bypass paywalls.* This is a selfhosted version of [1ft.io](https://1ft.io) and [12ft.io](https://12ft.io). It is inspired by [13ft](https://github.com/wasi-master/13ft).
|
||||
|
||||
@@ -10,39 +12,59 @@
|
||||
|
||||
Freedom of information is an essential pillar of democracy and informed decision-making. While media organizations have legitimate financial interests, it is crucial to strike a balance between profitability and the public's right to access information. The proliferation of paywalls raises concerns about the erosion of this fundamental freedom, and it is imperative for society to find innovative ways to preserve access to vital information without compromising the sustainability of journalism. In a world where knowledge should be shared and not commodified, paywalls should be critically examined to ensure that they do not undermine the principles of an open and informed society.
|
||||
|
||||
Certain sites may display missing images or encounter formatting issues. This can be attributed to the site's reliance on JavaScript or CSS for image and resource loading, which presents a limitation when accessed through this proxy. If you prefer a full experience, please concider buying a subscription for the site.
|
||||
> **Disclaimer:** This project is intended for educational purposes only. The author does not endorse or encourage any unethical or illegal activity. Use this tool at your own risk.
|
||||
|
||||
### Features
|
||||
- [x] Bypass Paywalls
|
||||
- [x] Remove CORS headers from responses, Assets, and images ...
|
||||
- [x] Remove CORS headers from responses, assets, and images ...
|
||||
- [x] Apply domain based ruleset/code to modify response / requested URL
|
||||
- [x] Keep site browsable
|
||||
- [x] Add a debug path
|
||||
- [x] Add a API
|
||||
- [x] Docker container
|
||||
- [x] API
|
||||
- [x] Fetch RAW HTML
|
||||
- [x] Custom User Agent
|
||||
- [x] Custom X-Forwarded-For IP
|
||||
- [x] [Docker container](https://github.com/everywall/ladder/pkgs/container/ladder) (amd64, arm64)
|
||||
- [x] Linux binary
|
||||
- [x] Mac OS binary
|
||||
- [x] Windows binary (Untested)
|
||||
- [x] Remove most of the ads (unexpected side effect)
|
||||
- [ ] Basic Auth
|
||||
- [x] Windows binary (untested)
|
||||
- [x] Removes most of the ads (unexpected side effect ¯\\\_(ツ)_/¯ )
|
||||
- [x] Basic Auth
|
||||
- [x] Disable logs
|
||||
- [x] No Tracking
|
||||
- [x] Limit the proxy to a list of domains
|
||||
- [x] Expose Ruleset to other ladders
|
||||
- [x] Fetch from Google Cache
|
||||
- [ ] Optional TOR proxy
|
||||
- [ ] A key to share only one URL
|
||||
|
||||
### Limitations
|
||||
Some sites do not expose their content to search engines, which means that the proxy cannot access the content. A future version will try to fetch the content from Google Cache.
|
||||
|
||||
Certain sites may display missing images or encounter formatting issues. This can be attributed to the site's reliance on JavaScript or CSS for image and resource loading, which presents a limitation when accessed through this proxy. If you prefer a full experience, please consider buying a subscription for the site.
|
||||
|
||||
## Installation
|
||||
|
||||
> **Warning:** If your instance will be publicly accessible, make sure to enable Basic Auth. This will prevent unauthorized users from using your proxy. If you do not enable Basic Auth, anyone can use your proxy to browse nasty/illegal stuff. And you will be responsible for it.
|
||||
|
||||
### Binary
|
||||
1) Download binary [here](https://github.com/kubero-dev/ladder/releases/latest)
|
||||
2) Unpack and run the binary `./ladder`
|
||||
1) Download binary [here](https://github.com/everywall/ladder/releases/latest)
|
||||
2) Unpack and run the binary `./ladder -r https://t.ly/14PSf`
|
||||
3) Open Browser (Default: http://localhost:8080)
|
||||
|
||||
### Docker
|
||||
```bash
|
||||
docker run -p 8080:8080 -d --name ladder ghcr.io/kubero-dev/ladder:latest
|
||||
docker run -p 8080:8080 -d --env RULESET=https://t.ly/14PSf --name ladder ghcr.io/everywall/ladder:latest
|
||||
```
|
||||
|
||||
### Docker Compose
|
||||
```bash
|
||||
wget https://raw.githubusercontent.com/kubero-dev/ladder/main/docker-compose.yaml
|
||||
curl https://raw.githubusercontent.com/everywall/ladder/main/docker-compose.yaml --output docker-compose.yaml
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
### Helm
|
||||
See [README.md](/helm-chart/README.md) in helm-chart sub-directory for more information.
|
||||
|
||||
## Usage
|
||||
|
||||
### Browser
|
||||
@@ -51,23 +73,112 @@ docker-compose up -d
|
||||
3) Press Enter
|
||||
|
||||
Or direct by appending the URL to the end of the proxy URL:
|
||||
http://localhost:8080/https://www.google.com
|
||||
|
||||
http://localhost:8080/https://www.example.com
|
||||
|
||||
Or create a bookmark with the following URL:
|
||||
```javascript
|
||||
javascript:window.location.href="http://localhost:8080/"+location.href
|
||||
```
|
||||
|
||||
### API
|
||||
```bash
|
||||
curl -X GET "http://localhost:8080/api/https://www.google.com"
|
||||
curl -X GET "http://localhost:8080/api/https://www.example.com"
|
||||
```
|
||||
|
||||
### Debug
|
||||
http://localhost:8080/debug/https://www.google.com
|
||||
### RAW
|
||||
http://localhost:8080/raw/https://www.example.com
|
||||
|
||||
|
||||
### Running Ruleset
|
||||
http://localhost:8080/ruleset
|
||||
|
||||
## Configuration
|
||||
|
||||
### Environment Variables
|
||||
|
||||
| Variable | Description | Default |
|
||||
| Variable | Description | Value |
|
||||
| --- | --- | --- |
|
||||
| `PORT` | Port to listen on | `8080` |
|
||||
| `PREFORK` | Spawn multiple server instances | `false` |
|
||||
| `USER_AGENT` | User agent to emulate | `Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)` |
|
||||
| `X_FORWARDED_FOR` | IP forwarder address | `66.249.66.1` |
|
||||
| `USERPASS` | Enables Basic Auth, format `admin:123456` | `` |
|
||||
| `LOG_URLS` | Log fetched URL's | `true` |
|
||||
| `DISABLE_FORM` | Disables URL Form Frontpage | `false` |
|
||||
| `FORM_PATH` | Path to custom Form HTML | `` |
|
||||
| `RULESET` | Path or URL to a ruleset file, accepts local directories | `https://raw.githubusercontent.com/everywall/ladder-rules/main/ruleset.yaml` or `/path/to/my/rules.yaml` or `/path/to/my/rules/` |
|
||||
| `EXPOSE_RULESET` | Make your Ruleset available to other ladders | `true` |
|
||||
| `ALLOWED_DOMAINS` | Comma separated list of allowed domains. Empty = no limitations | `` |
|
||||
| `ALLOWED_DOMAINS_RULESET` | Allow Domains from Ruleset. false = no limitations | `false` |
|
||||
|
||||
`ALLOWED_DOMAINS` and `ALLOWED_DOMAINS_RULESET` are joined together. If both are empty, no limitations are applied.
|
||||
|
||||
### Ruleset
|
||||
|
||||
It is possible to apply custom rules to modify the response or the requested URL. This can be used to remove unwanted or modify elements from the page. The ruleset is a YAML file, a directory with YAML Files, or an URL to a YAML file that contains a list of rules for each domain. These rules are loaded on startup.
|
||||
|
||||
There is a basic ruleset available in a separate repository [ruleset.yaml](https://raw.githubusercontent.com/everywall/ladder-rules/main/ruleset.yaml). Feel free to add your own rules and create a pull request.
|
||||
|
||||
|
||||
```yaml
|
||||
- domain: example.com # Includes all subdomains
|
||||
domains: # Additional domains to apply the rule
|
||||
- www.example.de
|
||||
- www.beispiel.de
|
||||
headers:
|
||||
x-forwarded-for: none # override X-Forwarded-For header or delete with none
|
||||
referer: none # override Referer header or delete with none
|
||||
user-agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36
|
||||
content-security-policy: script-src 'self'; # override response header
|
||||
cookie: privacy=1
|
||||
regexRules:
|
||||
- match: <script\s+([^>]*\s+)?src="(/)([^"]*)"
|
||||
replace: <script $1 script="/https://www.example.com/$3"
|
||||
injections:
|
||||
- position: head # Position where to inject the code
|
||||
append: | # possible keys: append, prepend, replace
|
||||
<script>
|
||||
window.localStorage.clear();
|
||||
console.log("test");
|
||||
alert("Hello!");
|
||||
</script>
|
||||
- domain: www.anotherdomain.com # Domain where the rule applies
|
||||
paths: # Paths where the rule applies
|
||||
- /article
|
||||
googleCache: false # Use Google Cache to fetch the content
|
||||
regexRules: # Regex rules to apply
|
||||
- match: <script\s+([^>]*\s+)?src="(/)([^"]*)"
|
||||
replace: <script $1 script="/https://www.example.com/$3"
|
||||
injections:
|
||||
- position: .left-content article .post-title # Position where to inject the code into DOM
|
||||
replace: |
|
||||
<h1>My Custom Title</h1>
|
||||
- position: .left-content article # Position where to inject the code into DOM
|
||||
prepend: |
|
||||
<h2>Subtitle</h2>
|
||||
- domain: demo.com
|
||||
headers:
|
||||
content-security-policy: script-src 'self';
|
||||
user-agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36
|
||||
urlMods: # Modify the URL
|
||||
query:
|
||||
- key: amp # (this will append ?amp=1 to the URL)
|
||||
value: 1
|
||||
domain:
|
||||
- match: www # regex to match part of domain
|
||||
replace: amp # (this would modify the domain from www.demo.de to amp.demo.de)
|
||||
path:
|
||||
- match: ^ # regex to match part of path
|
||||
replace: /amp/ # (modify the url from https://www.demo.com/article/ to https://www.demo.de/amp/article/)
|
||||
```
|
||||
|
||||
## Development
|
||||
|
||||
To run a development server at http://localhost:8080:
|
||||
|
||||
```bash
|
||||
echo "dev" > handlers/VERSION
|
||||
RULESET="./ruleset.yaml" go run cmd/main.go
|
||||
```
|
||||
|
||||
This project uses [pnpm](https://pnpm.io/) to build a stylesheet with the [Tailwind CSS](https://tailwindcss.com/) classes. For local development, if you modify styles in `form.html`, run `pnpm build` to generate a new stylesheet.
|
||||
|
||||
@@ -1,168 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title>Ladder</title>
|
||||
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/materialize/1.0.0/css/materialize.min.css">
|
||||
</head>
|
||||
<style>
|
||||
body {
|
||||
background-color: #ffffff;
|
||||
}
|
||||
|
||||
header h1 {
|
||||
text-transform: uppercase;
|
||||
font-size: 70px;
|
||||
font-weight: 600;
|
||||
color: #fdfdfe;
|
||||
text-shadow: 0px 0px 5px #7AA7D1, 0px 0px 10px #7AA7D1, 0px 0px 10px #7AA7D1,
|
||||
0px 0px 20px #7AA7D1;
|
||||
}
|
||||
.logo-title {
|
||||
font-family: 'Arial', sans-serif;
|
||||
font-size: 2rem;
|
||||
color: #fff;
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
.logo {
|
||||
text-align: center;
|
||||
}
|
||||
.github-corner {
|
||||
animation: octocat-wave 560ms ease-in-out;
|
||||
position:absolute;
|
||||
top:0;
|
||||
right:0;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<a href="https://github.com/kubero-dev/ladder">
|
||||
<div class="github-corner" aria-label="View source on GitHub">
|
||||
<svg
|
||||
xmlns:svg="http://www.w3.org/2000/svg"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:xlink="http://www.w3.org/1999/xlink"
|
||||
version="1.1"
|
||||
width="146"
|
||||
height="146"
|
||||
id="svg2">
|
||||
<defs
|
||||
id="defs8">
|
||||
<filter
|
||||
height="1.096"
|
||||
y="-0.048"
|
||||
width="1.096"
|
||||
x="-0.048"
|
||||
style="color-interpolation-filters:sRGB"
|
||||
id="filter6">
|
||||
<feGaussianBlur
|
||||
stdDeviation="3"
|
||||
id="feGaussianBlur4" />
|
||||
</filter>
|
||||
</defs>
|
||||
<path
|
||||
d="M 152,140 6,-6 H 48 L 152,98 Z"
|
||||
style="opacity:0.8;filter:url(#filter6)"
|
||||
id="path10" />
|
||||
<path
|
||||
d="M 146,134 12,0 h 42 l 92,92 z"
|
||||
style="fill:#007200"
|
||||
id="path12" />
|
||||
<g
|
||||
aria-label="Fork me on GitHub"
|
||||
transform="rotate(45)"
|
||||
style="font-family:Collegiate;fill:#ffffff"
|
||||
id="g42">
|
||||
<path
|
||||
d="m 53.643,-19.486 c 0,0.688 -0.016,1.2 -0.064,1.504 h 2.08 c -0.048,-0.32 -0.064,-0.8 -0.064,-1.424 v -3.344 h 1.76 c 0.416,0 0.736,0.016 0.944,0.048 v -1.76 c -0.24,0.032 -0.592,0.048 -1.088,0.048 h -1.616 v -2.496 h 1.936 c 0.56,0 0.944,0.016 1.184,0.048 v -1.792 h -5.136 c 0.048,0.272 0.064,0.784 0.064,1.504 z"
|
||||
id="path14" />
|
||||
<path
|
||||
d="m 62.424,-17.87 c 1.008,0 1.776,-0.368 2.272,-1.088 0.432,-0.624 0.656,-1.472 0.656,-2.544 0,-2.416 -0.976,-3.616 -2.928,-3.616 -1.968,0 -2.96,1.2 -2.96,3.616 0,1.072 0.224,1.936 0.656,2.56 0.512,0.72 1.28,1.072 2.304,1.072 z m -0.016,-5.68 c 0.496,0 0.816,0.24 0.976,0.704 0.096,0.272 0.144,0.72 0.144,1.344 0,0.64 -0.048,1.088 -0.144,1.36 -0.16,0.464 -0.48,0.688 -0.976,0.688 -0.496,0 -0.816,-0.24 -0.976,-0.704 -0.096,-0.272 -0.144,-0.72 -0.144,-1.344 0,-0.624 0.048,-1.072 0.144,-1.344 0.16,-0.464 0.48,-0.704 0.976,-0.704 z"
|
||||
id="path16" />
|
||||
<path
|
||||
d="m 68.293,-17.982 c -0.032,-0.24 -0.048,-0.64 -0.048,-1.184 v -3.888 c 0.352,-0.304 0.672,-0.464 0.976,-0.464 0.224,0 0.48,0.096 0.752,0.288 v -1.808 c -0.224,-0.08 -0.432,-0.128 -0.624,-0.128 -0.448,0 -0.832,0.192 -1.152,0.56 v -0.48 h -1.744 c 0.032,0.192 0.048,0.544 0.048,1.04 v 4.976 c 0,0.512 -0.016,0.88 -0.048,1.088 z"
|
||||
id="path18" />
|
||||
<path
|
||||
d="m 72.857,-17.982 c -0.032,-0.24 -0.048,-0.64 -0.048,-1.184 v -2.448 l 1.472,2.816 c 0.208,0.384 0.32,0.656 0.368,0.816 h 1.872 l -2.352,-4.416 2.144,-2.752 h -2.064 c -0.08,0.176 -0.192,0.352 -0.352,0.56 l -1.088,1.44 v -4.496 c 0,-0.464 0.016,-0.8 0.048,-1.008 h -1.824 c 0.032,0.192 0.048,0.544 0.048,1.04 v 8.544 c 0,0.512 -0.016,0.88 -0.048,1.088 z"
|
||||
id="path20" />
|
||||
<path
|
||||
d="m 85.08,-24.478 c -0.384,-0.432 -0.896,-0.656 -1.52,-0.656 -0.416,0 -0.864,0.192 -1.328,0.56 v -0.512 l -1.76,-0.016 c 0.032,0.176 0.048,0.544 0.048,1.12 v 4.992 c 0,0.496 -0.016,0.832 -0.048,1.008 h 1.856 c 0,-0.064 -0.048,-0.64 -0.048,-1.008 v -3.984 c 0.304,-0.288 0.608,-0.432 0.928,-0.432 0.656,0 0.864,0.416 0.864,1.76 l -0.016,2.16 c 0,0.656 -0.032,1.168 -0.08,1.504 h 1.92 c -0.048,-0.256 -0.064,-0.752 -0.064,-1.472 v -2.192 c 0,-0.56 -0.048,-1.056 -0.144,-1.504 0.208,-0.176 0.544,-0.256 0.976,-0.256 0.64,0 0.96,0.592 0.96,1.76 v 2.16 c 0,0.656 -0.032,1.168 -0.08,1.504 h 1.904 c -0.048,-0.256 -0.064,-0.752 -0.064,-1.472 v -2.192 c 0,-0.96 -0.176,-1.744 -0.512,-2.368 -0.432,-0.752 -1.056,-1.12 -1.888,-1.12 -0.736,0 -1.376,0.224 -1.904,0.656 z"
|
||||
id="path22" />
|
||||
<path
|
||||
d="m 95.905,-20.99 c 0.032,-0.304 0.048,-0.624 0.048,-0.992 0,-0.944 -0.224,-1.696 -0.656,-2.256 -0.464,-0.592 -1.136,-0.896 -2.016,-0.896 -0.896,0 -1.6,0.368 -2.112,1.088 -0.464,0.656 -0.688,1.456 -0.688,2.432 0,1.136 0.272,2.048 0.832,2.704 0.576,0.72 1.392,1.072 2.448,1.072 0.496,0 1.056,-0.128 1.712,-0.368 v -1.712 c -0.464,0.304 -1.008,0.464 -1.6,0.464 -0.944,0 -1.44,-0.512 -1.52,-1.536 z m -2.576,-2.672 c 0.64,0 0.96,0.4 0.976,1.216 h -1.968 c 0.048,-0.816 0.368,-1.216 0.992,-1.216 z"
|
||||
id="path24" />
|
||||
<use
|
||||
xlink:href="#path16"
|
||||
transform="translate(40.438)"
|
||||
id="use26" />
|
||||
<path
|
||||
d="m 110.187,-25.15 c -0.496,0 -0.992,0.208 -1.472,0.64 v -0.576 h -1.76 c 0.032,0.176 0.048,0.56 0.048,1.184 v 4.912 c 0,0.496 -0.016,0.832 -0.048,1.008 h 1.856 c 0,-0.064 -0.048,-0.64 -0.048,-1.008 v -3.936 c 0.368,-0.352 0.736,-0.528 1.088,-0.528 0.784,0 1.168,0.608 1.152,1.808 l -0.016,2.16 c -0.016,0.752 -0.032,1.264 -0.064,1.504 h 1.92 c -0.048,-0.256 -0.064,-0.752 -0.064,-1.472 v -2.192 c 0,-0.944 -0.192,-1.744 -0.592,-2.384 -0.464,-0.752 -1.136,-1.136 -2,-1.12 z"
|
||||
id="path28" />
|
||||
<path
|
||||
d="m 123.877,-17.982 c 0.144,0.016 0.256,0.016 0.336,0 0,-0.192 -0.064,-0.768 -0.064,-1.36 v -1.856 c 0,-0.56 0.016,-1.056 0.064,-1.52 h -1.952 c 0.032,0.704 0.048,1.12 0.032,1.248 0,1.28 -0.512,1.92 -1.552,1.92 -1.264,0 -1.904,-1.264 -1.904,-3.776 0,-2.48 0.784,-3.728 2.352,-3.728 0.752,0 1.472,0.288 2.16,0.88 v -1.824 c -0.608,-0.528 -1.328,-0.8 -2.16,-0.8 -2.896,0 -4.416,1.904 -4.416,5.424 0,3.696 1.328,5.552 3.968,5.552 0.592,0 1.12,-0.128 1.584,-0.368 0.368,-0.208 0.624,-0.432 0.768,-0.688 z"
|
||||
id="path30" />
|
||||
<path
|
||||
d="m 126.49,-26.334 c 0.592,0 1.104,-0.544 1.104,-1.184 0,-0.656 -0.512,-1.2 -1.104,-1.2 -0.624,0 -1.12,0.544 -1.12,1.2 0,0.64 0.496,1.184 1.12,1.184 z m 0.896,8.352 c -0.016,-0.24 -0.032,-0.64 -0.032,-1.184 v -4.912 c 0,-0.464 0.016,-0.8 0.032,-1.008 h -1.808 c 0.016,0.192 0.032,0.544 0.032,1.04 v 4.976 c 0,0.512 -0.016,0.88 -0.032,1.088 z"
|
||||
id="path32" />
|
||||
<path
|
||||
d="m 130.783,-25.742 c 0,-0.256 0.016,-0.48 0.048,-0.688 h -1.856 c 0.032,0.176 0.048,0.416 0.048,0.72 v 0.624 h -0.784 v 1.552 c 0.224,-0.032 0.4,-0.048 0.544,-0.048 l 0.24,0.016 v 0.032 h -0.016 v 2.864 c 0,0.896 0.112,1.552 0.336,1.968 0.304,0.56 0.832,0.832 1.616,0.832 0.56,0 1.024,-0.112 1.424,-0.32 v -1.6 c -0.272,0.176 -0.56,0.272 -0.896,0.272 -0.464,0 -0.704,-0.352 -0.704,-1.072 v -2.976 h 0.688 c 0.256,0 0.592,0.032 0.704,0.032 v -1.552 h -1.392 z"
|
||||
id="path34" />
|
||||
<path
|
||||
d="m 140.259,-27.678 c 0,-0.416 0.016,-0.736 0.064,-0.976 h -2.096 c 0.048,0.24 0.064,0.688 0.064,1.344 v 2.8 h -2.912 v -3.024 c 0,-0.48 0.016,-0.848 0.064,-1.12 h -2.08 c 0.048,0.256 0.064,0.624 0.064,1.12 v 8.432 c 0,0.496 -0.016,0.864 -0.064,1.12 h 2.08 c -0.048,-0.24 -0.064,-0.656 -0.064,-1.232 v -3.552 h 2.912 v 3.568 c 0,0.528 -0.016,0.944 -0.064,1.216 h 2.096 c -0.048,-0.24 -0.064,-0.624 -0.064,-1.12 v -3.664 h 0.528 v -1.744 h -0.528 z"
|
||||
id="path36" />
|
||||
<path
|
||||
d="m 144.402,-17.918 c 0.56,0 1.072,-0.208 1.568,-0.64 v 0.576 h 1.744 c -0.016,-0.176 -0.032,-0.576 -0.032,-1.2 v -4.896 c 0,-0.496 0.016,-0.832 0.032,-1.008 h -1.856 c 0,0.048 0.064,0.64 0.064,1.008 v 3.936 c -0.368,0.352 -0.704,0.528 -1.008,0.528 -0.432,0 -0.72,-0.16 -0.88,-0.496 -0.144,-0.272 -0.208,-0.704 -0.208,-1.312 l 0.016,-2.16 c 0.016,-0.768 0.032,-1.264 0.064,-1.504 h -1.92 c 0.048,0.256 0.064,0.752 0.064,1.472 v 2.192 c 0,0.976 0.16,1.776 0.48,2.384 0.4,0.752 1.024,1.12 1.872,1.12 z"
|
||||
id="path38" />
|
||||
<path
|
||||
d="m 152.31,-17.934 c 0.848,0 1.536,-0.416 2.048,-1.232 0.432,-0.704 0.64,-1.536 0.64,-2.48 0,-0.928 -0.208,-1.712 -0.608,-2.368 -0.48,-0.752 -1.136,-1.12 -1.984,-1.12 -0.464,0 -0.944,0.16 -1.44,0.464 v -2.784 c 0,-0.608 0.016,-1.008 0.032,-1.2 h -1.824 c 0.032,0.176 0.048,0.576 0.048,1.2 v 8.464 c 0,0.496 -0.016,0.832 -0.048,1.008 h 1.696 v -0.576 c 0.384,0.416 0.864,0.624 1.44,0.624 z m -0.24,-5.52 c 0.736,0 1.104,0.608 1.104,1.808 0,0.496 -0.08,0.928 -0.256,1.296 -0.208,0.464 -0.528,0.688 -0.944,0.688 -0.336,0 -0.672,-0.16 -1.008,-0.496 v -2.768 c 0.384,-0.352 0.752,-0.528 1.104,-0.528 z"
|
||||
id="path40" />
|
||||
</g>
|
||||
<path
|
||||
d="m 52,0 94,94 M 14,0 146,132"
|
||||
style="fill:none;stroke:#ffffff;stroke-dasharray:2, 1;stroke-opacity:0.95"
|
||||
id="path44" />
|
||||
</svg>
|
||||
</div>
|
||||
</a>
|
||||
<div class="container">
|
||||
<div class="logo">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="250" height="250" viewBox="0 0 512 512">
|
||||
<path fill="#7AA7D1" d="M262.074 485.246C254.809 485.265 247.407 485.534 240.165 484.99L226.178 483.306C119.737 468.826 34.1354 383.43 25.3176 274.714C24.3655 262.975 23.5876 253.161 24.3295 241.148C31.4284 126.212 123.985 31.919 238.633 24.1259L250.022 23.8366C258.02 23.8001 266.212 23.491 274.183 24.1306C320.519 27.8489 366.348 45.9743 402.232 75.4548L416.996 88.2751C444.342 114.373 464.257 146.819 475.911 182.72L480.415 197.211C486.174 219.054 488.67 242.773 487.436 265.259L486.416 275.75C478.783 352.041 436.405 418.1 369.36 455.394L355.463 462.875C326.247 477.031 294.517 484.631 262.074 485.246ZM253.547 72.4475C161.905 73.0454 83.5901 144.289 73.0095 234.5C69.9101 260.926 74.7763 292.594 83.9003 317.156C104.53 372.691 153.9 416.616 211.281 430.903C226.663 434.733 242.223 436.307 258.044 436.227C353.394 435.507 430.296 361.835 438.445 267.978C439.794 252.442 438.591 236.759 435.59 221.5C419.554 139.955 353.067 79.4187 269.856 72.7052C264.479 72.2714 258.981 72.423 253.586 72.4127L253.547 72.4475Z"/>
|
||||
<path fill="#7AA7D1" d="M153.196 310.121L133.153 285.021C140.83 283.798 148.978 285.092 156.741 284.353L156.637 277.725L124.406 278.002C123.298 277.325 122.856 276.187 122.058 275.193L116.089 267.862C110.469 260.975 103.827 254.843 98.6026 247.669C103.918 246.839 105.248 246.537 111.14 246.523L129.093 246.327C130.152 238.785 128.62 240.843 122.138 240.758C111.929 240.623 110.659 242.014 105.004 234.661L97.9953 225.654C94.8172 221.729 91.2219 218.104 88.2631 214.005C84.1351 208.286 90.1658 209.504 94.601 209.489L236.752 209.545C257.761 209.569 268.184 211.009 285.766 221.678L285.835 206.051C285.837 197.542 286.201 189.141 284.549 180.748C280.22 158.757 260.541 143.877 240.897 135.739C238.055 134.561 232.259 133.654 235.575 129.851C244.784 119.288 263.68 111.99 277.085 111.105C288.697 109.828 301.096 113.537 311.75 117.703C360.649 136.827 393.225 183.042 398.561 234.866C402.204 270.253 391.733 308.356 367.999 335.1C332.832 374.727 269.877 384.883 223.294 360.397C206.156 351.388 183.673 333.299 175.08 316.6C173.511 313.551 174.005 313.555 170.443 313.52L160.641 313.449C158.957 313.435 156.263 314.031 155.122 312.487L153.196 310.121Z"/>
|
||||
</svg>
|
||||
</div>
|
||||
<header>
|
||||
<h1 class="center-align logo-title">ladddddddder</h1>
|
||||
</header>
|
||||
<form id="inputForm" class="col s12" method="get">
|
||||
<div class="row">
|
||||
<div class="input-field col s12">
|
||||
<input type="text" id="inputField" name="inputField" class="validate" required>
|
||||
<label for="inputField">URL</label>
|
||||
</div>
|
||||
<!--
|
||||
<div class="input-field col s2">
|
||||
<button class="btn waves-effect waves-light" type="submit" name="action">Submit
|
||||
<i class="material-icons right">go</i>
|
||||
</button>
|
||||
</div>
|
||||
-->
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
|
||||
<script src="https://code.jquery.com/jquery-3.6.0.min.js"></script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/materialize/1.0.0/js/materialize.min.js"></script>
|
||||
<script>
|
||||
document.addEventListener('DOMContentLoaded', function() {
|
||||
M.AutoInit();
|
||||
});
|
||||
document.getElementById('inputForm').addEventListener('submit', function (e) {
|
||||
e.preventDefault();
|
||||
const inputValue = document.getElementById('inputField').value;
|
||||
window.location.href = '/' + inputValue;
|
||||
return false;
|
||||
});
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
BIN
cmd/favicon.ico
Normal file
BIN
cmd/favicon.ico
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 15 KiB |
140
cmd/main.go
140
cmd/main.go
@@ -1,32 +1,144 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"ladder/handlers"
|
||||
"embed"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"ladder/handlers"
|
||||
"ladder/internal/cli"
|
||||
|
||||
"github.com/akamensky/argparse"
|
||||
"github.com/gofiber/fiber/v2"
|
||||
"github.com/gofiber/fiber/v2/middleware/basicauth"
|
||||
"github.com/gofiber/fiber/v2/middleware/favicon"
|
||||
)
|
||||
|
||||
func main() {
|
||||
//go:embed favicon.ico
|
||||
var faviconData string
|
||||
|
||||
//go:embed styles.css
|
||||
var cssData embed.FS
|
||||
|
||||
func main() {
|
||||
parser := argparse.NewParser("ladder", "Every Wall needs a Ladder")
|
||||
|
||||
portEnv := os.Getenv("PORT")
|
||||
if os.Getenv("PORT") == "" {
|
||||
portEnv = "8080"
|
||||
}
|
||||
port := parser.String("p", "port", &argparse.Options{
|
||||
Required: false,
|
||||
Default: portEnv,
|
||||
Help: "Port the webserver will listen on",
|
||||
})
|
||||
|
||||
prefork := parser.Flag("P", "prefork", &argparse.Options{
|
||||
Required: false,
|
||||
Help: "This will spawn multiple processes listening",
|
||||
})
|
||||
|
||||
verbose := parser.Flag("v", "verbose", &argparse.Options{
|
||||
Required: false,
|
||||
Help: "Adds verbose logging",
|
||||
})
|
||||
|
||||
// TODO: add version flag that reads from handers/VERSION
|
||||
|
||||
ruleset := parser.String("r", "ruleset", &argparse.Options{
|
||||
Required: false,
|
||||
Help: "File, Directory or URL to a ruleset.yaml. Overrides RULESET environment variable.",
|
||||
})
|
||||
|
||||
mergeRulesets := parser.Flag("", "merge-rulesets", &argparse.Options{
|
||||
Required: false,
|
||||
Help: "Compiles a directory of yaml files into a single ruleset.yaml. Requires --ruleset arg.",
|
||||
})
|
||||
mergeRulesetsGzip := parser.Flag("", "merge-rulesets-gzip", &argparse.Options{
|
||||
Required: false,
|
||||
Help: "Compiles a directory of yaml files into a single ruleset.gz Requires --ruleset arg.",
|
||||
})
|
||||
mergeRulesetsOutput := parser.String("", "merge-rulesets-output", &argparse.Options{
|
||||
Required: false,
|
||||
Help: "Specify output file for --merge-rulesets and --merge-rulesets-gzip. Requires --ruleset and --merge-rulesets args.",
|
||||
})
|
||||
|
||||
err := parser.Parse(os.Args)
|
||||
if err != nil {
|
||||
fmt.Print(parser.Usage(err))
|
||||
}
|
||||
|
||||
// utility cli flag to compile ruleset directory into single ruleset.yaml
|
||||
if *mergeRulesets || *mergeRulesetsGzip {
|
||||
err = cli.HandleRulesetMerge(ruleset, mergeRulesets, mergeRulesetsGzip, mergeRulesetsOutput)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
os.Exit(1)
|
||||
}
|
||||
os.Exit(0)
|
||||
}
|
||||
|
||||
if os.Getenv("PREFORK") == "true" {
|
||||
*prefork = true
|
||||
}
|
||||
|
||||
prefork, _ := strconv.ParseBool(os.Getenv("PREFORK"))
|
||||
app := fiber.New(
|
||||
fiber.Config{
|
||||
Prefork: prefork,
|
||||
Prefork: *prefork,
|
||||
GETOnly: false,
|
||||
ReadBufferSize: 4096 * 4, // increase max header size
|
||||
},
|
||||
)
|
||||
|
||||
app.Get("/", handlers.Form)
|
||||
app.Get("debug/*", handlers.Debug)
|
||||
app.Get("api/*", handlers.Api)
|
||||
app.Get("/*", handlers.ProxySite)
|
||||
|
||||
port := os.Getenv("PORT")
|
||||
if os.Getenv("PORT") == "" {
|
||||
port = "8080"
|
||||
// TODO: move to cmd/auth.go
|
||||
userpass := os.Getenv("USERPASS")
|
||||
if userpass != "" {
|
||||
userpass := strings.Split(userpass, ":")
|
||||
app.Use(basicauth.New(basicauth.Config{
|
||||
Users: map[string]string{
|
||||
userpass[0]: userpass[1],
|
||||
},
|
||||
}))
|
||||
}
|
||||
log.Fatal(app.Listen(":" + port))
|
||||
|
||||
// TODO: move to handlers/favicon.go
|
||||
app.Use(favicon.New(favicon.Config{
|
||||
Data: []byte(faviconData),
|
||||
URL: "/favicon.ico",
|
||||
}))
|
||||
|
||||
if os.Getenv("NOLOGS") != "true" {
|
||||
app.Use(func(c *fiber.Ctx) error {
|
||||
log.Println(c.Method(), c.Path())
|
||||
return c.Next()
|
||||
})
|
||||
}
|
||||
|
||||
app.Get("/", handlers.Form)
|
||||
|
||||
// TODO: move this logic to handers/styles.go
|
||||
app.Get("/styles.css", func(c *fiber.Ctx) error {
|
||||
cssData, err := cssData.ReadFile("styles.css")
|
||||
if err != nil {
|
||||
return c.Status(fiber.StatusInternalServerError).SendString("Internal Server Error")
|
||||
}
|
||||
c.Set("Content-Type", "text/css")
|
||||
return c.Send(cssData)
|
||||
})
|
||||
|
||||
app.Get("ruleset", handlers.Ruleset)
|
||||
|
||||
app.Get("raw/*", handlers.Raw)
|
||||
app.Get("api/*", handlers.Api)
|
||||
|
||||
proxyOpts := &handlers.ProxyOptions{
|
||||
Verbose: *verbose,
|
||||
RulesetPath: *ruleset,
|
||||
}
|
||||
|
||||
app.Get("/*", handlers.NewProxySiteHandler(proxyOpts))
|
||||
app.Post("/*", handlers.NewProxySiteHandler(proxyOpts))
|
||||
log.Fatal(app.Listen(":" + *port))
|
||||
}
|
||||
|
||||
1
cmd/styles.css
Normal file
1
cmd/styles.css
Normal file
File diff suppressed because one or more lines are too long
@@ -1,23 +1,27 @@
|
||||
version: '3'
|
||||
services:
|
||||
ladder:
|
||||
image: ghcr.io/kubero-dev/ladder:latest
|
||||
image: ghcr.io/everywall/ladder:latest
|
||||
container_name: ladder
|
||||
build: .
|
||||
#build: .
|
||||
#restart: always
|
||||
#command: tail -f /dev/null
|
||||
#command: sh -c ./ladder
|
||||
environment:
|
||||
- PORT=8080
|
||||
- PREFORK=true
|
||||
#- GODEBUG=netdns=go+4
|
||||
- RULESET=/app/ruleset.yaml
|
||||
#- ALLOWED_DOMAINS=example.com,example.org
|
||||
#- ALLOWED_DOMAINS_RULESET=false
|
||||
#- EXPOSE_RULESET=true
|
||||
#- PREFORK=false
|
||||
#- DISABLE_FORM=false
|
||||
#- FORM_PATH=/app/form.html
|
||||
#- X_FORWARDED_FOR=66.249.66.1
|
||||
#- USER_AGENT=Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)
|
||||
#- USERPASS=foo:bar
|
||||
#- LOG_URLS=true
|
||||
#- GODEBUG=netdns=go
|
||||
ports:
|
||||
- "8080:8080"
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
cpus: "0.50"
|
||||
memory: 512M
|
||||
reservations:
|
||||
cpus: "0.25"
|
||||
memory: 128M
|
||||
volumes:
|
||||
- ./ruleset.yaml:/app/ruleset.yaml
|
||||
- ./handlers/form.html:/app/form.html
|
||||
16
go.mod
16
go.mod
@@ -2,10 +2,17 @@ module ladder
|
||||
|
||||
go 1.21.1
|
||||
|
||||
require github.com/gofiber/fiber/v2 v2.50.0
|
||||
require (
|
||||
github.com/PuerkitoBio/goquery v1.8.1
|
||||
github.com/akamensky/argparse v1.4.0
|
||||
github.com/gofiber/fiber/v2 v2.50.0
|
||||
github.com/stretchr/testify v1.8.4
|
||||
gopkg.in/yaml.v3 v3.0.1
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/andybalholm/brotli v1.0.6 // indirect
|
||||
github.com/andybalholm/cascadia v1.3.2 // indirect
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/google/uuid v1.4.0 // indirect
|
||||
github.com/klauspost/compress v1.17.2 // indirect
|
||||
@@ -14,11 +21,10 @@ require (
|
||||
github.com/mattn/go-runewidth v0.0.15 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
github.com/rivo/uniseg v0.4.4 // indirect
|
||||
github.com/stretchr/objx v0.5.0 // indirect
|
||||
github.com/stretchr/testify v1.8.4 // indirect
|
||||
github.com/valyala/bytebufferpool v1.0.0 // indirect
|
||||
github.com/valyala/fasthttp v1.50.0 // indirect
|
||||
github.com/valyala/tcplisten v1.0.0 // indirect
|
||||
golang.org/x/sys v0.13.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
golang.org/x/net v0.18.0
|
||||
golang.org/x/sys v0.14.0 // indirect
|
||||
golang.org/x/term v0.14.0
|
||||
)
|
||||
|
||||
62
go.sum
62
go.sum
@@ -1,6 +1,12 @@
|
||||
github.com/PuerkitoBio/goquery v1.8.1 h1:uQxhNlArOIdbrH1tr0UXwdVFgDcZDrZVdcpygAcwmWM=
|
||||
github.com/PuerkitoBio/goquery v1.8.1/go.mod h1:Q8ICL1kNUJ2sXGoAhPGUdYDJvgQgHzJsnnd3H7Ho5jQ=
|
||||
github.com/akamensky/argparse v1.4.0 h1:YGzvsTqCvbEZhL8zZu2AiA5nq805NZh75JNj4ajn1xc=
|
||||
github.com/akamensky/argparse v1.4.0/go.mod h1:S5kwC7IuDcEr5VeXtGPRVZ5o/FdhcMlQz4IZQuw64xA=
|
||||
github.com/andybalholm/brotli v1.0.6 h1:Yf9fFpf49Zrxb9NlQaluyE92/+X7UVHlhMNJN2sxfOI=
|
||||
github.com/andybalholm/brotli v1.0.6/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/andybalholm/cascadia v1.3.1/go.mod h1:R4bJ1UQfqADjvDa4P6HZHLh/3OxWWEqc0Sk8XGwHqvA=
|
||||
github.com/andybalholm/cascadia v1.3.2 h1:3Xi6Dw5lHF15JtdcmAHD3i1+T8plmv7BQ/nsViSLyss=
|
||||
github.com/andybalholm/cascadia v1.3.2/go.mod h1:7gtRlve5FxPPgIgX36uWBX58OdBsSS6lUvCFb+h7KvU=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/gofiber/fiber/v2 v2.50.0 h1:ia0JaB+uw3GpNSCR5nvC5dsaxXjRU5OEu36aytx+zGw=
|
||||
@@ -21,12 +27,6 @@ github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZN
|
||||
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||
github.com/rivo/uniseg v0.4.4 h1:8TfxU8dW6PdqD27gjM8MVNuicgxIjxpm4K7x4jp8sis=
|
||||
github.com/rivo/uniseg v0.4.4/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||
github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c=
|
||||
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
||||
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
|
||||
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||
github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
|
||||
@@ -35,11 +35,53 @@ github.com/valyala/fasthttp v1.50.0 h1:H7fweIlBm0rXLs2q0XbalvJ6r0CUPFWK3/bB4N13e
|
||||
github.com/valyala/fasthttp v1.50.0/go.mod h1:k2zXd82h/7UZc3VOdJ2WaUqt1uZ/XpXAfE9i+HBC3lA=
|
||||
github.com/valyala/tcplisten v1.0.0 h1:rBHj/Xf+E1tRGZyWIWwJDiRY0zc1Js+CV5DqwacVSA8=
|
||||
github.com/valyala/tcplisten v1.0.0/go.mod h1:T0xQ8SeCZGxckz9qRXTfG43PvQ/mcWh7FwZEA7Ioqkc=
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20210916014120-12bc252f5db8/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||
golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||
golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns=
|
||||
golang.org/x/net v0.18.0 h1:mIYleuAkSbHh0tCv7RvjL3F6ZVbLjq4+R7zbOn3Kokg=
|
||||
golang.org/x/net v0.18.0/go.mod h1:/czyP5RqHAH4odGYxBJ1qz0+CE5WZ+2j1YgoEo8F2jQ=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.13.0 h1:Af8nKPmuFypiUBjVoU9V20FiaFXOcuZI21p0ycVYYGE=
|
||||
golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.14.0 h1:Vz7Qs629MkJkGyHxUlRHizWJRG2j8fbQKjELVSNhy7Q=
|
||||
golang.org/x/sys v0.14.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||
golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY=
|
||||
golang.org/x/term v0.14.0 h1:LGK9IlZ8T9jvdy6cTdfKUCltatMFOehAQo9SRC46UQ8=
|
||||
golang.org/x/term v0.14.0/go.mod h1:TySc+nGkYR6qt8km8wUhuFRTVSMIX3XPR58y2lC8vww=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
|
||||
@@ -1,49 +1,34 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"io"
|
||||
_ "embed"
|
||||
"log"
|
||||
"net/http"
|
||||
"net/url"
|
||||
|
||||
"github.com/gofiber/fiber/v2"
|
||||
)
|
||||
|
||||
//nolint:all
|
||||
//go:embed VERSION
|
||||
var version string
|
||||
|
||||
func Api(c *fiber.Ctx) error {
|
||||
// Get the url from the URL
|
||||
urlQuery := c.Params("*")
|
||||
|
||||
u, err := url.Parse(urlQuery)
|
||||
queries := c.Queries()
|
||||
body, req, resp, err := fetchSite(urlQuery, queries)
|
||||
if err != nil {
|
||||
log.Println("ERROR:", err)
|
||||
c.SendStatus(500)
|
||||
return c.SendString(err.Error())
|
||||
}
|
||||
|
||||
log.Println(u.String())
|
||||
|
||||
// Fetch the site
|
||||
client := &http.Client{}
|
||||
req, _ := http.NewRequest("GET", u.String(), nil)
|
||||
req.Header.Set("User-Agent", "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)")
|
||||
req.Header.Set("X-Forwarded-For", "66.249.66.1")
|
||||
req.Header.Set("Referer", u.String())
|
||||
req.Header.Set("Host", u.Host)
|
||||
resp, err := client.Do(req)
|
||||
|
||||
if err != nil {
|
||||
return c.SendString(err.Error())
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
bodyB, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
log.Println("ERROR", err)
|
||||
return c.SendString(err.Error())
|
||||
}
|
||||
body := rewriteHtml(bodyB, u)
|
||||
response := Response{
|
||||
Body: body,
|
||||
Version: version,
|
||||
Body: body,
|
||||
}
|
||||
response.Request.Headers = make([]interface{}, 0)
|
||||
|
||||
response.Request.Headers = make([]any, 0, len(req.Header))
|
||||
for k, v := range req.Header {
|
||||
response.Request.Headers = append(response.Request.Headers, map[string]string{
|
||||
"key": k,
|
||||
@@ -51,7 +36,7 @@ func Api(c *fiber.Ctx) error {
|
||||
})
|
||||
}
|
||||
|
||||
response.Response.Headers = make([]interface{}, 0)
|
||||
response.Response.Headers = make([]any, 0, len(resp.Header))
|
||||
for k, v := range resp.Header {
|
||||
response.Response.Headers = append(response.Response.Headers, map[string]string{
|
||||
"key": k,
|
||||
@@ -63,6 +48,7 @@ func Api(c *fiber.Ctx) error {
|
||||
}
|
||||
|
||||
type Response struct {
|
||||
Version string `json:"version"`
|
||||
Body string `json:"body"`
|
||||
Request struct {
|
||||
Headers []interface{} `json:"headers"`
|
||||
|
||||
@@ -1,44 +0,0 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"io"
|
||||
"log"
|
||||
"net/http"
|
||||
"net/url"
|
||||
|
||||
"github.com/gofiber/fiber/v2"
|
||||
)
|
||||
|
||||
func Debug(c *fiber.Ctx) error {
|
||||
// Get the url from the URL
|
||||
urlQuery := c.Params("*")
|
||||
|
||||
u, err := url.Parse(urlQuery)
|
||||
if err != nil {
|
||||
return c.SendString(err.Error())
|
||||
}
|
||||
|
||||
log.Println(u.String())
|
||||
|
||||
// Fetch the site
|
||||
client := &http.Client{}
|
||||
req, _ := http.NewRequest("GET", u.String(), nil)
|
||||
req.Header.Set("User-Agent", "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)")
|
||||
req.Header.Set("X-Forwarded-For", "66.249.66.1")
|
||||
req.Header.Set("Referer", u.String())
|
||||
req.Header.Set("Host", u.Host)
|
||||
resp, err := client.Do(req)
|
||||
|
||||
if err != nil {
|
||||
return c.SendString(err.Error())
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
bodyB, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
log.Println("ERROR", err)
|
||||
return c.SendString(err.Error())
|
||||
}
|
||||
body := rewriteHtml(bodyB, u)
|
||||
return c.SendString(body)
|
||||
}
|
||||
200
handlers/form.go
200
handlers/form.go
@@ -1,179 +1,31 @@
|
||||
package handlers
|
||||
|
||||
import "github.com/gofiber/fiber/v2"
|
||||
import (
|
||||
_ "embed"
|
||||
"log"
|
||||
"os"
|
||||
|
||||
"github.com/gofiber/fiber/v2"
|
||||
)
|
||||
|
||||
//go:embed form.html
|
||||
var formHtml string
|
||||
|
||||
func Form(c *fiber.Ctx) error {
|
||||
c.Set("Content-Type", "text/html")
|
||||
return c.SendString(html)
|
||||
if os.Getenv("DISABLE_FORM") == "true" {
|
||||
c.Set("Content-Type", "text/html")
|
||||
c.SendStatus(fiber.StatusNotFound)
|
||||
return c.SendString("Form Disabled")
|
||||
} else {
|
||||
if os.Getenv("FORM_PATH") != "" {
|
||||
dat, err := os.ReadFile(os.Getenv("FORM_PATH"))
|
||||
if err != nil {
|
||||
log.Println("ERROR: unable to load custom form", err)
|
||||
} else {
|
||||
formHtml = string(dat)
|
||||
}
|
||||
}
|
||||
c.Set("Content-Type", "text/html")
|
||||
return c.SendString(formHtml)
|
||||
}
|
||||
}
|
||||
|
||||
const html = `
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title>Ladder</title>
|
||||
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/materialize/1.0.0/css/materialize.min.css">
|
||||
</head>
|
||||
<style>
|
||||
body {
|
||||
background-color: #ffffff;
|
||||
}
|
||||
|
||||
header h1 {
|
||||
text-transform: uppercase;
|
||||
font-size: 70px;
|
||||
font-weight: 600;
|
||||
color: #fdfdfe;
|
||||
text-shadow: 0px 0px 5px #7AA7D1, 0px 0px 10px #7AA7D1, 0px 0px 10px #7AA7D1,
|
||||
0px 0px 20px #7AA7D1;
|
||||
}
|
||||
.logo-title {
|
||||
font-family: 'Arial', sans-serif;
|
||||
font-size: 2rem;
|
||||
color: #fff;
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
.logo {
|
||||
text-align: center;
|
||||
}
|
||||
.github-corner {
|
||||
animation: octocat-wave 560ms ease-in-out;
|
||||
position:absolute;
|
||||
top:0;
|
||||
right:0;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<a href="https://github.com/kubero-dev/ladder">
|
||||
<div class="github-corner" aria-label="View source on GitHub">
|
||||
<svg
|
||||
xmlns:svg="http://www.w3.org/2000/svg"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:xlink="http://www.w3.org/1999/xlink"
|
||||
version="1.1"
|
||||
width="146"
|
||||
height="146"
|
||||
id="svg2">
|
||||
<defs
|
||||
id="defs8">
|
||||
<filter
|
||||
height="1.096"
|
||||
y="-0.048"
|
||||
width="1.096"
|
||||
x="-0.048"
|
||||
style="color-interpolation-filters:sRGB"
|
||||
id="filter6">
|
||||
<feGaussianBlur
|
||||
stdDeviation="3"
|
||||
id="feGaussianBlur4" />
|
||||
</filter>
|
||||
</defs>
|
||||
<path
|
||||
d="M 152,140 6,-6 H 48 L 152,98 Z"
|
||||
style="opacity:0.8;filter:url(#filter6)"
|
||||
id="path10" />
|
||||
<path
|
||||
d="M 146,134 12,0 h 42 l 92,92 z"
|
||||
style="fill:#007200"
|
||||
id="path12" />
|
||||
<g
|
||||
aria-label="Fork me on GitHub"
|
||||
transform="rotate(45)"
|
||||
style="font-family:Collegiate;fill:#ffffff"
|
||||
id="g42">
|
||||
<path
|
||||
d="m 53.643,-19.486 c 0,0.688 -0.016,1.2 -0.064,1.504 h 2.08 c -0.048,-0.32 -0.064,-0.8 -0.064,-1.424 v -3.344 h 1.76 c 0.416,0 0.736,0.016 0.944,0.048 v -1.76 c -0.24,0.032 -0.592,0.048 -1.088,0.048 h -1.616 v -2.496 h 1.936 c 0.56,0 0.944,0.016 1.184,0.048 v -1.792 h -5.136 c 0.048,0.272 0.064,0.784 0.064,1.504 z"
|
||||
id="path14" />
|
||||
<path
|
||||
d="m 62.424,-17.87 c 1.008,0 1.776,-0.368 2.272,-1.088 0.432,-0.624 0.656,-1.472 0.656,-2.544 0,-2.416 -0.976,-3.616 -2.928,-3.616 -1.968,0 -2.96,1.2 -2.96,3.616 0,1.072 0.224,1.936 0.656,2.56 0.512,0.72 1.28,1.072 2.304,1.072 z m -0.016,-5.68 c 0.496,0 0.816,0.24 0.976,0.704 0.096,0.272 0.144,0.72 0.144,1.344 0,0.64 -0.048,1.088 -0.144,1.36 -0.16,0.464 -0.48,0.688 -0.976,0.688 -0.496,0 -0.816,-0.24 -0.976,-0.704 -0.096,-0.272 -0.144,-0.72 -0.144,-1.344 0,-0.624 0.048,-1.072 0.144,-1.344 0.16,-0.464 0.48,-0.704 0.976,-0.704 z"
|
||||
id="path16" />
|
||||
<path
|
||||
d="m 68.293,-17.982 c -0.032,-0.24 -0.048,-0.64 -0.048,-1.184 v -3.888 c 0.352,-0.304 0.672,-0.464 0.976,-0.464 0.224,0 0.48,0.096 0.752,0.288 v -1.808 c -0.224,-0.08 -0.432,-0.128 -0.624,-0.128 -0.448,0 -0.832,0.192 -1.152,0.56 v -0.48 h -1.744 c 0.032,0.192 0.048,0.544 0.048,1.04 v 4.976 c 0,0.512 -0.016,0.88 -0.048,1.088 z"
|
||||
id="path18" />
|
||||
<path
|
||||
d="m 72.857,-17.982 c -0.032,-0.24 -0.048,-0.64 -0.048,-1.184 v -2.448 l 1.472,2.816 c 0.208,0.384 0.32,0.656 0.368,0.816 h 1.872 l -2.352,-4.416 2.144,-2.752 h -2.064 c -0.08,0.176 -0.192,0.352 -0.352,0.56 l -1.088,1.44 v -4.496 c 0,-0.464 0.016,-0.8 0.048,-1.008 h -1.824 c 0.032,0.192 0.048,0.544 0.048,1.04 v 8.544 c 0,0.512 -0.016,0.88 -0.048,1.088 z"
|
||||
id="path20" />
|
||||
<path
|
||||
d="m 85.08,-24.478 c -0.384,-0.432 -0.896,-0.656 -1.52,-0.656 -0.416,0 -0.864,0.192 -1.328,0.56 v -0.512 l -1.76,-0.016 c 0.032,0.176 0.048,0.544 0.048,1.12 v 4.992 c 0,0.496 -0.016,0.832 -0.048,1.008 h 1.856 c 0,-0.064 -0.048,-0.64 -0.048,-1.008 v -3.984 c 0.304,-0.288 0.608,-0.432 0.928,-0.432 0.656,0 0.864,0.416 0.864,1.76 l -0.016,2.16 c 0,0.656 -0.032,1.168 -0.08,1.504 h 1.92 c -0.048,-0.256 -0.064,-0.752 -0.064,-1.472 v -2.192 c 0,-0.56 -0.048,-1.056 -0.144,-1.504 0.208,-0.176 0.544,-0.256 0.976,-0.256 0.64,0 0.96,0.592 0.96,1.76 v 2.16 c 0,0.656 -0.032,1.168 -0.08,1.504 h 1.904 c -0.048,-0.256 -0.064,-0.752 -0.064,-1.472 v -2.192 c 0,-0.96 -0.176,-1.744 -0.512,-2.368 -0.432,-0.752 -1.056,-1.12 -1.888,-1.12 -0.736,0 -1.376,0.224 -1.904,0.656 z"
|
||||
id="path22" />
|
||||
<path
|
||||
d="m 95.905,-20.99 c 0.032,-0.304 0.048,-0.624 0.048,-0.992 0,-0.944 -0.224,-1.696 -0.656,-2.256 -0.464,-0.592 -1.136,-0.896 -2.016,-0.896 -0.896,0 -1.6,0.368 -2.112,1.088 -0.464,0.656 -0.688,1.456 -0.688,2.432 0,1.136 0.272,2.048 0.832,2.704 0.576,0.72 1.392,1.072 2.448,1.072 0.496,0 1.056,-0.128 1.712,-0.368 v -1.712 c -0.464,0.304 -1.008,0.464 -1.6,0.464 -0.944,0 -1.44,-0.512 -1.52,-1.536 z m -2.576,-2.672 c 0.64,0 0.96,0.4 0.976,1.216 h -1.968 c 0.048,-0.816 0.368,-1.216 0.992,-1.216 z"
|
||||
id="path24" />
|
||||
<use
|
||||
xlink:href="#path16"
|
||||
transform="translate(40.438)"
|
||||
id="use26" />
|
||||
<path
|
||||
d="m 110.187,-25.15 c -0.496,0 -0.992,0.208 -1.472,0.64 v -0.576 h -1.76 c 0.032,0.176 0.048,0.56 0.048,1.184 v 4.912 c 0,0.496 -0.016,0.832 -0.048,1.008 h 1.856 c 0,-0.064 -0.048,-0.64 -0.048,-1.008 v -3.936 c 0.368,-0.352 0.736,-0.528 1.088,-0.528 0.784,0 1.168,0.608 1.152,1.808 l -0.016,2.16 c -0.016,0.752 -0.032,1.264 -0.064,1.504 h 1.92 c -0.048,-0.256 -0.064,-0.752 -0.064,-1.472 v -2.192 c 0,-0.944 -0.192,-1.744 -0.592,-2.384 -0.464,-0.752 -1.136,-1.136 -2,-1.12 z"
|
||||
id="path28" />
|
||||
<path
|
||||
d="m 123.877,-17.982 c 0.144,0.016 0.256,0.016 0.336,0 0,-0.192 -0.064,-0.768 -0.064,-1.36 v -1.856 c 0,-0.56 0.016,-1.056 0.064,-1.52 h -1.952 c 0.032,0.704 0.048,1.12 0.032,1.248 0,1.28 -0.512,1.92 -1.552,1.92 -1.264,0 -1.904,-1.264 -1.904,-3.776 0,-2.48 0.784,-3.728 2.352,-3.728 0.752,0 1.472,0.288 2.16,0.88 v -1.824 c -0.608,-0.528 -1.328,-0.8 -2.16,-0.8 -2.896,0 -4.416,1.904 -4.416,5.424 0,3.696 1.328,5.552 3.968,5.552 0.592,0 1.12,-0.128 1.584,-0.368 0.368,-0.208 0.624,-0.432 0.768,-0.688 z"
|
||||
id="path30" />
|
||||
<path
|
||||
d="m 126.49,-26.334 c 0.592,0 1.104,-0.544 1.104,-1.184 0,-0.656 -0.512,-1.2 -1.104,-1.2 -0.624,0 -1.12,0.544 -1.12,1.2 0,0.64 0.496,1.184 1.12,1.184 z m 0.896,8.352 c -0.016,-0.24 -0.032,-0.64 -0.032,-1.184 v -4.912 c 0,-0.464 0.016,-0.8 0.032,-1.008 h -1.808 c 0.016,0.192 0.032,0.544 0.032,1.04 v 4.976 c 0,0.512 -0.016,0.88 -0.032,1.088 z"
|
||||
id="path32" />
|
||||
<path
|
||||
d="m 130.783,-25.742 c 0,-0.256 0.016,-0.48 0.048,-0.688 h -1.856 c 0.032,0.176 0.048,0.416 0.048,0.72 v 0.624 h -0.784 v 1.552 c 0.224,-0.032 0.4,-0.048 0.544,-0.048 l 0.24,0.016 v 0.032 h -0.016 v 2.864 c 0,0.896 0.112,1.552 0.336,1.968 0.304,0.56 0.832,0.832 1.616,0.832 0.56,0 1.024,-0.112 1.424,-0.32 v -1.6 c -0.272,0.176 -0.56,0.272 -0.896,0.272 -0.464,0 -0.704,-0.352 -0.704,-1.072 v -2.976 h 0.688 c 0.256,0 0.592,0.032 0.704,0.032 v -1.552 h -1.392 z"
|
||||
id="path34" />
|
||||
<path
|
||||
d="m 140.259,-27.678 c 0,-0.416 0.016,-0.736 0.064,-0.976 h -2.096 c 0.048,0.24 0.064,0.688 0.064,1.344 v 2.8 h -2.912 v -3.024 c 0,-0.48 0.016,-0.848 0.064,-1.12 h -2.08 c 0.048,0.256 0.064,0.624 0.064,1.12 v 8.432 c 0,0.496 -0.016,0.864 -0.064,1.12 h 2.08 c -0.048,-0.24 -0.064,-0.656 -0.064,-1.232 v -3.552 h 2.912 v 3.568 c 0,0.528 -0.016,0.944 -0.064,1.216 h 2.096 c -0.048,-0.24 -0.064,-0.624 -0.064,-1.12 v -3.664 h 0.528 v -1.744 h -0.528 z"
|
||||
id="path36" />
|
||||
<path
|
||||
d="m 144.402,-17.918 c 0.56,0 1.072,-0.208 1.568,-0.64 v 0.576 h 1.744 c -0.016,-0.176 -0.032,-0.576 -0.032,-1.2 v -4.896 c 0,-0.496 0.016,-0.832 0.032,-1.008 h -1.856 c 0,0.048 0.064,0.64 0.064,1.008 v 3.936 c -0.368,0.352 -0.704,0.528 -1.008,0.528 -0.432,0 -0.72,-0.16 -0.88,-0.496 -0.144,-0.272 -0.208,-0.704 -0.208,-1.312 l 0.016,-2.16 c 0.016,-0.768 0.032,-1.264 0.064,-1.504 h -1.92 c 0.048,0.256 0.064,0.752 0.064,1.472 v 2.192 c 0,0.976 0.16,1.776 0.48,2.384 0.4,0.752 1.024,1.12 1.872,1.12 z"
|
||||
id="path38" />
|
||||
<path
|
||||
d="m 152.31,-17.934 c 0.848,0 1.536,-0.416 2.048,-1.232 0.432,-0.704 0.64,-1.536 0.64,-2.48 0,-0.928 -0.208,-1.712 -0.608,-2.368 -0.48,-0.752 -1.136,-1.12 -1.984,-1.12 -0.464,0 -0.944,0.16 -1.44,0.464 v -2.784 c 0,-0.608 0.016,-1.008 0.032,-1.2 h -1.824 c 0.032,0.176 0.048,0.576 0.048,1.2 v 8.464 c 0,0.496 -0.016,0.832 -0.048,1.008 h 1.696 v -0.576 c 0.384,0.416 0.864,0.624 1.44,0.624 z m -0.24,-5.52 c 0.736,0 1.104,0.608 1.104,1.808 0,0.496 -0.08,0.928 -0.256,1.296 -0.208,0.464 -0.528,0.688 -0.944,0.688 -0.336,0 -0.672,-0.16 -1.008,-0.496 v -2.768 c 0.384,-0.352 0.752,-0.528 1.104,-0.528 z"
|
||||
id="path40" />
|
||||
</g>
|
||||
<path
|
||||
d="m 52,0 94,94 M 14,0 146,132"
|
||||
style="fill:none;stroke:#ffffff;stroke-dasharray:2, 1;stroke-opacity:0.95"
|
||||
id="path44" />
|
||||
</svg>
|
||||
</div>
|
||||
</a>
|
||||
<div class="container">
|
||||
<div class="logo">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="250" height="250" viewBox="0 0 512 512">
|
||||
<path fill="#7AA7D1" d="M262.074 485.246C254.809 485.265 247.407 485.534 240.165 484.99L226.178 483.306C119.737 468.826 34.1354 383.43 25.3176 274.714C24.3655 262.975 23.5876 253.161 24.3295 241.148C31.4284 126.212 123.985 31.919 238.633 24.1259L250.022 23.8366C258.02 23.8001 266.212 23.491 274.183 24.1306C320.519 27.8489 366.348 45.9743 402.232 75.4548L416.996 88.2751C444.342 114.373 464.257 146.819 475.911 182.72L480.415 197.211C486.174 219.054 488.67 242.773 487.436 265.259L486.416 275.75C478.783 352.041 436.405 418.1 369.36 455.394L355.463 462.875C326.247 477.031 294.517 484.631 262.074 485.246ZM253.547 72.4475C161.905 73.0454 83.5901 144.289 73.0095 234.5C69.9101 260.926 74.7763 292.594 83.9003 317.156C104.53 372.691 153.9 416.616 211.281 430.903C226.663 434.733 242.223 436.307 258.044 436.227C353.394 435.507 430.296 361.835 438.445 267.978C439.794 252.442 438.591 236.759 435.59 221.5C419.554 139.955 353.067 79.4187 269.856 72.7052C264.479 72.2714 258.981 72.423 253.586 72.4127L253.547 72.4475Z"/>
|
||||
<path fill="#7AA7D1" d="M153.196 310.121L133.153 285.021C140.83 283.798 148.978 285.092 156.741 284.353L156.637 277.725L124.406 278.002C123.298 277.325 122.856 276.187 122.058 275.193L116.089 267.862C110.469 260.975 103.827 254.843 98.6026 247.669C103.918 246.839 105.248 246.537 111.14 246.523L129.093 246.327C130.152 238.785 128.62 240.843 122.138 240.758C111.929 240.623 110.659 242.014 105.004 234.661L97.9953 225.654C94.8172 221.729 91.2219 218.104 88.2631 214.005C84.1351 208.286 90.1658 209.504 94.601 209.489L236.752 209.545C257.761 209.569 268.184 211.009 285.766 221.678L285.835 206.051C285.837 197.542 286.201 189.141 284.549 180.748C280.22 158.757 260.541 143.877 240.897 135.739C238.055 134.561 232.259 133.654 235.575 129.851C244.784 119.288 263.68 111.99 277.085 111.105C288.697 109.828 301.096 113.537 311.75 117.703C360.649 136.827 393.225 183.042 398.561 234.866C402.204 270.253 391.733 308.356 367.999 335.1C332.832 374.727 269.877 384.883 223.294 360.397C206.156 351.388 183.673 333.299 175.08 316.6C173.511 313.551 174.005 313.555 170.443 313.52L160.641 313.449C158.957 313.435 156.263 314.031 155.122 312.487L153.196 310.121Z"/>
|
||||
</svg>
|
||||
</div>
|
||||
<header>
|
||||
<h1 class="center-align logo-title">ladddddddder</h1>
|
||||
</header>
|
||||
<form id="inputForm" class="col s12" method="get">
|
||||
<div class="row">
|
||||
<div class="input-field col s12">
|
||||
<input type="text" id="inputField" name="inputField" class="validate" required>
|
||||
<label for="inputField">URL</label>
|
||||
</div>
|
||||
<!--
|
||||
<div class="input-field col s2">
|
||||
<button class="btn waves-effect waves-light" type="submit" name="action">Submit
|
||||
<i class="material-icons right">go</i>
|
||||
</button>
|
||||
</div>
|
||||
-->
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
|
||||
<script src="https://code.jquery.com/jquery-3.6.0.min.js"></script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/materialize/1.0.0/js/materialize.min.js"></script>
|
||||
<script>
|
||||
document.addEventListener('DOMContentLoaded', function() {
|
||||
M.AutoInit();
|
||||
});
|
||||
document.getElementById('inputForm').addEventListener('submit', function (e) {
|
||||
e.preventDefault();
|
||||
const inputValue = document.getElementById('inputField').value;
|
||||
window.location.href = '/' + inputValue;
|
||||
return false;
|
||||
});
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
`
|
||||
|
||||
79
handlers/form.html
Normal file
79
handlers/form.html
Normal file
@@ -0,0 +1,79 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>ladder</title>
|
||||
<link rel="stylesheet" href="/styles.css">
|
||||
</head>
|
||||
|
||||
<body class="antialiased text-slate-500 dark:text-slate-400 bg-white dark:bg-slate-900">
|
||||
<div class="grid grid-cols-1 gap-4 max-w-3xl mx-auto pt-10">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="100%" height="250" viewBox="0 0 512 512">
|
||||
<path fill="#7AA7D1" d="M262.074 485.246C254.809 485.265 247.407 485.534 240.165 484.99L226.178 483.306C119.737 468.826 34.1354 383.43 25.3176 274.714C24.3655 262.975 23.5876 253.161 24.3295 241.148C31.4284 126.212 123.985 31.919 238.633 24.1259L250.022 23.8366C258.02 23.8001 266.212 23.491 274.183 24.1306C320.519 27.8489 366.348 45.9743 402.232 75.4548L416.996 88.2751C444.342 114.373 464.257 146.819 475.911 182.72L480.415 197.211C486.174 219.054 488.67 242.773 487.436 265.259L486.416 275.75C478.783 352.041 436.405 418.1 369.36 455.394L355.463 462.875C326.247 477.031 294.517 484.631 262.074 485.246ZM253.547 72.4475C161.905 73.0454 83.5901 144.289 73.0095 234.5C69.9101 260.926 74.7763 292.594 83.9003 317.156C104.53 372.691 153.9 416.616 211.281 430.903C226.663 434.733 242.223 436.307 258.044 436.227C353.394 435.507 430.296 361.835 438.445 267.978C439.794 252.442 438.591 236.759 435.59 221.5C419.554 139.955 353.067 79.4187 269.856 72.7052C264.479 72.2714 258.981 72.423 253.586 72.4127L253.547 72.4475Z"/>
|
||||
<path fill="#7AA7D1" d="M153.196 310.121L133.153 285.021C140.83 283.798 148.978 285.092 156.741 284.353L156.637 277.725L124.406 278.002C123.298 277.325 122.856 276.187 122.058 275.193L116.089 267.862C110.469 260.975 103.827 254.843 98.6026 247.669C103.918 246.839 105.248 246.537 111.14 246.523L129.093 246.327C130.152 238.785 128.62 240.843 122.138 240.758C111.929 240.623 110.659 242.014 105.004 234.661L97.9953 225.654C94.8172 221.729 91.2219 218.104 88.2631 214.005C84.1351 208.286 90.1658 209.504 94.601 209.489L236.752 209.545C257.761 209.569 268.184 211.009 285.766 221.678L285.835 206.051C285.837 197.542 286.201 189.141 284.549 180.748C280.22 158.757 260.541 143.877 240.897 135.739C238.055 134.561 232.259 133.654 235.575 129.851C244.784 119.288 263.680 111.990 277.085 111.105C288.697 109.828 301.096 113.537 311.75 117.703C360.649 136.827 393.225 183.042 398.561 234.866C402.204 270.253 391.733 308.356 367.999 335.1C332.832 374.727 269.877 384.883 223.294 360.397C206.156 351.388 183.673 333.299 175.08 316.6C173.511 313.551 174.005 313.555 170.443 313.52L160.641 313.449C158.957 313.435 156.263 314.031 155.122 312.487L153.196 310.121Z"/>
|
||||
</svg>
|
||||
<header>
|
||||
<h1 class="text-center text-3xl sm:text-4xl font-extrabold text-slate-900 tracking-tight dark:text-slate-200">ladddddddder</h1>
|
||||
</header>
|
||||
<form id="inputForm" method="get" class="mx-4 relative">
|
||||
<div>
|
||||
<input type="text" id="inputField" placeholder="Proxy Search" name="inputField" class="w-full text-sm leading-6 text-slate-400 rounded-md ring-1 ring-slate-900/10 shadow-sm py-1.5 pl-2 pr-3 hover:ring-slate-300 dark:bg-slate-800 dark:highlight-white/5 dark:hover:bg-slate-700" required autofocus>
|
||||
<button id="clearButton" type="button" aria-label="Clear Search" title="Clear Search" class="hidden absolute inset-y-0 right-0 items-center pr-2 hover:text-slate-400 hover:dark:text-slate-300" tabindex="-1">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round""><path d="M18 6 6 18"/><path d="m6 6 12 12"/></svg>
|
||||
</button>
|
||||
</div>
|
||||
</form>
|
||||
<footer class="mt-10 mx-4 text-center text-slate-600 dark:text-slate-400">
|
||||
<p>
|
||||
Code Licensed Under GPL v3.0 |
|
||||
<a href="https://github.com/everywall/ladder" class="hover:text-blue-500 hover:underline underline-offset-2 transition-colors duration-300">View Source</a> |
|
||||
<a href="https://github.com/everywall" class="hover:text-blue-500 hover:underline underline-offset-2 transition-colors duration-300">Everywall</a>
|
||||
</p>
|
||||
</footer>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
document.getElementById('inputForm').addEventListener('submit', function (e) {
|
||||
e.preventDefault();
|
||||
let url = document.getElementById('inputField').value;
|
||||
if (url.indexOf('http') === -1) {
|
||||
url = 'https://' + url;
|
||||
}
|
||||
window.location.href = '/' + url;
|
||||
return false;
|
||||
});
|
||||
document.getElementById('inputField').addEventListener('input', function() {
|
||||
const clearButton = document.getElementById('clearButton');
|
||||
if (this.value.trim().length > 0) {
|
||||
clearButton.style.display = 'block';
|
||||
} else {
|
||||
clearButton.style.display = 'none';
|
||||
}
|
||||
});
|
||||
document.getElementById('clearButton').addEventListener('click', function() {
|
||||
document.getElementById('inputField').value = '';
|
||||
this.style.display = 'none';
|
||||
document.getElementById('inputField').focus();
|
||||
});
|
||||
</script>
|
||||
|
||||
<style>
|
||||
@media (prefers-color-scheme: light) {
|
||||
body {
|
||||
background-color: #ffffff;
|
||||
color: #333333;
|
||||
}
|
||||
}
|
||||
|
||||
@media (prefers-color-scheme: dark) {
|
||||
body {
|
||||
background-color: #1a202c;
|
||||
color: #ffffff;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
@@ -6,52 +6,192 @@ import (
|
||||
"log"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"ladder/pkg/ruleset"
|
||||
"ladder/proxychain"
|
||||
rx "ladder/proxychain/requestmodifers"
|
||||
tx "ladder/proxychain/responsemodifers"
|
||||
|
||||
"github.com/PuerkitoBio/goquery"
|
||||
"github.com/gofiber/fiber/v2"
|
||||
)
|
||||
|
||||
func ProxySite(c *fiber.Ctx) error {
|
||||
// Get the url from the URL
|
||||
urlQuery := c.Params("*")
|
||||
var (
|
||||
UserAgent = getenv("USER_AGENT", "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)")
|
||||
ForwardedFor = getenv("X_FORWARDED_FOR", "66.249.66.1")
|
||||
rulesSet = ruleset.NewRulesetFromEnv()
|
||||
allowedDomains = []string{}
|
||||
)
|
||||
|
||||
u, err := url.Parse(urlQuery)
|
||||
if err != nil {
|
||||
log.Println("ERROR", err)
|
||||
c.SendStatus(500)
|
||||
return c.SendString(err.Error())
|
||||
func init() {
|
||||
allowedDomains = strings.Split(os.Getenv("ALLOWED_DOMAINS"), ",")
|
||||
if os.Getenv("ALLOWED_DOMAINS_RULESET") == "true" {
|
||||
allowedDomains = append(allowedDomains, rulesSet.Domains()...)
|
||||
}
|
||||
}
|
||||
|
||||
type ProxyOptions struct {
|
||||
RulesetPath string
|
||||
Verbose bool
|
||||
}
|
||||
|
||||
func NewProxySiteHandler(opts *ProxyOptions) fiber.Handler {
|
||||
/*
|
||||
var rs ruleset.RuleSet
|
||||
if opts.RulesetPath != "" {
|
||||
r, err := ruleset.NewRuleset(opts.RulesetPath)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
rs = r
|
||||
}
|
||||
*/
|
||||
|
||||
return func(c *fiber.Ctx) error {
|
||||
proxychain := proxychain.
|
||||
NewProxyChain().
|
||||
SetFiberCtx(c).
|
||||
SetDebugLogging(opts.Verbose).
|
||||
SetRequestModifications(
|
||||
rx.DeleteOutgoingCookies(),
|
||||
//rx.RequestArchiveIs(),
|
||||
rx.MasqueradeAsGoogleBot(),
|
||||
).
|
||||
AddResponseModifications(
|
||||
tx.BypassCORS(),
|
||||
tx.BypassContentSecurityPolicy(),
|
||||
tx.DeleteIncomingCookies(),
|
||||
tx.RewriteHTMLResourceURLs(),
|
||||
tx.PatchDynamicResourceURLs(),
|
||||
).
|
||||
Execute()
|
||||
|
||||
return proxychain
|
||||
}
|
||||
|
||||
log.Println(u.String())
|
||||
}
|
||||
|
||||
func modifyURL(uri string, rule ruleset.Rule) (string, error) {
|
||||
newUrl, err := url.Parse(uri)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
for _, urlMod := range rule.UrlMods.Domain {
|
||||
re := regexp.MustCompile(urlMod.Match)
|
||||
newUrl.Host = re.ReplaceAllString(newUrl.Host, urlMod.Replace)
|
||||
}
|
||||
|
||||
for _, urlMod := range rule.UrlMods.Path {
|
||||
re := regexp.MustCompile(urlMod.Match)
|
||||
newUrl.Path = re.ReplaceAllString(newUrl.Path, urlMod.Replace)
|
||||
}
|
||||
|
||||
v := newUrl.Query()
|
||||
for _, query := range rule.UrlMods.Query {
|
||||
if query.Value == "" {
|
||||
v.Del(query.Key)
|
||||
continue
|
||||
}
|
||||
v.Set(query.Key, query.Value)
|
||||
}
|
||||
newUrl.RawQuery = v.Encode()
|
||||
|
||||
if rule.GoogleCache {
|
||||
newUrl, err = url.Parse("https://webcache.googleusercontent.com/search?q=cache:" + newUrl.String())
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
}
|
||||
|
||||
return newUrl.String(), nil
|
||||
}
|
||||
|
||||
func fetchSite(urlpath string, queries map[string]string) (string, *http.Request, *http.Response, error) {
|
||||
urlQuery := "?"
|
||||
if len(queries) > 0 {
|
||||
for k, v := range queries {
|
||||
urlQuery += k + "=" + v + "&"
|
||||
}
|
||||
}
|
||||
urlQuery = strings.TrimSuffix(urlQuery, "&")
|
||||
urlQuery = strings.TrimSuffix(urlQuery, "?")
|
||||
|
||||
u, err := url.Parse(urlpath)
|
||||
if err != nil {
|
||||
return "", nil, nil, err
|
||||
}
|
||||
|
||||
if len(allowedDomains) > 0 && !StringInSlice(u.Host, allowedDomains) {
|
||||
return "", nil, nil, fmt.Errorf("domain not allowed. %s not in %s", u.Host, allowedDomains)
|
||||
}
|
||||
|
||||
if os.Getenv("LOG_URLS") == "true" {
|
||||
log.Println(u.String() + urlQuery)
|
||||
}
|
||||
|
||||
// Modify the URI according to ruleset
|
||||
rule := fetchRule(u.Host, u.Path)
|
||||
url, err := modifyURL(u.String()+urlQuery, rule)
|
||||
if err != nil {
|
||||
return "", nil, nil, err
|
||||
}
|
||||
|
||||
// Fetch the site
|
||||
client := &http.Client{}
|
||||
req, _ := http.NewRequest("GET", u.String(), nil)
|
||||
req.Header.Set("User-Agent", "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)")
|
||||
req.Header.Set("X-Forwarded-For", "66.249.66.1")
|
||||
req.Header.Set("Referer", u.String())
|
||||
req.Header.Set("Host", u.Host)
|
||||
resp, err := client.Do(req)
|
||||
req, _ := http.NewRequest("GET", url, nil)
|
||||
|
||||
if rule.Headers.UserAgent != "" {
|
||||
req.Header.Set("User-Agent", rule.Headers.UserAgent)
|
||||
} else {
|
||||
req.Header.Set("User-Agent", UserAgent)
|
||||
}
|
||||
|
||||
if rule.Headers.XForwardedFor != "" {
|
||||
if rule.Headers.XForwardedFor != "none" {
|
||||
req.Header.Set("X-Forwarded-For", rule.Headers.XForwardedFor)
|
||||
}
|
||||
} else {
|
||||
req.Header.Set("X-Forwarded-For", ForwardedFor)
|
||||
}
|
||||
|
||||
if rule.Headers.Referer != "" {
|
||||
if rule.Headers.Referer != "none" {
|
||||
req.Header.Set("Referer", rule.Headers.Referer)
|
||||
}
|
||||
} else {
|
||||
req.Header.Set("Referer", u.String())
|
||||
}
|
||||
|
||||
if rule.Headers.Cookie != "" {
|
||||
req.Header.Set("Cookie", rule.Headers.Cookie)
|
||||
}
|
||||
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return c.SendString(err.Error())
|
||||
return "", nil, nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
bodyB, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
log.Println("ERROR", err)
|
||||
c.SendStatus(500)
|
||||
return c.SendString(err.Error())
|
||||
return "", nil, nil, err
|
||||
}
|
||||
|
||||
body := rewriteHtml(bodyB, u)
|
||||
c.Set("Content-Type", resp.Header.Get("Content-Type"))
|
||||
return c.SendString(body)
|
||||
if rule.Headers.CSP != "" {
|
||||
//log.Println(rule.Headers.CSP)
|
||||
resp.Header.Set("Content-Security-Policy", rule.Headers.CSP)
|
||||
}
|
||||
|
||||
//log.Print("rule", rule) TODO: Add a debug mode to print the rule
|
||||
body := rewriteHtml(bodyB, u, rule)
|
||||
return body, req, resp, nil
|
||||
}
|
||||
|
||||
func rewriteHtml(bodyB []byte, u *url.URL) string {
|
||||
func rewriteHtml(bodyB []byte, u *url.URL, rule ruleset.Rule) string {
|
||||
// Rewrite the HTML
|
||||
body := string(bodyB)
|
||||
|
||||
@@ -65,11 +205,86 @@ func rewriteHtml(bodyB []byte, u *url.URL) string {
|
||||
reScript := regexp.MustCompile(scriptPattern)
|
||||
body = reScript.ReplaceAllString(body, fmt.Sprintf(`<script $1 script="%s$3"`, "/https://"+u.Host+"/"))
|
||||
|
||||
//body = strings.ReplaceAll(body, "srcset=\"/", "srcset=\"/https://"+u.Host+"/") // TODO: Needs a regex to rewrite the URL's
|
||||
// body = strings.ReplaceAll(body, "srcset=\"/", "srcset=\"/https://"+u.Host+"/") // TODO: Needs a regex to rewrite the URL's
|
||||
body = strings.ReplaceAll(body, "href=\"/", "href=\"/https://"+u.Host+"/")
|
||||
body = strings.ReplaceAll(body, "url('/", "url('/https://"+u.Host+"/")
|
||||
body = strings.ReplaceAll(body, "url(/", "url(/https://"+u.Host+"/")
|
||||
body = strings.ReplaceAll(body, "href=\"https://"+u.Host, "href=\"/https://"+u.Host+"/")
|
||||
|
||||
if os.Getenv("RULESET") != "" {
|
||||
body = applyRules(body, rule)
|
||||
}
|
||||
return body
|
||||
}
|
||||
|
||||
func getenv(key, fallback string) string {
|
||||
value := os.Getenv(key)
|
||||
if len(value) == 0 {
|
||||
return fallback
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
func fetchRule(domain string, path string) ruleset.Rule {
|
||||
if len(rulesSet) == 0 {
|
||||
return ruleset.Rule{}
|
||||
}
|
||||
rule := ruleset.Rule{}
|
||||
for _, rule := range rulesSet {
|
||||
domains := rule.Domains
|
||||
if rule.Domain != "" {
|
||||
domains = append(domains, rule.Domain)
|
||||
}
|
||||
for _, ruleDomain := range domains {
|
||||
if ruleDomain == domain || strings.HasSuffix(domain, ruleDomain) {
|
||||
if len(rule.Paths) > 0 && !StringInSlice(path, rule.Paths) {
|
||||
continue
|
||||
}
|
||||
// return first match
|
||||
return rule
|
||||
}
|
||||
}
|
||||
}
|
||||
return rule
|
||||
}
|
||||
|
||||
func applyRules(body string, rule ruleset.Rule) string {
|
||||
if len(rulesSet) == 0 {
|
||||
return body
|
||||
}
|
||||
|
||||
for _, regexRule := range rule.RegexRules {
|
||||
re := regexp.MustCompile(regexRule.Match)
|
||||
body = re.ReplaceAllString(body, regexRule.Replace)
|
||||
}
|
||||
for _, injection := range rule.Injections {
|
||||
doc, err := goquery.NewDocumentFromReader(strings.NewReader(body))
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
if injection.Replace != "" {
|
||||
doc.Find(injection.Position).ReplaceWithHtml(injection.Replace)
|
||||
}
|
||||
if injection.Append != "" {
|
||||
doc.Find(injection.Position).AppendHtml(injection.Append)
|
||||
}
|
||||
if injection.Prepend != "" {
|
||||
doc.Find(injection.Position).PrependHtml(injection.Prepend)
|
||||
}
|
||||
body, err = doc.Html()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
return body
|
||||
}
|
||||
|
||||
func StringInSlice(s string, list []string) bool {
|
||||
for _, x := range list {
|
||||
if strings.HasPrefix(s, x) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"ladder/pkg/ruleset"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"net/url"
|
||||
@@ -13,7 +14,7 @@ import (
|
||||
|
||||
func TestProxySite(t *testing.T) {
|
||||
app := fiber.New()
|
||||
app.Get("/:url", ProxySite)
|
||||
app.Get("/:url", NewProxySiteHandler(nil))
|
||||
|
||||
req := httptest.NewRequest("GET", "/https://example.com", nil)
|
||||
resp, err := app.Test(req)
|
||||
@@ -51,7 +52,7 @@ func TestRewriteHtml(t *testing.T) {
|
||||
</html>
|
||||
`
|
||||
|
||||
actual := rewriteHtml(bodyB, u)
|
||||
actual := rewriteHtml(bodyB, u, ruleset.Rule{})
|
||||
assert.Equal(t, expected, actual)
|
||||
}
|
||||
|
||||
|
||||
21
handlers/raw.go
Normal file
21
handlers/raw.go
Normal file
@@ -0,0 +1,21 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"log"
|
||||
|
||||
"github.com/gofiber/fiber/v2"
|
||||
)
|
||||
|
||||
func Raw(c *fiber.Ctx) error {
|
||||
// Get the url from the URL
|
||||
urlQuery := c.Params("*")
|
||||
|
||||
queries := c.Queries()
|
||||
body, _, _, err := fetchSite(urlQuery, queries)
|
||||
if err != nil {
|
||||
log.Println("ERROR:", err)
|
||||
c.SendStatus(500)
|
||||
return c.SendString(err.Error())
|
||||
}
|
||||
return c.SendString(body)
|
||||
}
|
||||
@@ -11,9 +11,9 @@ import (
|
||||
"github.com/gofiber/fiber/v2"
|
||||
)
|
||||
|
||||
func TestDebug(t *testing.T) {
|
||||
func TestRaw(t *testing.T) {
|
||||
app := fiber.New()
|
||||
app.Get("/debug/*", Debug)
|
||||
app.Get("/raw/*", Raw)
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
@@ -34,7 +34,7 @@ func TestDebug(t *testing.T) {
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
req := httptest.NewRequest(http.MethodGet, "/debug/"+tc.url, nil)
|
||||
req := httptest.NewRequest(http.MethodGet, "/raw/"+tc.url, nil)
|
||||
resp, err := app.Test(req)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
23
handlers/ruleset.go
Normal file
23
handlers/ruleset.go
Normal file
@@ -0,0 +1,23 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"os"
|
||||
|
||||
"github.com/gofiber/fiber/v2"
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
func Ruleset(c *fiber.Ctx) error {
|
||||
if os.Getenv("EXPOSE_RULESET") == "false" {
|
||||
c.SendStatus(fiber.StatusForbidden)
|
||||
return c.SendString("Rules Disabled")
|
||||
}
|
||||
|
||||
body, err := yaml.Marshal(rulesSet)
|
||||
if err != nil {
|
||||
c.SendStatus(fiber.StatusInternalServerError)
|
||||
return c.SendString(err.Error())
|
||||
}
|
||||
|
||||
return c.SendString(string(body))
|
||||
}
|
||||
6
helm-chart/Chart.yaml
Normal file
6
helm-chart/Chart.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
apiVersion: v2
|
||||
name: ladder
|
||||
description: A helm chart to deploy everywall/ladder
|
||||
type: application
|
||||
version: "1.0"
|
||||
appVersion: "v0.0.11"
|
||||
27
helm-chart/README.md
Normal file
27
helm-chart/README.md
Normal file
@@ -0,0 +1,27 @@
|
||||
# Helm Chart for deployment of Ladder
|
||||
This folder contains a basic helm chart deployment for the ladder app.
|
||||
|
||||
# Deployment pre-reqs
|
||||
## Values
|
||||
Edit the values to your own preferences, with the only minimum requirement being `ingress.HOST` (line 19) being updated to your intended domain name.
|
||||
|
||||
Other variables in `values.yaml` can be updated as to your preferences, with details on each variable being listed in the main [README.md](/README.md) in the root of this repo.
|
||||
|
||||
## Defaults in K8s
|
||||
No ingress default has been specified.
|
||||
You can set this manually by adding an annotation to the ingress.yaml - if needed.
|
||||
For example, to use Traefik -
|
||||
```yaml
|
||||
metadata:
|
||||
name: ladder-ingress
|
||||
annotations:
|
||||
kubernetes.io/ingress.class: traefik
|
||||
```
|
||||
|
||||
## Helm Install
|
||||
`helm install <name> <location> -n <namespace-name> --create-namespace`
|
||||
`helm install ladder .\ladder\ -n ladder --create-namespace`
|
||||
|
||||
## Helm Upgrade
|
||||
`helm upgrade <name> <location> -n <namespace-name>`
|
||||
`helm upgrade ladder .\ladder\ -n ladder`
|
||||
55
helm-chart/templates/deployment.yaml
Normal file
55
helm-chart/templates/deployment.yaml
Normal file
@@ -0,0 +1,55 @@
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
labels:
|
||||
app: ladder
|
||||
name: ladder
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: ladder
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: ladder
|
||||
spec:
|
||||
containers:
|
||||
- image: "{{ .Values.image.RELEASE }}"
|
||||
imagePullPolicy: Always
|
||||
name: ladder
|
||||
resources:
|
||||
limits:
|
||||
cpu: 250m
|
||||
memory: 128Mi
|
||||
requests:
|
||||
cpu: 250m
|
||||
memory: 128Mi
|
||||
env:
|
||||
- name: PORT
|
||||
value: "{{ .Values.env.PORT }}"
|
||||
- name: PREFORK
|
||||
value: "{{ .Values.env.PREFORK }}"
|
||||
- name: USER_AGENT
|
||||
value: "{{ .Values.env.USER_AGENT }}"
|
||||
- name: X_FORWARDED_FOR
|
||||
value: "{{ .Values.env.X_FORWARDED_FOR }}"
|
||||
- name: USERPASS
|
||||
value: "{{ .Values.env.USERPASS }}"
|
||||
- name: LOG_URLS
|
||||
value: "{{ .Values.env.LOG_URLS }}"
|
||||
- name: DISABLE_FORM
|
||||
value: "{{ .Values.env.DISABLE_FORM }}"
|
||||
- name: FORM_PATH
|
||||
value: "{{ .Values.env.FORM_PATH }}"
|
||||
- name: RULESET
|
||||
value: "{{ .Values.env.RULESET }}"
|
||||
- name: EXPOSE_RULESET
|
||||
value: "{{ .Values.env.EXPOSE_RULESET }}"
|
||||
- name: ALLOWED_DOMAINS
|
||||
value: "{{ .Values.env.ALLOWED_DOMAINS }}"
|
||||
- name: ALLOWED_DOMAINS_RULESET
|
||||
value: "{{ .Values.env.ALLOWED_DOMAINS_RULESET }}"
|
||||
restartPolicy: Always
|
||||
terminationGracePeriodSeconds: 30
|
||||
17
helm-chart/templates/ingress.yaml
Normal file
17
helm-chart/templates/ingress.yaml
Normal file
@@ -0,0 +1,17 @@
|
||||
---
|
||||
apiVersion: networking.k8s.io/v1
|
||||
kind: Ingress
|
||||
metadata:
|
||||
name: ladder-ingress
|
||||
spec:
|
||||
rules:
|
||||
- host: "{{ .Values.ingress.HOST }}"
|
||||
http:
|
||||
paths:
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
backend:
|
||||
service:
|
||||
name: ladder-service
|
||||
port:
|
||||
number: {{ .Values.ingress.PORT }}
|
||||
14
helm-chart/templates/service.yaml
Normal file
14
helm-chart/templates/service.yaml
Normal file
@@ -0,0 +1,14 @@
|
||||
---
|
||||
kind: Service
|
||||
apiVersion: v1
|
||||
metadata:
|
||||
name: ladder-service
|
||||
spec:
|
||||
type: ClusterIP
|
||||
selector:
|
||||
app: ladder
|
||||
ports:
|
||||
- name: http
|
||||
port: {{ .Values.ingress.PORT }}
|
||||
protocol: TCP
|
||||
targetPort: {{ .Values.env.PORT }}
|
||||
20
helm-chart/values.yaml
Normal file
20
helm-chart/values.yaml
Normal file
@@ -0,0 +1,20 @@
|
||||
image:
|
||||
RELEASE: ghcr.io/everywall/ladder:latest
|
||||
|
||||
env:
|
||||
PORT: 8080
|
||||
PREFORK: "false"
|
||||
USER_AGENT: "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)"
|
||||
X_FORWARDED_FOR:
|
||||
USERPASS: ""
|
||||
LOG_URLS: "true"
|
||||
DISABLE_FORM: "false"
|
||||
FORM_PATH: ""
|
||||
RULESET: "https://raw.githubusercontent.com/everywall/ladder/main/ruleset.yaml"
|
||||
EXPOSE_RULESET: "true"
|
||||
ALLOWED_DOMAINS: ""
|
||||
ALLOWED_DOMAINS_RULESET: "false"
|
||||
|
||||
ingress:
|
||||
HOST: "ladder.domain.com"
|
||||
PORT: 80
|
||||
105
internal/cli/ruleset_merge.go
Normal file
105
internal/cli/ruleset_merge.go
Normal file
@@ -0,0 +1,105 @@
|
||||
package cli
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"io/fs"
|
||||
"ladder/pkg/ruleset"
|
||||
"os"
|
||||
|
||||
"golang.org/x/term"
|
||||
)
|
||||
|
||||
// HandleRulesetMerge merges a set of ruleset files, specified by the rulesetPath or RULESET env variable, into either YAML or Gzip format.
|
||||
// Exits the program with an error message if the ruleset path is not provided or if loading the ruleset fails.
|
||||
//
|
||||
// Parameters:
|
||||
// - rulesetPath: A pointer to a string specifying the path to the ruleset file.
|
||||
// - mergeRulesets: A pointer to a boolean indicating if a merge operation should be performed.
|
||||
// - mergeRulesetsGzip: A pointer to a boolean indicating if the merge should be in Gzip format.
|
||||
// - mergeRulesetsOutput: A pointer to a string specifying the output file path. If empty, the output is printed to stdout.
|
||||
//
|
||||
// Returns:
|
||||
// - An error if the ruleset loading or merging process fails, otherwise nil.
|
||||
func HandleRulesetMerge(rulesetPath *string, mergeRulesets *bool, mergeRulesetsGzip *bool, mergeRulesetsOutput *string) error {
|
||||
if *rulesetPath == "" {
|
||||
*rulesetPath = os.Getenv("RULESET")
|
||||
}
|
||||
if *rulesetPath == "" {
|
||||
fmt.Println("ERROR: no ruleset provided. Try again with --ruleset <ruleset.yaml>")
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
rs, err := ruleset.NewRuleset(*rulesetPath)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
if *mergeRulesetsGzip {
|
||||
return gzipMerge(rs, mergeRulesetsOutput)
|
||||
}
|
||||
return yamlMerge(rs, mergeRulesetsOutput)
|
||||
}
|
||||
|
||||
// gzipMerge takes a RuleSet and an optional output file path pointer. It compresses the RuleSet into Gzip format.
|
||||
// If the output file path is provided, the compressed data is written to this file. Otherwise, it prints a warning
|
||||
// and outputs the binary data to stdout
|
||||
//
|
||||
// Parameters:
|
||||
// - rs: The ruleset.RuleSet to be compressed.
|
||||
// - mergeRulesetsOutput: A pointer to a string specifying the output file path. If empty, the output is directed to stdout.
|
||||
//
|
||||
// Returns:
|
||||
// - An error if compression or file writing fails, otherwise nil.
|
||||
func gzipMerge(rs ruleset.RuleSet, mergeRulesetsOutput *string) error {
|
||||
gzip, err := rs.GzipYaml()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if *mergeRulesetsOutput != "" {
|
||||
out, err := os.Create(*mergeRulesetsOutput)
|
||||
defer out.Close()
|
||||
_, err = io.Copy(out, gzip)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if term.IsTerminal(int(os.Stdout.Fd())) {
|
||||
println("WARNING: binary output can mess up your terminal. Use '--merge-rulesets-output <ruleset.gz>' or pipe it to a file.")
|
||||
os.Exit(1)
|
||||
}
|
||||
_, err = io.Copy(os.Stdout, gzip)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// yamlMerge takes a RuleSet and an optional output file path pointer. It converts the RuleSet into YAML format.
|
||||
// If the output file path is provided, the YAML data is written to this file. If not, the YAML data is printed to stdout.
|
||||
//
|
||||
// Parameters:
|
||||
// - rs: The ruleset.RuleSet to be converted to YAML.
|
||||
// - mergeRulesetsOutput: A pointer to a string specifying the output file path. If empty, the output is printed to stdout.
|
||||
//
|
||||
// Returns:
|
||||
// - An error if YAML conversion or file writing fails, otherwise nil.
|
||||
func yamlMerge(rs ruleset.RuleSet, mergeRulesetsOutput *string) error {
|
||||
yaml, err := rs.Yaml()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if *mergeRulesetsOutput == "" {
|
||||
fmt.Printf(yaml)
|
||||
os.Exit(0)
|
||||
}
|
||||
|
||||
err = os.WriteFile(*mergeRulesetsOutput, []byte(yaml), fs.FileMode(os.O_RDWR))
|
||||
if err != nil {
|
||||
return fmt.Errorf("ERROR: failed to write merged YAML ruleset to '%s'\n", *mergeRulesetsOutput)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
9
package.json
Normal file
9
package.json
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"scripts": {
|
||||
"build": "pnpx tailwindcss -i ./styles/input.css -o ./styles/output.css --build && pnpx minify ./styles/output.css > ./cmd/styles.css"
|
||||
},
|
||||
"devDependencies": {
|
||||
"minify": "^10.5.2",
|
||||
"tailwindcss": "^3.3.5"
|
||||
}
|
||||
}
|
||||
286
pkg/ruleset/ruleset.go
Normal file
286
pkg/ruleset/ruleset.go
Normal file
@@ -0,0 +1,286 @@
|
||||
package ruleset
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"compress/gzip"
|
||||
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
type Regex struct {
|
||||
Match string `yaml:"match"`
|
||||
Replace string `yaml:"replace"`
|
||||
}
|
||||
type KV struct {
|
||||
Key string `yaml:"key"`
|
||||
Value string `yaml:"value"`
|
||||
}
|
||||
|
||||
type RuleSet []Rule
|
||||
|
||||
type Rule struct {
|
||||
Domain string `yaml:"domain,omitempty"`
|
||||
Domains []string `yaml:"domains,omitempty"`
|
||||
Paths []string `yaml:"paths,omitempty"`
|
||||
Headers struct {
|
||||
UserAgent string `yaml:"user-agent,omitempty"`
|
||||
XForwardedFor string `yaml:"x-forwarded-for,omitempty"`
|
||||
Referer string `yaml:"referer,omitempty"`
|
||||
Cookie string `yaml:"cookie,omitempty"`
|
||||
CSP string `yaml:"content-security-policy,omitempty"`
|
||||
} `yaml:"headers,omitempty"`
|
||||
GoogleCache bool `yaml:"googleCache,omitempty"`
|
||||
RegexRules []Regex `yaml:"regexRules,omitempty"`
|
||||
|
||||
UrlMods struct {
|
||||
Domain []Regex `yaml:"domain,omitempty"`
|
||||
Path []Regex `yaml:"path,omitempty"`
|
||||
Query []KV `yaml:"query,omitempty"`
|
||||
} `yaml:"urlMods,omitempty"`
|
||||
|
||||
Injections []struct {
|
||||
Position string `yaml:"position,omitempty"`
|
||||
Append string `yaml:"append,omitempty"`
|
||||
Prepend string `yaml:"prepend,omitempty"`
|
||||
Replace string `yaml:"replace,omitempty"`
|
||||
} `yaml:"injections,omitempty"`
|
||||
}
|
||||
|
||||
// NewRulesetFromEnv creates a new RuleSet based on the RULESET environment variable.
|
||||
// It logs a warning and returns an empty RuleSet if the RULESET environment variable is not set.
|
||||
// If the RULESET is set but the rules cannot be loaded, it panics.
|
||||
func NewRulesetFromEnv() RuleSet {
|
||||
rulesPath, ok := os.LookupEnv("RULESET")
|
||||
if !ok {
|
||||
log.Printf("WARN: No ruleset specified. Set the `RULESET` environment variable to load one for a better success rate.")
|
||||
return RuleSet{}
|
||||
}
|
||||
ruleSet, err := NewRuleset(rulesPath)
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
}
|
||||
return ruleSet
|
||||
}
|
||||
|
||||
// NewRuleset loads a RuleSet from a given string of rule paths, separated by semicolons.
|
||||
// It supports loading rules from both local file paths and remote URLs.
|
||||
// Returns a RuleSet and an error if any issues occur during loading.
|
||||
func NewRuleset(rulePaths string) (RuleSet, error) {
|
||||
ruleSet := RuleSet{}
|
||||
errs := []error{}
|
||||
|
||||
rp := strings.Split(rulePaths, ";")
|
||||
var remoteRegex = regexp.MustCompile(`^https?:\/\/(www\.)?[-a-zA-Z0-9@:%._\+~#=]{1,256}\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9()!@:%_\+.~#?&\/\/=]*)`)
|
||||
for _, rule := range rp {
|
||||
rulePath := strings.Trim(rule, " ")
|
||||
var err error
|
||||
|
||||
isRemote := remoteRegex.MatchString(rulePath)
|
||||
if isRemote {
|
||||
err = ruleSet.loadRulesFromRemoteFile(rulePath)
|
||||
} else {
|
||||
err = ruleSet.loadRulesFromLocalDir(rulePath)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
e := fmt.Errorf("WARN: failed to load ruleset from '%s'", rulePath)
|
||||
errs = append(errs, errors.Join(e, err))
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
if len(errs) != 0 {
|
||||
e := fmt.Errorf("WARN: failed to load %d rulesets", len(rp))
|
||||
errs = append(errs, e)
|
||||
// panic if the user specified a local ruleset, but it wasn't found on disk
|
||||
// don't fail silently
|
||||
for _, err := range errs {
|
||||
if errors.Is(os.ErrNotExist, err) {
|
||||
e := fmt.Errorf("PANIC: ruleset '%s' not found", err)
|
||||
panic(errors.Join(e, err))
|
||||
}
|
||||
}
|
||||
// else, bubble up any errors, such as syntax or remote host issues
|
||||
return ruleSet, errors.Join(errs...)
|
||||
}
|
||||
ruleSet.PrintStats()
|
||||
return ruleSet, nil
|
||||
}
|
||||
|
||||
// ================== RULESET loading logic ===================================
|
||||
|
||||
// loadRulesFromLocalDir loads rules from a local directory specified by the path.
|
||||
// It walks through the directory, loading rules from YAML files.
|
||||
// Returns an error if the directory cannot be accessed
|
||||
// If there is an issue loading any file, it will be skipped
|
||||
func (rs *RuleSet) loadRulesFromLocalDir(path string) error {
|
||||
_, err := os.Stat(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
yamlRegex := regexp.MustCompile(`.*\.ya?ml`)
|
||||
|
||||
err = filepath.Walk(path, func(path string, info os.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if info.IsDir() {
|
||||
return nil
|
||||
}
|
||||
if isYaml := yamlRegex.MatchString(path); !isYaml {
|
||||
return nil
|
||||
}
|
||||
|
||||
err = rs.loadRulesFromLocalFile(path)
|
||||
if err != nil {
|
||||
log.Printf("WARN: failed to load directory ruleset '%s': %s, skipping", path, err)
|
||||
return nil
|
||||
}
|
||||
log.Printf("INFO: loaded ruleset %s\n", path)
|
||||
return nil
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// loadRulesFromLocalFile loads rules from a local YAML file specified by the path.
|
||||
// Returns an error if the file cannot be read or if there's a syntax error in the YAML.
|
||||
func (rs *RuleSet) loadRulesFromLocalFile(path string) error {
|
||||
yamlFile, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
e := fmt.Errorf("failed to read rules from local file: '%s'", path)
|
||||
return errors.Join(e, err)
|
||||
}
|
||||
|
||||
var r RuleSet
|
||||
err = yaml.Unmarshal(yamlFile, &r)
|
||||
if err != nil {
|
||||
e := fmt.Errorf("failed to load rules from local file, possible syntax error in '%s'", path)
|
||||
ee := errors.Join(e, err)
|
||||
if _, ok := os.LookupEnv("DEBUG"); ok {
|
||||
debugPrintRule(string(yamlFile), ee)
|
||||
}
|
||||
return ee
|
||||
}
|
||||
*rs = append(*rs, r...)
|
||||
return nil
|
||||
}
|
||||
|
||||
// loadRulesFromRemoteFile loads rules from a remote URL.
|
||||
// It supports plain and gzip compressed content.
|
||||
// Returns an error if there's an issue accessing the URL or if there's a syntax error in the YAML.
|
||||
func (rs *RuleSet) loadRulesFromRemoteFile(rulesUrl string) error {
|
||||
var r RuleSet
|
||||
resp, err := http.Get(rulesUrl)
|
||||
if err != nil {
|
||||
e := fmt.Errorf("failed to load rules from remote url '%s'", rulesUrl)
|
||||
return errors.Join(e, err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode >= 400 {
|
||||
e := fmt.Errorf("failed to load rules from remote url (%s) on '%s'", resp.Status, rulesUrl)
|
||||
return errors.Join(e, err)
|
||||
}
|
||||
|
||||
var reader io.Reader
|
||||
isGzip := strings.HasSuffix(rulesUrl, ".gz") || strings.HasSuffix(rulesUrl, ".gzip") || resp.Header.Get("content-encoding") == "gzip"
|
||||
|
||||
if isGzip {
|
||||
reader, err = gzip.NewReader(resp.Body)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create gzip reader for URL '%s' with status code '%s': %w", rulesUrl, resp.Status, err)
|
||||
}
|
||||
} else {
|
||||
reader = resp.Body
|
||||
}
|
||||
|
||||
err = yaml.NewDecoder(reader).Decode(&r)
|
||||
|
||||
if err != nil {
|
||||
e := fmt.Errorf("failed to load rules from remote url '%s' with status code '%s' and possible syntax error", rulesUrl, resp.Status)
|
||||
ee := errors.Join(e, err)
|
||||
return ee
|
||||
}
|
||||
|
||||
*rs = append(*rs, r...)
|
||||
return nil
|
||||
}
|
||||
|
||||
// ================= utility methods ==========================
|
||||
|
||||
// Yaml returns the ruleset as a Yaml string
|
||||
func (rs *RuleSet) Yaml() (string, error) {
|
||||
y, err := yaml.Marshal(rs)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return string(y), nil
|
||||
}
|
||||
|
||||
// GzipYaml returns an io.Reader that streams the Gzip-compressed YAML representation of the RuleSet.
|
||||
func (rs *RuleSet) GzipYaml() (io.Reader, error) {
|
||||
pr, pw := io.Pipe()
|
||||
|
||||
go func() {
|
||||
defer pw.Close()
|
||||
|
||||
gw := gzip.NewWriter(pw)
|
||||
defer gw.Close()
|
||||
|
||||
if err := yaml.NewEncoder(gw).Encode(rs); err != nil {
|
||||
gw.Close() // Ensure to close the gzip writer
|
||||
pw.CloseWithError(err)
|
||||
return
|
||||
}
|
||||
}()
|
||||
|
||||
return pr, nil
|
||||
}
|
||||
|
||||
// Domains extracts and returns a slice of all domains present in the RuleSet.
|
||||
func (rs *RuleSet) Domains() []string {
|
||||
var domains []string
|
||||
for _, rule := range *rs {
|
||||
domains = append(domains, rule.Domain)
|
||||
domains = append(domains, rule.Domains...)
|
||||
}
|
||||
return domains
|
||||
}
|
||||
|
||||
// DomainCount returns the count of unique domains present in the RuleSet.
|
||||
func (rs *RuleSet) DomainCount() int {
|
||||
return len(rs.Domains())
|
||||
}
|
||||
|
||||
// Count returns the total number of rules in the RuleSet.
|
||||
func (rs *RuleSet) Count() int {
|
||||
return len(*rs)
|
||||
}
|
||||
|
||||
// PrintStats logs the number of rules and domains loaded in the RuleSet.
|
||||
func (rs *RuleSet) PrintStats() {
|
||||
log.Printf("INFO: Loaded %d rules for %d domains\n", rs.Count(), rs.DomainCount())
|
||||
}
|
||||
|
||||
// debugPrintRule is a utility function for printing a rule and associated error for debugging purposes.
|
||||
func debugPrintRule(rule string, err error) {
|
||||
fmt.Println("------------------------------ BEGIN DEBUG RULESET -----------------------------")
|
||||
fmt.Printf("%s\n", err.Error())
|
||||
fmt.Println("--------------------------------------------------------------------------------")
|
||||
fmt.Println(rule)
|
||||
fmt.Println("------------------------------ END DEBUG RULESET -------------------------------")
|
||||
}
|
||||
153
pkg/ruleset/ruleset_test.go
Normal file
153
pkg/ruleset/ruleset_test.go
Normal file
@@ -0,0 +1,153 @@
|
||||
package ruleset
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/gofiber/fiber/v2"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
var (
|
||||
validYAML = `
|
||||
- domain: example.com
|
||||
regexRules:
|
||||
- match: "^http:"
|
||||
replace: "https:"`
|
||||
|
||||
invalidYAML = `
|
||||
- domain: [thisIsATestYamlThatIsMeantToFail.example]
|
||||
regexRules:
|
||||
- match: "^http:"
|
||||
replace: "https:"
|
||||
- match: "[incomplete"`
|
||||
)
|
||||
|
||||
func TestLoadRulesFromRemoteFile(t *testing.T) {
|
||||
app := fiber.New()
|
||||
defer app.Shutdown()
|
||||
|
||||
app.Get("/valid-config.yml", func(c *fiber.Ctx) error {
|
||||
c.SendString(validYAML)
|
||||
return nil
|
||||
})
|
||||
app.Get("/invalid-config.yml", func(c *fiber.Ctx) error {
|
||||
c.SendString(invalidYAML)
|
||||
return nil
|
||||
})
|
||||
|
||||
app.Get("/valid-config.gz", func(c *fiber.Ctx) error {
|
||||
c.Set("Content-Type", "application/octet-stream")
|
||||
rs, err := loadRuleFromString(validYAML)
|
||||
if err != nil {
|
||||
t.Errorf("failed to load valid yaml from string: %s", err.Error())
|
||||
}
|
||||
s, err := rs.GzipYaml()
|
||||
if err != nil {
|
||||
t.Errorf("failed to load gzip serialize yaml: %s", err.Error())
|
||||
}
|
||||
|
||||
err = c.SendStream(s)
|
||||
if err != nil {
|
||||
t.Errorf("failed to stream gzip serialized yaml: %s", err.Error())
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
// Start the server in a goroutine
|
||||
go func() {
|
||||
if err := app.Listen("127.0.0.1:9999"); err != nil {
|
||||
t.Errorf("Server failed to start: %s", err.Error())
|
||||
}
|
||||
}()
|
||||
|
||||
// Wait for the server to start
|
||||
time.Sleep(time.Second * 1)
|
||||
|
||||
rs, err := NewRuleset("http://127.0.0.1:9999/valid-config.yml")
|
||||
if err != nil {
|
||||
t.Errorf("failed to load plaintext ruleset from http server: %s", err.Error())
|
||||
}
|
||||
assert.Equal(t, rs[0].Domain, "example.com")
|
||||
|
||||
rs, err = NewRuleset("http://127.0.0.1:9999/valid-config.gz")
|
||||
if err != nil {
|
||||
t.Errorf("failed to load gzipped ruleset from http server: %s", err.Error())
|
||||
}
|
||||
assert.Equal(t, rs[0].Domain, "example.com")
|
||||
|
||||
os.Setenv("RULESET", "http://127.0.0.1:9999/valid-config.gz")
|
||||
rs = NewRulesetFromEnv()
|
||||
if !assert.Equal(t, rs[0].Domain, "example.com") {
|
||||
t.Error("expected no errors loading ruleset from gzip url using environment variable, but got one")
|
||||
}
|
||||
}
|
||||
|
||||
func loadRuleFromString(yaml string) (RuleSet, error) {
|
||||
// Create a temporary file and load it
|
||||
tmpFile, _ := os.CreateTemp("", "ruleset*.yaml")
|
||||
defer os.Remove(tmpFile.Name())
|
||||
tmpFile.WriteString(yaml)
|
||||
rs := RuleSet{}
|
||||
err := rs.loadRulesFromLocalFile(tmpFile.Name())
|
||||
return rs, err
|
||||
}
|
||||
|
||||
// TestLoadRulesFromLocalFile tests the loading of rules from a local YAML file.
|
||||
func TestLoadRulesFromLocalFile(t *testing.T) {
|
||||
rs, err := loadRuleFromString(validYAML)
|
||||
if err != nil {
|
||||
t.Errorf("Failed to load rules from valid YAML: %s", err)
|
||||
}
|
||||
assert.Equal(t, rs[0].Domain, "example.com")
|
||||
assert.Equal(t, rs[0].RegexRules[0].Match, "^http:")
|
||||
assert.Equal(t, rs[0].RegexRules[0].Replace, "https:")
|
||||
|
||||
_, err = loadRuleFromString(invalidYAML)
|
||||
if err == nil {
|
||||
t.Errorf("Expected an error when loading invalid YAML, but got none")
|
||||
}
|
||||
}
|
||||
|
||||
// TestLoadRulesFromLocalDir tests the loading of rules from a local nested directory full of yaml rulesets
|
||||
func TestLoadRulesFromLocalDir(t *testing.T) {
|
||||
// Create a temporary directory
|
||||
baseDir, err := os.MkdirTemp("", "ruleset_test")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temporary directory: %s", err)
|
||||
}
|
||||
defer os.RemoveAll(baseDir)
|
||||
|
||||
// Create a nested subdirectory
|
||||
nestedDir := filepath.Join(baseDir, "nested")
|
||||
err = os.Mkdir(nestedDir, 0755)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create nested directory: %s", err)
|
||||
}
|
||||
|
||||
// Create a nested subdirectory
|
||||
nestedTwiceDir := filepath.Join(nestedDir, "nestedTwice")
|
||||
err = os.Mkdir(nestedTwiceDir, 0755)
|
||||
|
||||
testCases := []string{"test.yaml", "test2.yaml", "test-3.yaml", "test 4.yaml", "1987.test.yaml.yml", "foobar.example.com.yaml", "foobar.com.yml"}
|
||||
for _, fileName := range testCases {
|
||||
filePath := filepath.Join(nestedDir, "2x-"+fileName)
|
||||
os.WriteFile(filePath, []byte(validYAML), 0644)
|
||||
filePath = filepath.Join(nestedDir, fileName)
|
||||
os.WriteFile(filePath, []byte(validYAML), 0644)
|
||||
filePath = filepath.Join(baseDir, "base-"+fileName)
|
||||
os.WriteFile(filePath, []byte(validYAML), 0644)
|
||||
}
|
||||
rs := RuleSet{}
|
||||
err = rs.loadRulesFromLocalDir(baseDir)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, rs.Count(), len(testCases)*3)
|
||||
|
||||
for _, rule := range rs {
|
||||
assert.Equal(t, rule.Domain, "example.com")
|
||||
assert.Equal(t, rule.RegexRules[0].Match, "^http:")
|
||||
assert.Equal(t, rule.RegexRules[0].Replace, "https:")
|
||||
}
|
||||
}
|
||||
427
proxychain/proxychain.go
Normal file
427
proxychain/proxychain.go
Normal file
@@ -0,0 +1,427 @@
|
||||
package proxychain
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
|
||||
"ladder/pkg/ruleset"
|
||||
rr "ladder/proxychain/responsemodifers/rewriters"
|
||||
|
||||
"github.com/gofiber/fiber/v2"
|
||||
)
|
||||
|
||||
/*
|
||||
ProxyChain manages the process of forwarding an HTTP request to an upstream server,
|
||||
applying request and response modifications along the way.
|
||||
|
||||
- It accepts incoming HTTP requests (as a Fiber *ctx), and applies
|
||||
request modifiers (ReqMods) and response modifiers (ResMods) before passing the
|
||||
upstream response back to the client.
|
||||
|
||||
- ProxyChains can be reused to avoid memory allocations. However, they are not concurrent-safe
|
||||
so a ProxyChainPool should be used with mutexes to avoid memory errors.
|
||||
|
||||
---
|
||||
|
||||
# EXAMPLE
|
||||
|
||||
```
|
||||
|
||||
import (
|
||||
|
||||
rx "ladder/pkg/proxychain/requestmodifers"
|
||||
tx "ladder/pkg/proxychain/responsemodifers"
|
||||
"ladder/pkg/proxychain/responsemodifers/rewriters"
|
||||
"ladder/internal/proxychain"
|
||||
|
||||
)
|
||||
|
||||
proxychain.NewProxyChain().
|
||||
|
||||
SetFiberCtx(c).
|
||||
SetRequestModifications(
|
||||
rx.BlockOutgoingCookies(),
|
||||
rx.SpoofOrigin(),
|
||||
rx.SpoofReferrer(),
|
||||
).
|
||||
SetResultModifications(
|
||||
tx.BlockIncomingCookies(),
|
||||
tx.RewriteHTMLResourceURLs()
|
||||
).
|
||||
Execute()
|
||||
|
||||
```
|
||||
|
||||
client ladder service upstream
|
||||
|
||||
┌─────────┐ ┌────────────────────────┐ ┌─────────┐
|
||||
│ │GET │ │ │ │
|
||||
│ req────┼───► ProxyChain │ │ │
|
||||
│ │ │ │ │ │ │
|
||||
│ │ │ ▼ │ │ │
|
||||
│ │ │ apply │ │ │
|
||||
│ │ │ RequestModifications │ │ │
|
||||
│ │ │ │ │ │ │
|
||||
│ │ │ ▼ │ │ │
|
||||
│ │ │ send GET │ │ │
|
||||
│ │ │ Request req────────┼─► │ │
|
||||
│ │ │ │ │ │
|
||||
│ │ │ 200 OK │ │ │
|
||||
│ │ │ ┌────────────────┼─response │
|
||||
│ │ │ ▼ │ │ │
|
||||
│ │ │ apply │ │ │
|
||||
│ │ │ ResultModifications │ │ │
|
||||
│ │ │ │ │ │ │
|
||||
│ │◄───┼───────┘ │ │ │
|
||||
│ │ │ 200 OK │ │ │
|
||||
│ │ │ │ │ │
|
||||
└─────────┘ └────────────────────────┘ └─────────┘
|
||||
*/
|
||||
type ProxyChain struct {
|
||||
Context *fiber.Ctx
|
||||
Client *http.Client
|
||||
Request *http.Request
|
||||
Response *http.Response
|
||||
requestModifications []RequestModification
|
||||
resultModifications []ResponseModification
|
||||
htmlTokenRewriters []rr.IHTMLTokenRewriter
|
||||
Ruleset *ruleset.RuleSet
|
||||
debugMode bool
|
||||
abortErr error
|
||||
}
|
||||
|
||||
// a ProxyStrategy is a pre-built proxychain with purpose-built defaults
|
||||
type ProxyStrategy ProxyChain
|
||||
|
||||
// A RequestModification is a function that should operate on the
|
||||
// ProxyChain Req or Client field, using the fiber ctx as needed.
|
||||
type RequestModification func(*ProxyChain) error
|
||||
|
||||
// A ResponseModification is a function that should operate on the
|
||||
// ProxyChain Res (http result) & Body (buffered http response body) field
|
||||
type ResponseModification func(*ProxyChain) error
|
||||
|
||||
// SetRequestModifications sets the ProxyChain's request modifers
|
||||
// the modifier will not fire until ProxyChain.Execute() is run.
|
||||
func (chain *ProxyChain) SetRequestModifications(mods ...RequestModification) *ProxyChain {
|
||||
chain.requestModifications = mods
|
||||
return chain
|
||||
}
|
||||
|
||||
// AddRequestModifications sets the ProxyChain's request modifers
|
||||
// the modifier will not fire until ProxyChain.Execute() is run.
|
||||
func (chain *ProxyChain) AddRequestModifications(mods ...RequestModification) *ProxyChain {
|
||||
chain.requestModifications = append(chain.requestModifications, mods...)
|
||||
return chain
|
||||
}
|
||||
|
||||
// AddResponseModifications sets the ProxyChain's response modifers
|
||||
// the modifier will not fire until ProxyChain.Execute() is run.
|
||||
func (chain *ProxyChain) AddResponseModifications(mods ...ResponseModification) *ProxyChain {
|
||||
chain.resultModifications = mods
|
||||
return chain
|
||||
}
|
||||
|
||||
// Adds a ruleset to ProxyChain
|
||||
func (chain *ProxyChain) AddRuleset(rs *ruleset.RuleSet) *ProxyChain {
|
||||
chain.Ruleset = rs
|
||||
// TODO: add _applyRuleset method
|
||||
return chain
|
||||
}
|
||||
|
||||
func (chain *ProxyChain) _initialize_request() (*http.Request, error) {
|
||||
if chain.Context == nil {
|
||||
chain.abortErr = chain.abort(errors.New("no context set"))
|
||||
return nil, chain.abortErr
|
||||
}
|
||||
// initialize a request (without url)
|
||||
req, err := http.NewRequest(chain.Context.Method(), "", nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
chain.Request = req
|
||||
switch chain.Context.Method() {
|
||||
case "GET":
|
||||
case "DELETE":
|
||||
case "HEAD":
|
||||
case "OPTIONS":
|
||||
break
|
||||
case "POST":
|
||||
case "PUT":
|
||||
case "PATCH":
|
||||
// stream content of body from client request to upstream request
|
||||
chain.Request.Body = io.NopCloser(chain.Context.Request().BodyStream())
|
||||
default:
|
||||
return nil, fmt.Errorf("unsupported request method from client: '%s'", chain.Context.Method())
|
||||
}
|
||||
|
||||
/*
|
||||
// copy client request headers to upstream request headers
|
||||
forwardHeaders := func(key []byte, val []byte) {
|
||||
req.Header.Set(string(key), string(val))
|
||||
}
|
||||
clientHeaders := &chain.Context.Request().Header
|
||||
clientHeaders.VisitAll(forwardHeaders)
|
||||
*/
|
||||
|
||||
return req, nil
|
||||
}
|
||||
|
||||
// reconstructUrlFromReferer reconstructs the URL using the referer's scheme, host, and the relative path / queries
|
||||
func reconstructUrlFromReferer(referer *url.URL, relativeUrl *url.URL) (*url.URL, error) {
|
||||
|
||||
// Extract the real url from referer path
|
||||
realUrl, err := url.Parse(strings.TrimPrefix(referer.Path, "/"))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing real URL from referer '%s': %v", referer.Path, err)
|
||||
}
|
||||
|
||||
if realUrl.Scheme == "" || realUrl.Host == "" {
|
||||
return nil, fmt.Errorf("invalid referer URL: '%s' on request '%s", referer.String(), relativeUrl.String())
|
||||
}
|
||||
|
||||
log.Printf("rewrite relative URL using referer: '%s' -> '%s'\n", relativeUrl.String(), realUrl.String())
|
||||
|
||||
return &url.URL{
|
||||
Scheme: referer.Scheme,
|
||||
Host: referer.Host,
|
||||
Path: realUrl.Path,
|
||||
RawQuery: realUrl.RawQuery,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// prevents calls like: http://localhost:8080/http://localhost:8080
|
||||
func preventRecursiveProxyRequest(urlQuery *url.URL, baseProxyURL string) *url.URL {
|
||||
u := urlQuery.String()
|
||||
isRecursive := strings.HasPrefix(u, baseProxyURL) || u == baseProxyURL
|
||||
if !isRecursive {
|
||||
return urlQuery
|
||||
}
|
||||
|
||||
fixedURL, err := url.Parse(strings.TrimPrefix(strings.TrimPrefix(urlQuery.String(), baseProxyURL), "/"))
|
||||
if err != nil {
|
||||
log.Printf("proxychain: failed to fix recursive request: '%s' -> '%s\n'", baseProxyURL, u)
|
||||
return urlQuery
|
||||
}
|
||||
return preventRecursiveProxyRequest(fixedURL, baseProxyURL)
|
||||
}
|
||||
|
||||
// extractUrl extracts a URL from the request ctx. If the URL in the request
|
||||
// is a relative path, it reconstructs the full URL using the referer header.
|
||||
func (chain *ProxyChain) extractUrl() (*url.URL, error) {
|
||||
reqUrl := chain.Context.Params("*")
|
||||
|
||||
// sometimes client requests doubleroot '//'
|
||||
// there is a bug somewhere else, but this is a workaround until we find it
|
||||
if strings.HasPrefix(reqUrl, "/") || strings.HasPrefix(reqUrl, `%2F`) {
|
||||
reqUrl = strings.TrimPrefix(reqUrl, "/")
|
||||
reqUrl = strings.TrimPrefix(reqUrl, `%2F`)
|
||||
}
|
||||
|
||||
// unescape url query
|
||||
uReqUrl, err := url.QueryUnescape(reqUrl)
|
||||
if err == nil {
|
||||
reqUrl = uReqUrl
|
||||
}
|
||||
|
||||
urlQuery, err := url.Parse(reqUrl)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing request URL '%s': %v", reqUrl, err)
|
||||
}
|
||||
|
||||
// prevent recursive proxy requests
|
||||
fullURL := chain.Context.Request().URI()
|
||||
proxyURL := fmt.Sprintf("%s://%s", fullURL.Scheme(), fullURL.Host())
|
||||
urlQuery = preventRecursiveProxyRequest(urlQuery, proxyURL)
|
||||
|
||||
// Handle standard paths
|
||||
// eg: https://localhost:8080/https://realsite.com/images/foobar.jpg -> https://realsite.com/images/foobar.jpg
|
||||
isRelativePath := urlQuery.Scheme == ""
|
||||
if !isRelativePath {
|
||||
return urlQuery, nil
|
||||
}
|
||||
|
||||
// Handle relative URLs
|
||||
// eg: https://localhost:8080/images/foobar.jpg -> https://realsite.com/images/foobar.jpg
|
||||
referer, err := url.Parse(chain.Context.Get("referer"))
|
||||
relativePath := urlQuery
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing referer URL from req: '%s': %v", relativePath, err)
|
||||
}
|
||||
return reconstructUrlFromReferer(referer, relativePath)
|
||||
}
|
||||
|
||||
// AddBodyRewriter adds a HTMLTokenRewriter to the chain.
|
||||
// - HTMLTokenRewriters modify the body response by parsing the HTML
|
||||
// and making changes to the DOM as it streams to the client
|
||||
// - In most cases, you don't need to use this method. It's usually called by
|
||||
// a ResponseModifier to batch queue changes for performance reasons.
|
||||
func (chain *ProxyChain) AddHTMLTokenRewriter(rr rr.IHTMLTokenRewriter) *ProxyChain {
|
||||
chain.htmlTokenRewriters = append(chain.htmlTokenRewriters, rr)
|
||||
return chain
|
||||
}
|
||||
|
||||
// SetFiberCtx takes the request ctx from the client
|
||||
// for the modifiers and execute function to use.
|
||||
// it must be set everytime a new request comes through
|
||||
// if the upstream request url cannot be extracted from the ctx,
|
||||
// a 500 error will be sent back to the client
|
||||
func (chain *ProxyChain) SetFiberCtx(ctx *fiber.Ctx) *ProxyChain {
|
||||
chain.Context = ctx
|
||||
|
||||
// initialize the request and prepare it for modification
|
||||
req, err := chain._initialize_request()
|
||||
if err != nil {
|
||||
chain.abortErr = chain.abort(err)
|
||||
}
|
||||
chain.Request = req
|
||||
|
||||
// extract the URL for the request and add it to the new request
|
||||
url, err := chain.extractUrl()
|
||||
if err != nil {
|
||||
chain.abortErr = chain.abort(err)
|
||||
}
|
||||
chain.Request.URL = url
|
||||
fmt.Printf("extracted URL: %s\n", chain.Request.URL)
|
||||
|
||||
return chain
|
||||
}
|
||||
|
||||
func (chain *ProxyChain) validateCtxIsSet() error {
|
||||
if chain.Context != nil {
|
||||
return nil
|
||||
}
|
||||
err := errors.New("proxyChain was called without setting a fiber Ctx. Use ProxyChain.SetCtx()")
|
||||
chain.abortErr = chain.abort(err)
|
||||
return chain.abortErr
|
||||
}
|
||||
|
||||
// SetHttpClient sets a new upstream http client transport
|
||||
// useful for modifying TLS
|
||||
func (chain *ProxyChain) SetHttpClient(httpClient *http.Client) *ProxyChain {
|
||||
chain.Client = httpClient
|
||||
return chain
|
||||
}
|
||||
|
||||
// SetVerbose changes the logging behavior to print
|
||||
// the modification steps and applied rulesets for debugging
|
||||
func (chain *ProxyChain) SetDebugLogging(isDebugMode bool) *ProxyChain {
|
||||
chain.debugMode = isDebugMode
|
||||
return chain
|
||||
}
|
||||
|
||||
// abort proxychain and return 500 error to client
|
||||
// this will prevent Execute from firing and reset the state
|
||||
// returns the initial error enriched with context
|
||||
func (chain *ProxyChain) abort(err error) error {
|
||||
//defer chain._reset()
|
||||
chain.abortErr = err
|
||||
chain.Context.Response().SetStatusCode(500)
|
||||
e := fmt.Errorf("ProxyChain error for '%s': %s", chain.Request.URL.String(), err.Error())
|
||||
chain.Context.SendString(e.Error())
|
||||
log.Println(e.Error())
|
||||
return e
|
||||
}
|
||||
|
||||
// internal function to reset state of ProxyChain for reuse
|
||||
func (chain *ProxyChain) _reset() {
|
||||
chain.abortErr = nil
|
||||
chain.Request = nil
|
||||
//chain.Response = nil
|
||||
chain.Context = nil
|
||||
}
|
||||
|
||||
// NewProxyChain initializes a new ProxyChain
|
||||
func NewProxyChain() *ProxyChain {
|
||||
chain := new(ProxyChain)
|
||||
chain.Client = http.DefaultClient
|
||||
return chain
|
||||
}
|
||||
|
||||
/// ========================================================================================================
|
||||
|
||||
// _execute sends the request for the ProxyChain and returns the raw body only
|
||||
// the caller is responsible for returning a response back to the requestor
|
||||
// the caller is also responsible for calling chain._reset() when they are done with the body
|
||||
func (chain *ProxyChain) _execute() (io.Reader, error) {
|
||||
if chain.validateCtxIsSet() != nil || chain.abortErr != nil {
|
||||
return nil, chain.abortErr
|
||||
}
|
||||
if chain.Request == nil {
|
||||
return nil, errors.New("proxychain request not yet initialized")
|
||||
}
|
||||
if chain.Request.URL.Scheme == "" {
|
||||
return nil, errors.New("request url not set or invalid. Check ProxyChain ReqMods for issues")
|
||||
}
|
||||
|
||||
// Apply requestModifications to proxychain
|
||||
for _, applyRequestModificationsTo := range chain.requestModifications {
|
||||
err := applyRequestModificationsTo(chain)
|
||||
if err != nil {
|
||||
return nil, chain.abort(err)
|
||||
}
|
||||
}
|
||||
|
||||
// Send Request Upstream
|
||||
resp, err := chain.Client.Do(chain.Request)
|
||||
if err != nil {
|
||||
return nil, chain.abort(err)
|
||||
}
|
||||
chain.Response = resp
|
||||
|
||||
/* todo: move to rsm
|
||||
for k, v := range resp.Header {
|
||||
chain.Context.Set(k, resp.Header.Get(k))
|
||||
}
|
||||
*/
|
||||
|
||||
// Apply ResponseModifiers to proxychain
|
||||
for _, applyResultModificationsTo := range chain.resultModifications {
|
||||
err := applyResultModificationsTo(chain)
|
||||
if err != nil {
|
||||
return nil, chain.abort(err)
|
||||
}
|
||||
}
|
||||
|
||||
// stream request back to client, possibly rewriting the body
|
||||
if len(chain.htmlTokenRewriters) == 0 {
|
||||
return chain.Response.Body, nil
|
||||
}
|
||||
|
||||
ct := chain.Response.Header.Get("content-type")
|
||||
switch {
|
||||
case strings.HasPrefix(ct, "text/html"):
|
||||
fmt.Println("fooox")
|
||||
return rr.NewHTMLRewriter(chain.Response.Body, chain.htmlTokenRewriters), nil
|
||||
default:
|
||||
return chain.Response.Body, nil
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// Execute sends the request for the ProxyChain and returns the request to the sender
|
||||
// and resets the fields so that the ProxyChain can be reused.
|
||||
// if any step in the ProxyChain fails, the request will abort and a 500 error will
|
||||
// be returned to the client
|
||||
func (chain *ProxyChain) Execute() error {
|
||||
defer chain._reset()
|
||||
body, err := chain._execute()
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
return err
|
||||
}
|
||||
if chain.Context == nil {
|
||||
return errors.New("no context set")
|
||||
}
|
||||
|
||||
// Return request back to client
|
||||
chain.Context.Set("content-type", chain.Response.Header.Get("content-type"))
|
||||
return chain.Context.SendStream(body)
|
||||
|
||||
//return chain.Context.SendStream(body)
|
||||
}
|
||||
11
proxychain/proxychain_pool.go
Normal file
11
proxychain/proxychain_pool.go
Normal file
@@ -0,0 +1,11 @@
|
||||
package proxychain
|
||||
|
||||
import (
|
||||
"net/url"
|
||||
)
|
||||
|
||||
type ProxyChainPool map[url.URL]ProxyChain
|
||||
|
||||
func NewProxyChainPool() ProxyChainPool {
|
||||
return map[url.URL]ProxyChain{}
|
||||
}
|
||||
33
proxychain/requestmodifers/masquerade_as_trusted_bot.go
Normal file
33
proxychain/requestmodifers/masquerade_as_trusted_bot.go
Normal file
@@ -0,0 +1,33 @@
|
||||
package requestmodifers
|
||||
|
||||
import (
|
||||
"ladder/proxychain"
|
||||
)
|
||||
|
||||
// MasqueradeAsGoogleBot modifies user agent and x-forwarded for
|
||||
// to appear to be a Google Bot
|
||||
func MasqueradeAsGoogleBot() proxychain.RequestModification {
|
||||
const botUA string = "Mozilla/5.0 AppleWebKit/537.36 (KHTML, like Gecko; compatible; Googlebot/2.1; http://www.google.com/bot.html) Chrome/79.0.3945.120 Safari/537.36"
|
||||
const botIP string = "66.249.78.8" // TODO: create a random ip pool from https://developers.google.com/static/search/apis/ipranges/googlebot.json
|
||||
return masqueradeAsTrustedBot(botUA, botIP)
|
||||
}
|
||||
|
||||
// MasqueradeAsBingBot modifies user agent and x-forwarded for
|
||||
// to appear to be a Bing Bot
|
||||
func MasqueradeAsBingBot() proxychain.RequestModification {
|
||||
const botUA string = "Mozilla/5.0 AppleWebKit/537.36 (KHTML, like Gecko; compatible; bingbot/2.0; +http://www.bing.com/bingbot.htm) Chrome/79.0.3945.120 Safari/537.36"
|
||||
const botIP string = "13.66.144.9" // https://www.bing.com/toolbox/bingbot.json
|
||||
return masqueradeAsTrustedBot(botUA, botIP)
|
||||
}
|
||||
|
||||
func masqueradeAsTrustedBot(botUA string, botIP string) proxychain.RequestModification {
|
||||
return func(px *proxychain.ProxyChain) error {
|
||||
px.AddRequestModifications(
|
||||
SpoofUserAgent(botUA),
|
||||
SpoofXForwardedFor(botIP),
|
||||
SpoofReferrer(""),
|
||||
SpoofOrigin(""),
|
||||
)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
13
proxychain/requestmodifers/modify_domain_with_regex.go
Normal file
13
proxychain/requestmodifers/modify_domain_with_regex.go
Normal file
@@ -0,0 +1,13 @@
|
||||
package requestmodifers
|
||||
|
||||
import (
|
||||
"ladder/proxychain"
|
||||
"regexp"
|
||||
)
|
||||
|
||||
func ModifyDomainWithRegex(match regexp.Regexp, replacement string) proxychain.RequestModification {
|
||||
return func(px *proxychain.ProxyChain) error {
|
||||
px.Request.URL.Host = match.ReplaceAllString(px.Request.URL.Host, replacement)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
97
proxychain/requestmodifers/modify_outgoing_cookies.go
Normal file
97
proxychain/requestmodifers/modify_outgoing_cookies.go
Normal file
@@ -0,0 +1,97 @@
|
||||
package requestmodifers
|
||||
|
||||
import (
|
||||
"ladder/proxychain"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
// SetOutgoingCookie modifes a specific cookie name
|
||||
// by modifying the request cookie headers going to the upstream server.
|
||||
// If the cookie name does not already exist, it is created.
|
||||
func SetOutgoingCookie(name string, val string) proxychain.RequestModification {
|
||||
return func(chain *proxychain.ProxyChain) error {
|
||||
cookies := chain.Request.Cookies()
|
||||
hasCookie := false
|
||||
for _, cookie := range cookies {
|
||||
if cookie.Name != name {
|
||||
continue
|
||||
}
|
||||
hasCookie = true
|
||||
cookie.Value = val
|
||||
}
|
||||
|
||||
if hasCookie {
|
||||
return nil
|
||||
}
|
||||
|
||||
chain.Request.AddCookie(&http.Cookie{
|
||||
Domain: chain.Request.URL.Host,
|
||||
Name: name,
|
||||
Value: val,
|
||||
})
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// SetOutgoingCookies modifies a client request's cookie header
|
||||
// to a raw Cookie string, overwriting existing cookies
|
||||
func SetOutgoingCookies(cookies string) proxychain.RequestModification {
|
||||
return func(chain *proxychain.ProxyChain) error {
|
||||
chain.Request.Header.Set("Cookies", cookies)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// DeleteOutgoingCookie modifies the http request's cookies header to
|
||||
// delete a specific request cookie going to the upstream server.
|
||||
// If the cookie does not exist, it does not do anything.
|
||||
func DeleteOutgoingCookie(name string) proxychain.RequestModification {
|
||||
return func(chain *proxychain.ProxyChain) error {
|
||||
cookies := chain.Request.Cookies()
|
||||
chain.Request.Header.Del("Cookies")
|
||||
|
||||
for _, cookie := range cookies {
|
||||
if cookie.Name == name {
|
||||
chain.Request.AddCookie(cookie)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// DeleteOutgoingCookies removes the cookie header entirely,
|
||||
// preventing any cookies from reaching the upstream server.
|
||||
func DeleteOutgoingCookies() proxychain.RequestModification {
|
||||
return func(px *proxychain.ProxyChain) error {
|
||||
px.Request.Header.Del("Cookie")
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// DeleteOutGoingCookiesExcept prevents non-whitelisted cookies from being sent from the client
|
||||
// to the upstream proxy server. Cookies whose names are in the whitelist are not removed.
|
||||
func DeleteOutgoingCookiesExcept(whitelist ...string) proxychain.RequestModification {
|
||||
return func(px *proxychain.ProxyChain) error {
|
||||
// Convert whitelist slice to a map for efficient lookups
|
||||
whitelistMap := make(map[string]struct{})
|
||||
for _, cookieName := range whitelist {
|
||||
whitelistMap[cookieName] = struct{}{}
|
||||
}
|
||||
|
||||
// Get all cookies from the request header
|
||||
cookies := px.Request.Cookies()
|
||||
|
||||
// Clear the original Cookie header
|
||||
px.Request.Header.Del("Cookie")
|
||||
|
||||
// Re-add cookies that are in the whitelist
|
||||
for _, cookie := range cookies {
|
||||
if _, found := whitelistMap[cookie.Name]; found {
|
||||
px.Request.AddCookie(cookie)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
||||
13
proxychain/requestmodifers/modify_path_with_regex.go
Normal file
13
proxychain/requestmodifers/modify_path_with_regex.go
Normal file
@@ -0,0 +1,13 @@
|
||||
package requestmodifers
|
||||
|
||||
import (
|
||||
"ladder/proxychain"
|
||||
"regexp"
|
||||
)
|
||||
|
||||
func ModifyPathWithRegex(match regexp.Regexp, replacement string) proxychain.RequestModification {
|
||||
return func(px *proxychain.ProxyChain) error {
|
||||
px.Request.URL.Path = match.ReplaceAllString(px.Request.URL.Path, replacement)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
20
proxychain/requestmodifers/modify_query_params.go
Normal file
20
proxychain/requestmodifers/modify_query_params.go
Normal file
@@ -0,0 +1,20 @@
|
||||
package requestmodifers
|
||||
|
||||
import (
|
||||
"ladder/proxychain"
|
||||
)
|
||||
|
||||
// ModifyQueryParams replaces query parameter values in URL's query params in a ProxyChain's URL.
|
||||
// If the query param key doesn't exist, it is created.
|
||||
func ModifyQueryParams(key string, value string) proxychain.RequestModification {
|
||||
return func(px *proxychain.ProxyChain) error {
|
||||
q := px.Request.URL.Query()
|
||||
if value == "" {
|
||||
q.Del(key)
|
||||
return nil
|
||||
}
|
||||
q.Set(key, value)
|
||||
px.Request.URL.RawQuery = q.Encode()
|
||||
return nil
|
||||
}
|
||||
}
|
||||
23
proxychain/requestmodifers/modify_request_headers.go
Normal file
23
proxychain/requestmodifers/modify_request_headers.go
Normal file
@@ -0,0 +1,23 @@
|
||||
package requestmodifers
|
||||
|
||||
import (
|
||||
"ladder/proxychain"
|
||||
)
|
||||
|
||||
// SetRequestHeader modifies a specific outgoing header
|
||||
// This is the header that the upstream server will see.
|
||||
func SetRequestHeader(name string, val string) proxychain.RequestModification {
|
||||
return func(px *proxychain.ProxyChain) error {
|
||||
px.Request.Header.Set(name, val)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// DeleteRequestHeader modifies a specific outgoing header
|
||||
// This is the header that the upstream server will see.
|
||||
func DeleteRequestHeader(name string) proxychain.RequestModification {
|
||||
return func(px *proxychain.ProxyChain) error {
|
||||
px.Request.Header.Del(name)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
27
proxychain/requestmodifers/request_archive_is.go
Normal file
27
proxychain/requestmodifers/request_archive_is.go
Normal file
@@ -0,0 +1,27 @@
|
||||
package requestmodifers
|
||||
|
||||
import (
|
||||
"ladder/proxychain"
|
||||
"net/url"
|
||||
)
|
||||
|
||||
const archivistUrl string = "https://archive.is/latest/"
|
||||
|
||||
// RequestArchiveIs modifies a ProxyChain's URL to request an archived version from archive.is
|
||||
func RequestArchiveIs() proxychain.RequestModification {
|
||||
return func(px *proxychain.ProxyChain) error {
|
||||
px.Request.URL.RawQuery = ""
|
||||
newURLString := archivistUrl + px.Request.URL.String()
|
||||
newURL, err := url.Parse(newURLString)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// archivist seems to sabotage requests from cloudflare's DNS
|
||||
// bypass this just in case
|
||||
px.AddRequestModifications(ResolveWithGoogleDoH())
|
||||
|
||||
px.Request.URL = newURL
|
||||
return nil
|
||||
}
|
||||
}
|
||||
21
proxychain/requestmodifers/request_google_cache.go
Normal file
21
proxychain/requestmodifers/request_google_cache.go
Normal file
@@ -0,0 +1,21 @@
|
||||
package requestmodifers
|
||||
|
||||
import (
|
||||
"ladder/proxychain"
|
||||
"net/url"
|
||||
)
|
||||
|
||||
const googleCacheUrl string = "https://webcache.googleusercontent.com/search?q=cache:"
|
||||
|
||||
// RequestGoogleCache modifies a ProxyChain's URL to request its Google Cache version.
|
||||
func RequestGoogleCache() proxychain.RequestModification {
|
||||
return func(px *proxychain.ProxyChain) error {
|
||||
encodedURL := url.QueryEscape(px.Request.URL.String())
|
||||
newURL, err := url.Parse(googleCacheUrl + encodedURL)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
px.Request.URL = newURL
|
||||
return nil
|
||||
}
|
||||
}
|
||||
22
proxychain/requestmodifers/request_wayback_machine.go
Normal file
22
proxychain/requestmodifers/request_wayback_machine.go
Normal file
@@ -0,0 +1,22 @@
|
||||
package requestmodifers
|
||||
|
||||
import (
|
||||
"ladder/proxychain"
|
||||
"net/url"
|
||||
)
|
||||
|
||||
const waybackUrl string = "https://web.archive.org/web/"
|
||||
|
||||
// RequestWaybackMachine modifies a ProxyChain's URL to request the wayback machine (archive.org) version.
|
||||
func RequestWaybackMachine() proxychain.RequestModification {
|
||||
return func(px *proxychain.ProxyChain) error {
|
||||
px.Request.URL.RawQuery = ""
|
||||
newURLString := waybackUrl + px.Request.URL.String()
|
||||
newURL, err := url.Parse(newURLString)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
px.Request.URL = newURL
|
||||
return nil
|
||||
}
|
||||
}
|
||||
80
proxychain/requestmodifers/resolve_with_google_doh.go
Normal file
80
proxychain/requestmodifers/resolve_with_google_doh.go
Normal file
@@ -0,0 +1,80 @@
|
||||
package requestmodifers
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"ladder/proxychain"
|
||||
"net"
|
||||
"net/http"
|
||||
"time"
|
||||
)
|
||||
|
||||
// resolveWithGoogleDoH resolves DNS using Google's DNS-over-HTTPS
|
||||
func resolveWithGoogleDoH(host string) (string, error) {
|
||||
url := "https://dns.google/resolve?name=" + host + "&type=A"
|
||||
resp, err := http.Get(url)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
var result struct {
|
||||
Answer []struct {
|
||||
Data string `json:"data"`
|
||||
} `json:"Answer"`
|
||||
}
|
||||
err = json.NewDecoder(resp.Body).Decode(&result)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
// Get the first A record
|
||||
if len(result.Answer) > 0 {
|
||||
return result.Answer[0].Data, nil
|
||||
}
|
||||
return "", fmt.Errorf("no DoH DNS record found for %s", host)
|
||||
}
|
||||
|
||||
// ResolveWithGoogleDoH modifies a ProxyChain's client to make the request by resolving the URL
|
||||
// using Google's DNS over HTTPs service
|
||||
func ResolveWithGoogleDoH() proxychain.RequestModification {
|
||||
return func(px *proxychain.ProxyChain) error {
|
||||
client := &http.Client{
|
||||
Timeout: px.Client.Timeout,
|
||||
}
|
||||
|
||||
dialer := &net.Dialer{
|
||||
Timeout: 5 * time.Second,
|
||||
KeepAlive: 5 * time.Second,
|
||||
}
|
||||
|
||||
customDialContext := func(ctx context.Context, network, addr string) (net.Conn, error) {
|
||||
host, port, err := net.SplitHostPort(addr)
|
||||
if err != nil {
|
||||
// If the addr doesn't include a port, determine it based on the URL scheme
|
||||
if px.Request.URL.Scheme == "https" {
|
||||
port = "443"
|
||||
} else {
|
||||
port = "80"
|
||||
}
|
||||
host = addr // assume the entire addr is the host
|
||||
}
|
||||
|
||||
resolvedHost, err := resolveWithGoogleDoH(host)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return dialer.DialContext(ctx, network, net.JoinHostPort(resolvedHost, port))
|
||||
}
|
||||
|
||||
patchedTransportWithDoH := &http.Transport{
|
||||
DialContext: customDialContext,
|
||||
}
|
||||
|
||||
client.Transport = patchedTransportWithDoH
|
||||
px.Client = client // Assign the modified client to the ProxyChain
|
||||
return nil
|
||||
}
|
||||
}
|
||||
24
proxychain/requestmodifers/spoof_origin.go
Normal file
24
proxychain/requestmodifers/spoof_origin.go
Normal file
@@ -0,0 +1,24 @@
|
||||
package requestmodifers
|
||||
|
||||
import (
|
||||
"ladder/proxychain"
|
||||
)
|
||||
|
||||
// SpoofOrigin modifies the origin header
|
||||
// if the upstream server returns a Vary header
|
||||
// it means you might get a different response if you change this
|
||||
func SpoofOrigin(url string) proxychain.RequestModification {
|
||||
return func(px *proxychain.ProxyChain) error {
|
||||
px.Request.Header.Set("origin", url)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// HideOrigin modifies the origin header
|
||||
// so that it is the original origin, not the proxy
|
||||
func HideOrigin() proxychain.RequestModification {
|
||||
return func(px *proxychain.ProxyChain) error {
|
||||
px.Request.Header.Set("origin", px.Request.URL.String())
|
||||
return nil
|
||||
}
|
||||
}
|
||||
29
proxychain/requestmodifers/spoof_referrer.go
Normal file
29
proxychain/requestmodifers/spoof_referrer.go
Normal file
@@ -0,0 +1,29 @@
|
||||
package requestmodifers
|
||||
|
||||
import (
|
||||
"ladder/proxychain"
|
||||
)
|
||||
|
||||
// SpoofReferrer modifies the referrer header
|
||||
// useful if the page can be accessed from a search engine
|
||||
// or social media site, but not by browsing the website itself
|
||||
// if url is "", then the referrer header is removed
|
||||
func SpoofReferrer(url string) proxychain.RequestModification {
|
||||
return func(px *proxychain.ProxyChain) error {
|
||||
if url == "" {
|
||||
px.Request.Header.Del("referrer")
|
||||
return nil
|
||||
}
|
||||
px.Request.Header.Set("referrer", url)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// HideReferrer modifies the referrer header
|
||||
// so that it is the original referrer, not the proxy
|
||||
func HideReferrer() proxychain.RequestModification {
|
||||
return func(px *proxychain.ProxyChain) error {
|
||||
px.Request.Header.Set("referrer", px.Request.URL.String())
|
||||
return nil
|
||||
}
|
||||
}
|
||||
44
proxychain/requestmodifers/spoof_referrer_from_baidu_post.go
Normal file
44
proxychain/requestmodifers/spoof_referrer_from_baidu_post.go
Normal file
@@ -0,0 +1,44 @@
|
||||
package requestmodifers
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"ladder/proxychain"
|
||||
"math/rand"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// SpoofReferrerFromBaiduSearch modifies the referrer header
|
||||
// pretending to be from a BaiduSearch
|
||||
func SpoofReferrerFromBaiduSearch() proxychain.RequestModification {
|
||||
return func(chain *proxychain.ProxyChain) error {
|
||||
// https://www.baidu.com/link?url=5biIeDvUIihawf3Zbbysach2Xn4H3w3FzO6LZKgSs-B5Yt4M4RUFikokOk5zetf2&wd=&eqid=9da80d8208009b8480000706655d5ed6
|
||||
referrer := fmt.Sprintf("https://baidu.com/link?url=%s", generateRandomBaiduURL())
|
||||
chain.AddRequestModifications(
|
||||
SpoofReferrer(referrer),
|
||||
SetRequestHeader("sec-fetch-site", "cross-site"),
|
||||
SetRequestHeader("sec-fetch-dest", "document"),
|
||||
SetRequestHeader("sec-fetch-mode", "navigate"),
|
||||
)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// utility functions ==================
|
||||
|
||||
func generateRandomString(charset string, length int) string {
|
||||
var seededRand *rand.Rand = rand.New(rand.NewSource(time.Now().UnixNano()))
|
||||
var stringBuilder strings.Builder
|
||||
for i := 0; i < length; i++ {
|
||||
stringBuilder.WriteByte(charset[seededRand.Intn(len(charset))])
|
||||
}
|
||||
return stringBuilder.String()
|
||||
}
|
||||
|
||||
func generateRandomBaiduURL() string {
|
||||
const alphanumericCharset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
|
||||
const hexCharset = "0123456789abcdef"
|
||||
randomAlphanumeric := generateRandomString(alphanumericCharset, 30) // Length before "-"
|
||||
randomHex := generateRandomString(hexCharset, 16) // Length of eqid
|
||||
return randomAlphanumeric + "-" + "&wd=&eqid=" + randomHex
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
package requestmodifers
|
||||
|
||||
import (
|
||||
"ladder/proxychain"
|
||||
)
|
||||
|
||||
// SpoofReferrerFromBingSearch modifies the referrer header
|
||||
// pretending to be from a bing search site
|
||||
func SpoofReferrerFromBingSearch() proxychain.RequestModification {
|
||||
return func(chain *proxychain.ProxyChain) error {
|
||||
chain.AddRequestModifications(
|
||||
SpoofReferrer("https://www.bing.com/"),
|
||||
SetRequestHeader("sec-fetch-site", "cross-site"),
|
||||
SetRequestHeader("sec-fetch-dest", "document"),
|
||||
SetRequestHeader("sec-fetch-mode", "navigate"),
|
||||
ModifyQueryParams("utm_source", "bing"),
|
||||
)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
package requestmodifers
|
||||
|
||||
import (
|
||||
"ladder/proxychain"
|
||||
)
|
||||
|
||||
// SpoofReferrerFromGoogleSearch modifies the referrer header
|
||||
// pretending to be from a google search site
|
||||
func SpoofReferrerFromGoogleSearch() proxychain.RequestModification {
|
||||
return func(chain *proxychain.ProxyChain) error {
|
||||
chain.AddRequestModifications(
|
||||
SpoofReferrer("https://www.google.com/"),
|
||||
SetRequestHeader("sec-fetch-site", "cross-site"),
|
||||
SetRequestHeader("sec-fetch-dest", "document"),
|
||||
SetRequestHeader("sec-fetch-mode", "navigate"),
|
||||
ModifyQueryParams("utm_source", "google"),
|
||||
)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,21 @@
|
||||
package requestmodifers
|
||||
|
||||
import (
|
||||
"ladder/proxychain"
|
||||
)
|
||||
|
||||
// SpoofReferrerFromLinkedInPost modifies the referrer header
|
||||
// pretending to be from a linkedin post
|
||||
func SpoofReferrerFromLinkedInPost() proxychain.RequestModification {
|
||||
return func(chain *proxychain.ProxyChain) error {
|
||||
chain.AddRequestModifications(
|
||||
SpoofReferrer("https://www.linkedin.com/"),
|
||||
SetRequestHeader("sec-fetch-site", "cross-site"),
|
||||
SetRequestHeader("sec-fetch-dest", "document"),
|
||||
SetRequestHeader("sec-fetch-mode", "navigate"),
|
||||
ModifyQueryParams("utm_campaign", "post"),
|
||||
ModifyQueryParams("utm_medium", "web"),
|
||||
)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
24
proxychain/requestmodifers/spoof_referrer_from_naver_post.go
Normal file
24
proxychain/requestmodifers/spoof_referrer_from_naver_post.go
Normal file
@@ -0,0 +1,24 @@
|
||||
package requestmodifers
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"ladder/proxychain"
|
||||
)
|
||||
|
||||
// SpoofReferrerFromNaverSearch modifies the referrer header
|
||||
// pretending to be from a Naver search (popular in South Korea)
|
||||
func SpoofReferrerFromNaverSearch() proxychain.RequestModification {
|
||||
return func(chain *proxychain.ProxyChain) error {
|
||||
referrer := fmt.Sprintf(
|
||||
"https://search.naver.com/search.naver?where=nexearch&sm=top_hty&fbm=0&ie=utf8&query=%s",
|
||||
chain.Request.URL.Host,
|
||||
)
|
||||
chain.AddRequestModifications(
|
||||
SpoofReferrer(referrer),
|
||||
SetRequestHeader("sec-fetch-site", "cross-site"),
|
||||
SetRequestHeader("sec-fetch-dest", "document"),
|
||||
SetRequestHeader("sec-fetch-mode", "navigate"),
|
||||
)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,19 @@
|
||||
package requestmodifers
|
||||
|
||||
import (
|
||||
"ladder/proxychain"
|
||||
)
|
||||
|
||||
// SpoofReferrerFromPinterestPost modifies the referrer header
|
||||
// pretending to be from a pinterest post
|
||||
func SpoofReferrerFromPinterestPost() proxychain.RequestModification {
|
||||
return func(chain *proxychain.ProxyChain) error {
|
||||
chain.AddRequestModifications(
|
||||
SpoofReferrer("https://www.pinterest.com/"),
|
||||
SetRequestHeader("sec-fetch-site", "cross-site"),
|
||||
SetRequestHeader("sec-fetch-dest", "document"),
|
||||
SetRequestHeader("sec-fetch-mode", "navigate"),
|
||||
)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
19
proxychain/requestmodifers/spoof_referrer_from_qq_post.go
Normal file
19
proxychain/requestmodifers/spoof_referrer_from_qq_post.go
Normal file
@@ -0,0 +1,19 @@
|
||||
package requestmodifers
|
||||
|
||||
import (
|
||||
"ladder/proxychain"
|
||||
)
|
||||
|
||||
// SpoofReferrerFromQQPost modifies the referrer header
|
||||
// pretending to be from a QQ post (popular social media in China)
|
||||
func SpoofReferrerFromQQPost() proxychain.RequestModification {
|
||||
return func(chain *proxychain.ProxyChain) error {
|
||||
chain.AddRequestModifications(
|
||||
SpoofReferrer("https://new.qq.com/'"),
|
||||
SetRequestHeader("sec-fetch-site", "cross-site"),
|
||||
SetRequestHeader("sec-fetch-dest", "document"),
|
||||
SetRequestHeader("sec-fetch-mode", "navigate"),
|
||||
)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,19 @@
|
||||
package requestmodifers
|
||||
|
||||
import (
|
||||
"ladder/proxychain"
|
||||
)
|
||||
|
||||
// SpoofReferrerFromRedditPost modifies the referrer header
|
||||
// pretending to be from a reddit post
|
||||
func SpoofReferrerFromRedditPost() proxychain.RequestModification {
|
||||
return func(chain *proxychain.ProxyChain) error {
|
||||
chain.AddRequestModifications(
|
||||
SpoofReferrer("https://www.reddit.com/"),
|
||||
SetRequestHeader("sec-fetch-site", "cross-site"),
|
||||
SetRequestHeader("sec-fetch-dest", "document"),
|
||||
SetRequestHeader("sec-fetch-mode", "navigate"),
|
||||
)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,19 @@
|
||||
package requestmodifers
|
||||
|
||||
import (
|
||||
"ladder/proxychain"
|
||||
)
|
||||
|
||||
// SpoofReferrerFromTumblrPost modifies the referrer header
|
||||
// pretending to be from a tumblr post
|
||||
func SpoofReferrerFromTumblrPost() proxychain.RequestModification {
|
||||
return func(chain *proxychain.ProxyChain) error {
|
||||
chain.AddRequestModifications(
|
||||
SpoofReferrer("https://www.tumblr.com/"),
|
||||
SetRequestHeader("sec-fetch-site", "cross-site"),
|
||||
SetRequestHeader("sec-fetch-dest", "document"),
|
||||
SetRequestHeader("sec-fetch-mode", "navigate"),
|
||||
)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,19 @@
|
||||
package requestmodifers
|
||||
|
||||
import (
|
||||
"ladder/proxychain"
|
||||
)
|
||||
|
||||
// SpoofReferrerFromTwitterPost modifies the referrer header
|
||||
// pretending to be from a twitter post
|
||||
func SpoofReferrerFromTwitterPost() proxychain.RequestModification {
|
||||
return func(chain *proxychain.ProxyChain) error {
|
||||
chain.AddRequestModifications(
|
||||
SpoofReferrer("https://t.co/"),
|
||||
SetRequestHeader("sec-fetch-site", "cross-site"),
|
||||
SetRequestHeader("sec-fetch-dest", "document"),
|
||||
SetRequestHeader("sec-fetch-mode", "navigate"),
|
||||
)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,19 @@
|
||||
package requestmodifers
|
||||
|
||||
import (
|
||||
"ladder/proxychain"
|
||||
)
|
||||
|
||||
// SpoofReferrerFromVkontaktePost modifies the referrer header
|
||||
// pretending to be from a vkontakte post (popular in Russia)
|
||||
func SpoofReferrerFromVkontaktePost() proxychain.RequestModification {
|
||||
return func(chain *proxychain.ProxyChain) error {
|
||||
chain.AddRequestModifications(
|
||||
SpoofReferrer("https://away.vk.com/"),
|
||||
SetRequestHeader("sec-fetch-site", "cross-site"),
|
||||
SetRequestHeader("sec-fetch-dest", "document"),
|
||||
SetRequestHeader("sec-fetch-mode", "navigate"),
|
||||
)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
22
proxychain/requestmodifers/spoof_referrer_from_weibo_post.go
Normal file
22
proxychain/requestmodifers/spoof_referrer_from_weibo_post.go
Normal file
@@ -0,0 +1,22 @@
|
||||
package requestmodifers
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"ladder/proxychain"
|
||||
"math/rand"
|
||||
)
|
||||
|
||||
// SpoofReferrerFromWeiboPost modifies the referrer header
|
||||
// pretending to be from a Weibo post (popular in China)
|
||||
func SpoofReferrerFromWeiboPost() proxychain.RequestModification {
|
||||
return func(chain *proxychain.ProxyChain) error {
|
||||
referrer := fmt.Sprintf("http://weibo.com/u/%d", rand.Intn(90001))
|
||||
chain.AddRequestModifications(
|
||||
SpoofReferrer(referrer),
|
||||
SetRequestHeader("sec-fetch-site", "cross-site"),
|
||||
SetRequestHeader("sec-fetch-dest", "document"),
|
||||
SetRequestHeader("sec-fetch-mode", "navigate"),
|
||||
)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
13
proxychain/requestmodifers/spoof_user_agent.go
Normal file
13
proxychain/requestmodifers/spoof_user_agent.go
Normal file
@@ -0,0 +1,13 @@
|
||||
package requestmodifers
|
||||
|
||||
import (
|
||||
"ladder/proxychain"
|
||||
)
|
||||
|
||||
// SpoofUserAgent modifies the user agent
|
||||
func SpoofUserAgent(ua string) proxychain.RequestModification {
|
||||
return func(px *proxychain.ProxyChain) error {
|
||||
px.Request.Header.Set("user-agent", ua)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
14
proxychain/requestmodifers/spoof_x_forwarded_for.go
Normal file
14
proxychain/requestmodifers/spoof_x_forwarded_for.go
Normal file
@@ -0,0 +1,14 @@
|
||||
package requestmodifers
|
||||
|
||||
import (
|
||||
"ladder/proxychain"
|
||||
)
|
||||
|
||||
// SpoofXForwardedFor modifies the X-Forwarded-For header
|
||||
// in some cases, a forward proxy may interpret this as the source IP
|
||||
func SpoofXForwardedFor(ip string) proxychain.RequestModification {
|
||||
return func(px *proxychain.ProxyChain) error {
|
||||
px.Request.Header.Set("X-FORWARDED-FOR", ip)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
21
proxychain/responsemodifers/bypass_cors.go
Normal file
21
proxychain/responsemodifers/bypass_cors.go
Normal file
@@ -0,0 +1,21 @@
|
||||
package responsemodifers
|
||||
|
||||
import (
|
||||
"ladder/proxychain"
|
||||
)
|
||||
|
||||
// BypassCORS modifies response headers to prevent the browser
|
||||
// from enforcing any CORS restrictions. This should run at the end of the chain.
|
||||
func BypassCORS() proxychain.ResponseModification {
|
||||
return func(chain *proxychain.ProxyChain) error {
|
||||
chain.AddResponseModifications(
|
||||
SetResponseHeader("Access-Control-Allow-Origin", "*"),
|
||||
SetResponseHeader("Access-Control-Expose-Headers", "*"),
|
||||
SetResponseHeader("Access-Control-Allow-Credentials", "true"),
|
||||
SetResponseHeader("Access-Control-Allow-Methods", "GET, PUT, POST, DELETE, HEAD, OPTIONS, PATCH"),
|
||||
SetResponseHeader("Access-Control-Allow-Headers", "*"),
|
||||
DeleteResponseHeader("X-Frame-Options"),
|
||||
)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
30
proxychain/responsemodifers/bypass_csp.go
Normal file
30
proxychain/responsemodifers/bypass_csp.go
Normal file
@@ -0,0 +1,30 @@
|
||||
package responsemodifers
|
||||
|
||||
import (
|
||||
"ladder/proxychain"
|
||||
)
|
||||
|
||||
// TODO: handle edge case where CSP is specified in meta tag:
|
||||
// <meta http-equiv="Content-Security-Policy" content="default-src 'self'">
|
||||
|
||||
// BypassContentSecurityPolicy modifies response headers to prevent the browser
|
||||
// from enforcing any CSP restrictions. This should run at the end of the chain.
|
||||
func BypassContentSecurityPolicy() proxychain.ResponseModification {
|
||||
return func(chain *proxychain.ProxyChain) error {
|
||||
chain.AddResponseModifications(
|
||||
DeleteResponseHeader("Content-Security-Policy"),
|
||||
DeleteResponseHeader("Content-Security-Policy-Report-Only"),
|
||||
DeleteResponseHeader("X-Content-Security-Policy"),
|
||||
DeleteResponseHeader("X-WebKit-CSP"),
|
||||
)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// SetContentSecurityPolicy modifies response headers to a specific CSP
|
||||
func SetContentSecurityPolicy(csp string) proxychain.ResponseModification {
|
||||
return func(chain *proxychain.ProxyChain) error {
|
||||
chain.Response.Header.Set("Content-Security-Policy", csp)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
27
proxychain/responsemodifers/inject_script.go
Normal file
27
proxychain/responsemodifers/inject_script.go
Normal file
@@ -0,0 +1,27 @@
|
||||
package responsemodifers
|
||||
|
||||
import (
|
||||
_ "embed"
|
||||
"ladder/proxychain"
|
||||
"ladder/proxychain/responsemodifers/rewriters"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// InjectScript modifies HTTP responses
|
||||
// to execute javascript at a particular time.
|
||||
func InjectScript(js string, execTime rewriters.ScriptExecTime) proxychain.ResponseModification {
|
||||
return func(chain *proxychain.ProxyChain) error {
|
||||
// don't add rewriter if it's not even html
|
||||
ct := chain.Response.Header.Get("content-type")
|
||||
if !strings.HasPrefix(ct, "text/html") {
|
||||
return nil
|
||||
}
|
||||
|
||||
// the rewriting actually happens in chain.Execute() as the client is streaming the response body back
|
||||
rr := rewriters.NewScriptInjectorRewriter(js, execTime)
|
||||
// we just queue it up here
|
||||
chain.AddHTMLTokenRewriter(rr)
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
||||
102
proxychain/responsemodifers/modify_incoming_cookies.go
Normal file
102
proxychain/responsemodifers/modify_incoming_cookies.go
Normal file
@@ -0,0 +1,102 @@
|
||||
package responsemodifers
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"ladder/proxychain"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
// DeleteIncomingCookies prevents ALL cookies from being sent from the proxy server
|
||||
// back down to the client.
|
||||
func DeleteIncomingCookies(whitelist ...string) proxychain.ResponseModification {
|
||||
return func(px *proxychain.ProxyChain) error {
|
||||
px.Response.Header.Del("Set-Cookie")
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// DeleteIncomingCookiesExcept prevents non-whitelisted cookies from being sent from the proxy server
|
||||
// to the client. Cookies whose names are in the whitelist are not removed.
|
||||
func DeleteIncomingCookiesExcept(whitelist ...string) proxychain.ResponseModification {
|
||||
return func(px *proxychain.ProxyChain) error {
|
||||
// Convert whitelist slice to a map for efficient lookups
|
||||
whitelistMap := make(map[string]struct{})
|
||||
for _, cookieName := range whitelist {
|
||||
whitelistMap[cookieName] = struct{}{}
|
||||
}
|
||||
|
||||
// If the response has no cookies, return early
|
||||
if px.Response.Header == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Filter the cookies in the response
|
||||
filteredCookies := []string{}
|
||||
for _, cookieStr := range px.Response.Header["Set-Cookie"] {
|
||||
cookie := parseCookie(cookieStr)
|
||||
if _, found := whitelistMap[cookie.Name]; found {
|
||||
filteredCookies = append(filteredCookies, cookieStr)
|
||||
}
|
||||
}
|
||||
|
||||
// Update the Set-Cookie header with the filtered cookies
|
||||
if len(filteredCookies) > 0 {
|
||||
px.Response.Header["Set-Cookie"] = filteredCookies
|
||||
} else {
|
||||
px.Response.Header.Del("Set-Cookie")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// parseCookie parses a cookie string and returns an http.Cookie object.
|
||||
func parseCookie(cookieStr string) *http.Cookie {
|
||||
header := http.Header{}
|
||||
header.Add("Set-Cookie", cookieStr)
|
||||
request := http.Request{Header: header}
|
||||
return request.Cookies()[0]
|
||||
}
|
||||
|
||||
// SetIncomingCookies adds a raw cookie string being sent from the proxy server down to the client
|
||||
func SetIncomingCookies(cookies string) proxychain.ResponseModification {
|
||||
return func(px *proxychain.ProxyChain) error {
|
||||
px.Response.Header.Set("Set-Cookie", cookies)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// SetIncomingCookie modifies a specific cookie in the response from the proxy server to the client.
|
||||
func SetIncomingCookie(name string, val string) proxychain.ResponseModification {
|
||||
return func(px *proxychain.ProxyChain) error {
|
||||
if px.Response.Header == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
updatedCookies := []string{}
|
||||
found := false
|
||||
|
||||
// Iterate over existing cookies and modify the one that matches the cookieName
|
||||
for _, cookieStr := range px.Response.Header["Set-Cookie"] {
|
||||
cookie := parseCookie(cookieStr)
|
||||
if cookie.Name == name {
|
||||
// Replace the cookie with the new value
|
||||
updatedCookies = append(updatedCookies, fmt.Sprintf("%s=%s", name, val))
|
||||
found = true
|
||||
} else {
|
||||
// Keep the cookie as is
|
||||
updatedCookies = append(updatedCookies, cookieStr)
|
||||
}
|
||||
}
|
||||
|
||||
// If the specified cookie wasn't found, add it
|
||||
if !found {
|
||||
updatedCookies = append(updatedCookies, fmt.Sprintf("%s=%s", name, val))
|
||||
}
|
||||
|
||||
// Update the Set-Cookie header
|
||||
px.Response.Header["Set-Cookie"] = updatedCookies
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
||||
21
proxychain/responsemodifers/modify_response_header.go
Normal file
21
proxychain/responsemodifers/modify_response_header.go
Normal file
@@ -0,0 +1,21 @@
|
||||
package responsemodifers
|
||||
|
||||
import (
|
||||
"ladder/proxychain"
|
||||
)
|
||||
|
||||
// SetResponseHeader modifies response headers from the upstream server
|
||||
func SetResponseHeader(key string, value string) proxychain.ResponseModification {
|
||||
return func(px *proxychain.ProxyChain) error {
|
||||
px.Context.Response().Header.Set(key, value)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// DeleteResponseHeader removes response headers from the upstream server
|
||||
func DeleteResponseHeader(key string) proxychain.ResponseModification {
|
||||
return func(px *proxychain.ProxyChain) error {
|
||||
px.Context.Response().Header.Del(key)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
55
proxychain/responsemodifers/patch_dynamic_resource_urls.go
Normal file
55
proxychain/responsemodifers/patch_dynamic_resource_urls.go
Normal file
@@ -0,0 +1,55 @@
|
||||
package responsemodifers
|
||||
|
||||
import (
|
||||
_ "embed"
|
||||
"fmt"
|
||||
"ladder/proxychain"
|
||||
"ladder/proxychain/responsemodifers/rewriters"
|
||||
"strings"
|
||||
)
|
||||
|
||||
//go:embed patch_dynamic_resource_urls.js
|
||||
var patchDynamicResourceURLsScript string
|
||||
|
||||
// PatchDynamicResourceURLs patches the javascript runtime to rewrite URLs client-side.
|
||||
// - This function is designed to allow the proxified page
|
||||
// to still be browsible by routing all resource URLs through the proxy.
|
||||
// - Native APIs capable of network requests will be hooked
|
||||
// and the URLs arguments modified to point to the proxy instead.
|
||||
// - fetch('/relative_path') -> fetch('/https://proxiedsite.com/relative_path')
|
||||
// - Element.setAttribute('src', "/assets/img.jpg") -> Element.setAttribute('src', "/https://proxiedsite.com/assets/img.jpg") -> fetch('/https://proxiedsite.com/relative_path')
|
||||
func PatchDynamicResourceURLs() proxychain.ResponseModification {
|
||||
return func(chain *proxychain.ProxyChain) error {
|
||||
// don't add rewriter if it's not even html
|
||||
ct := chain.Response.Header.Get("content-type")
|
||||
if !strings.HasPrefix(ct, "text/html") {
|
||||
return nil
|
||||
}
|
||||
|
||||
// this is the original URL sent by client:
|
||||
// http://localhost:8080/http://proxiedsite.com/foo/bar
|
||||
originalURI := chain.Context.Request().URI()
|
||||
|
||||
// this is the extracted URL that the client requests to proxy
|
||||
// http://proxiedsite.com/foo/bar
|
||||
reqURL := chain.Request.URL
|
||||
|
||||
params := map[string]string{
|
||||
// ie: http://localhost:8080
|
||||
"{{PROXY_ORIGIN}}": fmt.Sprintf("%s://%s", originalURI.Scheme(), originalURI.Host()),
|
||||
// ie: http://proxiedsite.com
|
||||
"{{ORIGIN}}": fmt.Sprintf("%s://%s", reqURL.Scheme, reqURL.Host),
|
||||
}
|
||||
|
||||
// the rewriting actually happens in chain.Execute() as the client is streaming the response body back
|
||||
rr := rewriters.NewScriptInjectorRewriterWithParams(
|
||||
patchDynamicResourceURLsScript,
|
||||
rewriters.BeforeDOMContentLoaded,
|
||||
params,
|
||||
)
|
||||
// we just queue it up here
|
||||
chain.AddHTMLTokenRewriter(rr)
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
||||
325
proxychain/responsemodifers/patch_dynamic_resource_urls.js
Normal file
325
proxychain/responsemodifers/patch_dynamic_resource_urls.js
Normal file
@@ -0,0 +1,325 @@
|
||||
// Overrides the global fetch and XMLHttpRequest open methods to modify the request URLs.
|
||||
// Also overrides the attribute setter prototype to modify the request URLs
|
||||
// fetch("/relative_script.js") -> fetch("http://localhost:8080/relative_script.js")
|
||||
(() => {
|
||||
|
||||
// ============== PARAMS ===========================
|
||||
// if the original request was: http://localhost:8080/http://proxiedsite.com/foo/bar
|
||||
// proxyOrigin is http://localhost:8080
|
||||
const proxyOrigin = "{{PROXY_ORIGIN}}";
|
||||
//const proxyOrigin = globalThis.window.location.origin;
|
||||
|
||||
// if the original request was: http://localhost:8080/http://proxiedsite.com/foo/bar
|
||||
// origin is http://proxiedsite.com
|
||||
const origin = "{{ORIGIN}}";
|
||||
//const origin = (new URL(decodeURIComponent(globalThis.window.location.pathname.substring(1)))).origin
|
||||
// ============== END PARAMS ======================
|
||||
|
||||
const blacklistedSchemes = [
|
||||
"ftp:",
|
||||
"mailto:",
|
||||
"tel:",
|
||||
"file:",
|
||||
"blob:",
|
||||
"javascript:",
|
||||
"about:",
|
||||
"magnet:",
|
||||
"ws:",
|
||||
"wss:",
|
||||
];
|
||||
|
||||
function rewriteURL(url) {
|
||||
const oldUrl = url
|
||||
if (!url) return url
|
||||
let isStr = (typeof url.startsWith === 'function')
|
||||
if (!isStr) return url
|
||||
|
||||
// don't rewrite special URIs
|
||||
if (blacklistedSchemes.includes(url)) return url;
|
||||
|
||||
// don't rewrite invalid URIs
|
||||
try { new URL(url, origin) } catch { return url }
|
||||
|
||||
// don't double rewrite
|
||||
if (url.startsWith(proxyOrigin)) return url;
|
||||
if (url.startsWith(`/${proxyOrigin}`)) return url;
|
||||
if (url.startsWith(`/${origin}`)) return url;
|
||||
if (url.startsWith(`/http://`)) return url;
|
||||
if (url.startsWith(`/https://`)) return url;
|
||||
if (url.startsWith(`/http%3A%2F%2F`)) return url;
|
||||
if (url.startsWith(`/https%3A%2F%2F`)) return url;
|
||||
if (url.startsWith(`/%2Fhttp`)) return url;
|
||||
|
||||
//console.log(`proxychain: origin: ${origin} // proxyOrigin: ${proxyOrigin} // original: ${oldUrl}`)
|
||||
|
||||
if (url.startsWith("//")) {
|
||||
url = `/${origin}/${encodeURIComponent(url.substring(2))}`;
|
||||
} else if (url.startsWith("/")) {
|
||||
url = `/${origin}/${encodeURIComponent(url.substring(1))}`;
|
||||
} else if (url.startsWith(origin)) {
|
||||
url = `/${encodeURIComponent(url)}`
|
||||
} else if (url.startsWith("http://") || url.startsWith("https://")) {
|
||||
url = `/${proxyOrigin}/${encodeURIComponent(url)}`;
|
||||
}
|
||||
console.log(`proxychain: rewrite JS URL: ${oldUrl} -> ${url}`)
|
||||
return url;
|
||||
};
|
||||
|
||||
// sometimes anti-bot protections like cloudflare or akamai bot manager check if JS is hooked
|
||||
function hideMonkeyPatch(objectOrName, method, originalToString) {
|
||||
let obj;
|
||||
let isGlobalFunction = false;
|
||||
|
||||
if (typeof objectOrName === 'string') {
|
||||
obj = globalThis[objectOrName];
|
||||
isGlobalFunction = (typeof obj === 'function') && (method === objectOrName);
|
||||
} else {
|
||||
obj = objectOrName;
|
||||
}
|
||||
|
||||
if (isGlobalFunction) {
|
||||
const originalFunction = obj;
|
||||
globalThis[objectOrName] = function(...args) {
|
||||
return originalFunction.apply(this, args);
|
||||
};
|
||||
globalThis[objectOrName].toString = () => originalToString;
|
||||
} else if (obj && typeof obj[method] === 'function') {
|
||||
const originalMethod = obj[method];
|
||||
obj[method] = function(...args) {
|
||||
return originalMethod.apply(this, args);
|
||||
};
|
||||
obj[method].toString = () => originalToString;
|
||||
} else {
|
||||
console.warn(`proxychain: cannot hide monkey patch: ${method} is not a function on the provided object.`);
|
||||
}
|
||||
}
|
||||
|
||||
// monkey patch fetch
|
||||
const oldFetch = fetch;
|
||||
fetch = async (url, init) => {
|
||||
return oldFetch(rewriteURL(url), init)
|
||||
}
|
||||
hideMonkeyPatch('fetch', 'fetch', 'function fetch() { [native code] }')
|
||||
|
||||
// monkey patch xmlhttprequest
|
||||
const oldOpen = XMLHttpRequest.prototype.open;
|
||||
XMLHttpRequest.prototype.open = function(method, url, async = true, user = null, password = null) {
|
||||
return oldOpen.call(this, method, rewriteURL(url), async, user, password);
|
||||
};
|
||||
hideMonkeyPatch(XMLHttpRequest.prototype, 'open', 'function(){if("function"==typeof eo)return eo.apply(this,arguments)}');
|
||||
|
||||
const oldSend = XMLHttpRequest.prototype.send;
|
||||
XMLHttpRequest.prototype.send = function(method, url) {
|
||||
return oldSend.call(this, method, rewriteURL(url));
|
||||
};
|
||||
hideMonkeyPatch(XMLHttpRequest.prototype, 'send', 'function(){if("function"==typeof eo)return eo.apply(this,arguments)}');
|
||||
|
||||
|
||||
// monkey patch service worker registration
|
||||
const oldRegister = ServiceWorkerContainer.prototype.register;
|
||||
ServiceWorkerContainer.prototype.register = function(scriptURL, options) {
|
||||
return oldRegister.call(this, rewriteURL(scriptURL), options)
|
||||
}
|
||||
hideMonkeyPatch(ServiceWorkerContainer.prototype, 'register', 'function register() { [native code] }')
|
||||
|
||||
// monkey patch URL.toString() method
|
||||
const oldToString = URL.prototype.toString
|
||||
URL.prototype.toString = function() {
|
||||
let originalURL = oldToString.call(this)
|
||||
return rewriteURL(originalURL)
|
||||
}
|
||||
hideMonkeyPatch(URL.prototype, 'toString', 'function toString() { [native code] }')
|
||||
|
||||
// monkey patch URL.toJSON() method
|
||||
const oldToJson = URL.prototype.toString
|
||||
URL.prototype.toString = function() {
|
||||
let originalURL = oldToJson.call(this)
|
||||
return rewriteURL(originalURL)
|
||||
}
|
||||
hideMonkeyPatch(URL.prototype, 'toString', 'function toJSON() { [native code] }')
|
||||
|
||||
// Monkey patch URL.href getter and setter
|
||||
const originalHrefDescriptor = Object.getOwnPropertyDescriptor(URL.prototype, 'href');
|
||||
Object.defineProperty(URL.prototype, 'href', {
|
||||
get: function() {
|
||||
let originalHref = originalHrefDescriptor.get.call(this);
|
||||
return rewriteURL(originalHref)
|
||||
},
|
||||
set: function(newValue) {
|
||||
originalHrefDescriptor.set.call(this, rewriteURL(newValue));
|
||||
}
|
||||
});
|
||||
|
||||
// TODO: do one more pass of this by manually traversing the DOM
|
||||
// AFTER all the JS and page has loaded just in case
|
||||
|
||||
// Monkey patch setter
|
||||
const elements = [
|
||||
{ tag: 'a', attribute: 'href' },
|
||||
{ tag: 'img', attribute: 'src' },
|
||||
// { tag: 'img', attribute: 'srcset' }, // TODO: handle srcset
|
||||
{ tag: 'script', attribute: 'src' },
|
||||
{ tag: 'link', attribute: 'href' },
|
||||
{ tag: 'link', attribute: 'icon' },
|
||||
{ tag: 'iframe', attribute: 'src' },
|
||||
{ tag: 'audio', attribute: 'src' },
|
||||
{ tag: 'video', attribute: 'src' },
|
||||
{ tag: 'source', attribute: 'src' },
|
||||
// { tag: 'source', attribute: 'srcset' }, // TODO: handle srcset
|
||||
{ tag: 'embed', attribute: 'src' },
|
||||
{ tag: 'embed', attribute: 'pluginspage' },
|
||||
{ tag: 'html', attribute: 'manifest' },
|
||||
{ tag: 'object', attribute: 'src' },
|
||||
{ tag: 'input', attribute: 'src' },
|
||||
{ tag: 'track', attribute: 'src' },
|
||||
{ tag: 'form', attribute: 'action' },
|
||||
{ tag: 'area', attribute: 'href' },
|
||||
{ tag: 'base', attribute: 'href' },
|
||||
{ tag: 'blockquote', attribute: 'cite' },
|
||||
{ tag: 'del', attribute: 'cite' },
|
||||
{ tag: 'ins', attribute: 'cite' },
|
||||
{ tag: 'q', attribute: 'cite' },
|
||||
{ tag: 'button', attribute: 'formaction' },
|
||||
{ tag: 'input', attribute: 'formaction' },
|
||||
{ tag: 'meta', attribute: 'content' },
|
||||
{ tag: 'object', attribute: 'data' },
|
||||
];
|
||||
|
||||
elements.forEach(({ tag, attribute }) => {
|
||||
const proto = document.createElement(tag).constructor.prototype;
|
||||
const descriptor = Object.getOwnPropertyDescriptor(proto, attribute);
|
||||
if (descriptor && descriptor.set) {
|
||||
Object.defineProperty(proto, attribute, {
|
||||
...descriptor,
|
||||
set(value) {
|
||||
// calling rewriteURL will end up calling a setter for href,
|
||||
// leading to a recusive loop and a Maximum call stack size exceeded
|
||||
// error, so we guard against this with a local semaphore flag
|
||||
const isRewritingSetKey = Symbol.for('isRewritingSet');
|
||||
if (!this[isRewritingSetKey]) {
|
||||
this[isRewritingSetKey] = true;
|
||||
descriptor.set.call(this, rewriteURL(value));
|
||||
//descriptor.set.call(this, value);
|
||||
this[isRewritingSetKey] = false;
|
||||
} else {
|
||||
// Directly set the value without rewriting
|
||||
descriptor.set.call(this, value);
|
||||
}
|
||||
},
|
||||
get() {
|
||||
const isRewritingGetKey = Symbol.for('isRewritingGet');
|
||||
if (!this[isRewritingGetKey]) {
|
||||
this[isRewritingGetKey] = true;
|
||||
let oldURL = descriptor.get.call(this);
|
||||
let newURL = rewriteURL(oldURL);
|
||||
this[isRewritingGetKey] = false;
|
||||
return newURL
|
||||
} else {
|
||||
return descriptor.get.call(this);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
// sometimes, libraries will set the Element.innerHTML or Element.outerHTML directly with a string instead of setters.
|
||||
// in this case, we intercept it, create a fake DOM, parse it and then rewrite all attributes that could
|
||||
// contain a URL. Then we return the replacement innerHTML/outerHTML with redirected links.
|
||||
function rewriteInnerHTML(html, elements) {
|
||||
const isRewritingHTMLKey = Symbol.for('isRewritingHTML');
|
||||
|
||||
// Check if already processing
|
||||
if (document[isRewritingHTMLKey]) {
|
||||
return html;
|
||||
}
|
||||
|
||||
const tempContainer = document.createElement('div');
|
||||
document[isRewritingHTMLKey] = true;
|
||||
|
||||
try {
|
||||
tempContainer.innerHTML = html;
|
||||
|
||||
// Create a map for quick lookup
|
||||
const elementsMap = new Map(elements.map(e => [e.tag, e.attribute]));
|
||||
|
||||
// Loop-based DOM traversal
|
||||
const nodes = [...tempContainer.querySelectorAll('*')];
|
||||
for (const node of nodes) {
|
||||
const attribute = elementsMap.get(node.tagName.toLowerCase());
|
||||
if (attribute && node.hasAttribute(attribute)) {
|
||||
const originalUrl = node.getAttribute(attribute);
|
||||
const rewrittenUrl = rewriteURL(originalUrl);
|
||||
node.setAttribute(attribute, rewrittenUrl);
|
||||
}
|
||||
}
|
||||
|
||||
return tempContainer.innerHTML;
|
||||
} finally {
|
||||
// Clear the flag
|
||||
document[isRewritingHTMLKey] = false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Store original setters
|
||||
const originalSetters = {};
|
||||
|
||||
['innerHTML', 'outerHTML'].forEach(property => {
|
||||
const descriptor = Object.getOwnPropertyDescriptor(Element.prototype, property);
|
||||
if (descriptor && descriptor.set) {
|
||||
originalSetters[property] = descriptor.set;
|
||||
|
||||
Object.defineProperty(Element.prototype, property, {
|
||||
...descriptor,
|
||||
set(value) {
|
||||
const isRewritingHTMLKey = Symbol.for('isRewritingHTML');
|
||||
if (!this[isRewritingHTMLKey]) {
|
||||
this[isRewritingHTMLKey] = true;
|
||||
try {
|
||||
// Use custom logic
|
||||
descriptor.set.call(this, rewriteInnerHTML(value, elements));
|
||||
} finally {
|
||||
this[isRewritingHTMLKey] = false;
|
||||
}
|
||||
} else {
|
||||
// Use original setter in recursive call
|
||||
originalSetters[property].call(this, value);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
})();
|
||||
|
||||
|
||||
|
||||
(() => {
|
||||
document.addEventListener('DOMContentLoaded', (event) => {
|
||||
initIdleMutationObserver();
|
||||
});
|
||||
|
||||
function initIdleMutationObserver() {
|
||||
let debounceTimer;
|
||||
const debounceDelay = 500; // adjust the delay as needed
|
||||
|
||||
const observer = new MutationObserver((mutations) => {
|
||||
// Clear the previous timer and set a new one
|
||||
clearTimeout(debounceTimer);
|
||||
debounceTimer = setTimeout(() => {
|
||||
execute();
|
||||
observer.disconnect(); // Disconnect after first execution
|
||||
}, debounceDelay);
|
||||
});
|
||||
|
||||
const config = { attributes: false, childList: true, subtree: true };
|
||||
observer.observe(document.body, config);
|
||||
}
|
||||
|
||||
function execute() {
|
||||
console.log('DOM is now idle. Executing...');
|
||||
}
|
||||
|
||||
})();
|
||||
35
proxychain/responsemodifers/rewrite_http_resource_urls.go
Normal file
35
proxychain/responsemodifers/rewrite_http_resource_urls.go
Normal file
@@ -0,0 +1,35 @@
|
||||
package responsemodifers
|
||||
|
||||
import (
|
||||
_ "embed"
|
||||
"fmt"
|
||||
"ladder/proxychain"
|
||||
"ladder/proxychain/responsemodifers/rewriters"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// RewriteHTMLResourceURLs modifies HTTP responses
|
||||
// to rewrite URLs attributes in HTML content (such as src, href)
|
||||
// - `<img src='/relative_path'>` -> `<img src='/https://proxiedsite.com/relative_path'>`
|
||||
// - This function is designed to allow the proxified page
|
||||
// to still be browsible by routing all resource URLs through the proxy.
|
||||
func RewriteHTMLResourceURLs() proxychain.ResponseModification {
|
||||
return func(chain *proxychain.ProxyChain) error {
|
||||
// don't add rewriter if it's not even html
|
||||
ct := chain.Response.Header.Get("content-type")
|
||||
if !strings.HasPrefix(ct, "text/html") {
|
||||
return nil
|
||||
}
|
||||
|
||||
// proxyURL is the URL of the ladder: http://localhost:8080 (ladder)
|
||||
originalURI := chain.Context.Request().URI()
|
||||
proxyURL := fmt.Sprintf("%s://%s", originalURI.Scheme(), originalURI.Host())
|
||||
|
||||
// the rewriting actually happens in chain.Execute() as the client is streaming the response body back
|
||||
rr := rewriters.NewHTMLTokenURLRewriter(chain.Request.URL, proxyURL)
|
||||
// we just queue it up here
|
||||
chain.AddHTMLTokenRewriter(rr)
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,27 @@
|
||||
(() => {
|
||||
document.addEventListener('DOMContentLoaded', (event) => {
|
||||
initIdleMutationObserver();
|
||||
});
|
||||
|
||||
function initIdleMutationObserver() {
|
||||
let debounceTimer;
|
||||
const debounceDelay = 500; // adjust the delay as needed
|
||||
|
||||
const observer = new MutationObserver((mutations) => {
|
||||
// Clear the previous timer and set a new one
|
||||
clearTimeout(debounceTimer);
|
||||
debounceTimer = setTimeout(() => {
|
||||
execute();
|
||||
observer.disconnect(); // Disconnect after first execution
|
||||
}, debounceDelay);
|
||||
});
|
||||
|
||||
const config = { attributes: false, childList: true, subtree: true };
|
||||
observer.observe(document.body, config);
|
||||
}
|
||||
|
||||
function execute() {
|
||||
'SCRIPT_CONTENT_PARAM'
|
||||
//console.log('DOM is now idle. Executing...');
|
||||
}
|
||||
})();
|
||||
3
proxychain/responsemodifers/rewriters/css_rewriter.go
Normal file
3
proxychain/responsemodifers/rewriters/css_rewriter.go
Normal file
@@ -0,0 +1,3 @@
|
||||
package rewriters
|
||||
|
||||
// todo: implement
|
||||
131
proxychain/responsemodifers/rewriters/html_rewriter.go
Normal file
131
proxychain/responsemodifers/rewriters/html_rewriter.go
Normal file
@@ -0,0 +1,131 @@
|
||||
package rewriters
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
|
||||
"golang.org/x/net/html"
|
||||
)
|
||||
|
||||
// IHTMLTokenRewriter defines an interface for modifying HTML tokens.
|
||||
type IHTMLTokenRewriter interface {
|
||||
// ShouldModify determines whether a given HTML token requires modification.
|
||||
ShouldModify(*html.Token) bool
|
||||
|
||||
// ModifyToken applies modifications to a given HTML token.
|
||||
// It returns strings representing content to be prepended and
|
||||
// appended to the token. If no modifications are required or if an error occurs,
|
||||
// it returns empty strings for both 'prepend' and 'append'.
|
||||
// Note: The original token is not modified if an error occurs.
|
||||
ModifyToken(*html.Token) (prepend, append string)
|
||||
}
|
||||
|
||||
// HTMLRewriter is a struct that can take multiple TokenHandlers and process all
|
||||
// HTML tokens from http.Response.Body in a single pass, making changes and returning a new io.ReadCloser
|
||||
//
|
||||
// - HTMLRewriter reads the http.Response.Body stream,
|
||||
// parsing each HTML token one at a time and making modifications (defined by implementations of IHTMLTokenRewriter)
|
||||
// in a single pass of the tokenizer.
|
||||
//
|
||||
// - When ProxyChain.Execute() is called, the response body will be read from the server
|
||||
// and pulled through each ResponseModification which wraps the ProxyChain.Response.Body
|
||||
// without ever buffering the entire HTTP response in memory.
|
||||
type HTMLRewriter struct {
|
||||
tokenizer *html.Tokenizer
|
||||
currentToken *html.Token
|
||||
tokenBuffer *bytes.Buffer
|
||||
currentTokenProcessed bool
|
||||
rewriters []IHTMLTokenRewriter
|
||||
}
|
||||
|
||||
// NewHTMLRewriter creates a new HTMLRewriter instance.
|
||||
// It processes HTML tokens from an io.ReadCloser source (typically http.Response.Body)
|
||||
// using a series of HTMLTokenRewriters. Each HTMLTokenRewriter in the 'rewriters' slice
|
||||
// applies its specific modifications to the HTML tokens.
|
||||
// The HTMLRewriter reads from the provided 'src', applies the modifications,
|
||||
// and returns the processed content as a new io.ReadCloser.
|
||||
// This new io.ReadCloser can be used to stream the modified content back to the client.
|
||||
//
|
||||
// Parameters:
|
||||
// - src: An io.ReadCloser representing the source of the HTML content, such as http.Response.Body.
|
||||
// - rewriters: A slice of HTMLTokenRewriters that define the modifications to be applied to the HTML tokens.
|
||||
//
|
||||
// Returns:
|
||||
// - A pointer to an HTMLRewriter, which implements io.ReadCloser, containing the modified HTML content.
|
||||
func NewHTMLRewriter(src io.ReadCloser, rewriters []IHTMLTokenRewriter) *HTMLRewriter {
|
||||
return &HTMLRewriter{
|
||||
tokenizer: html.NewTokenizer(src),
|
||||
currentToken: nil,
|
||||
tokenBuffer: new(bytes.Buffer),
|
||||
currentTokenProcessed: false,
|
||||
rewriters: rewriters,
|
||||
}
|
||||
}
|
||||
|
||||
// Close resets the internal state of HTMLRewriter, clearing buffers and token data.
|
||||
func (r *HTMLRewriter) Close() error {
|
||||
r.tokenBuffer.Reset()
|
||||
r.currentToken = nil
|
||||
r.currentTokenProcessed = false
|
||||
return nil
|
||||
}
|
||||
|
||||
// Read processes the HTML content, rewriting URLs and managing the state of tokens.
|
||||
func (r *HTMLRewriter) Read(p []byte) (int, error) {
|
||||
|
||||
if r.currentToken == nil || r.currentToken.Data == "" || r.currentTokenProcessed {
|
||||
tokenType := r.tokenizer.Next()
|
||||
|
||||
// done reading html, close out reader
|
||||
if tokenType == html.ErrorToken {
|
||||
if r.tokenizer.Err() == io.EOF {
|
||||
return 0, io.EOF
|
||||
}
|
||||
return 0, r.tokenizer.Err()
|
||||
}
|
||||
|
||||
// get the next token; reset buffer
|
||||
t := r.tokenizer.Token()
|
||||
r.currentToken = &t
|
||||
r.tokenBuffer.Reset()
|
||||
|
||||
// buffer += "<prepends> <token> <appends>"
|
||||
// process token through all registered rewriters
|
||||
// rewriters will modify the token, and optionally
|
||||
// return a <prepend> or <append> string token
|
||||
appends := make([]string, 0, len(r.rewriters))
|
||||
for _, rewriter := range r.rewriters {
|
||||
if !rewriter.ShouldModify(r.currentToken) {
|
||||
continue
|
||||
}
|
||||
prepend, a := rewriter.ModifyToken(r.currentToken)
|
||||
appends = append(appends, a)
|
||||
// add <prepends> to buffer
|
||||
r.tokenBuffer.WriteString(prepend)
|
||||
}
|
||||
|
||||
// add <token> to buffer
|
||||
if tokenType == html.TextToken {
|
||||
// don't unescape textTokens (such as inline scripts).
|
||||
// Token.String() by default will escape the inputs, but
|
||||
// we don't want to modify the original source
|
||||
r.tokenBuffer.WriteString(r.currentToken.Data)
|
||||
} else {
|
||||
r.tokenBuffer.WriteString(r.currentToken.String())
|
||||
}
|
||||
|
||||
// add <appends> to buffer
|
||||
for _, a := range appends {
|
||||
r.tokenBuffer.WriteString(a)
|
||||
}
|
||||
|
||||
r.currentTokenProcessed = false
|
||||
}
|
||||
|
||||
n, err := r.tokenBuffer.Read(p)
|
||||
if err == io.EOF || r.tokenBuffer.Len() == 0 {
|
||||
r.currentTokenProcessed = true
|
||||
err = nil // EOF in this context is expected and not an actual error
|
||||
}
|
||||
return n, err
|
||||
}
|
||||
263
proxychain/responsemodifers/rewriters/html_token_url_rewriter.go
Normal file
263
proxychain/responsemodifers/rewriters/html_token_url_rewriter.go
Normal file
@@ -0,0 +1,263 @@
|
||||
package rewriters
|
||||
|
||||
import (
|
||||
_ "embed"
|
||||
"fmt"
|
||||
"log"
|
||||
"net/url"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/net/html"
|
||||
)
|
||||
|
||||
var rewriteAttrs map[string]map[string]bool
|
||||
var specialRewriteAttrs map[string]map[string]bool
|
||||
var schemeBlacklist map[string]bool
|
||||
|
||||
func init() {
|
||||
// define all tag/attributes which might contain URLs
|
||||
// to attempt to rewrite to point to proxy instead
|
||||
rewriteAttrs = map[string]map[string]bool{
|
||||
"img": {"src": true, "srcset": true, "longdesc": true, "usemap": true},
|
||||
"a": {"href": true},
|
||||
"form": {"action": true},
|
||||
"link": {"href": true, "manifest": true, "icon": true},
|
||||
"script": {"src": true},
|
||||
"video": {"src": true, "poster": true},
|
||||
"audio": {"src": true},
|
||||
"iframe": {"src": true, "longdesc": true},
|
||||
"embed": {"src": true},
|
||||
"object": {"data": true, "codebase": true},
|
||||
"source": {"src": true, "srcset": true},
|
||||
"track": {"src": true},
|
||||
"area": {"href": true},
|
||||
"base": {"href": true},
|
||||
"blockquote": {"cite": true},
|
||||
"del": {"cite": true},
|
||||
"ins": {"cite": true},
|
||||
"q": {"cite": true},
|
||||
"body": {"background": true},
|
||||
"button": {"formaction": true},
|
||||
"input": {"src": true, "formaction": true},
|
||||
"meta": {"content": true},
|
||||
}
|
||||
|
||||
// might contain URL but requires special handling
|
||||
specialRewriteAttrs = map[string]map[string]bool{
|
||||
"img": {"srcset": true},
|
||||
"source": {"srcset": true},
|
||||
"meta": {"content": true},
|
||||
}
|
||||
|
||||
// define URIs to NOT rewrite
|
||||
// for example: don't overwrite <img src="data:image/png;base64;iVBORw...">"
|
||||
schemeBlacklist = map[string]bool{
|
||||
"data": true,
|
||||
"tel": true,
|
||||
"mailto": true,
|
||||
"file": true,
|
||||
"blob": true,
|
||||
"javascript": true,
|
||||
"about": true,
|
||||
"magnet": true,
|
||||
"ws": true,
|
||||
"wss": true,
|
||||
"ftp": true,
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// HTMLTokenURLRewriter implements HTMLTokenRewriter
|
||||
// it rewrites URLs within HTML resources to use a specified proxy URL.
|
||||
// <img src='/relative_path'> -> <img src='/https://proxiedsite.com/relative_path'>
|
||||
type HTMLTokenURLRewriter struct {
|
||||
baseURL *url.URL
|
||||
proxyURL string // ladder URL, not proxied site URL
|
||||
}
|
||||
|
||||
// NewHTMLTokenURLRewriter creates a new instance of HTMLResourceURLRewriter.
|
||||
// It initializes the tokenizer with the provided source and sets the proxy URL.
|
||||
func NewHTMLTokenURLRewriter(baseURL *url.URL, proxyURL string) *HTMLTokenURLRewriter {
|
||||
return &HTMLTokenURLRewriter{
|
||||
baseURL: baseURL,
|
||||
proxyURL: proxyURL,
|
||||
}
|
||||
}
|
||||
|
||||
func (r *HTMLTokenURLRewriter) ShouldModify(token *html.Token) bool {
|
||||
attrLen := len(token.Attr)
|
||||
if attrLen == 0 {
|
||||
return false
|
||||
}
|
||||
if !(token.Type == html.StartTagToken || token.Type == html.SelfClosingTagToken) {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func (r *HTMLTokenURLRewriter) ModifyToken(token *html.Token) (string, string) {
|
||||
for i := range token.Attr {
|
||||
attr := &token.Attr[i]
|
||||
switch {
|
||||
// don't touch tag/attributes that don't contain URIs
|
||||
case !rewriteAttrs[token.Data][attr.Key]:
|
||||
continue
|
||||
// don't touch attributes with special URIs (like data:)
|
||||
case schemeBlacklist[strings.Split(attr.Key, ":")[0]]:
|
||||
continue
|
||||
// don't double-overwrite the url
|
||||
case strings.HasPrefix(attr.Val, r.proxyURL):
|
||||
continue
|
||||
case strings.HasPrefix(attr.Val, "/http://"):
|
||||
continue
|
||||
case strings.HasPrefix(attr.Val, "/https://"):
|
||||
continue
|
||||
// handle special rewrites
|
||||
case specialRewriteAttrs[token.Data][attr.Key]:
|
||||
r.handleSpecialAttr(token, attr, r.baseURL)
|
||||
continue
|
||||
default:
|
||||
// rewrite url
|
||||
handleURLPart(attr, r.baseURL)
|
||||
}
|
||||
}
|
||||
return "", ""
|
||||
}
|
||||
|
||||
// dispatcher for ModifyURL based on URI type
|
||||
func handleURLPart(attr *html.Attribute, baseURL *url.URL) {
|
||||
switch {
|
||||
case strings.HasPrefix(attr.Key, "//"):
|
||||
handleProtocolRelativePath(attr, baseURL)
|
||||
case strings.HasPrefix(attr.Key, "/"):
|
||||
handleRootRelativePath(attr, baseURL)
|
||||
case strings.HasPrefix(attr.Key, "https://"):
|
||||
handleAbsolutePath(attr, baseURL)
|
||||
case strings.HasPrefix(attr.Key, "http://"):
|
||||
handleAbsolutePath(attr, baseURL)
|
||||
default:
|
||||
handleDocumentRelativePath(attr, baseURL)
|
||||
}
|
||||
}
|
||||
|
||||
// Protocol-relative URLs: These start with "//" and will use the same protocol (http or https) as the current page.
|
||||
func handleProtocolRelativePath(attr *html.Attribute, baseURL *url.URL) {
|
||||
attr.Val = strings.TrimPrefix(attr.Val, "/")
|
||||
handleRootRelativePath(attr, baseURL)
|
||||
log.Printf("proto rel url rewritten-> '%s'='%s'", attr.Key, attr.Val)
|
||||
}
|
||||
|
||||
// Root-relative URLs: These are relative to the root path and start with a "/".
|
||||
func handleRootRelativePath(attr *html.Attribute, baseURL *url.URL) {
|
||||
// doublecheck this is a valid relative URL
|
||||
log.Printf("PROCESSING: key: %s val: %s\n", attr.Key, attr.Val)
|
||||
_, err := url.Parse(fmt.Sprintf("http://localhost.com%s", attr.Val))
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
return
|
||||
}
|
||||
|
||||
//log.Printf("BASEURL patch: %s\n", baseURL)
|
||||
|
||||
attr.Val = fmt.Sprintf(
|
||||
"/%s://%s/%s",
|
||||
baseURL.Scheme,
|
||||
baseURL.Host,
|
||||
strings.TrimPrefix(attr.Val, "/"),
|
||||
)
|
||||
attr.Val = escape(attr.Val)
|
||||
attr.Val = fmt.Sprintf("/%s", attr.Val)
|
||||
|
||||
log.Printf("root rel url rewritten-> '%s'='%s'", attr.Key, attr.Val)
|
||||
}
|
||||
|
||||
// Document-relative URLs: These are relative to the current document's path and don't start with a "/".
|
||||
func handleDocumentRelativePath(attr *html.Attribute, baseURL *url.URL) {
|
||||
log.Printf("PROCESSING: key: %s val: %s\n", attr.Key, attr.Val)
|
||||
attr.Val = fmt.Sprintf(
|
||||
"%s://%s/%s%s",
|
||||
baseURL.Scheme,
|
||||
strings.Trim(baseURL.Host, "/"),
|
||||
strings.Trim(baseURL.RawPath, "/"),
|
||||
strings.Trim(attr.Val, "/"),
|
||||
)
|
||||
attr.Val = escape(attr.Val)
|
||||
attr.Val = fmt.Sprintf("/%s", attr.Val)
|
||||
log.Printf("doc rel url rewritten-> '%s'='%s'", attr.Key, attr.Val)
|
||||
}
|
||||
|
||||
// full URIs beginning with https?://proxiedsite.com
|
||||
func handleAbsolutePath(attr *html.Attribute, baseURL *url.URL) {
|
||||
// check if valid URL
|
||||
log.Printf("PROCESSING: key: %s val: %s\n", attr.Key, attr.Val)
|
||||
u, err := url.Parse(attr.Val)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
if !(u.Scheme == "http" || u.Scheme == "https") {
|
||||
return
|
||||
}
|
||||
attr.Val = fmt.Sprintf("/%s", escape(strings.TrimPrefix(attr.Val, "/")))
|
||||
log.Printf("abs url rewritten-> '%s'='%s'", attr.Key, attr.Val)
|
||||
}
|
||||
|
||||
// handle edge cases for special attributes
|
||||
func (r *HTMLTokenURLRewriter) handleSpecialAttr(token *html.Token, attr *html.Attribute, baseURL *url.URL) {
|
||||
switch {
|
||||
// srcset attribute doesn't contain a single URL but a comma-separated list of URLs, each potentially followed by a space and a descriptor (like a width, pixel density, or other conditions).
|
||||
case token.Data == "img" && attr.Key == "srcset":
|
||||
handleSrcSet(attr, baseURL)
|
||||
case token.Data == "source" && attr.Key == "srcset":
|
||||
handleSrcSet(attr, baseURL)
|
||||
// meta with http-equiv="refresh": The content attribute of a meta tag, when used for a refresh directive, contains a time interval followed by a URL, like content="5;url=http://example.com/".
|
||||
case token.Data == "meta" && attr.Key == "content" && regexp.MustCompile(`^\d+;url=`).MatchString(attr.Val):
|
||||
handleMetaRefresh(attr, baseURL)
|
||||
default:
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
func handleMetaRefresh(attr *html.Attribute, baseURL *url.URL) {
|
||||
sec := strings.Split(attr.Val, ";url=")[0]
|
||||
url := strings.Split(attr.Val, ";url=")[1]
|
||||
f := &html.Attribute{Val: url, Key: "src"}
|
||||
handleURLPart(f, baseURL)
|
||||
attr.Val = fmt.Sprintf("%s;url=%s", sec, url)
|
||||
}
|
||||
|
||||
func handleSrcSet(attr *html.Attribute, baseURL *url.URL) {
|
||||
var srcSetBuilder strings.Builder
|
||||
srcSetItems := strings.Split(attr.Val, ",")
|
||||
|
||||
for i, srcItem := range srcSetItems {
|
||||
srcParts := strings.Fields(srcItem) // Fields splits around whitespace, trimming them
|
||||
|
||||
if len(srcParts) == 0 {
|
||||
continue // skip empty items
|
||||
}
|
||||
|
||||
// rewrite each URL part by passing in fake attribute
|
||||
f := &html.Attribute{Val: srcParts[0], Key: "src"}
|
||||
handleURLPart(f, baseURL)
|
||||
urlPart := f.Key
|
||||
|
||||
// First srcset item without a descriptor
|
||||
if i == 0 && (len(srcParts) == 1 || !strings.HasSuffix(srcParts[1], "x")) {
|
||||
srcSetBuilder.WriteString(urlPart)
|
||||
} else {
|
||||
srcSetBuilder.WriteString(fmt.Sprintf("%s %s", urlPart, srcParts[1]))
|
||||
}
|
||||
|
||||
if i < len(srcSetItems)-1 {
|
||||
srcSetBuilder.WriteString(",") // Add comma for all but last item
|
||||
}
|
||||
}
|
||||
|
||||
attr.Val = srcSetBuilder.String()
|
||||
log.Printf("srcset url rewritten-> '%s'='%s'", attr.Key, attr.Val)
|
||||
}
|
||||
|
||||
func escape(str string) string {
|
||||
return strings.ReplaceAll(url.PathEscape(str), "%2F", "/")
|
||||
}
|
||||
@@ -0,0 +1,91 @@
|
||||
package rewriters
|
||||
|
||||
import (
|
||||
_ "embed"
|
||||
"fmt"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/net/html"
|
||||
"golang.org/x/net/html/atom"
|
||||
)
|
||||
|
||||
// ScriptInjectorRewriter implements HTMLTokenRewriter
|
||||
// ScriptInjectorRewriter is a struct that injects JS into the page
|
||||
// It uses an HTML tokenizer to process HTML content and injects JS at a specified location
|
||||
type ScriptInjectorRewriter struct {
|
||||
execTime ScriptExecTime
|
||||
script string
|
||||
}
|
||||
|
||||
type ScriptExecTime int
|
||||
|
||||
const (
|
||||
BeforeDOMContentLoaded ScriptExecTime = iota
|
||||
AfterDOMContentLoaded
|
||||
AfterDOMIdle
|
||||
)
|
||||
|
||||
func (r *ScriptInjectorRewriter) ShouldModify(token *html.Token) bool {
|
||||
// modify if token == <head>
|
||||
return token.DataAtom == atom.Head && token.Type == html.StartTagToken
|
||||
}
|
||||
|
||||
//go:embed after_dom_idle_script_injector.js
|
||||
var afterDomIdleScriptInjector string
|
||||
|
||||
func (r *ScriptInjectorRewriter) ModifyToken(token *html.Token) (string, string) {
|
||||
switch {
|
||||
case r.execTime == BeforeDOMContentLoaded:
|
||||
return "", fmt.Sprintf("\n<script>\n%s\n</script>\n", r.script)
|
||||
|
||||
case r.execTime == AfterDOMContentLoaded:
|
||||
return "", fmt.Sprintf("\n<script>\ndocument.addEventListener('DOMContentLoaded', () => { %s });\n</script>", r.script)
|
||||
|
||||
case r.execTime == AfterDOMIdle:
|
||||
s := strings.Replace(afterDomIdleScriptInjector, `'SCRIPT_CONTENT_PARAM'`, r.script, 1)
|
||||
return "", fmt.Sprintf("\n<script>\n%s\n</script>\n", s)
|
||||
|
||||
default:
|
||||
return "", ""
|
||||
}
|
||||
}
|
||||
|
||||
// applies parameters by string replacement of the template script
|
||||
func (r *ScriptInjectorRewriter) applyParams(params map[string]string) {
|
||||
// Sort the keys by length in descending order
|
||||
keys := make([]string, 0, len(params))
|
||||
for key := range params {
|
||||
keys = append(keys, key)
|
||||
}
|
||||
sort.Slice(keys, func(i, j int) bool {
|
||||
return len(keys[i]) > len(keys[j])
|
||||
})
|
||||
|
||||
for _, key := range keys {
|
||||
r.script = strings.ReplaceAll(r.script, key, params[key])
|
||||
}
|
||||
}
|
||||
|
||||
// NewScriptInjectorRewriter implements a HtmlTokenRewriter
|
||||
// and injects JS into the page for execution at a particular time
|
||||
func NewScriptInjectorRewriter(script string, execTime ScriptExecTime) *ScriptInjectorRewriter {
|
||||
return &ScriptInjectorRewriter{
|
||||
execTime: execTime,
|
||||
script: script,
|
||||
}
|
||||
}
|
||||
|
||||
// NewScriptInjectorRewriterWith implements a HtmlTokenRewriter
|
||||
// and injects JS into the page for execution at a particular time
|
||||
// accepting arguments into the script, which will be added via a string replace
|
||||
// the params map represents the key-value pair of the params.
|
||||
// the key will be string replaced with the value
|
||||
func NewScriptInjectorRewriterWithParams(script string, execTime ScriptExecTime, params map[string]string) *ScriptInjectorRewriter {
|
||||
rr := &ScriptInjectorRewriter{
|
||||
execTime: execTime,
|
||||
script: script,
|
||||
}
|
||||
rr.applyParams(params)
|
||||
return rr
|
||||
}
|
||||
23
ruleset.yaml
Normal file
23
ruleset.yaml
Normal file
@@ -0,0 +1,23 @@
|
||||
- domain: example.com
|
||||
domains:
|
||||
- www.beispiel.de
|
||||
googleCache: true
|
||||
headers:
|
||||
x-forwarded-for: none
|
||||
referer: none
|
||||
user-agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36
|
||||
cookie: privacy=1
|
||||
regexRules:
|
||||
- match: <script\s+([^>]*\s+)?src="(/)([^"]*)"
|
||||
replace: <script $1 script="/https://www.example.com/$3"
|
||||
injections:
|
||||
- position: head # Position where to inject the code
|
||||
append: |
|
||||
<script>
|
||||
window.localStorage.clear();
|
||||
console.log("test");
|
||||
alert("Hello!");
|
||||
</script>
|
||||
- position: h1
|
||||
replace: |
|
||||
<h1>An example with a ladder ;-)</h1>
|
||||
35
rulesets/ca/_multi-metroland-media-group.yaml
Normal file
35
rulesets/ca/_multi-metroland-media-group.yaml
Normal file
@@ -0,0 +1,35 @@
|
||||
- domains:
|
||||
- www.thestar.com
|
||||
- www.niagarafallsreview.ca
|
||||
- www.stcatharinesstandard.ca
|
||||
- www.thepeterboroughexaminer.com
|
||||
- www.therecord.com
|
||||
- www.thespec.com
|
||||
- www.wellandtribune.ca
|
||||
injections:
|
||||
- position: head
|
||||
append: |
|
||||
<script>
|
||||
window.localStorage.clear();
|
||||
document.addEventListener("DOMContentLoaded", () => {
|
||||
const paywall = document.querySelectorAll('div.subscriber-offers');
|
||||
paywall.forEach(el => { el.remove(); });
|
||||
const subscriber_only = document.querySelectorAll('div.subscriber-only');
|
||||
for (const elem of subscriber_only) {
|
||||
if (elem.classList.contains('encrypted-content') && dompurify_loaded) {
|
||||
const parser = new DOMParser();
|
||||
const doc = parser.parseFromString('<div>' + DOMPurify.sanitize(unscramble(elem.innerText)) + '</div>', 'text/html');
|
||||
const content_new = doc.querySelector('div');
|
||||
elem.parentNode.replaceChild(content_new, elem);
|
||||
}
|
||||
elem.removeAttribute('style');
|
||||
elem.removeAttribute('class');
|
||||
}
|
||||
const banners = document.querySelectorAll('div.subscription-required, div.redacted-overlay, div.subscriber-hide, div.tnt-ads-container');
|
||||
banners.forEach(el => { el.remove(); });
|
||||
const ads = document.querySelectorAll('div.tnt-ads-container, div[class*="adLabelWrapper"]');
|
||||
ads.forEach(el => { el.remove(); });
|
||||
const recommendations = document.querySelectorAll('div[id^="tncms-region-article"]');
|
||||
recommendations.forEach(el => { el.remove(); });
|
||||
});
|
||||
</script>
|
||||
24
rulesets/ch/nzz-ch.yaml
Normal file
24
rulesets/ch/nzz-ch.yaml
Normal file
@@ -0,0 +1,24 @@
|
||||
- domain: www.nzz.ch
|
||||
paths:
|
||||
- /international
|
||||
- /sport
|
||||
- /wirtschaft
|
||||
- /technologie
|
||||
- /feuilleton
|
||||
- /zuerich
|
||||
- /wissenschaft
|
||||
- /gesellschaft
|
||||
- /panorama
|
||||
- /mobilitaet
|
||||
- /reisen
|
||||
- /meinung
|
||||
- /finanze
|
||||
injections:
|
||||
- position: head
|
||||
append: |
|
||||
<script>
|
||||
document.addEventListener("DOMContentLoaded", () => {
|
||||
const paywall = document.querySelector('.dynamic-regwall');
|
||||
removeDOMElement(paywall)
|
||||
});
|
||||
</script>
|
||||
9
rulesets/de/tagesspiegel-de.yaml
Normal file
9
rulesets/de/tagesspiegel-de.yaml
Normal file
@@ -0,0 +1,9 @@
|
||||
# loads amp version of page
|
||||
- domain: tagesspiegel.de
|
||||
headers:
|
||||
content-security-policy: script-src 'self';
|
||||
user-agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36
|
||||
urlMods:
|
||||
query:
|
||||
- key: amp
|
||||
value: 1
|
||||
20
rulesets/gb/ft-com.yaml
Normal file
20
rulesets/gb/ft-com.yaml
Normal file
@@ -0,0 +1,20 @@
|
||||
- domain: www.ft.com
|
||||
headers:
|
||||
referer: https://t.co/x?amp=1
|
||||
injections:
|
||||
- position: head
|
||||
append: |
|
||||
<script>
|
||||
document.addEventListener("DOMContentLoaded", () => {
|
||||
const styleTags = document.querySelectorAll('link[rel="stylesheet"]');
|
||||
styleTags.forEach(el => {
|
||||
const href = el.getAttribute('href').substring(1);
|
||||
const updatedHref = href.replace(/(https?:\/\/.+?)\/{2,}/, '$1/');
|
||||
el.setAttribute('href', updatedHref);
|
||||
});
|
||||
setTimeout(() => {
|
||||
const cookie = document.querySelectorAll('.o-cookie-message, .js-article-ribbon, .o-ads, .o-banner, .o-message, .article__content-sign-up');
|
||||
cookie.forEach(el => { el.remove(); });
|
||||
}, 1000);
|
||||
})
|
||||
</script>
|
||||
19
rulesets/us/_multi-conde-nast.yaml
Normal file
19
rulesets/us/_multi-conde-nast.yaml
Normal file
@@ -0,0 +1,19 @@
|
||||
- domains:
|
||||
- www.architecturaldigest.com
|
||||
- www.bonappetit.com
|
||||
- www.cntraveler.com
|
||||
- www.epicurious.com
|
||||
- www.gq.com
|
||||
- www.newyorker.com
|
||||
- www.vanityfair.com
|
||||
- www.vogue.com
|
||||
- www.wired.com
|
||||
injections:
|
||||
- position: head
|
||||
append: |
|
||||
<script>
|
||||
document.addEventListener("DOMContentLoaded", () => {
|
||||
const banners = document.querySelectorAll('.paywall-bar, div[class^="MessageBannerWrapper-"');
|
||||
banners.forEach(el => { el.remove(); });
|
||||
});
|
||||
</script>
|
||||
16
rulesets/us/americanbanker-com.yaml
Normal file
16
rulesets/us/americanbanker-com.yaml
Normal file
@@ -0,0 +1,16 @@
|
||||
- domain: americanbanker.com
|
||||
paths:
|
||||
- /news
|
||||
injections:
|
||||
- position: head
|
||||
append: |
|
||||
<script>
|
||||
document.addEventListener("DOMContentLoaded", () => {
|
||||
const inlineGate = document.querySelector('.inline-gate');
|
||||
if (inlineGate) {
|
||||
inlineGate.classList.remove('inline-gate');
|
||||
const inlineGated = document.querySelectorAll('.inline-gated');
|
||||
for (const elem of inlineGated) { elem.classList.remove('inline-gated'); }
|
||||
}
|
||||
});
|
||||
</script>
|
||||
7
rulesets/us/medium-com.yaml
Normal file
7
rulesets/us/medium-com.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
- domain: medium.com
|
||||
headers:
|
||||
referer: https://t.co/x?amp=1
|
||||
x-forwarded-for: none
|
||||
user-agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36
|
||||
content-security-policy: script-src 'self';
|
||||
cookie:
|
||||
17
rulesets/us/nytimes-com.yaml
Normal file
17
rulesets/us/nytimes-com.yaml
Normal file
@@ -0,0 +1,17 @@
|
||||
- domains:
|
||||
- www.nytimes.com
|
||||
- www.time.com
|
||||
headers:
|
||||
ueser-agent: Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)
|
||||
cookie: nyt-a=; nyt-gdpr=0; nyt-geo=DE; nyt-privacy=1
|
||||
referer: https://www.google.com/
|
||||
injections:
|
||||
- position: head
|
||||
append: |
|
||||
<script>
|
||||
window.localStorage.clear();
|
||||
document.addEventListener("DOMContentLoaded", () => {
|
||||
const banners = document.querySelectorAll('div[data-testid="inline-message"], div[id^="ad-"], div[id^="leaderboard-"], div.expanded-dock, div.pz-ad-box, div[id="top-wrapper"], div[id="bottom-wrapper"]');
|
||||
banners.forEach(el => { el.remove(); });
|
||||
});
|
||||
</script>
|
||||
10
rulesets/us/usatoday-com.yaml
Normal file
10
rulesets/us/usatoday-com.yaml
Normal file
@@ -0,0 +1,10 @@
|
||||
- domain: www.usatoday.com
|
||||
injections:
|
||||
- position: head
|
||||
append: |
|
||||
<script>
|
||||
document.addEventListener("DOMContentLoaded", () => {
|
||||
const banners = document.querySelectorAll('div.roadblock-container, .gnt_nb, [aria-label="advertisement"], div[id="main-frame-error"]');
|
||||
banners.forEach(el => { el.remove(); });
|
||||
});
|
||||
</script>
|
||||
14
rulesets/us/washingtonpost-com.yaml
Normal file
14
rulesets/us/washingtonpost-com.yaml
Normal file
@@ -0,0 +1,14 @@
|
||||
- domain: www.washingtonpost.com
|
||||
injections:
|
||||
- position: head
|
||||
append: |
|
||||
<script>
|
||||
document.addEventListener("DOMContentLoaded", () => {
|
||||
let paywall = document.querySelectorAll('div[data-qa$="-ad"], div[id="leaderboard-wrapper"], div[data-qa="subscribe-promo"]');
|
||||
paywall.forEach(el => { el.remove(); });
|
||||
const images = document.querySelectorAll('img');
|
||||
images.forEach(image => { image.parentElement.style.filter = ''; });
|
||||
const headimage = document.querySelectorAll('div .aspect-custom');
|
||||
headimage.forEach(image => { image.style.filter = ''; });
|
||||
});
|
||||
</script>
|
||||
3
styles/input.css
Normal file
3
styles/input.css
Normal file
@@ -0,0 +1,3 @@
|
||||
@tailwind base;
|
||||
@tailwind components;
|
||||
@tailwind utilities;
|
||||
9
tailwind.config.js
Normal file
9
tailwind.config.js
Normal file
@@ -0,0 +1,9 @@
|
||||
/** @type {import('tailwindcss').Config} */
|
||||
module.exports = {
|
||||
content: ["./handlers/**/*.html"],
|
||||
theme: {
|
||||
extend: {},
|
||||
},
|
||||
plugins: [],
|
||||
}
|
||||
|
||||
91
tests/package-lock.json
generated
Normal file
91
tests/package-lock.json
generated
Normal file
@@ -0,0 +1,91 @@
|
||||
{
|
||||
"name": "tests",
|
||||
"version": "1.0.0",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "tests",
|
||||
"version": "1.0.0",
|
||||
"license": "ISC",
|
||||
"devDependencies": {
|
||||
"@playwright/test": "^1.40.0",
|
||||
"@types/node": "^20.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@playwright/test": {
|
||||
"version": "1.40.0",
|
||||
"resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.40.0.tgz",
|
||||
"integrity": "sha512-PdW+kn4eV99iP5gxWNSDQCbhMaDVej+RXL5xr6t04nbKLCBwYtA046t7ofoczHOm8u6c+45hpDKQVZqtqwkeQg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"playwright": "1.40.0"
|
||||
},
|
||||
"bin": {
|
||||
"playwright": "cli.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
"version": "20.10.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.10.0.tgz",
|
||||
"integrity": "sha512-D0WfRmU9TQ8I9PFx9Yc+EBHw+vSpIub4IDvQivcp26PtPrdMGAq5SDcpXEo/epqa/DXotVpekHiLNTg3iaKXBQ==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"undici-types": "~5.26.4"
|
||||
}
|
||||
},
|
||||
"node_modules/fsevents": {
|
||||
"version": "2.3.2",
|
||||
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz",
|
||||
"integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==",
|
||||
"dev": true,
|
||||
"hasInstallScript": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
],
|
||||
"engines": {
|
||||
"node": "^8.16.0 || ^10.6.0 || >=11.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/playwright": {
|
||||
"version": "1.40.0",
|
||||
"resolved": "https://registry.npmjs.org/playwright/-/playwright-1.40.0.tgz",
|
||||
"integrity": "sha512-gyHAgQjiDf1m34Xpwzaqb76KgfzYrhK7iih+2IzcOCoZWr/8ZqmdBw+t0RU85ZmfJMgtgAiNtBQ/KS2325INXw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"playwright-core": "1.40.0"
|
||||
},
|
||||
"bin": {
|
||||
"playwright": "cli.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"fsevents": "2.3.2"
|
||||
}
|
||||
},
|
||||
"node_modules/playwright-core": {
|
||||
"version": "1.40.0",
|
||||
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.40.0.tgz",
|
||||
"integrity": "sha512-fvKewVJpGeca8t0ipM56jkVSU6Eo0RmFvQ/MaCQNDYm+sdvKkMBBWTE1FdeMqIdumRaXXjZChWHvIzCGM/tA/Q==",
|
||||
"dev": true,
|
||||
"bin": {
|
||||
"playwright-core": "cli.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16"
|
||||
}
|
||||
},
|
||||
"node_modules/undici-types": {
|
||||
"version": "5.26.5",
|
||||
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz",
|
||||
"integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==",
|
||||
"dev": true
|
||||
}
|
||||
}
|
||||
}
|
||||
14
tests/package.json
Normal file
14
tests/package.json
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"name": "tests",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"devDependencies": {
|
||||
"@playwright/test": "^1.40.0",
|
||||
"@types/node": "^20.10.0"
|
||||
}
|
||||
}
|
||||
77
tests/playwright.config.ts
Normal file
77
tests/playwright.config.ts
Normal file
@@ -0,0 +1,77 @@
|
||||
import { defineConfig, devices } from "@playwright/test";
|
||||
|
||||
/**
|
||||
* Read environment variables from file.
|
||||
* https://github.com/motdotla/dotenv
|
||||
*/
|
||||
// require('dotenv').config();
|
||||
|
||||
/**
|
||||
* See https://playwright.dev/docs/test-configuration.
|
||||
*/
|
||||
export default defineConfig({
|
||||
testDir: "./tests",
|
||||
/* Run tests in files in parallel */
|
||||
fullyParallel: true,
|
||||
/* Fail the build on CI if you accidentally left test.only in the source code. */
|
||||
forbidOnly: !!process.env.CI,
|
||||
/* Retry on CI only */
|
||||
retries: process.env.CI ? 2 : 0,
|
||||
/* Opt out of parallel tests on CI. */
|
||||
workers: process.env.CI ? 1 : undefined,
|
||||
/* Reporter to use. See https://playwright.dev/docs/test-reporters */
|
||||
reporter: "html",
|
||||
/* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */
|
||||
use: {
|
||||
/* Base URL to use in actions like `await page.goto('/')`. */
|
||||
// baseURL: 'http://127.0.0.1:3000',
|
||||
|
||||
/* Collect trace when retrying the failed test. See https://playwright.dev/docs/trace-viewer */
|
||||
trace: "on-first-retry",
|
||||
},
|
||||
|
||||
/* Configure projects for major browsers */
|
||||
projects: [
|
||||
{
|
||||
name: "chromium",
|
||||
use: { ...devices["Desktop Chrome"] },
|
||||
},
|
||||
/*
|
||||
{
|
||||
name: 'firefox',
|
||||
use: { ...devices['Desktop Firefox'] },
|
||||
},
|
||||
|
||||
{
|
||||
name: 'webkit',
|
||||
use: { ...devices['Desktop Safari'] },
|
||||
},
|
||||
*/
|
||||
|
||||
/* Test against mobile viewports. */
|
||||
// {
|
||||
// name: 'Mobile Chrome',
|
||||
// use: { ...devices['Pixel 5'] },
|
||||
// },
|
||||
// {
|
||||
// name: 'Mobile Safari',
|
||||
// use: { ...devices['iPhone 12'] },
|
||||
// },
|
||||
|
||||
/* Test against branded browsers. */
|
||||
// {
|
||||
// name: 'Microsoft Edge',
|
||||
// use: { ...devices['Desktop Edge'], channel: 'msedge' },
|
||||
// },
|
||||
// {
|
||||
// name: 'Google Chrome',
|
||||
// use: { ...devices['Desktop Chrome'], channel: 'chrome' },
|
||||
// },
|
||||
],
|
||||
/* Run your local dev server before starting the tests */
|
||||
// webServer: {
|
||||
// command: 'npm run start',
|
||||
// url: 'http://127.0.0.1:3000',
|
||||
// reuseExistingServer: !process.env.CI,
|
||||
// },
|
||||
});
|
||||
2
tests/run_test.sh
Normal file
2
tests/run_test.sh
Normal file
@@ -0,0 +1,2 @@
|
||||
npx playwright test
|
||||
npx playwright show-report
|
||||
18
tests/tests/www-wellandtribune-ca.spec.ts
Normal file
18
tests/tests/www-wellandtribune-ca.spec.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
import { expect, test } from "@playwright/test";
|
||||
|
||||
const paywallText = "This article is exclusive to subscribers.";
|
||||
const articleURL =
|
||||
"https://www.wellandtribune.ca/news/niagara-region/niagara-transit-commission-rejects-council-request-to-reduce-its-budget-increase/article_e9fb424c-8df5-58ae-a6c3-3648e2a9df66.html";
|
||||
|
||||
const ladderURL = "http://localhost:8080";
|
||||
let domain = (new URL(articleURL)).host;
|
||||
|
||||
test(`${domain} has paywall by default`, async ({ page }) => {
|
||||
await page.goto(articleURL);
|
||||
await expect(page.getByText(paywallText)).toBeVisible();
|
||||
});
|
||||
|
||||
test(`${domain} + Ladder doesn't have paywall`, async ({ page }) => {
|
||||
await page.goto(`${ladderURL}/${articleURL}`);
|
||||
await expect(page.getByText(paywallText)).toBeVisible();
|
||||
});
|
||||
Reference in New Issue
Block a user