59 Commits

Author SHA1 Message Date
ladddder
ddba232a31 fixes most of the encoding issues in embeded javascripts 2023-11-21 16:15:34 +01:00
Kevin Pham
79a229f28c handle srcset resource URL rewrites; monkey patch JS for URL rewrites 2023-11-20 23:42:50 -06:00
Kevin Pham
6222476684 forward content-type headers 2023-11-20 15:49:39 -06:00
Kevin Pham
5d46adc486 wip 2023-11-20 15:37:07 -06:00
Kevin Pham
1d88f14de2 rewrite resource URLs based on html tokenizer instead of regex 2023-11-20 11:38:53 -06:00
Kevin Pham
5035f65d6b wip 2023-11-19 20:59:55 -06:00
Kevin Pham
ee9066dedb refactor wip 2023-11-19 15:03:11 -06:00
Kevin Pham
98fa53287b fix nil pointer deref 2023-11-18 18:13:46 -06:00
Kevin Pham
f6341f2c3e begin refactor of proxy engine 2023-11-18 08:31:59 -06:00
Gianni Carafa
6d8e943df5 add env var docker run command 2023-11-16 14:14:17 +01:00
Gianni Carafa
68e5023ed9 Revert "remove rulesets from base repository"
This reverts commit 8d00e29c43.
2023-11-16 14:01:57 +01:00
Gianni Carafa
8d00e29c43 remove rulesets from base repository 2023-11-16 13:30:23 +01:00
Gianni Carafa
c8d39ea21f readd ruleset 2023-11-16 13:27:57 +01:00
Gianni Carafa
dae4afb55e fix typo 2023-11-16 13:10:55 +01:00
mms-gianni
a83503170e Merge pull request #41 from deoxykev/refactor_rulesets
refactor rulesets into separate files and add a ruleset compiler cli …
2023-11-16 13:07:11 +01:00
Kevin Pham
0eef3e5808 refactor rulesets into separate files and add a ruleset compiler cli flag 2023-11-15 15:30:23 -06:00
Gianni Carafa
7597ea2807 udpate README 2023-11-15 21:28:23 +01:00
Gianni Carafa
235dca8dd0 minor ruleset improvements 2023-11-15 21:04:42 +01:00
mms-gianni
191279c00c Merge pull request #40 from everywall/39-request-header-fields-too-large
fix request header fields to large
2023-11-15 20:46:09 +01:00
mms-gianni
f4060c3e78 Merge branch 'main' into 39-request-header-fields-too-large 2023-11-15 20:45:59 +01:00
mms-gianni
55284f0b24 Merge pull request #37 from deoxykev/organized_rulesets
Organized rulesets
2023-11-15 20:45:10 +01:00
mms-gianni
f7f4586032 Merge branch 'main' into organized_rulesets 2023-11-15 20:40:36 +01:00
Gianni Carafa
fe881ca661 use cookie method to empty cookie header 2023-11-15 16:48:00 +01:00
Gianni Carafa
86700d8828 set empty cookie 2023-11-15 16:34:56 +01:00
mms-gianni
7be62e2735 Update README.md 2023-11-15 14:55:21 +01:00
mms-gianni
5e76ff0879 Update README.md
Fix typo
2023-11-15 13:36:56 +01:00
Github action
ee641bf8f6 Generated stylesheet 2023-11-15 09:20:45 +00:00
Gianni Carafa
2fb089ea28 initial style.css 2023-11-15 10:20:05 +01:00
Gianni Carafa
9f857eca8b add permission to push to repository 2023-11-15 10:16:33 +01:00
Gianni Carafa
0673255fc8 remove styles.css from gitignore to allow github action to detect changes 2023-11-15 10:06:49 +01:00
Gianni Carafa
4dbc103cf7 generate css 2023-11-15 09:53:42 +01:00
mms-gianni
514facd2c0 Merge pull request #36 from joncrangle/tailwind-cli-build
improvement: Use Tailwind CLI to build stylesheet instead of using Play CDN
2023-11-15 09:32:28 +01:00
Kevin Pham
a8d920548c add feature to load ruleset from directory or gzip file on http server, refactor ruleset loading logic 2023-11-14 15:57:39 -06:00
Kevin Pham
e87d19d7f5 add ability to load rulesets from directory 2023-11-14 15:42:26 -06:00
Gianni Carafa
531b7da811 remove useless route 2023-11-14 22:27:34 +01:00
joncrangle
9a53f28b3f Update dev instructions 2023-11-14 11:52:19 -05:00
joncrangle
6cbccbfadb Update with development instructions 2023-11-14 10:46:13 -05:00
Gianni Carafa
b07d49f230 update README 2023-11-14 16:29:43 +01:00
mms-gianni
af10efb7f2 Merge pull request #31 from deoxykev/main
Add feature to modify URLs in ruleset | Fix Relative URLs
2023-11-14 16:09:07 +01:00
joncrangle
3f0f4207a1 Undo errant removal of meta viewport tag 2023-11-14 09:01:58 -05:00
joncrangle
2236c4fff9 Add embed declaration 2023-11-13 21:46:20 -05:00
joncrangle
78454f8713 improvement: tailwind cli to build stylesheet 2023-11-13 21:44:11 -05:00
mms-gianni
6bff28e18d Merge pull request #29 from joncrangle/add-clear-button
Add x button to clear search
2023-11-13 22:57:08 +01:00
Kevin Pham
cdd429e4be Merge branch 'main' into main 2023-11-13 07:50:54 -06:00
mms-gianni
11ee581fd4 Merge pull request #33 from joncrangle/mobile-improvement-and-spelling
Mobile improvement and spelling fixes
2023-11-13 09:13:48 +01:00
joncrangle
4e44a24261 Remove materialize function 2023-11-12 21:53:24 -05:00
joncrangle
0ddb029aae Fix spelling 2023-11-12 21:44:50 -05:00
joncrangle
a262afe035 Improvements for mobile 2023-11-12 21:42:53 -05:00
Kevin Pham
fdca9d39d9 Merge remote-tracking branch 'refs/remotes/origin/main' 2023-11-12 17:03:19 -06:00
Kevin Pham
30a6ab501d handle URL encoded URLs in proxy for other app integrations 2023-11-12 17:02:32 -06:00
Kevin Pham
7a51243ff4 Merge branch 'main' into main 2023-11-12 11:58:46 -06:00
Kevin Pham
c8b94dc702 remove debug logging messages 2023-11-12 11:52:43 -06:00
Kevin Pham
fbc9567820 Handle relative URLs when using proxy 2023-11-12 11:48:47 -06:00
Gianni C
4d5c25c148 Merge pull request #28 from joncrangle/ft.com
Add Ft.com
2023-11-12 18:11:47 +01:00
Kevin Pham
082868af2d Add feature to modify URLs in ruleset 2023-11-12 10:30:06 -06:00
joncrangle
a4abce78fb Add button to clear search 2023-11-12 00:21:46 -05:00
joncrangle
190de6d9c5 Remove article signup iframe 2023-11-11 20:19:07 -05:00
joncrangle
bdd19dcbb6 Remove console log 2023-11-11 20:02:00 -05:00
joncrangle
02e6b1c090 Add ft.com 2023-11-11 20:01:29 -05:00
51 changed files with 2300 additions and 275 deletions

42
.github/workflows/build-css.yaml vendored Normal file
View File

@@ -0,0 +1,42 @@
name: Build Tailwind CSS
on:
push:
paths:
- "handlers/form.html"
workflow_dispatch:
jobs:
tailwindbuilder:
permissions:
# Give the default GITHUB_TOKEN write permission to commit and push the
# added or changed files to the repository.
contents: write
runs-on: ubuntu-latest
steps:
-
name: Checkout
uses: actions/checkout@v3
-
name: Install pnpm
uses: pnpm/action-setup@v2
with:
version: 8
-
name: Build Tailwind CSS
run: pnpm build
-
name: Commit generated stylesheet
run: |
if git diff --quiet cmd/styles.css; then
echo "No changes to commit."
exit 0
else
echo "Changes detected, committing..."
git config --global user.name "Github action"
git config --global user.email "username@users.noreply.github.com"
git add cmd
git commit -m "Generated stylesheet"
git push
fi

1
.gitignore vendored
View File

@@ -2,3 +2,4 @@
ladder
VERSION
output.css

View File

@@ -17,7 +17,7 @@ Freedom of information is an essential pillar of democracy and informed decision
### Features
- [x] Bypass Paywalls
- [x] Remove CORS headers from responses, assets, and images ...
- [x] Apply domain based ruleset/code to modify response
- [x] Apply domain based ruleset/code to modify response / requested URL
- [x] Keep site browsable
- [x] API
- [x] Fetch RAW HTML
@@ -38,22 +38,22 @@ Freedom of information is an essential pillar of democracy and informed decision
- [ ] A key to share only one URL
### Limitations
Certain sites may display missing images or encounter formatting issues. This can be attributed to the site's reliance on JavaScript or CSS for image and resource loading, which presents a limitation when accessed through this proxy. If you prefer a full experience, please consider buying a subscription for the site.
Some sites do not expose their content to search engines, which means that the proxy cannot access the content. A future version will try to fetch the content from Google Cache.
Certain sites may display missing images or encounter formatting issues. This can be attributed to the site's reliance on JavaScript or CSS for image and resource loading, which presents a limitation when accessed through this proxy. If you prefer a full experience, please consider buying a subscription for the site.
## Installation
> **Warning:** If your instance will be publicly accessible, make sure to enable Basic Auth. This will prevent unauthorized users from using your proxy. If you do not enable Basic Auth, anyone can use your proxy to browse nasty/illegal stuff. And you will be responsible for it.
### Binary
1) Download binary [here](https://github.com/everywall/ladder/releases/latest)
2) Unpack and run the binary `./ladder`
2) Unpack and run the binary `./ladder -r https://t.ly/14PSf`
3) Open Browser (Default: http://localhost:8080)
### Docker
```bash
docker run -p 8080:8080 -d --name ladder ghcr.io/everywall/ladder:latest
docker run -p 8080:8080 -d --env RULESET=https://t.ly/14PSf --name ladder ghcr.io/everywall/ladder:latest
```
### Docker Compose
@@ -106,7 +106,7 @@ http://localhost:8080/ruleset
| `LOG_URLS` | Log fetched URL's | `true` |
| `DISABLE_FORM` | Disables URL Form Frontpage | `false` |
| `FORM_PATH` | Path to custom Form HTML | `` |
| `RULESET` | URL to a ruleset file | `https://raw.githubusercontent.com/everywall/ladder/main/ruleset.yaml` or `/path/to/my/rules.yaml` |
| `RULESET` | Path or URL to a ruleset file, accepts local directories | `https://raw.githubusercontent.com/everywall/ladder-rules/main/ruleset.yaml` or `/path/to/my/rules.yaml` or `/path/to/my/rules/` |
| `EXPOSE_RULESET` | Make your Ruleset available to other ladders | `true` |
| `ALLOWED_DOMAINS` | Comma separated list of allowed domains. Empty = no limitations | `` |
| `ALLOWED_DOMAINS_RULESET` | Allow Domains from Ruleset. false = no limitations | `false` |
@@ -115,12 +115,13 @@ http://localhost:8080/ruleset
### Ruleset
It is possible to apply custom rules to modify the response. This can be used to remove unwanted or modify elements from the page. The ruleset is a YAML file that contains a list of rules for each domain and is loaded on startup
It is possible to apply custom rules to modify the response or the requested URL. This can be used to remove unwanted or modify elements from the page. The ruleset is a YAML file, a directory with YAML Files, or an URL to a YAML file that contains a list of rules for each domain. These rules are loaded on startup.
There is a basic ruleset available in a separate repository [ruleset.yaml](https://raw.githubusercontent.com/everywall/ladder-rules/main/ruleset.yaml). Feel free to add your own rules and create a pull request.
See in [ruleset.yaml](ruleset.yaml) for an example.
```yaml
- domain: example.com # Inbcludes all subdomains
- domain: example.com # Includes all subdomains
domains: # Additional domains to apply the rule
- www.example.de
- www.beispiel.de
@@ -154,5 +155,30 @@ See in [ruleset.yaml](ruleset.yaml) for an example.
<h1>My Custom Title</h1>
- position: .left-content article # Position where to inject the code into DOM
prepend: |
<h2>Suptitle</h2>
<h2>Subtitle</h2>
- domain: demo.com
headers:
content-security-policy: script-src 'self';
user-agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36
urlMods: # Modify the URL
query:
- key: amp # (this will append ?amp=1 to the URL)
value: 1
domain:
- match: www # regex to match part of domain
replace: amp # (this would modify the domain from www.demo.de to amp.demo.de)
path:
- match: ^ # regex to match part of path
replace: /amp/ # (modify the url from https://www.demo.com/article/ to https://www.demo.de/amp/article/)
```
## Development
To run a development server at http://localhost:8080:
```bash
echo "dev" > handlers/VERSION
RULESET="./ruleset.yaml" go run cmd/main.go
```
This project uses [pnpm](https://pnpm.io/) to build a stylesheet with the [Tailwind CSS](https://tailwindcss.com/) classes. For local development, if you modify styles in `form.html`, run `pnpm build` to generate a new stylesheet.

View File

@@ -1,13 +1,14 @@
package main
import (
_ "embed"
"embed"
"fmt"
"log"
"os"
"strings"
"ladder/handlers"
"ladder/internal/cli"
"github.com/akamensky/argparse"
"github.com/gofiber/fiber/v2"
@@ -18,6 +19,9 @@ import (
//go:embed favicon.ico
var faviconData string
//go:embed styles.css
var cssData embed.FS
func main() {
parser := argparse.NewParser("ladder", "Every Wall needs a Ladder")
@@ -36,11 +40,46 @@ func main() {
Help: "This will spawn multiple processes listening",
})
verbose := parser.Flag("v", "verbose", &argparse.Options{
Required: false,
Help: "Adds verbose logging",
})
// TODO: add version flag that reads from handers/VERSION
ruleset := parser.String("r", "ruleset", &argparse.Options{
Required: false,
Help: "File, Directory or URL to a ruleset.yaml. Overrides RULESET environment variable.",
})
mergeRulesets := parser.Flag("", "merge-rulesets", &argparse.Options{
Required: false,
Help: "Compiles a directory of yaml files into a single ruleset.yaml. Requires --ruleset arg.",
})
mergeRulesetsGzip := parser.Flag("", "merge-rulesets-gzip", &argparse.Options{
Required: false,
Help: "Compiles a directory of yaml files into a single ruleset.gz Requires --ruleset arg.",
})
mergeRulesetsOutput := parser.String("", "merge-rulesets-output", &argparse.Options{
Required: false,
Help: "Specify output file for --merge-rulesets and --merge-rulesets-gzip. Requires --ruleset and --merge-rulesets args.",
})
err := parser.Parse(os.Args)
if err != nil {
fmt.Print(parser.Usage(err))
}
// utility cli flag to compile ruleset directory into single ruleset.yaml
if *mergeRulesets || *mergeRulesetsGzip {
err = cli.HandleRulesetMerge(ruleset, mergeRulesets, mergeRulesetsGzip, mergeRulesetsOutput)
if err != nil {
fmt.Println(err)
os.Exit(1)
}
os.Exit(0)
}
if os.Getenv("PREFORK") == "true" {
*prefork = true
}
@@ -52,6 +91,7 @@ func main() {
},
)
// TODO: move to cmd/auth.go
userpass := os.Getenv("USERPASS")
if userpass != "" {
userpass := strings.Split(userpass, ":")
@@ -62,6 +102,7 @@ func main() {
}))
}
// TODO: move to handlers/favicon.go
app.Use(favicon.New(favicon.Config{
Data: []byte(faviconData),
URL: "/favicon.ico",
@@ -75,12 +116,27 @@ func main() {
}
app.Get("/", handlers.Form)
// TODO: move this logic to handers/styles.go
app.Get("/styles.css", func(c *fiber.Ctx) error {
cssData, err := cssData.ReadFile("styles.css")
if err != nil {
return c.Status(fiber.StatusInternalServerError).SendString("Internal Server Error")
}
c.Set("Content-Type", "text/css")
return c.Send(cssData)
})
app.Get("ruleset", handlers.Ruleset)
app.Get("raw/*", handlers.Raw)
app.Get("api/*", handlers.Api)
app.Get("ruleset", handlers.Raw)
app.Get("/*", handlers.ProxySite)
proxyOpts := &handlers.ProxyOptions{
Verbose: *verbose,
RulesetPath: *ruleset,
}
app.Get("/*", handlers.NewProxySiteHandler(proxyOpts))
log.Fatal(app.Listen(":" + *port))
}

1
cmd/styles.css Normal file

File diff suppressed because one or more lines are too long

View File

@@ -9,10 +9,11 @@ services:
environment:
- PORT=8080
- RULESET=/app/ruleset.yaml
#- ALLOWED_DOMAINS=example.com,example.org
#- ALLOWED_DOMAINS_RULESET=false
#- EXPOSE_RULESET=true
#- PREFORK=false
#- DISABLE_FORM=fase
#- DISABLE_FORM=false
#- FORM_PATH=/app/form.html
#- X_FORWARDED_FOR=66.249.66.1
#- USER_AGENT=Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)

3
go.mod
View File

@@ -24,6 +24,7 @@ require (
github.com/valyala/bytebufferpool v1.0.0 // indirect
github.com/valyala/fasthttp v1.50.0 // indirect
github.com/valyala/tcplisten v1.0.0 // indirect
golang.org/x/net v0.18.0 // indirect
golang.org/x/net v0.18.0
golang.org/x/sys v0.14.0 // indirect
golang.org/x/term v0.14.0
)

2
go.sum
View File

@@ -68,6 +68,8 @@ golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9sn
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY=
golang.org/x/term v0.14.0 h1:LGK9IlZ8T9jvdy6cTdfKUCltatMFOehAQo9SRC46UQ8=
golang.org/x/term v0.14.0/go.mod h1:TySc+nGkYR6qt8km8wUhuFRTVSMIX3XPR58y2lC8vww=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=

View File

@@ -3,8 +3,9 @@
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>ladder</title>
<script src="https://cdn.tailwindcss.com"></script>
<link rel="stylesheet" href="/styles.css">
</head>
<body class="antialiased text-slate-500 dark:text-slate-400 bg-white dark:bg-slate-900">
@@ -16,12 +17,15 @@
<header>
<h1 class="text-center text-3xl sm:text-4xl font-extrabold text-slate-900 tracking-tight dark:text-slate-200">ladddddddder</h1>
</header>
<form id="inputForm" method="get" class="mx-4">
<form id="inputForm" method="get" class="mx-4 relative">
<div>
<input type="text" id="inputField" placeholder="Proxy Search" name="inputField" class="w-full text-sm leading-6 text-slate-400 rounded-md ring-1 ring-slate-900/10 shadow-sm py-1.5 pl-2 pr-3 hover:ring-slate-300 dark:bg-slate-800 dark:highlight-white/5 dark:hover:bg-slate-700" required autofocus>
<button id="clearButton" type="button" aria-label="Clear Search" title="Clear Search" class="hidden absolute inset-y-0 right-0 items-center pr-2 hover:text-slate-400 hover:dark:text-slate-300" tabindex="-1">
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round""><path d="M18 6 6 18"/><path d="m6 6 12 12"/></svg>
</button>
</div>
</form>
<footer class="mt-10 text-center text-slate-600 dark:text-slate-400">
<footer class="mt-10 mx-4 text-center text-slate-600 dark:text-slate-400">
<p>
Code Licensed Under GPL v3.0 |
<a href="https://github.com/everywall/ladder" class="hover:text-blue-500 hover:underline underline-offset-2 transition-colors duration-300">View Source</a> |
@@ -31,9 +35,6 @@
</div>
<script>
document.addEventListener('DOMContentLoaded', function () {
M.AutoInit();
});
document.getElementById('inputForm').addEventListener('submit', function (e) {
e.preventDefault();
let url = document.getElementById('inputField').value;
@@ -43,6 +44,19 @@
window.location.href = '/' + url;
return false;
});
document.getElementById('inputField').addEventListener('input', function() {
const clearButton = document.getElementById('clearButton');
if (this.value.trim().length > 0) {
clearButton.style.display = 'block';
} else {
clearButton.style.display = 'none';
}
});
document.getElementById('clearButton').addEventListener('click', function() {
document.getElementById('inputField').value = '';
this.style.display = 'none';
document.getElementById('inputField').focus();
});
</script>
<style>

View File

@@ -10,34 +10,97 @@ import (
"regexp"
"strings"
"ladder/pkg/ruleset"
"ladder/proxychain"
rx "ladder/proxychain/requestmodifers"
tx "ladder/proxychain/responsemodifers"
"github.com/PuerkitoBio/goquery"
"github.com/gofiber/fiber/v2"
"gopkg.in/yaml.v3"
)
var (
UserAgent = getenv("USER_AGENT", "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)")
ForwardedFor = getenv("X_FORWARDED_FOR", "66.249.66.1")
rulesSet = loadRules()
allowedDomains = strings.Split(os.Getenv("ALLOWED_DOMAINS"), ",")
rulesSet = ruleset.NewRulesetFromEnv()
allowedDomains = []string{}
)
func ProxySite(c *fiber.Ctx) error {
// Get the url from the URL
url := c.Params("*")
func init() {
allowedDomains = strings.Split(os.Getenv("ALLOWED_DOMAINS"), ",")
if os.Getenv("ALLOWED_DOMAINS_RULESET") == "true" {
allowedDomains = append(allowedDomains, rulesSet.Domains()...)
}
}
queries := c.Queries()
body, _, resp, err := fetchSite(url, queries)
type ProxyOptions struct {
RulesetPath string
Verbose bool
}
func NewProxySiteHandler(opts *ProxyOptions) fiber.Handler {
/*
var rs ruleset.RuleSet
if opts.RulesetPath != "" {
r, err := ruleset.NewRuleset(opts.RulesetPath)
if err != nil {
log.Println("ERROR:", err)
c.SendStatus(fiber.StatusInternalServerError)
return c.SendString(err.Error())
panic(err)
}
rs = r
}
*/
return func(c *fiber.Ctx) error {
proxychain := proxychain.
NewProxyChain().
SetDebugLogging(opts.Verbose).
SetRequestModifications(
rx.DeleteOutgoingCookies(),
//rx.RequestArchiveIs(),
).
AddResponseModifications(
tx.DeleteIncomingCookies(),
tx.RewriteHTMLResourceURLs(),
)
return proxychain.SetFiberCtx(c).Execute()
}
c.Set("Content-Type", resp.Header.Get("Content-Type"))
c.Set("Content-Security-Policy", resp.Header.Get("Content-Security-Policy"))
}
return c.SendString(body)
func modifyURL(uri string, rule ruleset.Rule) (string, error) {
newUrl, err := url.Parse(uri)
if err != nil {
return "", err
}
for _, urlMod := range rule.UrlMods.Domain {
re := regexp.MustCompile(urlMod.Match)
newUrl.Host = re.ReplaceAllString(newUrl.Host, urlMod.Replace)
}
for _, urlMod := range rule.UrlMods.Path {
re := regexp.MustCompile(urlMod.Match)
newUrl.Path = re.ReplaceAllString(newUrl.Path, urlMod.Replace)
}
v := newUrl.Query()
for _, query := range rule.UrlMods.Query {
if query.Value == "" {
v.Del(query.Key)
continue
}
v.Set(query.Key, query.Value)
}
newUrl.RawQuery = v.Encode()
if rule.GoogleCache {
newUrl, err = url.Parse("https://webcache.googleusercontent.com/search?q=cache:" + newUrl.String())
if err != nil {
return "", err
}
}
return newUrl.String(), nil
}
func fetchSite(urlpath string, queries map[string]string) (string, *http.Request, *http.Response, error) {
@@ -63,18 +126,16 @@ func fetchSite(urlpath string, queries map[string]string) (string, *http.Request
log.Println(u.String() + urlQuery)
}
// Modify the URI according to ruleset
rule := fetchRule(u.Host, u.Path)
if rule.GoogleCache {
u, err = url.Parse("https://webcache.googleusercontent.com/search?q=cache:" + u.String())
url, err := modifyURL(u.String()+urlQuery, rule)
if err != nil {
return "", nil, nil, err
}
}
// Fetch the site
client := &http.Client{}
req, _ := http.NewRequest("GET", u.String()+urlQuery, nil)
req, _ := http.NewRequest("GET", url, nil)
if rule.Headers.UserAgent != "" {
req.Header.Set("User-Agent", rule.Headers.UserAgent)
@@ -114,6 +175,7 @@ func fetchSite(urlpath string, queries map[string]string) (string, *http.Request
}
if rule.Headers.CSP != "" {
//log.Println(rule.Headers.CSP)
resp.Header.Set("Content-Security-Policy", rule.Headers.CSP)
}
@@ -122,7 +184,7 @@ func fetchSite(urlpath string, queries map[string]string) (string, *http.Request
return body, req, resp, nil
}
func rewriteHtml(bodyB []byte, u *url.URL, rule Rule) string {
func rewriteHtml(bodyB []byte, u *url.URL, rule ruleset.Rule) string {
// Rewrite the HTML
body := string(bodyB)
@@ -156,63 +218,11 @@ func getenv(key, fallback string) string {
return value
}
func loadRules() RuleSet {
rulesUrl := os.Getenv("RULESET")
if rulesUrl == "" {
RulesList := RuleSet{}
return RulesList
}
log.Println("Loading rules")
var ruleSet RuleSet
if strings.HasPrefix(rulesUrl, "http") {
resp, err := http.Get(rulesUrl)
if err != nil {
log.Println("ERROR:", err)
}
defer resp.Body.Close()
if resp.StatusCode >= 400 {
log.Println("ERROR:", resp.StatusCode, rulesUrl)
}
body, err := io.ReadAll(resp.Body)
if err != nil {
log.Println("ERROR:", err)
}
yaml.Unmarshal(body, &ruleSet)
if err != nil {
log.Println("ERROR:", err)
}
} else {
yamlFile, err := os.ReadFile(rulesUrl)
if err != nil {
log.Println("ERROR:", err)
}
yaml.Unmarshal(yamlFile, &ruleSet)
}
domains := []string{}
for _, rule := range ruleSet {
domains = append(domains, rule.Domain)
domains = append(domains, rule.Domains...)
if os.Getenv("ALLOWED_DOMAINS_RULESET") == "true" {
allowedDomains = append(allowedDomains, domains...)
}
}
log.Println("Loaded ", len(ruleSet), " rules for", len(domains), "Domains")
return ruleSet
}
func fetchRule(domain string, path string) Rule {
func fetchRule(domain string, path string) ruleset.Rule {
if len(rulesSet) == 0 {
return Rule{}
return ruleset.Rule{}
}
rule := Rule{}
rule := ruleset.Rule{}
for _, rule := range rulesSet {
domains := rule.Domains
if rule.Domain != "" {
@@ -231,7 +241,7 @@ func fetchRule(domain string, path string) Rule {
return rule
}
func applyRules(body string, rule Rule) string {
func applyRules(body string, rule ruleset.Rule) string {
if len(rulesSet) == 0 {
return body
}

View File

@@ -2,6 +2,7 @@
package handlers
import (
"ladder/pkg/ruleset"
"net/http"
"net/http/httptest"
"net/url"
@@ -13,7 +14,7 @@ import (
func TestProxySite(t *testing.T) {
app := fiber.New()
app.Get("/:url", ProxySite)
app.Get("/:url", NewProxySiteHandler(nil))
req := httptest.NewRequest("GET", "/https://example.com", nil)
resp, err := app.Test(req)
@@ -51,7 +52,7 @@ func TestRewriteHtml(t *testing.T) {
</html>
`
actual := rewriteHtml(bodyB, u, Rule{})
actual := rewriteHtml(bodyB, u, ruleset.Rule{})
assert.Equal(t, expected, actual)
}

View File

@@ -1,29 +0,0 @@
package handlers
type Regex struct {
Match string `yaml:"match"`
Replace string `yaml:"replace"`
}
type RuleSet []Rule
type Rule struct {
Domain string `yaml:"domain,omitempty"`
Domains []string `yaml:"domains,omitempty"`
Paths []string `yaml:"paths,omitempty"`
Headers struct {
UserAgent string `yaml:"user-agent,omitempty"`
XForwardedFor string `yaml:"x-forwarded-for,omitempty"`
Referer string `yaml:"referer,omitempty"`
Cookie string `yaml:"cookie,omitempty"`
CSP string `yaml:"content-security-policy,omitempty"`
} `yaml:"headers,omitempty"`
GoogleCache bool `yaml:"googleCache,omitempty"`
RegexRules []Regex `yaml:"regexRules"`
Injections []struct {
Position string `yaml:"position"`
Append string `yaml:"append"`
Prepend string `yaml:"prepend"`
Replace string `yaml:"replace"`
} `yaml:"injections"`
}

View File

@@ -0,0 +1,105 @@
package cli
import (
"fmt"
"io"
"io/fs"
"ladder/pkg/ruleset"
"os"
"golang.org/x/term"
)
// HandleRulesetMerge merges a set of ruleset files, specified by the rulesetPath or RULESET env variable, into either YAML or Gzip format.
// Exits the program with an error message if the ruleset path is not provided or if loading the ruleset fails.
//
// Parameters:
// - rulesetPath: A pointer to a string specifying the path to the ruleset file.
// - mergeRulesets: A pointer to a boolean indicating if a merge operation should be performed.
// - mergeRulesetsGzip: A pointer to a boolean indicating if the merge should be in Gzip format.
// - mergeRulesetsOutput: A pointer to a string specifying the output file path. If empty, the output is printed to stdout.
//
// Returns:
// - An error if the ruleset loading or merging process fails, otherwise nil.
func HandleRulesetMerge(rulesetPath *string, mergeRulesets *bool, mergeRulesetsGzip *bool, mergeRulesetsOutput *string) error {
if *rulesetPath == "" {
*rulesetPath = os.Getenv("RULESET")
}
if *rulesetPath == "" {
fmt.Println("ERROR: no ruleset provided. Try again with --ruleset <ruleset.yaml>")
os.Exit(1)
}
rs, err := ruleset.NewRuleset(*rulesetPath)
if err != nil {
fmt.Println(err)
os.Exit(1)
}
if *mergeRulesetsGzip {
return gzipMerge(rs, mergeRulesetsOutput)
}
return yamlMerge(rs, mergeRulesetsOutput)
}
// gzipMerge takes a RuleSet and an optional output file path pointer. It compresses the RuleSet into Gzip format.
// If the output file path is provided, the compressed data is written to this file. Otherwise, it prints a warning
// and outputs the binary data to stdout
//
// Parameters:
// - rs: The ruleset.RuleSet to be compressed.
// - mergeRulesetsOutput: A pointer to a string specifying the output file path. If empty, the output is directed to stdout.
//
// Returns:
// - An error if compression or file writing fails, otherwise nil.
func gzipMerge(rs ruleset.RuleSet, mergeRulesetsOutput *string) error {
gzip, err := rs.GzipYaml()
if err != nil {
return err
}
if *mergeRulesetsOutput != "" {
out, err := os.Create(*mergeRulesetsOutput)
defer out.Close()
_, err = io.Copy(out, gzip)
if err != nil {
return err
}
}
if term.IsTerminal(int(os.Stdout.Fd())) {
println("WARNING: binary output can mess up your terminal. Use '--merge-rulesets-output <ruleset.gz>' or pipe it to a file.")
os.Exit(1)
}
_, err = io.Copy(os.Stdout, gzip)
if err != nil {
return err
}
return nil
}
// yamlMerge takes a RuleSet and an optional output file path pointer. It converts the RuleSet into YAML format.
// If the output file path is provided, the YAML data is written to this file. If not, the YAML data is printed to stdout.
//
// Parameters:
// - rs: The ruleset.RuleSet to be converted to YAML.
// - mergeRulesetsOutput: A pointer to a string specifying the output file path. If empty, the output is printed to stdout.
//
// Returns:
// - An error if YAML conversion or file writing fails, otherwise nil.
func yamlMerge(rs ruleset.RuleSet, mergeRulesetsOutput *string) error {
yaml, err := rs.Yaml()
if err != nil {
return err
}
if *mergeRulesetsOutput == "" {
fmt.Printf(yaml)
os.Exit(0)
}
err = os.WriteFile(*mergeRulesetsOutput, []byte(yaml), fs.FileMode(os.O_RDWR))
if err != nil {
return fmt.Errorf("ERROR: failed to write merged YAML ruleset to '%s'\n", *mergeRulesetsOutput)
}
return nil
}

9
package.json Normal file
View File

@@ -0,0 +1,9 @@
{
"scripts": {
"build": "pnpx tailwindcss -i ./styles/input.css -o ./styles/output.css --build && pnpx minify ./styles/output.css > ./cmd/styles.css"
},
"devDependencies": {
"minify": "^10.5.2",
"tailwindcss": "^3.3.5"
}
}

286
pkg/ruleset/ruleset.go Normal file
View File

@@ -0,0 +1,286 @@
package ruleset
import (
"errors"
"fmt"
"io"
"log"
"net/http"
"os"
"path/filepath"
"regexp"
"strings"
"compress/gzip"
"gopkg.in/yaml.v3"
)
type Regex struct {
Match string `yaml:"match"`
Replace string `yaml:"replace"`
}
type KV struct {
Key string `yaml:"key"`
Value string `yaml:"value"`
}
type RuleSet []Rule
type Rule struct {
Domain string `yaml:"domain,omitempty"`
Domains []string `yaml:"domains,omitempty"`
Paths []string `yaml:"paths,omitempty"`
Headers struct {
UserAgent string `yaml:"user-agent,omitempty"`
XForwardedFor string `yaml:"x-forwarded-for,omitempty"`
Referer string `yaml:"referer,omitempty"`
Cookie string `yaml:"cookie,omitempty"`
CSP string `yaml:"content-security-policy,omitempty"`
} `yaml:"headers,omitempty"`
GoogleCache bool `yaml:"googleCache,omitempty"`
RegexRules []Regex `yaml:"regexRules,omitempty"`
UrlMods struct {
Domain []Regex `yaml:"domain,omitempty"`
Path []Regex `yaml:"path,omitempty"`
Query []KV `yaml:"query,omitempty"`
} `yaml:"urlMods,omitempty"`
Injections []struct {
Position string `yaml:"position,omitempty"`
Append string `yaml:"append,omitempty"`
Prepend string `yaml:"prepend,omitempty"`
Replace string `yaml:"replace,omitempty"`
} `yaml:"injections,omitempty"`
}
// NewRulesetFromEnv creates a new RuleSet based on the RULESET environment variable.
// It logs a warning and returns an empty RuleSet if the RULESET environment variable is not set.
// If the RULESET is set but the rules cannot be loaded, it panics.
func NewRulesetFromEnv() RuleSet {
rulesPath, ok := os.LookupEnv("RULESET")
if !ok {
log.Printf("WARN: No ruleset specified. Set the `RULESET` environment variable to load one for a better success rate.")
return RuleSet{}
}
ruleSet, err := NewRuleset(rulesPath)
if err != nil {
log.Println(err)
}
return ruleSet
}
// NewRuleset loads a RuleSet from a given string of rule paths, separated by semicolons.
// It supports loading rules from both local file paths and remote URLs.
// Returns a RuleSet and an error if any issues occur during loading.
func NewRuleset(rulePaths string) (RuleSet, error) {
ruleSet := RuleSet{}
errs := []error{}
rp := strings.Split(rulePaths, ";")
var remoteRegex = regexp.MustCompile(`^https?:\/\/(www\.)?[-a-zA-Z0-9@:%._\+~#=]{1,256}\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9()!@:%_\+.~#?&\/\/=]*)`)
for _, rule := range rp {
rulePath := strings.Trim(rule, " ")
var err error
isRemote := remoteRegex.MatchString(rulePath)
if isRemote {
err = ruleSet.loadRulesFromRemoteFile(rulePath)
} else {
err = ruleSet.loadRulesFromLocalDir(rulePath)
}
if err != nil {
e := fmt.Errorf("WARN: failed to load ruleset from '%s'", rulePath)
errs = append(errs, errors.Join(e, err))
continue
}
}
if len(errs) != 0 {
e := fmt.Errorf("WARN: failed to load %d rulesets", len(rp))
errs = append(errs, e)
// panic if the user specified a local ruleset, but it wasn't found on disk
// don't fail silently
for _, err := range errs {
if errors.Is(os.ErrNotExist, err) {
e := fmt.Errorf("PANIC: ruleset '%s' not found", err)
panic(errors.Join(e, err))
}
}
// else, bubble up any errors, such as syntax or remote host issues
return ruleSet, errors.Join(errs...)
}
ruleSet.PrintStats()
return ruleSet, nil
}
// ================== RULESET loading logic ===================================
// loadRulesFromLocalDir loads rules from a local directory specified by the path.
// It walks through the directory, loading rules from YAML files.
// Returns an error if the directory cannot be accessed
// If there is an issue loading any file, it will be skipped
func (rs *RuleSet) loadRulesFromLocalDir(path string) error {
_, err := os.Stat(path)
if err != nil {
return err
}
yamlRegex := regexp.MustCompile(`.*\.ya?ml`)
err = filepath.Walk(path, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if info.IsDir() {
return nil
}
if isYaml := yamlRegex.MatchString(path); !isYaml {
return nil
}
err = rs.loadRulesFromLocalFile(path)
if err != nil {
log.Printf("WARN: failed to load directory ruleset '%s': %s, skipping", path, err)
return nil
}
log.Printf("INFO: loaded ruleset %s\n", path)
return nil
})
if err != nil {
return err
}
return nil
}
// loadRulesFromLocalFile loads rules from a local YAML file specified by the path.
// Returns an error if the file cannot be read or if there's a syntax error in the YAML.
func (rs *RuleSet) loadRulesFromLocalFile(path string) error {
yamlFile, err := os.ReadFile(path)
if err != nil {
e := fmt.Errorf("failed to read rules from local file: '%s'", path)
return errors.Join(e, err)
}
var r RuleSet
err = yaml.Unmarshal(yamlFile, &r)
if err != nil {
e := fmt.Errorf("failed to load rules from local file, possible syntax error in '%s'", path)
ee := errors.Join(e, err)
if _, ok := os.LookupEnv("DEBUG"); ok {
debugPrintRule(string(yamlFile), ee)
}
return ee
}
*rs = append(*rs, r...)
return nil
}
// loadRulesFromRemoteFile loads rules from a remote URL.
// It supports plain and gzip compressed content.
// Returns an error if there's an issue accessing the URL or if there's a syntax error in the YAML.
func (rs *RuleSet) loadRulesFromRemoteFile(rulesUrl string) error {
var r RuleSet
resp, err := http.Get(rulesUrl)
if err != nil {
e := fmt.Errorf("failed to load rules from remote url '%s'", rulesUrl)
return errors.Join(e, err)
}
defer resp.Body.Close()
if resp.StatusCode >= 400 {
e := fmt.Errorf("failed to load rules from remote url (%s) on '%s'", resp.Status, rulesUrl)
return errors.Join(e, err)
}
var reader io.Reader
isGzip := strings.HasSuffix(rulesUrl, ".gz") || strings.HasSuffix(rulesUrl, ".gzip") || resp.Header.Get("content-encoding") == "gzip"
if isGzip {
reader, err = gzip.NewReader(resp.Body)
if err != nil {
return fmt.Errorf("failed to create gzip reader for URL '%s' with status code '%s': %w", rulesUrl, resp.Status, err)
}
} else {
reader = resp.Body
}
err = yaml.NewDecoder(reader).Decode(&r)
if err != nil {
e := fmt.Errorf("failed to load rules from remote url '%s' with status code '%s' and possible syntax error", rulesUrl, resp.Status)
ee := errors.Join(e, err)
return ee
}
*rs = append(*rs, r...)
return nil
}
// ================= utility methods ==========================
// Yaml returns the ruleset as a Yaml string
func (rs *RuleSet) Yaml() (string, error) {
y, err := yaml.Marshal(rs)
if err != nil {
return "", err
}
return string(y), nil
}
// GzipYaml returns an io.Reader that streams the Gzip-compressed YAML representation of the RuleSet.
func (rs *RuleSet) GzipYaml() (io.Reader, error) {
pr, pw := io.Pipe()
go func() {
defer pw.Close()
gw := gzip.NewWriter(pw)
defer gw.Close()
if err := yaml.NewEncoder(gw).Encode(rs); err != nil {
gw.Close() // Ensure to close the gzip writer
pw.CloseWithError(err)
return
}
}()
return pr, nil
}
// Domains extracts and returns a slice of all domains present in the RuleSet.
func (rs *RuleSet) Domains() []string {
var domains []string
for _, rule := range *rs {
domains = append(domains, rule.Domain)
domains = append(domains, rule.Domains...)
}
return domains
}
// DomainCount returns the count of unique domains present in the RuleSet.
func (rs *RuleSet) DomainCount() int {
return len(rs.Domains())
}
// Count returns the total number of rules in the RuleSet.
func (rs *RuleSet) Count() int {
return len(*rs)
}
// PrintStats logs the number of rules and domains loaded in the RuleSet.
func (rs *RuleSet) PrintStats() {
log.Printf("INFO: Loaded %d rules for %d domains\n", rs.Count(), rs.DomainCount())
}
// debugPrintRule is a utility function for printing a rule and associated error for debugging purposes.
func debugPrintRule(rule string, err error) {
fmt.Println("------------------------------ BEGIN DEBUG RULESET -----------------------------")
fmt.Printf("%s\n", err.Error())
fmt.Println("--------------------------------------------------------------------------------")
fmt.Println(rule)
fmt.Println("------------------------------ END DEBUG RULESET -------------------------------")
}

153
pkg/ruleset/ruleset_test.go Normal file
View File

@@ -0,0 +1,153 @@
package ruleset
import (
"os"
"path/filepath"
"testing"
"time"
"github.com/gofiber/fiber/v2"
"github.com/stretchr/testify/assert"
)
var (
validYAML = `
- domain: example.com
regexRules:
- match: "^http:"
replace: "https:"`
invalidYAML = `
- domain: [thisIsATestYamlThatIsMeantToFail.example]
regexRules:
- match: "^http:"
replace: "https:"
- match: "[incomplete"`
)
func TestLoadRulesFromRemoteFile(t *testing.T) {
app := fiber.New()
defer app.Shutdown()
app.Get("/valid-config.yml", func(c *fiber.Ctx) error {
c.SendString(validYAML)
return nil
})
app.Get("/invalid-config.yml", func(c *fiber.Ctx) error {
c.SendString(invalidYAML)
return nil
})
app.Get("/valid-config.gz", func(c *fiber.Ctx) error {
c.Set("Content-Type", "application/octet-stream")
rs, err := loadRuleFromString(validYAML)
if err != nil {
t.Errorf("failed to load valid yaml from string: %s", err.Error())
}
s, err := rs.GzipYaml()
if err != nil {
t.Errorf("failed to load gzip serialize yaml: %s", err.Error())
}
err = c.SendStream(s)
if err != nil {
t.Errorf("failed to stream gzip serialized yaml: %s", err.Error())
}
return nil
})
// Start the server in a goroutine
go func() {
if err := app.Listen("127.0.0.1:9999"); err != nil {
t.Errorf("Server failed to start: %s", err.Error())
}
}()
// Wait for the server to start
time.Sleep(time.Second * 1)
rs, err := NewRuleset("http://127.0.0.1:9999/valid-config.yml")
if err != nil {
t.Errorf("failed to load plaintext ruleset from http server: %s", err.Error())
}
assert.Equal(t, rs[0].Domain, "example.com")
rs, err = NewRuleset("http://127.0.0.1:9999/valid-config.gz")
if err != nil {
t.Errorf("failed to load gzipped ruleset from http server: %s", err.Error())
}
assert.Equal(t, rs[0].Domain, "example.com")
os.Setenv("RULESET", "http://127.0.0.1:9999/valid-config.gz")
rs = NewRulesetFromEnv()
if !assert.Equal(t, rs[0].Domain, "example.com") {
t.Error("expected no errors loading ruleset from gzip url using environment variable, but got one")
}
}
func loadRuleFromString(yaml string) (RuleSet, error) {
// Create a temporary file and load it
tmpFile, _ := os.CreateTemp("", "ruleset*.yaml")
defer os.Remove(tmpFile.Name())
tmpFile.WriteString(yaml)
rs := RuleSet{}
err := rs.loadRulesFromLocalFile(tmpFile.Name())
return rs, err
}
// TestLoadRulesFromLocalFile tests the loading of rules from a local YAML file.
func TestLoadRulesFromLocalFile(t *testing.T) {
rs, err := loadRuleFromString(validYAML)
if err != nil {
t.Errorf("Failed to load rules from valid YAML: %s", err)
}
assert.Equal(t, rs[0].Domain, "example.com")
assert.Equal(t, rs[0].RegexRules[0].Match, "^http:")
assert.Equal(t, rs[0].RegexRules[0].Replace, "https:")
_, err = loadRuleFromString(invalidYAML)
if err == nil {
t.Errorf("Expected an error when loading invalid YAML, but got none")
}
}
// TestLoadRulesFromLocalDir tests the loading of rules from a local nested directory full of yaml rulesets
func TestLoadRulesFromLocalDir(t *testing.T) {
// Create a temporary directory
baseDir, err := os.MkdirTemp("", "ruleset_test")
if err != nil {
t.Fatalf("Failed to create temporary directory: %s", err)
}
defer os.RemoveAll(baseDir)
// Create a nested subdirectory
nestedDir := filepath.Join(baseDir, "nested")
err = os.Mkdir(nestedDir, 0755)
if err != nil {
t.Fatalf("Failed to create nested directory: %s", err)
}
// Create a nested subdirectory
nestedTwiceDir := filepath.Join(nestedDir, "nestedTwice")
err = os.Mkdir(nestedTwiceDir, 0755)
testCases := []string{"test.yaml", "test2.yaml", "test-3.yaml", "test 4.yaml", "1987.test.yaml.yml", "foobar.example.com.yaml", "foobar.com.yml"}
for _, fileName := range testCases {
filePath := filepath.Join(nestedDir, "2x-"+fileName)
os.WriteFile(filePath, []byte(validYAML), 0644)
filePath = filepath.Join(nestedDir, fileName)
os.WriteFile(filePath, []byte(validYAML), 0644)
filePath = filepath.Join(baseDir, "base-"+fileName)
os.WriteFile(filePath, []byte(validYAML), 0644)
}
rs := RuleSet{}
err = rs.loadRulesFromLocalDir(baseDir)
assert.NoError(t, err)
assert.Equal(t, rs.Count(), len(testCases)*3)
for _, rule := range rs {
assert.Equal(t, rule.Domain, "example.com")
assert.Equal(t, rule.RegexRules[0].Match, "^http:")
assert.Equal(t, rule.RegexRules[0].Replace, "https:")
}
}

370
proxychain/proxychain.go Normal file
View File

@@ -0,0 +1,370 @@
package proxychain
import (
"errors"
"fmt"
"io"
"log"
"net/http"
"net/url"
"strings"
"ladder/pkg/ruleset"
"github.com/gofiber/fiber/v2"
)
/*
ProxyChain manages the process of forwarding an HTTP request to an upstream server,
applying request and response modifications along the way.
- It accepts incoming HTTP requests (as a Fiber *ctx), and applies
request modifiers (ReqMods) and response modifiers (ResMods) before passing the
upstream response back to the client.
- ProxyChains can be reused to avoid memory allocations. However, they are not concurrent-safe
so a ProxyChainPool should be used with mutexes to avoid memory errors.
---
# EXAMPLE
```
import (
rx "ladder/pkg/proxychain/requestmodifers"
tx "ladder/pkg/proxychain/responsemodifers"
"ladder/internal/proxychain"
)
proxychain.NewProxyChain().
SetFiberCtx(c).
SetRequestModifications(
rx.BlockOutgoingCookies(),
rx.SpoofOrigin(),
rx.SpoofReferrer(),
).
SetResultModifications(
tx.BlockIncomingCookies(),
tx.RewriteHTMLResourceURLs()
).
Execute()
```
client ladder service upstream
┌─────────┐ ┌────────────────────────┐ ┌─────────┐
│ │GET │ │ │ │
│ req────┼───► ProxyChain │ │ │
│ │ │ │ │ │ │
│ │ │ ▼ │ │ │
│ │ │ apply │ │ │
│ │ │ RequestModifications │ │ │
│ │ │ │ │ │ │
│ │ │ ▼ │ │ │
│ │ │ send GET │ │ │
│ │ │ Request req────────┼─► │ │
│ │ │ │ │ │
│ │ │ 200 OK │ │ │
│ │ │ ┌────────────────┼─response │
│ │ │ ▼ │ │ │
│ │ │ apply │ │ │
│ │ │ ResultModifications │ │ │
│ │ │ │ │ │ │
│ │◄───┼───────┘ │ │ │
│ │ │ 200 OK │ │ │
│ │ │ │ │ │
└─────────┘ └────────────────────────┘ └─────────┘
*/
type ProxyChain struct {
Context *fiber.Ctx
Client *http.Client
Request *http.Request
Response *http.Response
requestModifications []RequestModification
resultModifications []ResponseModification
Ruleset *ruleset.RuleSet
debugMode bool
abortErr error
}
// a ProxyStrategy is a pre-built proxychain with purpose-built defaults
type ProxyStrategy ProxyChain
// A RequestModification is a function that should operate on the
// ProxyChain Req or Client field, using the fiber ctx as needed.
type RequestModification func(*ProxyChain) error
// A ResponseModification is a function that should operate on the
// ProxyChain Res (http result) & Body (buffered http response body) field
type ResponseModification func(*ProxyChain) error
// SetRequestModifications sets the ProxyChain's request modifers
// the modifier will not fire until ProxyChain.Execute() is run.
func (chain *ProxyChain) SetRequestModifications(mods ...RequestModification) *ProxyChain {
chain.requestModifications = mods
return chain
}
// AddRequestModifications sets the ProxyChain's request modifers
// the modifier will not fire until ProxyChain.Execute() is run.
func (chain *ProxyChain) AddRequestModifications(mods ...RequestModification) *ProxyChain {
chain.requestModifications = append(chain.requestModifications, mods...)
return chain
}
// AddResponseModifications sets the ProxyChain's response modifers
// the modifier will not fire until ProxyChain.Execute() is run.
func (chain *ProxyChain) AddResponseModifications(mods ...ResponseModification) *ProxyChain {
chain.resultModifications = mods
return chain
}
// Adds a ruleset to ProxyChain
func (chain *ProxyChain) AddRuleset(rs *ruleset.RuleSet) *ProxyChain {
chain.Ruleset = rs
// TODO: add _applyRuleset method
return chain
}
func (chain *ProxyChain) _initialize_request() (*http.Request, error) {
if chain.Context == nil {
chain.abortErr = chain.abort(errors.New("no context set"))
return nil, chain.abortErr
}
// initialize a request (without url)
req, err := http.NewRequest(chain.Context.Method(), "", nil)
if err != nil {
return nil, err
}
chain.Request = req
switch chain.Context.Method() {
case "GET":
case "DELETE":
case "HEAD":
case "OPTIONS":
break
case "POST":
case "PUT":
case "PATCH":
// stream content of body from client request to upstream request
chain.Request.Body = io.NopCloser(chain.Context.Request().BodyStream())
default:
return nil, fmt.Errorf("unsupported request method from client: '%s'", chain.Context.Method())
}
/*
// copy client request headers to upstream request headers
forwardHeaders := func(key []byte, val []byte) {
req.Header.Set(string(key), string(val))
}
clientHeaders := &chain.Context.Request().Header
clientHeaders.VisitAll(forwardHeaders)
*/
return req, nil
}
// _execute sends the request for the ProxyChain and returns the raw body only
// the caller is responsible for returning a response back to the requestor
// the caller is also responsible for calling chain._reset() when they are done with the body
func (chain *ProxyChain) _execute() (io.Reader, error) {
if chain.validateCtxIsSet() != nil || chain.abortErr != nil {
return nil, chain.abortErr
}
if chain.Request == nil {
return nil, errors.New("proxychain request not yet initialized")
}
if chain.Request.URL.Scheme == "" {
return nil, errors.New("request url not set or invalid. Check ProxyChain ReqMods for issues")
}
// Apply requestModifications to proxychain
for _, applyRequestModificationsTo := range chain.requestModifications {
err := applyRequestModificationsTo(chain)
if err != nil {
return nil, chain.abort(err)
}
}
// Send Request Upstream
resp, err := chain.Client.Do(chain.Request)
if err != nil {
return nil, chain.abort(err)
}
chain.Response = resp
//defer resp.Body.Close()
/* todo: move to rsm
for k, v := range resp.Header {
chain.Context.Set(k, resp.Header.Get(k))
}
*/
// Apply ResponseModifiers to proxychain
for _, applyResultModificationsTo := range chain.resultModifications {
err := applyResultModificationsTo(chain)
if err != nil {
return nil, chain.abort(err)
}
}
return chain.Response.Body, nil
}
// Execute sends the request for the ProxyChain and returns the request to the sender
// and resets the fields so that the ProxyChain can be reused.
// if any step in the ProxyChain fails, the request will abort and a 500 error will
// be returned to the client
func (chain *ProxyChain) Execute() error {
defer chain._reset()
body, err := chain._execute()
if err != nil {
log.Println(err)
return err
}
if chain.Context == nil {
return errors.New("no context set")
}
// Return request back to client
chain.Context.Set("content-type", chain.Response.Header.Get("content-type"))
return chain.Context.SendStream(body)
//return chain.Context.SendStream(body)
}
// reconstructUrlFromReferer reconstructs the URL using the referer's scheme, host, and the relative path / queries
func reconstructUrlFromReferer(referer *url.URL, relativeUrl *url.URL) (*url.URL, error) {
// Extract the real url from referer path
realUrl, err := url.Parse(strings.TrimPrefix(referer.Path, "/"))
if err != nil {
return nil, fmt.Errorf("error parsing real URL from referer '%s': %v", referer.Path, err)
}
if realUrl.Scheme == "" || realUrl.Host == "" {
return nil, fmt.Errorf("invalid referer URL: '%s' on request '%s", referer, relativeUrl)
}
log.Printf("'%s' -> '%s'\n", relativeUrl.String(), realUrl.String())
return &url.URL{
Scheme: referer.Scheme,
Host: referer.Host,
Path: realUrl.Path,
RawQuery: realUrl.RawQuery,
}, nil
}
// extractUrl extracts a URL from the request ctx. If the URL in the request
// is a relative path, it reconstructs the full URL using the referer header.
func (chain *ProxyChain) extractUrl() (*url.URL, error) {
// try to extract url-encoded
reqUrl, err := url.QueryUnescape(chain.Context.Params("*"))
if err != nil {
reqUrl = chain.Context.Params("*") // fallback
}
urlQuery, err := url.Parse(reqUrl)
if err != nil {
return nil, fmt.Errorf("error parsing request URL '%s': %v", reqUrl, err)
}
// Handle standard paths
// eg: https://localhost:8080/https://realsite.com/images/foobar.jpg -> https://realsite.com/images/foobar.jpg
isRelativePath := urlQuery.Scheme == ""
if !isRelativePath {
return urlQuery, nil
}
// Handle relative URLs
// eg: https://localhost:8080/images/foobar.jpg -> https://realsite.com/images/foobar.jpg
referer, err := url.Parse(chain.Context.Get("referer"))
relativePath := urlQuery
if err != nil {
return nil, fmt.Errorf("error parsing referer URL from req: '%s': %v", relativePath, err)
}
return reconstructUrlFromReferer(referer, relativePath)
}
// SetFiberCtx takes the request ctx from the client
// for the modifiers and execute function to use.
// it must be set everytime a new request comes through
// if the upstream request url cannot be extracted from the ctx,
// a 500 error will be sent back to the client
func (chain *ProxyChain) SetFiberCtx(ctx *fiber.Ctx) *ProxyChain {
chain.Context = ctx
// initialize the request and prepare it for modification
req, err := chain._initialize_request()
if err != nil {
chain.abortErr = chain.abort(err)
}
chain.Request = req
// extract the URL for the request and add it to the new request
url, err := chain.extractUrl()
if err != nil {
chain.abortErr = chain.abort(err)
}
chain.Request.URL = url
fmt.Printf("extracted URL: %s\n", chain.Request.URL)
return chain
}
func (chain *ProxyChain) validateCtxIsSet() error {
if chain.Context != nil {
return nil
}
err := errors.New("proxyChain was called without setting a fiber Ctx. Use ProxyChain.SetCtx()")
chain.abortErr = chain.abort(err)
return chain.abortErr
}
// SetHttpClient sets a new upstream http client transport
// useful for modifying TLS
func (chain *ProxyChain) SetHttpClient(httpClient *http.Client) *ProxyChain {
chain.Client = httpClient
return chain
}
// SetVerbose changes the logging behavior to print
// the modification steps and applied rulesets for debugging
func (chain *ProxyChain) SetDebugLogging(isDebugMode bool) *ProxyChain {
chain.debugMode = isDebugMode
return chain
}
// abort proxychain and return 500 error to client
// this will prevent Execute from firing and reset the state
// returns the initial error enriched with context
func (chain *ProxyChain) abort(err error) error {
//defer chain._reset()
chain.abortErr = err
chain.Context.Response().SetStatusCode(500)
e := fmt.Errorf("ProxyChain error for '%s': %s", chain.Request.URL.String(), err.Error())
chain.Context.SendString(e.Error())
log.Println(e.Error())
return e
}
// internal function to reset state of ProxyChain for reuse
func (chain *ProxyChain) _reset() {
chain.abortErr = nil
chain.Request = nil
//chain.Response = nil
chain.Context = nil
}
// NewProxyChain initializes a new ProxyChain
func NewProxyChain() *ProxyChain {
chain := new(ProxyChain)
chain.Client = http.DefaultClient
return chain
}

View File

@@ -0,0 +1,11 @@
package proxychain
import (
"net/url"
)
type ProxyChainPool map[url.URL]ProxyChain
func NewProxyChainPool() ProxyChainPool {
return map[url.URL]ProxyChain{}
}

View File

@@ -0,0 +1,33 @@
package requestmodifers
import (
"ladder/proxychain"
)
// MasqueradeAsGoogleBot modifies user agent and x-forwarded for
// to appear to be a Google Bot
func MasqueradeAsGoogleBot() proxychain.RequestModification {
const botUA string = "Mozilla/5.0 AppleWebKit/537.36 (KHTML, like Gecko; compatible; Googlebot/2.1; http://www.google.com/bot.html) Chrome/79.0.3945.120 Safari/537.36"
const botIP string = "66.249.78.8" // TODO: create a random ip pool from https://developers.google.com/static/search/apis/ipranges/googlebot.json
return masqueradeAsTrustedBot(botUA, botIP)
}
// MasqueradeAsBingBot modifies user agent and x-forwarded for
// to appear to be a Bing Bot
func MasqueradeAsBingBot() proxychain.RequestModification {
const botUA string = "Mozilla/5.0 AppleWebKit/537.36 (KHTML, like Gecko; compatible; bingbot/2.0; +http://www.bing.com/bingbot.htm) Chrome/79.0.3945.120 Safari/537.36"
const botIP string = "13.66.144.9" // https://www.bing.com/toolbox/bingbot.json
return masqueradeAsTrustedBot(botUA, botIP)
}
func masqueradeAsTrustedBot(botUA string, botIP string) proxychain.RequestModification {
return func(px *proxychain.ProxyChain) error {
px.AddRequestModifications(
SpoofUserAgent(botUA),
SpoofXForwardedFor(botIP),
SpoofReferrer(""),
SpoofOrigin(""),
)
return nil
}
}

View File

@@ -0,0 +1,13 @@
package requestmodifers
import (
"ladder/proxychain"
"regexp"
)
func ModifyDomainWithRegex(match regexp.Regexp, replacement string) proxychain.RequestModification {
return func(px *proxychain.ProxyChain) error {
px.Request.URL.Host = match.ReplaceAllString(px.Request.URL.Host, replacement)
return nil
}
}

View File

@@ -0,0 +1,97 @@
package requestmodifers
import (
"ladder/proxychain"
"net/http"
)
// SetOutgoingCookie modifes a specific cookie name
// by modifying the request cookie headers going to the upstream server.
// If the cookie name does not already exist, it is created.
func SetOutgoingCookie(name string, val string) proxychain.RequestModification {
return func(chain *proxychain.ProxyChain) error {
cookies := chain.Request.Cookies()
hasCookie := false
for _, cookie := range cookies {
if cookie.Name != name {
continue
}
hasCookie = true
cookie.Value = val
}
if hasCookie {
return nil
}
chain.Request.AddCookie(&http.Cookie{
Domain: chain.Request.URL.Host,
Name: name,
Value: val,
})
return nil
}
}
// SetOutgoingCookies modifies a client request's cookie header
// to a raw Cookie string, overwriting existing cookies
func SetOutgoingCookies(cookies string) proxychain.RequestModification {
return func(chain *proxychain.ProxyChain) error {
chain.Request.Header.Set("Cookies", cookies)
return nil
}
}
// DeleteOutgoingCookie modifies the http request's cookies header to
// delete a specific request cookie going to the upstream server.
// If the cookie does not exist, it does not do anything.
func DeleteOutgoingCookie(name string) proxychain.RequestModification {
return func(chain *proxychain.ProxyChain) error {
cookies := chain.Request.Cookies()
chain.Request.Header.Del("Cookies")
for _, cookie := range cookies {
if cookie.Name == name {
chain.Request.AddCookie(cookie)
}
}
return nil
}
}
// DeleteOutgoingCookies removes the cookie header entirely,
// preventing any cookies from reaching the upstream server.
func DeleteOutgoingCookies() proxychain.RequestModification {
return func(px *proxychain.ProxyChain) error {
px.Request.Header.Del("Cookie")
return nil
}
}
// DeleteOutGoingCookiesExcept prevents non-whitelisted cookies from being sent from the client
// to the upstream proxy server. Cookies whose names are in the whitelist are not removed.
func DeleteOutgoingCookiesExcept(whitelist ...string) proxychain.RequestModification {
return func(px *proxychain.ProxyChain) error {
// Convert whitelist slice to a map for efficient lookups
whitelistMap := make(map[string]struct{})
for _, cookieName := range whitelist {
whitelistMap[cookieName] = struct{}{}
}
// Get all cookies from the request header
cookies := px.Request.Cookies()
// Clear the original Cookie header
px.Request.Header.Del("Cookie")
// Re-add cookies that are in the whitelist
for _, cookie := range cookies {
if _, found := whitelistMap[cookie.Name]; found {
px.Request.AddCookie(cookie)
}
}
return nil
}
}

View File

@@ -0,0 +1,13 @@
package requestmodifers
import (
"ladder/proxychain"
"regexp"
)
func ModifyPathWithRegex(match regexp.Regexp, replacement string) proxychain.RequestModification {
return func(px *proxychain.ProxyChain) error {
px.Request.URL.Path = match.ReplaceAllString(px.Request.URL.Path, replacement)
return nil
}
}

View File

@@ -0,0 +1,20 @@
package requestmodifers
import (
"ladder/proxychain"
)
// ModifyQueryParams replaces query parameter values in URL's query params in a ProxyChain's URL.
// If the query param key doesn't exist, it is created.
func ModifyQueryParams(key string, value string) proxychain.RequestModification {
return func(px *proxychain.ProxyChain) error {
q := px.Request.URL.Query()
if value == "" {
q.Del(key)
return nil
}
q.Set(key, value)
px.Request.URL.RawQuery = q.Encode()
return nil
}
}

View File

@@ -0,0 +1,23 @@
package requestmodifers
import (
"ladder/proxychain"
)
// SetRequestHeader modifies a specific outgoing header
// This is the header that the upstream server will see.
func SetRequestHeader(name string, val string) proxychain.RequestModification {
return func(px *proxychain.ProxyChain) error {
px.Request.Header.Set(name, val)
return nil
}
}
// DeleteRequestHeader modifies a specific outgoing header
// This is the header that the upstream server will see.
func DeleteRequestHeader(name string) proxychain.RequestModification {
return func(px *proxychain.ProxyChain) error {
px.Request.Header.Del(name)
return nil
}
}

View File

@@ -0,0 +1,27 @@
package requestmodifers
import (
"ladder/proxychain"
"net/url"
)
const archivistUrl string = "https://archive.is/latest/"
// RequestArchiveIs modifies a ProxyChain's URL to request an archived version from archive.is
func RequestArchiveIs() proxychain.RequestModification {
return func(px *proxychain.ProxyChain) error {
px.Request.URL.RawQuery = ""
newURLString := archivistUrl + px.Request.URL.String()
newURL, err := url.Parse(newURLString)
if err != nil {
return err
}
// archivist seems to sabotage requests from cloudflare's DNS
// bypass this just in case
px.AddRequestModifications(ResolveWithGoogleDoH())
px.Request.URL = newURL
return nil
}
}

View File

@@ -0,0 +1,21 @@
package requestmodifers
import (
"ladder/proxychain"
"net/url"
)
const googleCacheUrl string = "https://webcache.googleusercontent.com/search?q=cache:"
// RequestGoogleCache modifies a ProxyChain's URL to request its Google Cache version.
func RequestGoogleCache() proxychain.RequestModification {
return func(px *proxychain.ProxyChain) error {
encodedURL := url.QueryEscape(px.Request.URL.String())
newURL, err := url.Parse(googleCacheUrl + encodedURL)
if err != nil {
return err
}
px.Request.URL = newURL
return nil
}
}

View File

@@ -0,0 +1,22 @@
package requestmodifers
import (
"ladder/proxychain"
"net/url"
)
const waybackUrl string = "https://web.archive.org/web/"
// RequestWaybackMachine modifies a ProxyChain's URL to request the wayback machine (archive.org) version.
func RequestWaybackMachine() proxychain.RequestModification {
return func(px *proxychain.ProxyChain) error {
px.Request.URL.RawQuery = ""
newURLString := waybackUrl + px.Request.URL.String()
newURL, err := url.Parse(newURLString)
if err != nil {
return err
}
px.Request.URL = newURL
return nil
}
}

View File

@@ -0,0 +1,80 @@
package requestmodifers
import (
"context"
"encoding/json"
"fmt"
"ladder/proxychain"
"net"
"net/http"
"time"
)
// resolveWithGoogleDoH resolves DNS using Google's DNS-over-HTTPS
func resolveWithGoogleDoH(host string) (string, error) {
url := "https://dns.google/resolve?name=" + host + "&type=A"
resp, err := http.Get(url)
if err != nil {
return "", err
}
defer resp.Body.Close()
var result struct {
Answer []struct {
Data string `json:"data"`
} `json:"Answer"`
}
err = json.NewDecoder(resp.Body).Decode(&result)
if err != nil {
return "", err
}
// Get the first A record
if len(result.Answer) > 0 {
return result.Answer[0].Data, nil
}
return "", fmt.Errorf("no DoH DNS record found for %s", host)
}
// ResolveWithGoogleDoH modifies a ProxyChain's client to make the request but resolve the URL
// using Google's DNS over HTTPs service
func ResolveWithGoogleDoH() proxychain.RequestModification {
return func(px *proxychain.ProxyChain) error {
client := &http.Client{
Timeout: px.Client.Timeout,
}
dialer := &net.Dialer{
Timeout: 5 * time.Second,
KeepAlive: 5 * time.Second,
}
customDialContext := func(ctx context.Context, network, addr string) (net.Conn, error) {
host, port, err := net.SplitHostPort(addr)
if err != nil {
// If the addr doesn't include a port, determine it based on the URL scheme
if px.Request.URL.Scheme == "https" {
port = "443"
} else {
port = "80"
}
host = addr // assume the entire addr is the host
}
resolvedHost, err := resolveWithGoogleDoH(host)
if err != nil {
return nil, err
}
return dialer.DialContext(ctx, network, net.JoinHostPort(resolvedHost, port))
}
patchedTransportWithDoH := &http.Transport{
DialContext: customDialContext,
}
client.Transport = patchedTransportWithDoH
px.Client = client // Assign the modified client to the ProxyChain
return nil
}
}

View File

@@ -0,0 +1,24 @@
package requestmodifers
import (
"ladder/proxychain"
)
// SpoofOrigin modifies the origin header
// if the upstream server returns a Vary header
// it means you might get a different response if you change this
func SpoofOrigin(url string) proxychain.RequestModification {
return func(px *proxychain.ProxyChain) error {
px.Request.Header.Set("origin", url)
return nil
}
}
// HideOrigin modifies the origin header
// so that it is the original origin, not the proxy
func HideOrigin() proxychain.RequestModification {
return func(px *proxychain.ProxyChain) error {
px.Request.Header.Set("origin", px.Request.URL.String())
return nil
}
}

View File

@@ -0,0 +1,29 @@
package requestmodifers
import (
"ladder/proxychain"
)
// SpoofReferrer modifies the referrer header
// useful if the page can be accessed from a search engine
// or social media site, but not by browsing the website itself
// if url is "", then the referrer header is removed
func SpoofReferrer(url string) proxychain.RequestModification {
return func(px *proxychain.ProxyChain) error {
if url == "" {
px.Request.Header.Del("referrer")
return nil
}
px.Request.Header.Set("referrer", url)
return nil
}
}
// HideReferrer modifies the referrer header
// so that it is the original referrer, not the proxy
func HideReferrer() proxychain.RequestModification {
return func(px *proxychain.ProxyChain) error {
px.Request.Header.Set("referrer", px.Request.URL.String())
return nil
}
}

View File

@@ -0,0 +1,13 @@
package requestmodifers
import (
"ladder/proxychain"
)
// SpoofUserAgent modifies the user agent
func SpoofUserAgent(ua string) proxychain.RequestModification {
return func(px *proxychain.ProxyChain) error {
px.Request.Header.Set("user-agent", ua)
return nil
}
}

View File

@@ -0,0 +1,14 @@
package requestmodifers
import (
"ladder/proxychain"
)
// SpoofXForwardedFor modifies the X-Forwarded-For header
// in some cases, a forward proxy may interpret this as the source IP
func SpoofXForwardedFor(ip string) proxychain.RequestModification {
return func(px *proxychain.ProxyChain) error {
px.Request.Header.Set("X-FORWARDED-FOR", ip)
return nil
}
}

View File

@@ -0,0 +1,21 @@
package responsemodifers
import (
"ladder/proxychain"
)
// BypassCORS modifies response headers to prevent the browser
// from enforcing any CORS restrictions. This should run at the end of the chain.
func BypassCORS() proxychain.ResponseModification {
return func(chain *proxychain.ProxyChain) error {
chain.AddResponseModifications(
SetResponseHeader("Access-Control-Allow-Origin", "*"),
SetResponseHeader("Access-Control-Expose-Headers", "*"),
SetResponseHeader("Access-Control-Allow-Credentials", "true"),
SetResponseHeader("Access-Control-Allow-Methods", "GET, PUT, POST, DELETE, HEAD, OPTIONS, PATCH"),
SetResponseHeader("Access-Control-Allow-Headers", "*"),
DeleteResponseHeader("X-Frame-Options"),
)
return nil
}
}

View File

@@ -0,0 +1,27 @@
package responsemodifers
import (
"ladder/proxychain"
)
// BypassContentSecurityPolicy modifies response headers to prevent the browser
// from enforcing any CSP restrictions. This should run at the end of the chain.
func BypassContentSecurityPolicy() proxychain.ResponseModification {
return func(chain *proxychain.ProxyChain) error {
chain.AddResponseModifications(
DeleteResponseHeader("Content-Security-Policy"),
DeleteResponseHeader("Content-Security-Policy-Report-Only"),
DeleteResponseHeader("X-Content-Security-Policy"),
DeleteResponseHeader("X-WebKit-CSP"),
)
return nil
}
}
// SetContentSecurityPolicy modifies response headers to a specific CSP
func SetContentSecurityPolicy(csp string) proxychain.ResponseModification {
return func(chain *proxychain.ProxyChain) error {
chain.Response.Header.Set("Content-Security-Policy", csp)
return nil
}
}

View File

@@ -0,0 +1,102 @@
package responsemodifers
import (
"fmt"
"ladder/proxychain"
"net/http"
)
// DeleteIncomingCookies prevents ALL cookies from being sent from the proxy server
// back down to the client.
func DeleteIncomingCookies(whitelist ...string) proxychain.ResponseModification {
return func(px *proxychain.ProxyChain) error {
px.Response.Header.Del("Set-Cookie")
return nil
}
}
// DeleteIncomingCookiesExcept prevents non-whitelisted cookies from being sent from the proxy server
// to the client. Cookies whose names are in the whitelist are not removed.
func DeleteIncomingCookiesExcept(whitelist ...string) proxychain.ResponseModification {
return func(px *proxychain.ProxyChain) error {
// Convert whitelist slice to a map for efficient lookups
whitelistMap := make(map[string]struct{})
for _, cookieName := range whitelist {
whitelistMap[cookieName] = struct{}{}
}
// If the response has no cookies, return early
if px.Response.Header == nil {
return nil
}
// Filter the cookies in the response
filteredCookies := []string{}
for _, cookieStr := range px.Response.Header["Set-Cookie"] {
cookie := parseCookie(cookieStr)
if _, found := whitelistMap[cookie.Name]; found {
filteredCookies = append(filteredCookies, cookieStr)
}
}
// Update the Set-Cookie header with the filtered cookies
if len(filteredCookies) > 0 {
px.Response.Header["Set-Cookie"] = filteredCookies
} else {
px.Response.Header.Del("Set-Cookie")
}
return nil
}
}
// parseCookie parses a cookie string and returns an http.Cookie object.
func parseCookie(cookieStr string) *http.Cookie {
header := http.Header{}
header.Add("Set-Cookie", cookieStr)
request := http.Request{Header: header}
return request.Cookies()[0]
}
// SetIncomingCookies adds a raw cookie string being sent from the proxy server down to the client
func SetIncomingCookies(cookies string) proxychain.ResponseModification {
return func(px *proxychain.ProxyChain) error {
px.Response.Header.Set("Set-Cookie", cookies)
return nil
}
}
// SetIncomingCookie modifies a specific cookie in the response from the proxy server to the client.
func SetIncomingCookie(name string, val string) proxychain.ResponseModification {
return func(px *proxychain.ProxyChain) error {
if px.Response.Header == nil {
return nil
}
updatedCookies := []string{}
found := false
// Iterate over existing cookies and modify the one that matches the cookieName
for _, cookieStr := range px.Response.Header["Set-Cookie"] {
cookie := parseCookie(cookieStr)
if cookie.Name == name {
// Replace the cookie with the new value
updatedCookies = append(updatedCookies, fmt.Sprintf("%s=%s", name, val))
found = true
} else {
// Keep the cookie as is
updatedCookies = append(updatedCookies, cookieStr)
}
}
// If the specified cookie wasn't found, add it
if !found {
updatedCookies = append(updatedCookies, fmt.Sprintf("%s=%s", name, val))
}
// Update the Set-Cookie header
px.Response.Header["Set-Cookie"] = updatedCookies
return nil
}
}

View File

@@ -0,0 +1,21 @@
package responsemodifers
import (
"ladder/proxychain"
)
// SetResponseHeader modifies response headers from the upstream server
func SetResponseHeader(key string, value string) proxychain.ResponseModification {
return func(px *proxychain.ProxyChain) error {
px.Context.Response().Header.Set(key, value)
return nil
}
}
// DeleteResponseHeader removes response headers from the upstream server
func DeleteResponseHeader(key string) proxychain.ResponseModification {
return func(px *proxychain.ProxyChain) error {
px.Context.Response().Header.Del(key)
return nil
}
}

View File

@@ -0,0 +1,62 @@
// Overrides the global fetch and XMLHttpRequest open methods to modify the request URLs.
// Also overrides the attribute setter prototype to modify the request URLs
// fetch("/relative_script.js") -> fetch("http://localhost:8080/relative_script.js")
(() => {
function rewriteURL(url) {
if (!url) return url
if (url.startsWith(window.location.origin)) return url
if (url.startsWith("//")) {
url = `${window.location.origin}/${encodeURIComponent(url.substring(2))}`;
} else if (url.startsWith("/")) {
url = `${window.location.origin}/${encodeURIComponent(url.substring(1))}`;
} else if (url.startsWith("http://") || url.startsWith("https://")) {
url = `${window.location.origin}/${encodeURIComponent(url)}`;
}
return url;
};
// monkey patch fetch
const oldFetch = globalThis.fetch ;
globalThis.fetch = async (url, init) => {
return oldFetch(rewriteURL(url), init)
}
// monkey patch xmlhttprequest
const oldOpen = XMLHttpRequest.prototype.open;
XMLHttpRequest.prototype.open = function(method, url, async = true, user = null, password = null) {
return oldOpen.call(this, method, rewriteURL(url), async, user, password);
};
// Monkey patch setter methods
const elements = [
{ tag: 'a', attribute: 'href' },
{ tag: 'img', attribute: 'src' },
{ tag: 'script', attribute: 'src' },
{ tag: 'link', attribute: 'href' },
{ tag: 'iframe', attribute: 'src' },
{ tag: 'audio', attribute: 'src' },
{ tag: 'video', attribute: 'src' },
{ tag: 'source', attribute: 'src' },
{ tag: 'embed', attribute: 'src' },
{ tag: 'object', attribute: 'src' },
{ tag: 'input', attribute: 'src' },
{ tag: 'track', attribute: 'src' },
{ tag: 'form', attribute: 'action' },
];
elements.forEach(({ tag, attribute }) => {
const proto = document.createElement(tag).constructor.prototype;
const descriptor = Object.getOwnPropertyDescriptor(proto, attribute);
if (descriptor && descriptor.set) {
Object.defineProperty(proto, attribute, {
...descriptor,
set(value) {
return descriptor.set.call(this, rewriteURL(value));
}
});
}
});
})();

View File

@@ -0,0 +1,262 @@
package responsemodifers
import (
"bytes"
"fmt"
"io"
"ladder/proxychain"
"net/url"
"strings"
"golang.org/x/net/html"
)
// Define list of HTML attributes to try to rewrite
var AttributesToRewrite map[string]bool
func init() {
AttributesToRewrite = map[string]bool{
"src": true,
"href": true,
"action": true,
"srcset": true, // TODO: fix
"poster": true,
"data": true,
"cite": true,
"formaction": true,
"background": true,
"usemap": true,
"longdesc": true,
"manifest": true,
"archive": true,
"codebase": true,
"icon": true,
"pluginspage": true,
}
}
// HTMLResourceURLRewriter is a struct that rewrites URLs within HTML resources to use a specified proxy URL.
// It uses an HTML tokenizer to process HTML content and rewrites URLs in src/href attributes.
// <img src='/relative_path'> -> <img src='/https://proxiedsite.com/relative_path'>
type HTMLResourceURLRewriter struct {
baseURL *url.URL
tokenizer *html.Tokenizer
currentToken html.Token
tokenBuffer *bytes.Buffer
currentTokenIndex int
currentTokenProcessed bool
}
// NewHTMLResourceURLRewriter creates a new instance of HTMLResourceURLRewriter.
// It initializes the tokenizer with the provided source and sets the proxy URL.
func NewHTMLResourceURLRewriter(src io.ReadCloser, baseURL *url.URL) *HTMLResourceURLRewriter {
return &HTMLResourceURLRewriter{
tokenizer: html.NewTokenizer(src),
currentToken: html.Token{},
currentTokenIndex: 0,
tokenBuffer: new(bytes.Buffer),
baseURL: baseURL,
}
}
// Close resets the internal state of HTMLResourceURLRewriter, clearing buffers and token data.
func (r *HTMLResourceURLRewriter) Close() error {
r.tokenBuffer.Reset()
r.currentToken = html.Token{}
r.currentTokenIndex = 0
r.currentTokenProcessed = false
return nil
}
// Read processes the HTML content, rewriting URLs and managing the state of tokens.
// It reads HTML content, token by token, rewriting URLs to route through the specified proxy.
func (r *HTMLResourceURLRewriter) Read(p []byte) (int, error) {
if r.currentToken.Data == "" || r.currentTokenProcessed {
tokenType := r.tokenizer.Next()
// done reading html, close out reader
if tokenType == html.ErrorToken {
if r.tokenizer.Err() == io.EOF {
return 0, io.EOF
}
return 0, r.tokenizer.Err()
}
// flush the current token into an internal buffer
// to handle fragmented tokens
r.currentToken = r.tokenizer.Token()
// patch tokens with URLs
isTokenWithAttribute := r.currentToken.Type == html.StartTagToken || r.currentToken.Type == html.SelfClosingTagToken
if isTokenWithAttribute {
patchResourceURL(&r.currentToken, r.baseURL)
}
r.tokenBuffer.Reset()
r.tokenBuffer.WriteString(html.UnescapeString(r.currentToken.String()))
r.currentTokenProcessed = false
r.currentTokenIndex = 0
}
n, err := r.tokenBuffer.Read(p)
if err == io.EOF || r.tokenBuffer.Len() == 0 {
r.currentTokenProcessed = true
err = nil // EOF in this context is expected and not an actual error
}
return n, err
}
// Root-relative URLs: These are relative to the root path and start with a "/".
func handleRootRelativePath(attr *html.Attribute, baseURL *url.URL) {
// doublecheck this is a valid relative URL
_, err := url.Parse(fmt.Sprintf("http://localhost.com%s", attr.Val))
if err != nil {
return
}
//log.Printf("BASEURL patch: %s\n", baseURL)
attr.Val = fmt.Sprintf(
"/%s://%s/%s",
baseURL.Scheme,
baseURL.Host,
strings.TrimPrefix(attr.Val, "/"),
)
attr.Val = url.QueryEscape(attr.Val)
attr.Val = fmt.Sprintf("/%s", attr.Val)
//log.Printf("root rel url rewritten-> '%s'='%s'", attr.Key, attr.Val)
}
// Document-relative URLs: These are relative to the current document's path and don't start with a "/".
func handleDocumentRelativePath(attr *html.Attribute, baseURL *url.URL) {
attr.Val = fmt.Sprintf(
"%s://%s/%s%s",
baseURL.Scheme,
strings.Trim(baseURL.Host, "/"),
strings.Trim(baseURL.RawPath, "/"),
strings.Trim(attr.Val, "/"),
)
attr.Val = url.QueryEscape(attr.Val)
attr.Val = fmt.Sprintf("/%s", attr.Val)
//log.Printf("doc rel url rewritten-> '%s'='%s'", attr.Key, attr.Val)
}
// Protocol-relative URLs: These start with "//" and will use the same protocol (http or https) as the current page.
func handleProtocolRelativePath(attr *html.Attribute, baseURL *url.URL) {
attr.Val = strings.TrimPrefix(attr.Val, "/")
handleRootRelativePath(attr, baseURL)
//log.Printf("proto rel url rewritten-> '%s'='%s'", attr.Key, attr.Val)
}
func handleAbsolutePath(attr *html.Attribute, baseURL *url.URL) {
// check if valid URL
u, err := url.Parse(attr.Val)
if err != nil {
return
}
if !(u.Scheme == "http" || u.Scheme == "https") {
return
}
attr.Val = fmt.Sprintf(
"/%s",
url.QueryEscape(
strings.TrimPrefix(attr.Val, "/"),
),
)
//log.Printf("abs url rewritten-> '%s'='%s'", attr.Key, attr.Val)
}
func handleSrcSet(attr *html.Attribute, baseURL *url.URL) {
for i, src := range strings.Split(attr.Val, ",") {
src = strings.Trim(src, " ")
for j, s := range strings.Split(src, " ") {
s = strings.Trim(s, " ")
if j == 0 {
f := &html.Attribute{Val: s, Key: attr.Key}
switch {
case strings.HasPrefix(s, "//"):
handleProtocolRelativePath(f, baseURL)
case strings.HasPrefix(s, "/"):
handleRootRelativePath(f, baseURL)
case strings.HasPrefix(s, "https://") || strings.HasPrefix(s, "http://"):
handleAbsolutePath(f, baseURL)
default:
handleDocumentRelativePath(f, baseURL)
}
s = f.Val
}
if i == 0 && j == 0 {
attr.Val = s
continue
}
attr.Val = fmt.Sprintf("%s %s", attr.Val, s)
}
attr.Val = fmt.Sprintf("%s,", attr.Val)
}
attr.Val = strings.TrimSuffix(attr.Val, ",")
}
// TODO: figure out how to handle these
// srcset
func patchResourceURL(token *html.Token, baseURL *url.URL) {
for i := range token.Attr {
attr := &token.Attr[i]
switch {
// dont touch attributes except for the ones we defined
case !AttributesToRewrite[attr.Key]:
continue
case attr.Key == "srcset":
handleSrcSet(attr, baseURL)
continue
case strings.HasPrefix(attr.Val, "//"):
handleProtocolRelativePath(attr, baseURL)
continue
case strings.HasPrefix(attr.Val, "/"):
handleRootRelativePath(attr, baseURL)
continue
case strings.HasPrefix(attr.Val, "https://") || strings.HasPrefix(attr.Val, "http://"):
handleAbsolutePath(attr, baseURL)
continue
default:
handleDocumentRelativePath(attr, baseURL)
continue
}
}
}
// RewriteHTMLResourceURLs modifies HTTP responses
// to rewrite URLs attributes in HTML content (such as src, href)
// - `<img src='/relative_path'>` -> `<img src='/https://proxiedsite.com/relative_path'>`
// - This function is designed to allow the proxified page
// to still be browsible by routing all resource URLs through the proxy.
//
// ---
//
// - It works by replacing the io.ReadCloser of the http.Response.Body
// with another io.ReaderCloser (HTMLResourceRewriter) that wraps the first one.
//
// - This process can be done multiple times, so that the response will
// be streamed and modified through each pass without buffering the entire response in memory.
//
// - HTMLResourceRewriter reads the http.Response.Body stream,
// parsing each HTML token one at a time and replacing attribute tags.
//
// - When ProxyChain.Execute() is called, the response body will be read from the server
// and pulled through each ResponseModification which wraps the ProxyChain.Response.Body
// without ever buffering the entire HTTP response in memory.
func RewriteHTMLResourceURLs() proxychain.ResponseModification {
return func(chain *proxychain.ProxyChain) error {
// return early if it's not HTML
ct := chain.Response.Header.Get("content-type")
if !strings.HasPrefix(ct, "text/html") {
return nil
}
chain.Response.Body = NewHTMLResourceURLRewriter(chain.Response.Body, chain.Request.URL)
return nil
}
}

View File

@@ -21,145 +21,3 @@
- position: h1
replace: |
<h1>An example with a ladder ;-)</h1>
- domain: www.americanbanker.com
paths:
- /news
injections:
- position: head
append: |
<script>
document.addEventListener("DOMContentLoaded", () => {
const inlineGate = document.querySelector('.inline-gate');
if (inlineGate) {
inlineGate.classList.remove('inline-gate');
const inlineGated = document.querySelectorAll('.inline-gated');
for (const elem of inlineGated) { elem.classList.remove('inline-gated'); }
}
});
</script>
- domain: www.nzz.ch
paths:
- /international
- /sport
- /wirtschaft
- /technologie
- /feuilleton
- /zuerich
- /wissenschaft
- /gesellschaft
- /panorama
- /mobilitaet
- /reisen
- /meinung
- /finanze
injections:
- position: head
append: |
<script>
document.addEventListener("DOMContentLoaded", () => {
const paywall = document.querySelector('.dynamic-regwall');
removeDOMElement(paywall)
});
</script>
- domains:
- www.architecturaldigest.com
- www.bonappetit.com
- www.cntraveler.com
- www.epicurious.com
- www.gq.com
- www.newyorker.com
- www.vanityfair.com
- www.vogue.com
- www.wired.com
injections:
- position: head
append: |
<script>
document.addEventListener("DOMContentLoaded", () => {
const banners = document.querySelectorAll('.paywall-bar, div[class^="MessageBannerWrapper-"');
banners.forEach(el => { el.remove(); });
});
</script>
- domains:
- www.nytimes.com
- www.time.com
headers:
ueser-agent: Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)
cookie: nyt-a=; nyt-gdpr=0; nyt-geo=DE; nyt-privacy=1
referer: https://www.google.com/
injections:
- position: head
append: |
<script>
window.localStorage.clear();
document.addEventListener("DOMContentLoaded", () => {
const banners = document.querySelectorAll('div[data-testid="inline-message"], div[id^="ad-"], div[id^="leaderboard-"], div.expanded-dock, div.pz-ad-box, div[id="top-wrapper"], div[id="bottom-wrapper"]');
banners.forEach(el => { el.remove(); });
});
</script>
- domains:
- www.thestar.com
- www.niagarafallsreview.ca
- www.stcatharinesstandard.ca
- www.thepeterboroughexaminer.com
- www.therecord.com
- www.thespec.com
- www.wellandtribune.ca
injections:
- position: head
append: |
<script>
window.localStorage.clear();
document.addEventListener("DOMContentLoaded", () => {
const paywall = document.querySelectorAll('div.subscriber-offers');
paywall.forEach(el => { el.remove(); });
const subscriber_only = document.querySelectorAll('div.subscriber-only');
for (const elem of subscriber_only) {
if (elem.classList.contains('encrypted-content') && dompurify_loaded) {
const parser = new DOMParser();
const doc = parser.parseFromString('<div>' + DOMPurify.sanitize(unscramble(elem.innerText)) + '</div>', 'text/html');
const content_new = doc.querySelector('div');
elem.parentNode.replaceChild(content_new, elem);
}
elem.removeAttribute('style');
elem.removeAttribute('class');
}
const banners = document.querySelectorAll('div.subscription-required, div.redacted-overlay, div.subscriber-hide, div.tnt-ads-container');
banners.forEach(el => { el.remove(); });
const ads = document.querySelectorAll('div.tnt-ads-container, div[class*="adLabelWrapper"]');
ads.forEach(el => { el.remove(); });
const recommendations = document.querySelectorAll('div[id^="tncms-region-article"]');
recommendations.forEach(el => { el.remove(); });
});
</script>
- domain: www.usatoday.com
injections:
- position: head
append: |
<script>
document.addEventListener("DOMContentLoaded", () => {
const banners = document.querySelectorAll('div.roadblock-container, .gnt_nb, [aria-label="advertisement"], div[id="main-frame-error"]');
banners.forEach(el => { el.remove(); });
});
</script>
- domain: www.washingtonpost.com
injections:
- position: head
append: |
<script>
document.addEventListener("DOMContentLoaded", () => {
let paywall = document.querySelectorAll('div[data-qa$="-ad"], div[id="leaderboard-wrapper"], div[data-qa="subscribe-promo"]');
paywall.forEach(el => { el.remove(); });
const images = document.querySelectorAll('img');
images.forEach(image => { image.parentElement.style.filter = ''; });
const headimage = document.querySelectorAll('div .aspect-custom');
headimage.forEach(image => { image.style.filter = ''; });
});
</script>
- domain: medium.com
headers:
referer: https://t.co/x?amp=1
x-forwarded-for: none
user-agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36
content-security-policy: script-src 'self';
cookie:

View File

@@ -0,0 +1,35 @@
- domains:
- www.thestar.com
- www.niagarafallsreview.ca
- www.stcatharinesstandard.ca
- www.thepeterboroughexaminer.com
- www.therecord.com
- www.thespec.com
- www.wellandtribune.ca
injections:
- position: head
append: |
<script>
window.localStorage.clear();
document.addEventListener("DOMContentLoaded", () => {
const paywall = document.querySelectorAll('div.subscriber-offers');
paywall.forEach(el => { el.remove(); });
const subscriber_only = document.querySelectorAll('div.subscriber-only');
for (const elem of subscriber_only) {
if (elem.classList.contains('encrypted-content') && dompurify_loaded) {
const parser = new DOMParser();
const doc = parser.parseFromString('<div>' + DOMPurify.sanitize(unscramble(elem.innerText)) + '</div>', 'text/html');
const content_new = doc.querySelector('div');
elem.parentNode.replaceChild(content_new, elem);
}
elem.removeAttribute('style');
elem.removeAttribute('class');
}
const banners = document.querySelectorAll('div.subscription-required, div.redacted-overlay, div.subscriber-hide, div.tnt-ads-container');
banners.forEach(el => { el.remove(); });
const ads = document.querySelectorAll('div.tnt-ads-container, div[class*="adLabelWrapper"]');
ads.forEach(el => { el.remove(); });
const recommendations = document.querySelectorAll('div[id^="tncms-region-article"]');
recommendations.forEach(el => { el.remove(); });
});
</script>

24
rulesets/ch/nzz-ch.yaml Normal file
View File

@@ -0,0 +1,24 @@
- domain: www.nzz.ch
paths:
- /international
- /sport
- /wirtschaft
- /technologie
- /feuilleton
- /zuerich
- /wissenschaft
- /gesellschaft
- /panorama
- /mobilitaet
- /reisen
- /meinung
- /finanze
injections:
- position: head
append: |
<script>
document.addEventListener("DOMContentLoaded", () => {
const paywall = document.querySelector('.dynamic-regwall');
removeDOMElement(paywall)
});
</script>

View File

@@ -0,0 +1,9 @@
# loads amp version of page
- domain: tagesspiegel.de
headers:
content-security-policy: script-src 'self';
user-agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36
urlMods:
query:
- key: amp
value: 1

20
rulesets/gb/ft-com.yaml Normal file
View File

@@ -0,0 +1,20 @@
- domain: www.ft.com
headers:
referer: https://t.co/x?amp=1
injections:
- position: head
append: |
<script>
document.addEventListener("DOMContentLoaded", () => {
const styleTags = document.querySelectorAll('link[rel="stylesheet"]');
styleTags.forEach(el => {
const href = el.getAttribute('href').substring(1);
const updatedHref = href.replace(/(https?:\/\/.+?)\/{2,}/, '$1/');
el.setAttribute('href', updatedHref);
});
setTimeout(() => {
const cookie = document.querySelectorAll('.o-cookie-message, .js-article-ribbon, .o-ads, .o-banner, .o-message, .article__content-sign-up');
cookie.forEach(el => { el.remove(); });
}, 1000);
})
</script>

View File

@@ -0,0 +1,19 @@
- domains:
- www.architecturaldigest.com
- www.bonappetit.com
- www.cntraveler.com
- www.epicurious.com
- www.gq.com
- www.newyorker.com
- www.vanityfair.com
- www.vogue.com
- www.wired.com
injections:
- position: head
append: |
<script>
document.addEventListener("DOMContentLoaded", () => {
const banners = document.querySelectorAll('.paywall-bar, div[class^="MessageBannerWrapper-"');
banners.forEach(el => { el.remove(); });
});
</script>

View File

@@ -0,0 +1,16 @@
- domain: americanbanker.com
paths:
- /news
injections:
- position: head
append: |
<script>
document.addEventListener("DOMContentLoaded", () => {
const inlineGate = document.querySelector('.inline-gate');
if (inlineGate) {
inlineGate.classList.remove('inline-gate');
const inlineGated = document.querySelectorAll('.inline-gated');
for (const elem of inlineGated) { elem.classList.remove('inline-gated'); }
}
});
</script>

View File

@@ -0,0 +1,7 @@
- domain: medium.com
headers:
referer: https://t.co/x?amp=1
x-forwarded-for: none
user-agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36
content-security-policy: script-src 'self';
cookie:

View File

@@ -0,0 +1,17 @@
- domains:
- www.nytimes.com
- www.time.com
headers:
ueser-agent: Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)
cookie: nyt-a=; nyt-gdpr=0; nyt-geo=DE; nyt-privacy=1
referer: https://www.google.com/
injections:
- position: head
append: |
<script>
window.localStorage.clear();
document.addEventListener("DOMContentLoaded", () => {
const banners = document.querySelectorAll('div[data-testid="inline-message"], div[id^="ad-"], div[id^="leaderboard-"], div.expanded-dock, div.pz-ad-box, div[id="top-wrapper"], div[id="bottom-wrapper"]');
banners.forEach(el => { el.remove(); });
});
</script>

View File

@@ -0,0 +1,10 @@
- domain: www.usatoday.com
injections:
- position: head
append: |
<script>
document.addEventListener("DOMContentLoaded", () => {
const banners = document.querySelectorAll('div.roadblock-container, .gnt_nb, [aria-label="advertisement"], div[id="main-frame-error"]');
banners.forEach(el => { el.remove(); });
});
</script>

View File

@@ -0,0 +1,14 @@
- domain: www.washingtonpost.com
injections:
- position: head
append: |
<script>
document.addEventListener("DOMContentLoaded", () => {
let paywall = document.querySelectorAll('div[data-qa$="-ad"], div[id="leaderboard-wrapper"], div[data-qa="subscribe-promo"]');
paywall.forEach(el => { el.remove(); });
const images = document.querySelectorAll('img');
images.forEach(image => { image.parentElement.style.filter = ''; });
const headimage = document.querySelectorAll('div .aspect-custom');
headimage.forEach(image => { image.style.filter = ''; });
});
</script>

3
styles/input.css Normal file
View File

@@ -0,0 +1,3 @@
@tailwind base;
@tailwind components;
@tailwind utilities;

9
tailwind.config.js Normal file
View File

@@ -0,0 +1,9 @@
/** @type {import('tailwindcss').Config} */
module.exports = {
content: ["./handlers/**/*.html"],
theme: {
extend: {},
},
plugins: [],
}