Merge branch 'origin/proxy_v2' into proxy_v2/playground

This commit is contained in:
joncrangle
2023-12-10 23:44:46 -05:00
committed by GitHub
67 changed files with 463 additions and 125 deletions

View File

@@ -70,8 +70,8 @@ package ruleset_v2
// for use in proxychains.
import (
"ladder/proxychain"
tx "ladder/proxychain/responsemodifiers"
"github.com/everywall/ladder/proxychain"
tx "github.com/everywall/ladder/proxychain/responsemodifiers"
)
type ResponseModifierFactory func(params ...string) proxychain.ResponseModification
@@ -142,8 +142,8 @@ package ruleset_v2
// for use in proxychains.
import (
"ladder/proxychain"
rx "ladder/proxychain/requestmodifiers"
"github.com/everywall/ladder/proxychain"
rx "github.com/everywall/ladder/proxychain/requestmodifiers"
)
type RequestModifierFactory func(params ...string) proxychain.RequestModification

View File

@@ -1,8 +1,9 @@
package requestmodifiers
import (
"ladder/proxychain"
"math/rand"
"github.com/everywall/ladder/proxychain"
)
// AddCacheBusterQuery modifies query params to add a random parameter key

View File

@@ -3,7 +3,7 @@ package requestmodifiers
import (
"strings"
//"fmt"
"ladder/proxychain"
"github.com/everywall/ladder/proxychain"
)
var forwardBlacklist map[string]bool

View File

@@ -1,8 +1,9 @@
package requestmodifiers
import (
"ladder/proxychain"
"ladder/proxychain/requestmodifiers/bot"
"github.com/everywall/ladder/proxychain/requestmodifiers/bot"
"github.com/everywall/ladder/proxychain"
)
// MasqueradeAsGoogleBot modifies user agent and x-forwarded for

View File

@@ -4,7 +4,7 @@ import (
"fmt"
"regexp"
"ladder/proxychain"
"github.com/everywall/ladder/proxychain"
)
func ModifyDomainWithRegex(matchRegex string, replacement string) proxychain.RequestModification {

View File

@@ -5,7 +5,7 @@ import (
//http "github.com/Danny-Dasilva/fhttp"
http "github.com/bogdanfinn/fhttp"
"ladder/proxychain"
"github.com/everywall/ladder/proxychain"
)
// SetOutgoingCookie modifes a specific cookie name

View File

@@ -4,7 +4,7 @@ import (
"fmt"
"regexp"
"ladder/proxychain"
"github.com/everywall/ladder/proxychain"
)
func ModifyPathWithRegex(matchRegex string, replacement string) proxychain.RequestModification {

View File

@@ -4,7 +4,7 @@ import (
//"fmt"
"net/url"
"ladder/proxychain"
"github.com/everywall/ladder/proxychain"
)
// ModifyQueryParams replaces query parameter values in URL's query params in a ProxyChain's URL.

View File

@@ -1,7 +1,7 @@
package requestmodifiers
import (
"ladder/proxychain"
"github.com/everywall/ladder/proxychain"
)
// SetRequestHeader modifies a specific outgoing header

View File

@@ -5,8 +5,9 @@ import (
"net/url"
"regexp"
"ladder/proxychain"
tx "ladder/proxychain/responsemodifiers"
tx "github.com/everywall/ladder/proxychain/responsemodifiers"
"github.com/everywall/ladder/proxychain"
)
const archivistUrl string = "https://archive.is/latest"

View File

@@ -3,7 +3,7 @@ package requestmodifiers
import (
"net/url"
"ladder/proxychain"
"github.com/everywall/ladder/proxychain"
)
const googleCacheUrl string = "https://webcache.googleusercontent.com/search?q=cache:"

View File

@@ -4,8 +4,9 @@ import (
"net/url"
"regexp"
"ladder/proxychain"
tx "ladder/proxychain/responsemodifiers"
tx "github.com/everywall/ladder/proxychain/responsemodifiers"
"github.com/everywall/ladder/proxychain"
)
const waybackUrl string = "https://web.archive.org/web/"

View File

@@ -14,7 +14,7 @@ import (
//"net/http"
*/
"ladder/proxychain"
"github.com/everywall/ladder/proxychain"
)
// resolveWithGoogleDoH resolves DNS using Google's DNS-over-HTTPS

View File

@@ -1,7 +1,7 @@
package requestmodifiers
import (
"ladder/proxychain"
"github.com/everywall/ladder/proxychain"
)
// SpoofOrigin modifies the origin header

View File

@@ -3,8 +3,9 @@ package requestmodifiers
import (
"fmt"
"ladder/proxychain"
tx "ladder/proxychain/responsemodifiers"
tx "github.com/everywall/ladder/proxychain/responsemodifiers"
"github.com/everywall/ladder/proxychain"
)
// SpoofReferrer modifies the referrer header.

View File

@@ -6,7 +6,7 @@ import (
"strings"
"time"
"ladder/proxychain"
"github.com/everywall/ladder/proxychain"
)
// SpoofReferrerFromBaiduSearch modifies the referrer header

View File

@@ -1,7 +1,7 @@
package requestmodifiers
import (
"ladder/proxychain"
"github.com/everywall/ladder/proxychain"
)
// SpoofReferrerFromBingSearch modifies the referrer header

View File

@@ -1,7 +1,7 @@
package requestmodifiers
import (
"ladder/proxychain"
"github.com/everywall/ladder/proxychain"
)
// SpoofReferrerFromGoogleSearch modifies the referrer header

View File

@@ -1,7 +1,7 @@
package requestmodifiers
import (
"ladder/proxychain"
"github.com/everywall/ladder/proxychain"
)
// SpoofReferrerFromLinkedInPost modifies the referrer header

View File

@@ -3,7 +3,7 @@ package requestmodifiers
import (
"fmt"
"ladder/proxychain"
"github.com/everywall/ladder/proxychain"
)
// SpoofReferrerFromNaverSearch modifies the referrer header

View File

@@ -1,7 +1,7 @@
package requestmodifiers
import (
"ladder/proxychain"
"github.com/everywall/ladder/proxychain"
)
// SpoofReferrerFromPinterestPost modifies the referrer header

View File

@@ -1,7 +1,7 @@
package requestmodifiers
import (
"ladder/proxychain"
"github.com/everywall/ladder/proxychain"
)
// SpoofReferrerFromQQPost modifies the referrer header

View File

@@ -1,7 +1,7 @@
package requestmodifiers
import (
"ladder/proxychain"
"github.com/everywall/ladder/proxychain"
)
// SpoofReferrerFromRedditPost modifies the referrer header

View File

@@ -1,7 +1,7 @@
package requestmodifiers
import (
"ladder/proxychain"
"github.com/everywall/ladder/proxychain"
)
// SpoofReferrerFromTumblrPost modifies the referrer header

View File

@@ -1,7 +1,7 @@
package requestmodifiers
import (
"ladder/proxychain"
"github.com/everywall/ladder/proxychain"
)
// SpoofReferrerFromTwitterPost modifies the referrer header

View File

@@ -1,7 +1,7 @@
package requestmodifiers
import (
"ladder/proxychain"
"github.com/everywall/ladder/proxychain"
)
// SpoofReferrerFromVkontaktePost modifies the referrer header

View File

@@ -4,7 +4,7 @@ import (
"fmt"
"math/rand"
"ladder/proxychain"
"github.com/everywall/ladder/proxychain"
)
// SpoofReferrerFromWeiboPost modifies the referrer header

View File

@@ -4,8 +4,9 @@ import (
_ "embed"
"strings"
"ladder/proxychain"
tx "ladder/proxychain/responsemodifiers"
tx "github.com/everywall/ladder/proxychain/responsemodifiers"
"github.com/everywall/ladder/proxychain"
)
// https://github.com/faisalman/ua-parser-js/tree/master

View File

@@ -1,7 +1,7 @@
package requestmodifiers
import (
"ladder/proxychain"
"github.com/everywall/ladder/proxychain"
)
// SpoofXForwardedFor modifies the X-Forwarded-For header

View File

@@ -7,8 +7,9 @@ import (
"github.com/markusmobius/go-trafilatura"
"ladder/proxychain"
"ladder/proxychain/responsemodifiers/api"
"github.com/everywall/ladder/proxychain/responsemodifiers/api"
"github.com/everywall/ladder/proxychain"
)
// APIContent creates an JSON representation of the article and returns it as an API response.

View File

@@ -7,7 +7,7 @@ import (
"net/url"
"testing"
"ladder/proxychain/responsemodifiers/api"
"github.com/everywall/ladder/proxychain/responsemodifiers/api"
)
func TestCreateAPIErrReader(t *testing.T) {

View File

@@ -4,8 +4,9 @@ import (
_ "embed"
"strings"
"ladder/proxychain"
"ladder/proxychain/responsemodifiers/rewriters"
"github.com/everywall/ladder/proxychain/responsemodifiers/rewriters"
"github.com/everywall/ladder/proxychain"
)
//go:embed vendor/block_element_removal.js

View File

@@ -5,8 +5,9 @@ import (
"fmt"
"strings"
"ladder/proxychain"
"ladder/proxychain/responsemodifiers/rewriters"
"github.com/everywall/ladder/proxychain/responsemodifiers/rewriters"
"github.com/everywall/ladder/proxychain"
)
// BlockThirdPartyScripts rewrites HTML and injects JS to block all third party JS from loading.

View File

@@ -1,7 +1,7 @@
package responsemodifiers
import (
"ladder/proxychain"
"github.com/everywall/ladder/proxychain"
)
// BypassCORS modifies response headers to prevent the browser

View File

@@ -1,7 +1,7 @@
package responsemodifiers
import (
"ladder/proxychain"
"github.com/everywall/ladder/proxychain"
)
// TODO: handle edge case where CSP is specified in meta tag:

View File

@@ -4,7 +4,7 @@ import (
_ "embed"
"strings"
"ladder/proxychain"
"github.com/everywall/ladder/proxychain"
)
// DeleteLocalStorageData deletes localstorage cookies.

View File

@@ -4,7 +4,7 @@ import (
_ "embed"
"strings"
"ladder/proxychain"
"github.com/everywall/ladder/proxychain"
)
// DeleteSessionStorageData deletes localstorage cookies.

View File

@@ -5,7 +5,7 @@ import (
"net/url"
"strings"
"ladder/proxychain"
"github.com/everywall/ladder/proxychain"
)
var forwardBlacklist map[string]bool

View File

@@ -7,16 +7,15 @@ import (
"html/template"
"io"
"log"
"math"
"net/url"
"strings"
"time"
"ladder/proxychain"
"github.com/everywall/ladder/proxychain"
"github.com/markusmobius/go-trafilatura"
"golang.org/x/net/html"
"golang.org/x/net/html/atom"
//"github.com/go-shiori/dom"
"github.com/markusmobius/go-trafilatura"
)
//go:embed vendor/generate_readable_outline.html
@@ -63,19 +62,24 @@ func GenerateReadableOutline() proxychain.ResponseModification {
html.Render(&b, extract.ContentNode)
distilledHTML := b.String()
siteName := strings.Split(extract.Metadata.Sitename, ";")[0]
title := strings.Split(extract.Metadata.Title, "|")[0]
fmtDate := createWikipediaDateLink(extract.Metadata.Date)
readingTime := formatDuration(estimateReadingTime(extract.ContentText))
// populate template parameters
data := map[string]interface{}{
"Success": true,
"Image": extract.Metadata.Image,
"Description": extract.Metadata.Description,
"Sitename": extract.Metadata.Sitename,
"Sitename": siteName,
"Hostname": extract.Metadata.Hostname,
"Url": "/" + chain.Request.URL.String(),
"Title": extract.Metadata.Title, // todo: modify CreateReadableDocument so we don't have <h1> titles duplicated?
"Date": extract.Metadata.Date.String(),
"Author": createWikipediaSearchLinks(extract.Metadata.Author),
//"Author": extract.Metadata.Author,
"Body": distilledHTML,
"Title": title,
"Date": fmtDate,
"Author": createDDGFeelingLuckyLinks(extract.Metadata.Author, extract.Metadata.Hostname),
"Body": distilledHTML,
"ReadingTime": readingTime,
}
// ============================================================================
@@ -157,9 +161,20 @@ func rewriteHrefLinks(n *html.Node, baseURL string, apiPath string) {
recurse(n)
}
// createWikipediaSearchLinks takes in comma or semicolon separated terms,
// then turns them into <a> links searching for the term.
func createWikipediaSearchLinks(searchTerms string) string {
// createWikipediaDateLink takes in a date
// and returns an <a> link pointing to the current events page for that day
func createWikipediaDateLink(t time.Time) string {
url := fmt.Sprintf("https://en.wikipedia.org/wiki/Portal:Current_events#%s", t.Format("2006_January_02"))
date := t.Format("January 02, 2006")
return fmt.Sprintf("<a rel=\"noreferrer\" href=\"%s\">%s</a>", url, date)
}
// createDDGFeelingLuckyLinks takes in comma or semicolon separated terms,
// then turns them into <a> links searching for the term using DuckDuckGo's I'm
// feeling lucky feature. It will redirect the user immediately to the first search result.
func createDDGFeelingLuckyLinks(searchTerms string, siteHostname string) string {
siteHostname = strings.TrimSpace(siteHostname)
semiColonSplit := strings.Split(searchTerms, ";")
var links []string
@@ -171,11 +186,13 @@ func createWikipediaSearchLinks(searchTerms string) string {
continue
}
encodedTerm := url.QueryEscape(trimmedTerm)
ddgQuery := fmt.Sprintf(` site:%s intitle:"%s"`, strings.TrimPrefix(siteHostname, "www."), trimmedTerm)
wikiURL := fmt.Sprintf("https://en.wikipedia.org/w/index.php?search=%s", encodedTerm)
encodedTerm := `\%s:` + url.QueryEscape(ddgQuery)
//ddgURL := `https://html.duckduckgo.com/html/?q=` + encodedTerm
ddgURL := `https://www.duckduckgo.com/?q=` + encodedTerm
link := fmt.Sprintf("<a href=\"%s\">%s</a>", wikiURL, trimmedTerm)
link := fmt.Sprintf("<a rel=\"noreferrer\" href=\"%s\">%s</a>", ddgURL, trimmedTerm)
links = append(links, link)
}
@@ -187,3 +204,66 @@ func createWikipediaSearchLinks(searchTerms string) string {
return strings.Join(links, " ")
}
// estimateReadingTime estimates how long the given text will take to read using the given configuration.
func estimateReadingTime(text string) time.Duration {
if len(text) == 0 {
return 0
}
// Init options with default values.
WordsPerMinute := 200
WordBound := func(b byte) bool {
return b == ' ' || b == '\n' || b == '\r' || b == '\t'
}
words := 0
start := 0
end := len(text) - 1
// Fetch bounds.
for WordBound(text[start]) {
start++
}
for WordBound(text[end]) {
end--
}
// Calculate the number of words.
for i := start; i <= end; {
for i <= end && !WordBound(text[i]) {
i++
}
words++
for i <= end && WordBound(text[i]) {
i++
}
}
// Reading time stats.
minutes := math.Ceil(float64(words) / float64(WordsPerMinute))
duration := time.Duration(math.Ceil(minutes) * float64(time.Minute))
return duration
}
func formatDuration(d time.Duration) string {
// Check if the duration is less than one minute
if d < time.Minute {
seconds := int(d.Seconds())
return fmt.Sprintf("%d seconds", seconds)
}
// Convert the duration to minutes
minutes := int(d.Minutes())
// Format the string for one or more minutes
if minutes == 1 {
return "1 minute"
} else {
return fmt.Sprintf("%d minutes", minutes)
}
}

View File

@@ -4,8 +4,9 @@ import (
_ "embed"
"strings"
"ladder/proxychain"
"ladder/proxychain/responsemodifiers/rewriters"
"github.com/everywall/ladder/proxychain/responsemodifiers/rewriters"
"github.com/everywall/ladder/proxychain"
)
// injectScript modifies HTTP responses

View File

@@ -7,7 +7,7 @@ import (
//"net/http"
//http "github.com/Danny-Dasilva/fhttp"
"ladder/proxychain"
"github.com/everywall/ladder/proxychain"
)
// DeleteIncomingCookies prevents ALL cookies from being sent from the proxy server

View File

@@ -0,0 +1,53 @@
package responsemodifiers
import (
"bytes"
"io"
"regexp"
"strings"
"github.com/everywall/ladder/proxychain"
)
// ModifyIncomingScriptsWithRegex modifies all incoming javascript (application/javascript and inline <script> in text/html) using a regex match and replacement.
func ModifyIncomingScriptsWithRegex(matchRegex string, replacement string) proxychain.ResponseModification {
return func(chain *proxychain.ProxyChain) error {
path := chain.Request.URL.Path
ct := chain.Response.Header.Get("content-type")
isJavascript := strings.HasSuffix(path, ".js") || ct == "text/javascript" || ct == "application/javascript"
isHTML := strings.HasSuffix(chain.Request.URL.Path, ".html") || ct == "text/html"
switch {
case isJavascript:
rBody, err := modifyResponse(chain.Response.Body, matchRegex, replacement)
if err != nil {
return err
}
chain.Response.Body = rBody
case isHTML:
default:
return nil
}
return nil
}
}
func modifyResponse(body io.ReadCloser, matchRegex, replacement string) (io.ReadCloser, error) {
content, err := io.ReadAll(body)
if err != nil {
return nil, err
}
re, err := regexp.Compile(matchRegex)
if err != nil {
return nil, err
}
err = body.Close()
if err != nil {
return body, err
}
modifiedContent := re.ReplaceAll(content, []byte(replacement))
return io.NopCloser(bytes.NewReader(modifiedContent)), nil
}

View File

@@ -1,7 +1,7 @@
package responsemodifiers
import (
"ladder/proxychain"
"github.com/everywall/ladder/proxychain"
)
// SetResponseHeader modifies response headers from the upstream server

View File

@@ -5,8 +5,9 @@ import (
"fmt"
"strings"
"ladder/proxychain"
"ladder/proxychain/responsemodifiers/rewriters"
"github.com/everywall/ladder/proxychain/responsemodifiers/rewriters"
"github.com/everywall/ladder/proxychain"
)
//go:embed vendor/patch_dynamic_resource_urls.js
@@ -51,6 +52,19 @@ func PatchDynamicResourceURLs() proxychain.ResponseModification {
htmlRewriter := rewriters.NewHTMLRewriter(chain.Response.Body, rr)
chain.Response.Body = htmlRewriter
// window.location
/*
spoofedLocationAPI := fmt.Sprintf(`{href:"%s", origin:"%s", pathname:"%s", protocol:"%s:", port:"%s"}`,
reqURL.String(), reqURL.Host,
reqURL.Path, reqURL.Scheme, reqURL.Port())
spoofedLocationAPI := fmt.Sprintf(`{origin: "%s"}`, reqURL.Host)
fmt.Println(spoofedLocationAPI)
chain.AddOnceResponseModifications(
ModifyIncomingScriptsWithRegex(`window\.location`, spoofedLocationAPI),
)
*/
return nil
}
}

View File

@@ -7,7 +7,7 @@ import (
"log"
"regexp"
"ladder/proxychain"
"github.com/everywall/ladder/proxychain"
)
//go:embed vendor/ddg-tracker-surrogates/mapping.json

View File

@@ -5,8 +5,9 @@ import (
"fmt"
"strings"
"ladder/proxychain"
"ladder/proxychain/responsemodifiers/rewriters"
"github.com/everywall/ladder/proxychain/responsemodifiers/rewriters"
"github.com/everywall/ladder/proxychain"
)
// RewriteHTMLResourceURLs modifies HTTP responses

View File

@@ -46,7 +46,7 @@
xmlns="http://www.w3.org/2000/svg"
xmlns:xlink="http://www.w3.org/1999/xlink"
viewBox="0 0 512 512"
class="h-8 focus:outline-none focus:ring focus:border-[#7AA7D1] ring-offset-2"
class="noprint h-8 focus:outline-none focus:ring focus:border-[#7AA7D1] ring-offset-2"
>
<path
fill="#7AA7D1"
@@ -68,7 +68,7 @@
</div>
<div class="flex justify-center z-10">
<div class="noprint flex justify-center z-10">
<div class="relative" id="dropdown">
<button
aria-expanded="false"
@@ -95,6 +95,15 @@
</svg>
</button>
<button
aria-expanded="closed"
onclick="document.getElementById('readingtime').innerText = 'Date Accessed: '+(new Date().toLocaleDateString('en-US', { year: 'numeric', month: 'long', day: 'numeric' })); [...document.querySelectorAll('.noprint')].forEach(e => e.remove()); window.print()"
type="button"
class="inline-flex items-center justify-center whitespace-nowrap rounded-full h-12 px-4 py-2 text-sm font-medium text-slate-600 dark:text-slate-400 ring-offset-background transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:pointer-events-none disabled:opacity-50 bg-white dark:bg-slate-900 hover:bg-slate-200 dark:hover:bg-slate-700 hover:text-slate-500 dark:hover:text-slate-200"
>
<svg xmlns="http://www.w3.org/2000/svg" fill="currentColor" height="16" width="16" viewBox="0 0 512 512"><path d="M128 0C92.7 0 64 28.7 64 64v96h64V64H354.7L384 93.3V160h64V93.3c0-17-6.7-33.3-18.7-45.3L400 18.7C388 6.7 371.7 0 354.7 0H128zM384 352v32 64H128V384 368 352H384zm64 32h32c17.7 0 32-14.3 32-32V256c0-35.3-28.7-64-64-64H64c-35.3 0-64 28.7-64 64v96c0 17.7 14.3 32 32 32H64v64c0 35.3 28.7 64 64 64H384c35.3 0 64-28.7 64-64V384zM432 248a24 24 0 1 1 0 48 24 24 0 1 1 0-48z"/></svg>
</button>
<div
id="dropdown_panel"
class="hidden absolute right-0 mt-2 w-52 rounded-md bg-white dark:bg-slate-900 shadow-md border border-slate-400 dark:border-slate-700"
@@ -329,6 +338,7 @@
{{.Error}}
</code>
{{else}}
<div class="flex flex-col gap-1 mt-3">
<h1>
<a href="{{.Url}}" class="text-slate-900 dark:text-slate-200 no-underline hover:underline"> {{.Title}} </a>
@@ -347,6 +357,27 @@
{{end}}
</div>
<h2>
<a href="{{.Url}}" class="text-slate-900 dark:text-slate-200"> {{.Title}} </a>
</h2>
<div class="flex justify-between items-center gap-1 mt-3">
<div>
{{if ne .Author ""}}
<small class="text-sm font-medium leading-none text-slate-600 dark:text-slate-400">{{.Author}} | </small>
{{end}}
{{if ne .Date ""}}
<small class="text-sm font-medium leading-none text-slate-600 dark:text-slate-400">{{.Date}}</small>
{{end}}
</div>
<div>
<small id="readingtime" class="text-sm font-medium leading-none text-slate-600 dark:text-slate-400">Reading Time: {{.ReadingTime}}</small>
</div>
</div>
<div class="flex flex-col space-y-3">
<div>
<div class="grid grid-cols-1 justify-items-center">
@@ -360,21 +391,22 @@
</main>
<div class="my-2"></div>
<footer class="mx-4 text-center text-slate-600 dark:text-slate-400">
<p>
Code Licensed Under GPL v3.0 |
<a
href="https://github.com/everywall/ladder"
class="hover:text-blue-500 dark:hover:text-blue-500 hover:underline underline-offset-2 transition-colors duration-300"
>View Source</a
>
|
<footer class="noprint mx-4 my-2 pt-2 border-t border-gray-300 dark:border-gray-700 text-center text-slate-600 dark:text-slate-400">
<small>
<a
href="https://github.com/everywall"
class="hover:text-blue-500 dark:hover:text-blue-500 hover:underline underline-offset-2 transition-colors duration-300"
>Everywall</a
>
</p>
|
<a
href="https://github.com/everywall/ladder"
class="hover:text-blue-500 dark:hover:text-blue-500 hover:underline underline-offset-2 transition-colors duration-300"
>Source</a
>
| Code Licensed Under GPL v3.0
</small>
</footer>
<div class="my-2"></div>
</div>

View File

@@ -6,7 +6,7 @@ import (
"fmt"
//"gopkg.in/yaml.v3"
"ladder/proxychain"
"github.com/everywall/ladder/proxychain"
)
type Rule struct {

View File

@@ -5,8 +5,8 @@ package ruleset_v2
// for use in proxychains.
import (
"ladder/proxychain"
rx "ladder/proxychain/requestmodifiers"
"github.com/everywall/ladder/proxychain"
rx "github.com/everywall/ladder/proxychain/requestmodifiers"
)
type RequestModifierFactory func(params ...string) proxychain.RequestModification

View File

@@ -5,8 +5,8 @@ package ruleset_v2
// for use in proxychains.
import (
"ladder/proxychain"
tx "ladder/proxychain/responsemodifiers"
"github.com/everywall/ladder/proxychain"
tx "github.com/everywall/ladder/proxychain/responsemodifiers"
)
type ResponseModifierFactory func(params ...string) proxychain.ResponseModification
@@ -84,6 +84,10 @@ func init() {
return tx.SetIncomingCookie(params[0], params[1])
}
rsmModMap["ModifyIncomingScriptsWithRegex"] = func(params ...string) proxychain.ResponseModification {
return tx.ModifyIncomingScriptsWithRegex(params[0], params[1])
}
rsmModMap["SetResponseHeader"] = func(params ...string) proxychain.ResponseModification {
return tx.SetResponseHeader(params[0], params[1])
}

View File

@@ -33,14 +33,21 @@ func (rs *Ruleset) UnmarshalYAML(unmarshal func(interface{}) error) error {
type AuxRuleset struct {
Rules []Rule `yaml:"rules"`
}
yr := &AuxRuleset{}
yamlRuleset := &AuxRuleset{}
if err := unmarshal(&yr); err != nil {
return err
// First, try to unmarshal as AuxRuleset
err := unmarshal(yamlRuleset)
if err != nil {
// If that fails, try to unmarshal directly into a slice of Rules
var directRules []Rule
if err := unmarshal(&directRules); err != nil {
return err
}
yamlRuleset.Rules = directRules
}
rs._rulemap = make(map[string]*Rule)
rs.Rules = yr.Rules
rs.Rules = yamlRuleset.Rules
// create a map of pointers to rules loaded above based on domain string keys
// this way we don't have two copies of the rule in ruleset
@@ -214,6 +221,7 @@ func (rs *Ruleset) loadRulesFromLocalDir(path string) error {
if !isYAML {
return nil
}
fmt.Printf("loadRulesFromLocalDir :: loading rule: %s\n", path)
tmpRs := Ruleset{_rulemap: make(map[string]*Rule)}
err = tmpRs.loadRulesFromLocalFile(path)
@@ -258,6 +266,7 @@ func (rs *Ruleset) loadRulesFromLocalFile(path string) error {
e := fmt.Errorf("failed to read rules from local file: '%s'", path)
return errors.Join(e, err)
}
fmt.Printf("loadRulesFromLocalFile :: %s\n", path)
isJSON := strings.HasSuffix(path, ".json")
if isJSON {