Merge pull request #47 from dxbednarczyk/main

Idiomatize(?) ruleset package and run lint
This commit is contained in:
ladddder
2023-11-25 22:25:20 +01:00
committed by GitHub
7 changed files with 129 additions and 57 deletions

View File

@@ -1,6 +1,6 @@
lint: lint:
gofumpt -l -w . gofumpt -l -w .
golangci-lint run -c .golangci-lint.yaml golangci-lint run -c .golangci-lint.yaml --fix
go mod tidy go mod tidy
go clean go clean

View File

@@ -29,6 +29,7 @@ func main() {
if os.Getenv("PORT") == "" { if os.Getenv("PORT") == "" {
portEnv = "8080" portEnv = "8080"
} }
port := parser.String("p", "port", &argparse.Options{ port := parser.String("p", "port", &argparse.Options{
Required: false, Required: false,
Default: portEnv, Default: portEnv,
@@ -49,10 +50,12 @@ func main() {
Required: false, Required: false,
Help: "Compiles a directory of yaml files into a single ruleset.yaml. Requires --ruleset arg.", Help: "Compiles a directory of yaml files into a single ruleset.yaml. Requires --ruleset arg.",
}) })
mergeRulesetsGzip := parser.Flag("", "merge-rulesets-gzip", &argparse.Options{ mergeRulesetsGzip := parser.Flag("", "merge-rulesets-gzip", &argparse.Options{
Required: false, Required: false,
Help: "Compiles a directory of yaml files into a single ruleset.gz Requires --ruleset arg.", Help: "Compiles a directory of yaml files into a single ruleset.gz Requires --ruleset arg.",
}) })
mergeRulesetsOutput := parser.String("", "merge-rulesets-output", &argparse.Options{ mergeRulesetsOutput := parser.String("", "merge-rulesets-output", &argparse.Options{
Required: false, Required: false,
Help: "Specify output file for --merge-rulesets and --merge-rulesets-gzip. Requires --ruleset and --merge-rulesets args.", Help: "Specify output file for --merge-rulesets and --merge-rulesets-gzip. Requires --ruleset and --merge-rulesets args.",
@@ -65,7 +68,18 @@ func main() {
// utility cli flag to compile ruleset directory into single ruleset.yaml // utility cli flag to compile ruleset directory into single ruleset.yaml
if *mergeRulesets || *mergeRulesetsGzip { if *mergeRulesets || *mergeRulesetsGzip {
err = cli.HandleRulesetMerge(ruleset, mergeRulesets, mergeRulesetsGzip, mergeRulesetsOutput) output := os.Stdout
if *mergeRulesetsOutput != "" {
output, err = os.Create(*mergeRulesetsOutput)
if err != nil {
fmt.Println(err)
os.Exit(1)
}
}
err = cli.HandleRulesetMerge(*ruleset, *mergeRulesets, *mergeRulesetsGzip, output)
if err != nil { if err != nil {
fmt.Println(err) fmt.Println(err)
os.Exit(1) os.Exit(1)
@@ -87,6 +101,7 @@ func main() {
userpass := os.Getenv("USERPASS") userpass := os.Getenv("USERPASS")
if userpass != "" { if userpass != "" {
userpass := strings.Split(userpass, ":") userpass := strings.Split(userpass, ":")
app.Use(basicauth.New(basicauth.Config{ app.Use(basicauth.New(basicauth.Config{
Users: map[string]string{ Users: map[string]string{
userpass[0]: userpass[1], userpass[0]: userpass[1],
@@ -102,23 +117,28 @@ func main() {
if os.Getenv("NOLOGS") != "true" { if os.Getenv("NOLOGS") != "true" {
app.Use(func(c *fiber.Ctx) error { app.Use(func(c *fiber.Ctx) error {
log.Println(c.Method(), c.Path()) log.Println(c.Method(), c.Path())
return c.Next() return c.Next()
}) })
} }
app.Get("/", handlers.Form) app.Get("/", handlers.Form)
app.Get("/styles.css", func(c *fiber.Ctx) error { app.Get("/styles.css", func(c *fiber.Ctx) error {
cssData, err := cssData.ReadFile("styles.css") cssData, err := cssData.ReadFile("styles.css")
if err != nil { if err != nil {
return c.Status(fiber.StatusInternalServerError).SendString("Internal Server Error") return c.Status(fiber.StatusInternalServerError).SendString("Internal Server Error")
} }
c.Set("Content-Type", "text/css") c.Set("Content-Type", "text/css")
return c.Send(cssData) return c.Send(cssData)
}) })
app.Get("ruleset", handlers.Ruleset)
app.Get("ruleset", handlers.Ruleset)
app.Get("raw/*", handlers.Raw) app.Get("raw/*", handlers.Raw)
app.Get("api/*", handlers.Api) app.Get("api/*", handlers.Api)
app.Get("/*", handlers.ProxySite(*ruleset)) app.Get("/*", handlers.ProxySite(*ruleset))
log.Fatal(app.Listen(":" + *port)) log.Fatal(app.Listen(":" + *port))
} }

View File

@@ -3,10 +3,10 @@ package cli
import ( import (
"fmt" "fmt"
"io" "io"
"io/fs"
"ladder/pkg/ruleset"
"os" "os"
"ladder/pkg/ruleset"
"golang.org/x/term" "golang.org/x/term"
) )
@@ -14,32 +14,38 @@ import (
// Exits the program with an error message if the ruleset path is not provided or if loading the ruleset fails. // Exits the program with an error message if the ruleset path is not provided or if loading the ruleset fails.
// //
// Parameters: // Parameters:
// - rulesetPath: A pointer to a string specifying the path to the ruleset file. // - rulesetPath: Specifies the path to the ruleset file.
// - mergeRulesets: A pointer to a boolean indicating if a merge operation should be performed. // - mergeRulesets: Indicates if a merge operation should be performed.
// - mergeRulesetsGzip: A pointer to a boolean indicating if the merge should be in Gzip format. // - useGzip: Indicates if the merged rulesets should be gzip-ped.
// - mergeRulesetsOutput: A pointer to a string specifying the output file path. If empty, the output is printed to stdout. // - output: Specifies the output file. If nil, stdout will be used.
// //
// Returns: // Returns:
// - An error if the ruleset loading or merging process fails, otherwise nil. // - An error if the ruleset loading or merging process fails, otherwise nil.
func HandleRulesetMerge(rulesetPath *string, mergeRulesets *bool, mergeRulesetsGzip *bool, mergeRulesetsOutput *string) error { func HandleRulesetMerge(rulesetPath string, mergeRulesets bool, useGzip bool, output *os.File) error {
if *rulesetPath == "" { if !mergeRulesets {
*rulesetPath = os.Getenv("RULESET") return nil
} }
if *rulesetPath == "" {
fmt.Println("ERROR: no ruleset provided. Try again with --ruleset <ruleset.yaml>") if rulesetPath == "" {
rulesetPath = os.Getenv("RULESET")
}
if rulesetPath == "" {
fmt.Println("error: no ruleset provided. Try again with --ruleset <ruleset.yaml>")
os.Exit(1) os.Exit(1)
} }
rs, err := ruleset.NewRuleset(*rulesetPath) rs, err := ruleset.NewRuleset(rulesetPath)
if err != nil { if err != nil {
fmt.Println(err) fmt.Println(err)
os.Exit(1) os.Exit(1)
} }
if *mergeRulesetsGzip { if useGzip {
return gzipMerge(rs, mergeRulesetsOutput) return gzipMerge(rs, output)
} }
return yamlMerge(rs, mergeRulesetsOutput)
return yamlMerge(rs, output)
} }
// gzipMerge takes a RuleSet and an optional output file path pointer. It compresses the RuleSet into Gzip format. // gzipMerge takes a RuleSet and an optional output file path pointer. It compresses the RuleSet into Gzip format.
@@ -48,33 +54,33 @@ func HandleRulesetMerge(rulesetPath *string, mergeRulesets *bool, mergeRulesetsG
// //
// Parameters: // Parameters:
// - rs: The ruleset.RuleSet to be compressed. // - rs: The ruleset.RuleSet to be compressed.
// - mergeRulesetsOutput: A pointer to a string specifying the output file path. If empty, the output is directed to stdout. // - output: The output for the gzip data. If nil, stdout will be used.
// //
// Returns: // Returns:
// - An error if compression or file writing fails, otherwise nil. // - An error if compression or file writing fails, otherwise nil.
func gzipMerge(rs ruleset.RuleSet, mergeRulesetsOutput *string) error { func gzipMerge(rs ruleset.RuleSet, output io.Writer) error {
gzip, err := rs.GzipYaml() gzip, err := rs.GzipYaml()
if err != nil { if err != nil {
return err return err
} }
if *mergeRulesetsOutput != "" { if output != nil {
out, err := os.Create(*mergeRulesetsOutput) _, err = io.Copy(output, gzip)
defer out.Close()
_, err = io.Copy(out, gzip)
if err != nil { if err != nil {
return err return err
} }
} }
if term.IsTerminal(int(os.Stdout.Fd())) { if term.IsTerminal(int(os.Stdout.Fd())) {
println("WARNING: binary output can mess up your terminal. Use '--merge-rulesets-output <ruleset.gz>' or pipe it to a file.") println("warning: binary output can mess up your terminal. Use '--merge-rulesets-output <ruleset.gz>' or pipe it to a file.")
os.Exit(1) os.Exit(1)
} }
_, err = io.Copy(os.Stdout, gzip) _, err = io.Copy(os.Stdout, gzip)
if err != nil { if err != nil {
return err return err
} }
return nil return nil
} }
@@ -83,23 +89,25 @@ func gzipMerge(rs ruleset.RuleSet, mergeRulesetsOutput *string) error {
// //
// Parameters: // Parameters:
// - rs: The ruleset.RuleSet to be converted to YAML. // - rs: The ruleset.RuleSet to be converted to YAML.
// - mergeRulesetsOutput: A pointer to a string specifying the output file path. If empty, the output is printed to stdout. // - output: The output for the merged data. If nil, stdout will be used.
// //
// Returns: // Returns:
// - An error if YAML conversion or file writing fails, otherwise nil. // - An error if YAML conversion or file writing fails, otherwise nil.
func yamlMerge(rs ruleset.RuleSet, mergeRulesetsOutput *string) error { func yamlMerge(rs ruleset.RuleSet, output io.Writer) error {
yaml, err := rs.Yaml() yaml, err := rs.Yaml()
if err != nil { if err != nil {
return err return err
} }
if *mergeRulesetsOutput == "" {
fmt.Printf(yaml) if output == nil {
fmt.Println(yaml)
os.Exit(0) os.Exit(0)
} }
err = os.WriteFile(*mergeRulesetsOutput, []byte(yaml), fs.FileMode(os.O_RDWR)) _, err = io.WriteString(output, yaml)
if err != nil { if err != nil {
return fmt.Errorf("ERROR: failed to write merged YAML ruleset to '%s'\n", *mergeRulesetsOutput) return fmt.Errorf("failed to write merged YAML ruleset: %v", err)
} }
return nil return nil
} }

View File

@@ -80,7 +80,6 @@ func extractUrl(c *fiber.Ctx) (string, error) {
// default behavior: // default behavior:
// eg: https://localhost:8080/https://realsite.com/images/foobar.jpg -> https://realsite.com/images/foobar.jpg // eg: https://localhost:8080/https://realsite.com/images/foobar.jpg -> https://realsite.com/images/foobar.jpg
return urlQuery.String(), nil return urlQuery.String(), nil
} }
func ProxySite(rulesetPath string) fiber.Handler { func ProxySite(rulesetPath string) fiber.Handler {
@@ -121,18 +120,18 @@ func modifyURL(uri string, rule ruleset.Rule) (string, error) {
return "", err return "", err
} }
for _, urlMod := range rule.UrlMods.Domain { for _, urlMod := range rule.URLMods.Domain {
re := regexp.MustCompile(urlMod.Match) re := regexp.MustCompile(urlMod.Match)
newUrl.Host = re.ReplaceAllString(newUrl.Host, urlMod.Replace) newUrl.Host = re.ReplaceAllString(newUrl.Host, urlMod.Replace)
} }
for _, urlMod := range rule.UrlMods.Path { for _, urlMod := range rule.URLMods.Path {
re := regexp.MustCompile(urlMod.Match) re := regexp.MustCompile(urlMod.Match)
newUrl.Path = re.ReplaceAllString(newUrl.Path, urlMod.Replace) newUrl.Path = re.ReplaceAllString(newUrl.Path, urlMod.Replace)
} }
v := newUrl.Query() v := newUrl.Query()
for _, query := range rule.UrlMods.Query { for _, query := range rule.URLMods.Query {
if query.Value == "" { if query.Value == "" {
v.Del(query.Key) v.Del(query.Key)
continue continue
@@ -223,11 +222,11 @@ func fetchSite(urlpath string, queries map[string]string) (string, *http.Request
} }
if rule.Headers.CSP != "" { if rule.Headers.CSP != "" {
//log.Println(rule.Headers.CSP) // log.Println(rule.Headers.CSP)
resp.Header.Set("Content-Security-Policy", rule.Headers.CSP) resp.Header.Set("Content-Security-Policy", rule.Headers.CSP)
} }
//log.Print("rule", rule) TODO: Add a debug mode to print the rule // log.Print("rule", rule) TODO: Add a debug mode to print the rule
body := rewriteHtml(bodyB, u, rule) body := rewriteHtml(bodyB, u, rule)
return body, req, resp, nil return body, req, resp, nil
} }

View File

@@ -2,12 +2,13 @@
package handlers package handlers
import ( import (
"ladder/pkg/ruleset"
"net/http" "net/http"
"net/http/httptest" "net/http/httptest"
"net/url" "net/url"
"testing" "testing"
"ladder/pkg/ruleset"
"github.com/gofiber/fiber/v2" "github.com/gofiber/fiber/v2"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
) )

View File

@@ -1,6 +1,7 @@
package ruleset package ruleset
import ( import (
"compress/gzip"
"errors" "errors"
"fmt" "fmt"
"io" "io"
@@ -11,8 +12,6 @@ import (
"regexp" "regexp"
"strings" "strings"
"compress/gzip"
"gopkg.in/yaml.v3" "gopkg.in/yaml.v3"
) )
@@ -41,7 +40,7 @@ type Rule struct {
GoogleCache bool `yaml:"googleCache,omitempty"` GoogleCache bool `yaml:"googleCache,omitempty"`
RegexRules []Regex `yaml:"regexRules,omitempty"` RegexRules []Regex `yaml:"regexRules,omitempty"`
UrlMods struct { URLMods struct {
Domain []Regex `yaml:"domain,omitempty"` Domain []Regex `yaml:"domain,omitempty"`
Path []Regex `yaml:"path,omitempty"` Path []Regex `yaml:"path,omitempty"`
Query []KV `yaml:"query,omitempty"` Query []KV `yaml:"query,omitempty"`
@@ -55,6 +54,8 @@ type Rule struct {
} `yaml:"injections,omitempty"` } `yaml:"injections,omitempty"`
} }
var remoteRegex = regexp.MustCompile(`^https?:\/\/(www\.)?[-a-zA-Z0-9@:%._\+~#=]{1,256}\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9()!@:%_\+.~#?&\/\/=]*)`)
// NewRulesetFromEnv creates a new RuleSet based on the RULESET environment variable. // NewRulesetFromEnv creates a new RuleSet based on the RULESET environment variable.
// It logs a warning and returns an empty RuleSet if the RULESET environment variable is not set. // It logs a warning and returns an empty RuleSet if the RULESET environment variable is not set.
// If the RULESET is set but the rules cannot be loaded, it panics. // If the RULESET is set but the rules cannot be loaded, it panics.
@@ -64,10 +65,12 @@ func NewRulesetFromEnv() RuleSet {
log.Printf("WARN: No ruleset specified. Set the `RULESET` environment variable to load one for a better success rate.") log.Printf("WARN: No ruleset specified. Set the `RULESET` environment variable to load one for a better success rate.")
return RuleSet{} return RuleSet{}
} }
ruleSet, err := NewRuleset(rulesPath) ruleSet, err := NewRuleset(rulesPath)
if err != nil { if err != nil {
log.Println(err) log.Println(err)
} }
return ruleSet return ruleSet
} }
@@ -75,16 +78,17 @@ func NewRulesetFromEnv() RuleSet {
// It supports loading rules from both local file paths and remote URLs. // It supports loading rules from both local file paths and remote URLs.
// Returns a RuleSet and an error if any issues occur during loading. // Returns a RuleSet and an error if any issues occur during loading.
func NewRuleset(rulePaths string) (RuleSet, error) { func NewRuleset(rulePaths string) (RuleSet, error) {
ruleSet := RuleSet{} var ruleSet RuleSet
errs := []error{}
var errs []error
rp := strings.Split(rulePaths, ";") rp := strings.Split(rulePaths, ";")
var remoteRegex = regexp.MustCompile(`^https?:\/\/(www\.)?[-a-zA-Z0-9@:%._\+~#=]{1,256}\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9()!@:%_\+.~#?&\/\/=]*)`)
for _, rule := range rp { for _, rule := range rp {
rulePath := strings.Trim(rule, " ")
var err error var err error
rulePath := strings.Trim(rule, " ")
isRemote := remoteRegex.MatchString(rulePath) isRemote := remoteRegex.MatchString(rulePath)
if isRemote { if isRemote {
err = ruleSet.loadRulesFromRemoteFile(rulePath) err = ruleSet.loadRulesFromRemoteFile(rulePath)
} else { } else {
@@ -94,6 +98,7 @@ func NewRuleset(rulePaths string) (RuleSet, error) {
if err != nil { if err != nil {
e := fmt.Errorf("WARN: failed to load ruleset from '%s'", rulePath) e := fmt.Errorf("WARN: failed to load ruleset from '%s'", rulePath)
errs = append(errs, errors.Join(e, err)) errs = append(errs, errors.Join(e, err))
continue continue
} }
} }
@@ -101,6 +106,7 @@ func NewRuleset(rulePaths string) (RuleSet, error) {
if len(errs) != 0 { if len(errs) != 0 {
e := fmt.Errorf("WARN: failed to load %d rulesets", len(rp)) e := fmt.Errorf("WARN: failed to load %d rulesets", len(rp))
errs = append(errs, e) errs = append(errs, e)
// panic if the user specified a local ruleset, but it wasn't found on disk // panic if the user specified a local ruleset, but it wasn't found on disk
// don't fail silently // don't fail silently
for _, err := range errs { for _, err := range errs {
@@ -109,10 +115,13 @@ func NewRuleset(rulePaths string) (RuleSet, error) {
panic(errors.Join(e, err)) panic(errors.Join(e, err))
} }
} }
// else, bubble up any errors, such as syntax or remote host issues // else, bubble up any errors, such as syntax or remote host issues
return ruleSet, errors.Join(errs...) return ruleSet, errors.Join(errs...)
} }
ruleSet.PrintStats() ruleSet.PrintStats()
return ruleSet, nil return ruleSet, nil
} }
@@ -146,13 +155,16 @@ func (rs *RuleSet) loadRulesFromLocalDir(path string) error {
log.Printf("WARN: failed to load directory ruleset '%s': %s, skipping", path, err) log.Printf("WARN: failed to load directory ruleset '%s': %s, skipping", path, err)
return nil return nil
} }
log.Printf("INFO: loaded ruleset %s\n", path) log.Printf("INFO: loaded ruleset %s\n", path)
return nil return nil
}) })
if err != nil { if err != nil {
return err return err
} }
return nil return nil
} }
@@ -167,42 +179,51 @@ func (rs *RuleSet) loadRulesFromLocalFile(path string) error {
var r RuleSet var r RuleSet
err = yaml.Unmarshal(yamlFile, &r) err = yaml.Unmarshal(yamlFile, &r)
if err != nil { if err != nil {
e := fmt.Errorf("failed to load rules from local file, possible syntax error in '%s'", path) e := fmt.Errorf("failed to load rules from local file, possible syntax error in '%s'", path)
ee := errors.Join(e, err) ee := errors.Join(e, err)
if _, ok := os.LookupEnv("DEBUG"); ok { if _, ok := os.LookupEnv("DEBUG"); ok {
debugPrintRule(string(yamlFile), ee) debugPrintRule(string(yamlFile), ee)
} }
return ee return ee
} }
*rs = append(*rs, r...) *rs = append(*rs, r...)
return nil return nil
} }
// loadRulesFromRemoteFile loads rules from a remote URL. // loadRulesFromRemoteFile loads rules from a remote URL.
// It supports plain and gzip compressed content. // It supports plain and gzip compressed content.
// Returns an error if there's an issue accessing the URL or if there's a syntax error in the YAML. // Returns an error if there's an issue accessing the URL or if there's a syntax error in the YAML.
func (rs *RuleSet) loadRulesFromRemoteFile(rulesUrl string) error { func (rs *RuleSet) loadRulesFromRemoteFile(rulesURL string) error {
var r RuleSet var r RuleSet
resp, err := http.Get(rulesUrl)
resp, err := http.Get(rulesURL)
if err != nil { if err != nil {
e := fmt.Errorf("failed to load rules from remote url '%s'", rulesUrl) e := fmt.Errorf("failed to load rules from remote url '%s'", rulesURL)
return errors.Join(e, err) return errors.Join(e, err)
} }
defer resp.Body.Close() defer resp.Body.Close()
if resp.StatusCode >= 400 { if resp.StatusCode >= 400 {
e := fmt.Errorf("failed to load rules from remote url (%s) on '%s'", resp.Status, rulesUrl) e := fmt.Errorf("failed to load rules from remote url (%s) on '%s'", resp.Status, rulesURL)
return errors.Join(e, err) return errors.Join(e, err)
} }
var reader io.Reader var reader io.Reader
isGzip := strings.HasSuffix(rulesUrl, ".gz") || strings.HasSuffix(rulesUrl, ".gzip") || resp.Header.Get("content-encoding") == "gzip"
isGzip := strings.HasSuffix(rulesURL, ".gz") || strings.HasSuffix(rulesURL, ".gzip") || resp.Header.Get("content-encoding") == "gzip"
if isGzip { if isGzip {
reader, err = gzip.NewReader(resp.Body) reader, err = gzip.NewReader(resp.Body)
if err != nil { if err != nil {
return fmt.Errorf("failed to create gzip reader for URL '%s' with status code '%s': %w", rulesUrl, resp.Status, err) return fmt.Errorf("failed to create gzip reader for URL '%s' with status code '%s': %w", rulesURL, resp.Status, err)
} }
} else { } else {
reader = resp.Body reader = resp.Body
@@ -211,12 +232,14 @@ func (rs *RuleSet) loadRulesFromRemoteFile(rulesUrl string) error {
err = yaml.NewDecoder(reader).Decode(&r) err = yaml.NewDecoder(reader).Decode(&r)
if err != nil { if err != nil {
e := fmt.Errorf("failed to load rules from remote url '%s' with status code '%s' and possible syntax error", rulesUrl, resp.Status) e := fmt.Errorf("failed to load rules from remote url '%s' with status code '%s' and possible syntax error", rulesURL, resp.Status)
ee := errors.Join(e, err) ee := errors.Join(e, err)
return ee return ee
} }
*rs = append(*rs, r...) *rs = append(*rs, r...)
return nil return nil
} }
@@ -228,6 +251,7 @@ func (rs *RuleSet) Yaml() (string, error) {
if err != nil { if err != nil {
return "", err return "", err
} }
return string(y), nil return string(y), nil
} }

View File

@@ -33,6 +33,7 @@ func TestLoadRulesFromRemoteFile(t *testing.T) {
c.SendString(validYAML) c.SendString(validYAML)
return nil return nil
}) })
app.Get("/invalid-config.yml", func(c *fiber.Ctx) error { app.Get("/invalid-config.yml", func(c *fiber.Ctx) error {
c.SendString(invalidYAML) c.SendString(invalidYAML)
return nil return nil
@@ -40,10 +41,12 @@ func TestLoadRulesFromRemoteFile(t *testing.T) {
app.Get("/valid-config.gz", func(c *fiber.Ctx) error { app.Get("/valid-config.gz", func(c *fiber.Ctx) error {
c.Set("Content-Type", "application/octet-stream") c.Set("Content-Type", "application/octet-stream")
rs, err := loadRuleFromString(validYAML) rs, err := loadRuleFromString(validYAML)
if err != nil { if err != nil {
t.Errorf("failed to load valid yaml from string: %s", err.Error()) t.Errorf("failed to load valid yaml from string: %s", err.Error())
} }
s, err := rs.GzipYaml() s, err := rs.GzipYaml()
if err != nil { if err != nil {
t.Errorf("failed to load gzip serialize yaml: %s", err.Error()) t.Errorf("failed to load gzip serialize yaml: %s", err.Error())
@@ -70,15 +73,18 @@ func TestLoadRulesFromRemoteFile(t *testing.T) {
if err != nil { if err != nil {
t.Errorf("failed to load plaintext ruleset from http server: %s", err.Error()) t.Errorf("failed to load plaintext ruleset from http server: %s", err.Error())
} }
assert.Equal(t, rs[0].Domain, "example.com") assert.Equal(t, rs[0].Domain, "example.com")
rs, err = NewRuleset("http://127.0.0.1:9999/valid-config.gz") rs, err = NewRuleset("http://127.0.0.1:9999/valid-config.gz")
if err != nil { if err != nil {
t.Errorf("failed to load gzipped ruleset from http server: %s", err.Error()) t.Errorf("failed to load gzipped ruleset from http server: %s", err.Error())
} }
assert.Equal(t, rs[0].Domain, "example.com") assert.Equal(t, rs[0].Domain, "example.com")
os.Setenv("RULESET", "http://127.0.0.1:9999/valid-config.gz") os.Setenv("RULESET", "http://127.0.0.1:9999/valid-config.gz")
rs = NewRulesetFromEnv() rs = NewRulesetFromEnv()
if !assert.Equal(t, rs[0].Domain, "example.com") { if !assert.Equal(t, rs[0].Domain, "example.com") {
t.Error("expected no errors loading ruleset from gzip url using environment variable, but got one") t.Error("expected no errors loading ruleset from gzip url using environment variable, but got one")
@@ -88,10 +94,14 @@ func TestLoadRulesFromRemoteFile(t *testing.T) {
func loadRuleFromString(yaml string) (RuleSet, error) { func loadRuleFromString(yaml string) (RuleSet, error) {
// Create a temporary file and load it // Create a temporary file and load it
tmpFile, _ := os.CreateTemp("", "ruleset*.yaml") tmpFile, _ := os.CreateTemp("", "ruleset*.yaml")
defer os.Remove(tmpFile.Name()) defer os.Remove(tmpFile.Name())
tmpFile.WriteString(yaml) tmpFile.WriteString(yaml)
rs := RuleSet{} rs := RuleSet{}
err := rs.loadRulesFromLocalFile(tmpFile.Name()) err := rs.loadRulesFromLocalFile(tmpFile.Name())
return rs, err return rs, err
} }
@@ -101,6 +111,7 @@ func TestLoadRulesFromLocalFile(t *testing.T) {
if err != nil { if err != nil {
t.Errorf("Failed to load rules from valid YAML: %s", err) t.Errorf("Failed to load rules from valid YAML: %s", err)
} }
assert.Equal(t, rs[0].Domain, "example.com") assert.Equal(t, rs[0].Domain, "example.com")
assert.Equal(t, rs[0].RegexRules[0].Match, "^http:") assert.Equal(t, rs[0].RegexRules[0].Match, "^http:")
assert.Equal(t, rs[0].RegexRules[0].Replace, "https:") assert.Equal(t, rs[0].RegexRules[0].Replace, "https:")
@@ -118,30 +129,39 @@ func TestLoadRulesFromLocalDir(t *testing.T) {
if err != nil { if err != nil {
t.Fatalf("Failed to create temporary directory: %s", err) t.Fatalf("Failed to create temporary directory: %s", err)
} }
defer os.RemoveAll(baseDir) defer os.RemoveAll(baseDir)
// Create a nested subdirectory // Create a nested subdirectory
nestedDir := filepath.Join(baseDir, "nested") nestedDir := filepath.Join(baseDir, "nested")
err = os.Mkdir(nestedDir, 0755) err = os.Mkdir(nestedDir, 0o755)
if err != nil { if err != nil {
t.Fatalf("Failed to create nested directory: %s", err) t.Fatalf("Failed to create nested directory: %s", err)
} }
// Create a nested subdirectory // Create a nested subdirectory
nestedTwiceDir := filepath.Join(nestedDir, "nestedTwice") nestedTwiceDir := filepath.Join(nestedDir, "nestedTwice")
err = os.Mkdir(nestedTwiceDir, 0755) err = os.Mkdir(nestedTwiceDir, 0o755)
if err != nil {
t.Fatalf("Failed to create twice-nested directory: %s", err)
}
testCases := []string{"test.yaml", "test2.yaml", "test-3.yaml", "test 4.yaml", "1987.test.yaml.yml", "foobar.example.com.yaml", "foobar.com.yml"} testCases := []string{"test.yaml", "test2.yaml", "test-3.yaml", "test 4.yaml", "1987.test.yaml.yml", "foobar.example.com.yaml", "foobar.com.yml"}
for _, fileName := range testCases { for _, fileName := range testCases {
filePath := filepath.Join(nestedDir, "2x-"+fileName) filePath := filepath.Join(nestedDir, "2x-"+fileName)
os.WriteFile(filePath, []byte(validYAML), 0644) os.WriteFile(filePath, []byte(validYAML), 0o644)
filePath = filepath.Join(nestedDir, fileName) filePath = filepath.Join(nestedDir, fileName)
os.WriteFile(filePath, []byte(validYAML), 0644) os.WriteFile(filePath, []byte(validYAML), 0o644)
filePath = filepath.Join(baseDir, "base-"+fileName) filePath = filepath.Join(baseDir, "base-"+fileName)
os.WriteFile(filePath, []byte(validYAML), 0644) os.WriteFile(filePath, []byte(validYAML), 0o644)
} }
rs := RuleSet{} rs := RuleSet{}
err = rs.loadRulesFromLocalDir(baseDir) err = rs.loadRulesFromLocalDir(baseDir)
assert.NoError(t, err) assert.NoError(t, err)
assert.Equal(t, rs.Count(), len(testCases)*3) assert.Equal(t, rs.Count(), len(testCases)*3)