From a007314153a9e44663aa2479c997b725d6c2ac6f Mon Sep 17 00:00:00 2001 From: Alexei Shevchenko Date: Fri, 10 Jun 2022 05:05:09 +0300 Subject: [PATCH] Feature/custom headers and cookies (#20) * user-specified headers and cookies * actions update * go version up --- .github/dependabot.yml | 4 +- .github/workflows/ci.yml | 20 +++--- .github/workflows/release.yml | 8 +-- .golangci.yml | 4 ++ README.md | 6 ++ cmd/crawley/main.go | 79 +++++++++++++++++------- go.mod | 4 +- go.sum | 9 +-- pkg/client/cookie.go | 45 ++++++++++++++ pkg/client/cookie_test.go | 36 +++++++++++ pkg/client/header.go | 38 ++++++++++++ pkg/client/header_test.go | 37 +++++++++++ pkg/client/http.go | 53 ++++++++++++---- pkg/client/http_test.go | 21 +++++-- pkg/crawler/config.go | 2 + pkg/crawler/config_test.go | 16 +++++ pkg/crawler/crawl.go | 8 ++- pkg/crawler/options.go | 14 +++++ pkg/values/loader.go | 67 ++++++++++++++++++++ pkg/values/loader_test.go | 112 ++++++++++++++++++++++++++++++++++ 20 files changed, 520 insertions(+), 63 deletions(-) create mode 100644 pkg/client/cookie.go create mode 100644 pkg/client/cookie_test.go create mode 100644 pkg/client/header.go create mode 100644 pkg/client/header_test.go create mode 100644 pkg/values/loader.go create mode 100644 pkg/values/loader_test.go diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 7e084bc..c19f579 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -4,8 +4,8 @@ updates: - package-ecosystem: gomod directory: / schedule: - interval: daily + interval: monthly - package-ecosystem: "github-actions" directory: "/" schedule: - interval: daily + interval: monthly diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4532bba..16cb89e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -16,20 +16,20 @@ jobs: name: ci steps: - name: checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: golangci-lint - uses: golangci/golangci-lint-action@v2 + uses: golangci/golangci-lint-action@v3 test: runs-on: ubuntu-latest environment: name: ci steps: - name: checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: setup golang - uses: actions/setup-go@v2 + uses: actions/setup-go@v3 with: - go-version: ^1.17 + go-version: ^1.18 - name: test-coverage uses: paambaati/codeclimate-action@v3.0.0 env: @@ -44,14 +44,14 @@ jobs: name: ci steps: - name: checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: setup golang - uses: actions/setup-go@v2 + uses: actions/setup-go@v3 with: - go-version: ^1.17 + go-version: ^1.18 - name: init codeql - uses: github/codeql-action/init@v1 + uses: github/codeql-action/init@v2 with: language: 'go' - name: run analysis - uses: github/codeql-action/analyze@v1 + uses: github/codeql-action/analyze@v2 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index d12e429..7d90ef2 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -11,13 +11,13 @@ jobs: runs-on: ubuntu-latest steps: - name: checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: set up golang - uses: actions/setup-go@v2 + uses: actions/setup-go@v3 with: - go-version: ^1.17 + go-version: ^1.18 - name: build - uses: goreleaser/goreleaser-action@v2 + uses: goreleaser/goreleaser-action@v3 with: version: latest args: release -f .goreleaser.yml --rm-dist diff --git a/.golangci.yml b/.golangci.yml index 9e2b6c6..501d2eb 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -11,11 +11,14 @@ linters: disable: - exhaustivestruct - gochecknoglobals + - nonamedreturns - testpackage - exhaustive + - exhaustruct - varnamelen - forbidigo - typecheck + - gofumpt - gci # deprecated :( - interfacer @@ -42,3 +45,4 @@ issues: - cyclop - dupl - goerr113 + - errcheck diff --git a/README.md b/README.md index 44ebf29..232f950 100644 --- a/README.md +++ b/README.md @@ -24,6 +24,8 @@ Crawls web pages and prints any link it can find. - `brute` mode - scan html comments for urls (this can lead to bogus results) - make use of `HTTP_PROXY` / `HTTPS_PROXY` environment values - directory-only scan mode (aka `fast-scan`) +- user-defined cookies, in curl-compatible format (i.e. `-cookie "ONE=1; TWO=2" -cookie "EXT=3" -cookie @cookie-file`) +- user-defined headers, same as curl: `-header "ONE: 1" -header "TWO: 2" -header @headers-file` # installation @@ -43,12 +45,16 @@ possible flags: -brute scan html comments +-cookie value + extra cookies for request, can be used multiple times, accept files with '@'-prefix -delay duration per-request delay (0 - disable) (default 150ms) -depth int scan depth (-1 - unlimited) -dirs string policy for non-resource urls: show / hide / only (default "show") +-header value + extra headers for request, can be used multiple times, accept files with '@'-prefix -headless disable pre-flight HEAD requests -help diff --git a/cmd/crawley/main.go b/cmd/crawley/main.go index 7aa9ea7..e35b2c7 100644 --- a/cmd/crawley/main.go +++ b/cmd/crawley/main.go @@ -12,6 +12,7 @@ import ( "time" "github.com/s0rg/crawley/pkg/crawler" + "github.com/s0rg/crawley/pkg/values" ) const ( @@ -24,6 +25,8 @@ var ( gitHash string gitVersion string buildDate string + extCookies values.List + extHeaders values.List fVersion = flag.Bool("version", false, "show version") fBrute = flag.Bool("brute", false, "scan html comments") fSkipSSL = flag.Bool("skip-ssl", false, "skip ssl verification") @@ -57,21 +60,7 @@ func crawl(uri string, opts ...crawler.Option) error { return nil } -func main() { - flag.Parse() - - if *fVersion { - fmt.Printf("%s %s-%s build at: %s site: %s\n", appName, gitVersion, gitHash, buildDate, appSite) - - return - } - - if flag.NArg() != 1 { - flag.Usage() - - return - } - +func options() (rv []crawler.Option) { robots, err := crawler.ParseRobotsPolicy(*fRobotsPolicy) if err != nil { log.Fatal("robots policy:", err) @@ -82,12 +71,24 @@ func main() { log.Fatal("dirs policy:", err) } - if *fSilent { - log.SetOutput(io.Discard) + workdir, err := os.Getwd() + if err != nil { + log.Fatal("work dir:", err) } - if err := crawl( - flag.Arg(0), + fs := os.DirFS(workdir) + + headers, err := extHeaders.Load(fs) + if err != nil { + log.Fatal("headers:", err) + } + + cookies, err := extCookies.Load(fs) + if err != nil { + log.Fatal("cookies:", err) + } + + return []crawler.Option{ crawler.WithUserAgent(*fUA), crawler.WithDelay(*fDelay), crawler.WithMaxCrawlDepth(*fDepth), @@ -97,9 +98,43 @@ func main() { crawler.WithDirsPolicy(dirs), crawler.WithRobotsPolicy(robots), crawler.WithoutHeads(*fNoHeads), - ); err != nil { - // forcing back stderr in case of errors, otherwise if 'silent' is on - - // no one will know what happened. + crawler.WithExtraHeaders(headers), + crawler.WithExtraCookies(cookies), + } +} + +func main() { + flag.Var( + &extHeaders, + "header", + "extra headers for request, can be used multiple times, accept files with '@'-prefix", + ) + flag.Var( + &extCookies, + "cookie", + "extra cookies for request, can be used multiple times, accept files with '@'-prefix", + ) + flag.Parse() + + if *fVersion { + fmt.Printf("%s %s-%s build at: %s site: %s\n", appName, gitVersion, gitHash, buildDate, appSite) + + return + } + + if flag.NArg() != 1 { + flag.Usage() + + return + } + + if *fSilent { + log.SetOutput(io.Discard) + } + + if err := crawl(flag.Arg(0), options()...); err != nil { + // forcing back stderr in case of errors, otherwise + // if 'silent' is on - no one will know what happened. log.SetOutput(os.Stderr) log.Fatal("crawler:", err) } diff --git a/go.mod b/go.mod index cc58587..f1d97ca 100644 --- a/go.mod +++ b/go.mod @@ -1,5 +1,5 @@ module github.com/s0rg/crawley -go 1.17 +go 1.18 -require golang.org/x/net v0.0.0-20211216030914-fe4d6282115f +require golang.org/x/net v0.0.0-20220607020251-c690dde0001d diff --git a/go.sum b/go.sum index cc3a21a..88dbbc3 100644 --- a/go.sum +++ b/go.sum @@ -1,7 +1,2 @@ -golang.org/x/net v0.0.0-20211216030914-fe4d6282115f h1:hEYJvxw1lSnWIl8X9ofsYMklzaDs90JI2az5YMd4fPM= -golang.org/x/net v0.0.0-20211216030914-fe4d6282115f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= -golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/net v0.0.0-20220607020251-c690dde0001d h1:4SFsTMi4UahlKoloni7L4eYzhFRifURQLw+yv0QDCx8= +golang.org/x/net v0.0.0-20220607020251-c690dde0001d/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= diff --git a/pkg/client/cookie.go b/pkg/client/cookie.go new file mode 100644 index 0000000..4ac4141 --- /dev/null +++ b/pkg/client/cookie.go @@ -0,0 +1,45 @@ +package client + +import ( + "net/http" + "strings" +) + +const ( + keyvalParts = 2 + keyvalSeparator = "=" + valuesSeparator = ";" +) + +func prepareCookies(raw []string) (rv []*http.Cookie) { + for _, r := range raw { + for _, p := range strings.Split(r, valuesSeparator) { + if val, ok := parseOne(p); ok { + rv = append(rv, val) + } + } + } + + return rv +} + +func parseOne(raw string) (rv *http.Cookie, ok bool) { + pair := strings.SplitN(raw, keyvalSeparator, keyvalParts) + + var name, value string + + if name = strings.TrimSpace(pair[0]); name == "" { + return + } + + if value = strings.TrimSpace(pair[1]); value == "" { + return + } + + rv = &http.Cookie{ + Name: name, + Value: value, + } + + return rv, true +} diff --git a/pkg/client/cookie_test.go b/pkg/client/cookie_test.go new file mode 100644 index 0000000..9bdb3a1 --- /dev/null +++ b/pkg/client/cookie_test.go @@ -0,0 +1,36 @@ +package client + +import ( + "net/http" + "reflect" + "testing" +) + +func Test_prepareCookies(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + args []string + want []*http.Cookie + }{ + {"1", + []string{"NAME1=VALUE1; NAME2=VALUE2", "NAME3=VALUE3"}, + []*http.Cookie{ + {Name: "NAME1", Value: "VALUE1"}, + {Name: "NAME2", Value: "VALUE2"}, + {Name: "NAME3", Value: "VALUE3"}, + }}, + {"2", + []string{"", "NAME=", "=VALUE", ";;", "===", " VALID = COOKIE "}, + []*http.Cookie{ + {Name: "VALID", Value: "COOKIE"}, + }}, + } + + for _, tt := range tests { + if got := prepareCookies(tt.args); !reflect.DeepEqual(got, tt.want) { + t.Errorf("prepareCookies() = %v, want %v", got, tt.want) + } + } +} diff --git a/pkg/client/header.go b/pkg/client/header.go new file mode 100644 index 0000000..4e49b98 --- /dev/null +++ b/pkg/client/header.go @@ -0,0 +1,38 @@ +package client + +import "strings" + +const ( + headerParts = 2 + headerSeparator = ":" +) + +type header struct { + Key string + Val string +} + +func prepareHeaders(raw []string) (rv []*header) { + rv = make([]*header, 0, len(raw)) + + var ( + pair []string + key, val string + ) + + for _, h := range raw { + pair = strings.SplitN(h, headerSeparator, headerParts) + + if key = strings.TrimSpace(pair[0]); key == "" { + continue + } + + if val = strings.TrimSpace(pair[1]); val == "" { + continue + } + + rv = append(rv, &header{Key: key, Val: val}) + } + + return rv +} diff --git a/pkg/client/header_test.go b/pkg/client/header_test.go new file mode 100644 index 0000000..f837309 --- /dev/null +++ b/pkg/client/header_test.go @@ -0,0 +1,37 @@ +package client + +import ( + "reflect" + "testing" +) + +func Test_prepareHeaders(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + args []string + want []*header + }{ + {"1", + []string{"foo: bar", "test: me"}, + []*header{ + {"foo", "bar"}, + {"test", "me"}, + }, + }, + {"2", + []string{" one: 1", "junk-key:", "two : 2 ", ":junk-val"}, + []*header{ + {"one", "1"}, + {"two", "2"}, + }, + }, + } + + for _, tt := range tests { + if got := prepareHeaders(tt.args); !reflect.DeepEqual(got, tt.want) { + t.Errorf("prepareHeaders() = %v, want %v", got, tt.want) + } + } +} diff --git a/pkg/client/http.go b/pkg/client/http.go index d0c11e6..593c964 100644 --- a/pkg/client/http.go +++ b/pkg/client/http.go @@ -17,12 +17,19 @@ const ( // HTTP holds pre-configured http.Client. type HTTP struct { - ua string - c *http.Client + c *http.Client + ua string + cookies []*http.Cookie + headers []*header } // New creates and configure client for later use. -func New(ua string, conns int, skipSSL bool) (h *HTTP) { +func New( + ua string, + conns int, + skipSSL bool, + headers, cookies []string, +) (h *HTTP) { transport := &http.Transport{ Proxy: http.ProxyFromEnvironment, Dial: (&net.Dialer{ @@ -44,8 +51,10 @@ func New(ua string, conns int, skipSSL bool) (h *HTTP) { } return &HTTP{ - c: client, - ua: ua, + ua: ua, + c: client, + headers: prepareHeaders(headers), + cookies: prepareCookies(cookies), } } @@ -53,7 +62,12 @@ func New(ua string, conns int, skipSSL bool) (h *HTTP) { func (h *HTTP) Get(ctx context.Context, url string) (body io.ReadCloser, hdrs http.Header, err error) { var req *http.Request - if req, err = http.NewRequestWithContext(ctx, http.MethodGet, url, http.NoBody); err != nil { + if req, err = http.NewRequestWithContext( + ctx, + http.MethodGet, + url, + http.NoBody, + ); err != nil { return } @@ -68,7 +82,12 @@ func (h *HTTP) Get(ctx context.Context, url string) (body io.ReadCloser, hdrs ht func (h *HTTP) Head(ctx context.Context, url string) (hdrs http.Header, err error) { var req *http.Request - if req, err = http.NewRequestWithContext(ctx, http.MethodHead, url, http.NoBody); err != nil { + if req, err = http.NewRequestWithContext( + ctx, + http.MethodHead, + url, + http.NoBody, + ); err != nil { return } @@ -83,12 +102,20 @@ func (h *HTTP) Head(ctx context.Context, url string) (hdrs http.Header, err erro return hdrs, nil } +// Discard read all contents from ReaderCloser, closing it afterwards. +func Discard(rc io.ReadCloser) { + _, _ = io.Copy(io.Discard, rc) + _ = rc.Close() +} + func (h *HTTP) request(req *http.Request) (body io.ReadCloser, hdrs http.Header, err error) { req.Header.Set("Accept", "text/html,application/xhtml+xml;q=0.9,*/*;q=0.5") req.Header.Set("Accept-Language", "en-US,en;q=0.8") req.Header.Set("Cache-Control", "no-cache") req.Header.Set("User-Agent", h.ua) + h.enrich(req) + var resp *http.Response if resp, err = h.c.Do(req); err != nil { @@ -102,8 +129,12 @@ func (h *HTTP) request(req *http.Request) (body io.ReadCloser, hdrs http.Header, return resp.Body, resp.Header, err } -// Discard read all contents from ReaderCloser, closing it afterwards. -func Discard(rc io.ReadCloser) { - _, _ = io.Copy(io.Discard, rc) - _ = rc.Close() +func (h *HTTP) enrich(req *http.Request) { + for _, hdr := range h.headers { + req.Header.Set(hdr.Key, hdr.Val) + } + + for _, cck := range h.cookies { + req.AddCookie(cck) + } } diff --git a/pkg/client/http_test.go b/pkg/client/http_test.go index 4ed797e..218eefb 100644 --- a/pkg/client/http_test.go +++ b/pkg/client/http_test.go @@ -15,7 +15,7 @@ const ( func TestHTTPGetOK(t *testing.T) { t.Parallel() - c := New(ua, 1, false) + c := New(ua, 1, false, []string{"FOO: BAR"}, []string{"NAME=VAL"}) const body = "test-body" @@ -28,6 +28,19 @@ func TestHTTPGetOK(t *testing.T) { t.Error("agent") } + if r.Header.Get("FOO") != "BAR" { + t.Error("extra headers") + } + + c, err := r.Cookie("NAME") + if err != nil { + t.Error("extra cookies - retrieve") + } + + if c.Value != "VAL" { + t.Error("extra cookies - value") + } + _, _ = io.WriteString(w, body) })) @@ -55,7 +68,7 @@ func TestHTTPGetOK(t *testing.T) { func TestHTTPGetERR(t *testing.T) { t.Parallel() - c := New("", 1, false) + c := New("", 1, false, []string{}, []string{}) ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { w.WriteHeader(http.StatusNotFound) @@ -81,7 +94,7 @@ func TestHTTPGetERR(t *testing.T) { func TestHTTPHeadOK(t *testing.T) { t.Parallel() - c := New(ua, 1, false) + c := New(ua, 1, false, []string{}, []string{}) const ( key = "x-some-key" @@ -116,7 +129,7 @@ func TestHTTPHeadOK(t *testing.T) { func TestHTTPHeadERR(t *testing.T) { t.Parallel() - c := New("", 1, false) + c := New("", 1, false, []string{}, []string{}) ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { w.WriteHeader(http.StatusInternalServerError) diff --git a/pkg/crawler/config.go b/pkg/crawler/config.go index 501ba08..baa9e87 100644 --- a/pkg/crawler/config.go +++ b/pkg/crawler/config.go @@ -13,6 +13,8 @@ const ( ) type config struct { + Headers []string + Cookies []string UserAgent string Delay time.Duration Workers int diff --git a/pkg/crawler/config_test.go b/pkg/crawler/config_test.go index ea86f2f..5e0288a 100644 --- a/pkg/crawler/config_test.go +++ b/pkg/crawler/config_test.go @@ -1,6 +1,7 @@ package crawler import ( + "reflect" "strings" "testing" "time" @@ -46,6 +47,11 @@ func TestOptions(t *testing.T) { fbool = true ) + var ( + extHeaders = []string{"foo: bar"} + extCookies = []string{"name=val"} + ) + t.Parallel() opts := []Option{ @@ -58,6 +64,8 @@ func TestOptions(t *testing.T) { WithBruteMode(fbool), WithSkipSSL(fbool), WithoutHeads(fbool), + WithExtraHeaders(extHeaders), + WithExtraCookies(extCookies), } c := &config{} @@ -103,6 +111,14 @@ func TestOptions(t *testing.T) { if c.Dirs != dp { t.Error("bad dirs policy") } + + if !reflect.DeepEqual(c.Headers, extHeaders) { + t.Error("bad extra headers") + } + + if !reflect.DeepEqual(c.Cookies, extCookies) { + t.Error("bad extra cookies") + } } func TestString(t *testing.T) { diff --git a/pkg/crawler/crawl.go b/pkg/crawler/crawl.go index 2fe7061..73fc567 100644 --- a/pkg/crawler/crawl.go +++ b/pkg/crawler/crawl.go @@ -93,7 +93,13 @@ func (c *Crawler) Run(uri string, fn func(string)) (err error) { seen := make(set.U64) seen.Add(urlHash(uri)) - web := client.New(c.cfg.UserAgent, c.cfg.Workers, c.cfg.SkipSSL) + web := client.New( + c.cfg.UserAgent, + c.cfg.Workers, + c.cfg.SkipSSL, + c.cfg.Headers, + c.cfg.Cookies, + ) c.initRobots(base, web) for i := 0; i < c.cfg.Workers; i++ { diff --git a/pkg/crawler/options.go b/pkg/crawler/options.go index 7f31bf4..1e42b2a 100644 --- a/pkg/crawler/options.go +++ b/pkg/crawler/options.go @@ -67,3 +67,17 @@ func WithoutHeads(v bool) Option { c.NoHEAD = v } } + +// WithExtraHeaders add extra HTTP headers to requests. +func WithExtraHeaders(v []string) Option { + return func(c *config) { + c.Headers = v + } +} + +// WithExtraCookies add cookies to requests. +func WithExtraCookies(v []string) Option { + return func(c *config) { + c.Cookies = v + } +} diff --git a/pkg/values/loader.go b/pkg/values/loader.go new file mode 100644 index 0000000..89f2ebb --- /dev/null +++ b/pkg/values/loader.go @@ -0,0 +1,67 @@ +package values + +import ( + "bufio" + "bytes" + "fmt" + "io/fs" +) + +const fileMarker = '@' // curl-compatible + +type List struct { + values []string +} + +func (l *List) String() (rv string) { + return +} + +func (l *List) Set(val string) (err error) { + l.values = append(l.values, val) + + return +} + +func (l *List) Load( + target fs.FS, +) (rv []string, err error) { + rv = make([]string, 0, len(l.values)) + + var vals []string + + for _, v := range l.values { + if v[0] == fileMarker { + if vals, err = l.loadFile(target, v[1:]); err != nil { + return + } + + rv = append(rv, vals...) + } else { + rv = append(rv, v) + } + } + + return +} + +func (l *List) loadFile( + target fs.FS, + name string, +) (rv []string, err error) { + var body []byte + + if body, err = fs.ReadFile(target, name); err != nil { + err = fmt.Errorf("read: %w", err) + + return + } + + s := bufio.NewScanner(bytes.NewReader(body)) + + for s.Scan() { + rv = append(rv, s.Text()) + } + + return rv, nil +} diff --git a/pkg/values/loader_test.go b/pkg/values/loader_test.go new file mode 100644 index 0000000..e5b932a --- /dev/null +++ b/pkg/values/loader_test.go @@ -0,0 +1,112 @@ +package values + +import ( + "io/fs" + "testing" + "testing/fstest" +) + +func TestListSet(t *testing.T) { + t.Parallel() + + var ( + l List + err error + res []string + ) + + if err = l.Set("a"); err != nil { + t.Fatalf("set a - unexpected error: %v", err) + } + + if res, err = l.Load(nil); err != nil { + t.Fatalf("load a - unexpected error: %v", err) + } + + if len(res) != 1 { + t.Fatalf("len a - unexpected length: %d", len(res)) + } + + if res[0] != "a" { + t.Fatalf("res a - unexpected value: %v", res[0]) + } + + if err = l.Set("b"); err != nil { + t.Fatalf("set b - unexpected error: %v", err) + } + + if res, err = l.Load(nil); err != nil { + t.Fatalf("load b - unexpected error: %v", err) + } + + if len(res) != 2 { + t.Fatalf("len a - unexpected length: %d", len(res)) + } + + if res[1] != "b" { + t.Fatalf("res a - unexpected value: %v", res[1]) + } +} + +func TestListString(t *testing.T) { + t.Parallel() + + var l List + + if l.String() != "" { + t.Fatal("non-empty result") + } +} + +func TestLoadFS(t *testing.T) { + t.Parallel() + + fsys := fstest.MapFS{ + "foo": {Data: []byte("foo1\nfoo2")}, + "bar": {Data: []byte("bar1\nbar2")}, + } + + var l List + + l.Set("foo0") + l.Set("@foo") + + res, err := l.Load(fsys) + if err != nil { + t.Fatal(err) + } + + if len(res) != 3 { + t.Fatal("unexpexted length", len(res)) + } + + if res[0] != "foo0" { + t.Fatal("unexpexted value 0") + } + + if res[1] != "foo1" { + t.Fatal("unexpexted value 1") + } + + if res[2] != "foo2" { + t.Fatal("unexpexted value 2") + } +} + +func TestLoadFSErrorDir(t *testing.T) { + t.Parallel() + + fsys := fstest.MapFS{ + "foo": {Mode: 0o777 | fs.ModeDir}, + } + + var l List + + l.Set("foo0") + l.Set("@foo") + + _, err := l.Load(fsys) + if err == nil { + t.Fatal("unexepected nil-error") + } +}