From aec1ee3ba05cd23362c266c297bb57f602775f9f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Aleksa=20Siri=C5=A1ki?= <31509435+aleksasiriski@users.noreply.github.com> Date: Wed, 19 Jun 2024 12:09:25 +0200 Subject: [PATCH 1/3] fix: switch to consts instead of runtime structs and comments --- src/search/engines/bing/info.go | 12 ++++++++ src/search/engines/bing/infoparams.go | 20 ------------- src/search/engines/bing/params.go | 9 +++++- src/search/engines/bing/search.go | 20 ++++++------- src/search/engines/bing/search_test.go | 7 ++--- src/search/engines/bingimages/info.go | 12 ++++++++ src/search/engines/bingimages/infoparams.go | 23 -------------- src/search/engines/bingimages/params.go | 12 +++++++- src/search/engines/bingimages/search.go | 18 +++++------ src/search/engines/bingimages/search_test.go | 7 ++--- src/search/engines/brave/cookies.go | 21 ------------- src/search/engines/brave/info.go | 12 ++++++++ src/search/engines/brave/infoparams.go | 22 -------------- src/search/engines/brave/params.go | 30 +++++++++++++++++++ src/search/engines/brave/search.go | 20 ++++++------- src/search/engines/brave/search_test.go | 7 ++--- src/search/engines/duckduckgo/cookies.go | 13 -------- src/search/engines/duckduckgo/info.go | 12 ++++++++ src/search/engines/duckduckgo/infoparams.go | 19 ------------ src/search/engines/duckduckgo/params.go | 19 ++++++++++++ src/search/engines/duckduckgo/search.go | 20 ++++++------- src/search/engines/duckduckgo/search_test.go | 7 ++--- src/search/engines/etools/info.go | 13 ++++++++ src/search/engines/etools/infoparams.go | 23 -------------- src/search/engines/etools/params.go | 12 ++++++-- src/search/engines/etools/search.go | 20 ++++++------- src/search/engines/etools/search_test.go | 7 ++--- src/search/engines/google/info.go | 12 ++++++++ src/search/engines/google/infoparams.go | 22 -------------- src/search/engines/google/params.go | 15 ++++++++-- src/search/engines/google/search.go | 22 +++++++------- src/search/engines/google/search_test.go | 7 ++--- src/search/engines/googleimages/info.go | 12 ++++++++ src/search/engines/googleimages/infoparams.go | 24 --------------- src/search/engines/googleimages/params.go | 17 +++++++++-- src/search/engines/googleimages/search.go | 22 +++++++------- .../engines/googleimages/search_test.go | 7 ++--- src/search/engines/googlescholar/info.go | 12 ++++++++ .../engines/googlescholar/infoparams.go | 22 -------------- src/search/engines/googlescholar/params.go | 15 ++++++++-- src/search/engines/googlescholar/search.go | 22 +++++++------- .../engines/googlescholar/search_test.go | 7 ++--- src/search/engines/mojeek/info.go | 12 ++++++++ src/search/engines/mojeek/infoparams.go | 20 ------------- src/search/engines/mojeek/params.go | 13 ++++++-- src/search/engines/mojeek/search.go | 22 +++++++------- src/search/engines/mojeek/search_test.go | 7 ++--- src/search/engines/presearch/cookies.go | 13 -------- src/search/engines/presearch/info.go | 12 ++++++++ src/search/engines/presearch/infoparams.go | 18 ----------- src/search/engines/presearch/params.go | 18 +++++++++++ src/search/engines/presearch/search.go | 18 +++++------ src/search/engines/presearch/search_test.go | 7 ++--- src/search/engines/qwant/info.go | 12 ++++++++ src/search/engines/qwant/infoparams.go | 21 ------------- src/search/engines/qwant/params.go | 16 +++++++--- src/search/engines/qwant/search.go | 20 ++++++------- src/search/engines/qwant/search_test.go | 7 ++--- src/search/engines/startpage/info.go | 12 ++++++++ src/search/engines/startpage/infoparams.go | 18 ----------- src/search/engines/startpage/params.go | 7 ++++- src/search/engines/startpage/search.go | 20 ++++++------- src/search/engines/startpage/search_test.go | 7 ++--- src/search/engines/swisscows/info.go | 12 ++++++++ src/search/engines/swisscows/infoparams.go | 21 ------------- src/search/engines/swisscows/params.go | 10 ++++++- src/search/engines/swisscows/search.go | 19 ++++++------ src/search/engines/swisscows/search_test.go | 7 ++--- src/search/engines/yahoo/cookies.go | 13 -------- src/search/engines/yahoo/info.go | 12 ++++++++ src/search/engines/yahoo/infoparams.go | 20 ------------- src/search/engines/yahoo/params.go | 20 +++++++++++++ src/search/engines/yahoo/search.go | 20 ++++++------- src/search/engines/yahoo/search_test.go | 7 ++--- src/search/engines/yep/info.go | 12 ++++++++ src/search/engines/yep/infoparams.go | 23 -------------- src/search/engines/yep/params.go | 16 ++++++++-- src/search/engines/yep/search.go | 29 ++++++++++-------- src/search/engines/yep/search_test.go | 7 ++--- src/search/result/rank/score.go | 13 ++++---- src/search/scraper/infoparams.go | 17 ----------- 81 files changed, 580 insertions(+), 653 deletions(-) create mode 100644 src/search/engines/bing/info.go delete mode 100644 src/search/engines/bing/infoparams.go create mode 100644 src/search/engines/bingimages/info.go delete mode 100644 src/search/engines/bingimages/infoparams.go delete mode 100644 src/search/engines/brave/cookies.go create mode 100644 src/search/engines/brave/info.go delete mode 100644 src/search/engines/brave/infoparams.go create mode 100644 src/search/engines/brave/params.go delete mode 100644 src/search/engines/duckduckgo/cookies.go create mode 100644 src/search/engines/duckduckgo/info.go delete mode 100644 src/search/engines/duckduckgo/infoparams.go create mode 100644 src/search/engines/duckduckgo/params.go create mode 100644 src/search/engines/etools/info.go delete mode 100644 src/search/engines/etools/infoparams.go create mode 100644 src/search/engines/google/info.go delete mode 100644 src/search/engines/google/infoparams.go create mode 100644 src/search/engines/googleimages/info.go delete mode 100644 src/search/engines/googleimages/infoparams.go create mode 100644 src/search/engines/googlescholar/info.go delete mode 100644 src/search/engines/googlescholar/infoparams.go create mode 100644 src/search/engines/mojeek/info.go delete mode 100644 src/search/engines/mojeek/infoparams.go delete mode 100644 src/search/engines/presearch/cookies.go create mode 100644 src/search/engines/presearch/info.go delete mode 100644 src/search/engines/presearch/infoparams.go create mode 100644 src/search/engines/presearch/params.go create mode 100644 src/search/engines/qwant/info.go delete mode 100644 src/search/engines/qwant/infoparams.go create mode 100644 src/search/engines/startpage/info.go delete mode 100644 src/search/engines/startpage/infoparams.go create mode 100644 src/search/engines/swisscows/info.go delete mode 100644 src/search/engines/swisscows/infoparams.go delete mode 100644 src/search/engines/yahoo/cookies.go create mode 100644 src/search/engines/yahoo/info.go delete mode 100644 src/search/engines/yahoo/infoparams.go create mode 100644 src/search/engines/yahoo/params.go create mode 100644 src/search/engines/yep/info.go delete mode 100644 src/search/engines/yep/infoparams.go delete mode 100644 src/search/scraper/infoparams.go diff --git a/src/search/engines/bing/info.go b/src/search/engines/bing/info.go new file mode 100644 index 00000000..53ccd1e9 --- /dev/null +++ b/src/search/engines/bing/info.go @@ -0,0 +1,12 @@ +package bing + +import ( + "github.com/hearchco/agent/src/search/engines" +) + +const ( + seName = engines.BING + searchURL = "https://www.bing.com/search" +) + +var origins = [...]engines.Name{engines.BING} diff --git a/src/search/engines/bing/infoparams.go b/src/search/engines/bing/infoparams.go deleted file mode 100644 index 01a2134c..00000000 --- a/src/search/engines/bing/infoparams.go +++ /dev/null @@ -1,20 +0,0 @@ -package bing - -import ( - "github.com/hearchco/agent/src/search/engines" - "github.com/hearchco/agent/src/search/scraper" -) - -var info = scraper.Info{ - Name: engines.BING, - Domain: "www.bing.com", - URL: "https://www.bing.com/search", - Origins: []engines.Name{engines.BING}, -} - -var params = scraper.Params{ - Page: "first", - Locale: "setlang", // Should be first 2 characters of Locale. - LocaleSec: "cc", // Should be last 2 characters of Locale. - SafeSearch: "", // Always enabled. -} diff --git a/src/search/engines/bing/params.go b/src/search/engines/bing/params.go index bf143df3..c3a98dfc 100644 --- a/src/search/engines/bing/params.go +++ b/src/search/engines/bing/params.go @@ -7,7 +7,14 @@ import ( "github.com/hearchco/agent/src/search/engines/options" ) +const ( + paramKeyPage = "first" + paramKeyLocale = "setlang" // Should be first 2 characters of Locale. + paramKeyLocaleSec = "cc" // Should be last 2 characters of Locale. + paramKeySafeSearch = "" // Always enabled. +) + func localeParamString(locale options.Locale) string { spl := strings.SplitN(strings.ToLower(locale.String()), "_", 2) - return fmt.Sprintf("%v=%v&%v=%v", params.Locale, spl[0], params.LocaleSec, spl[1]) + return fmt.Sprintf("%v=%v&%v=%v", paramKeyLocale, spl[0], paramKeyLocaleSec, spl[1]) } diff --git a/src/search/engines/bing/search.go b/src/search/engines/bing/search.go index bfcda5df..f726d981 100644 --- a/src/search/engines/bing/search.go +++ b/src/search/engines/bing/search.go @@ -21,10 +21,10 @@ type Engine struct { scraper.EngineBase } -func New() *Engine { - return &Engine{EngineBase: scraper.EngineBase{ - Name: info.Name, - Origins: info.Origins, +func New() scraper.Enginer { + return &Engine{scraper.EngineBase{ + Name: seName, + Origins: origins[:], }} } @@ -84,7 +84,7 @@ func (se Engine) Search(query string, opts options.Options, resChan chan result. }) // Static params. - localeParam := localeParamString(opts.Locale) + paramLocale := localeParamString(opts.Locale) for i := range opts.Pages.Max { pageNum0 := i + opts.Pages.Start @@ -92,15 +92,15 @@ func (se Engine) Search(query string, opts options.Options, resChan chan result. ctx.Put("page", strconv.Itoa(i)) // Dynamic params. - pageParam := "" + paramPage := "" if pageNum0 > 0 { - pageParam = fmt.Sprintf("%v=%v", params.Page, pageNum0*10+1) + paramPage = fmt.Sprintf("%v=%v", paramKeyPage, pageNum0*10+1) } - combinedParams := morestrings.JoinNonEmpty([]string{pageParam, localeParam}, "&", "&") + combinedParams := morestrings.JoinNonEmpty([]string{paramPage, paramLocale}, "&", "&") - urll := fmt.Sprintf("%v?q=%v%v", info.URL, query, combinedParams) - anonUrll := fmt.Sprintf("%v?q=%v%v", info.URL, anonymize.String(query), combinedParams) + urll := fmt.Sprintf("%v?q=%v%v", searchURL, query, combinedParams) + anonUrll := fmt.Sprintf("%v?q=%v%v", searchURL, anonymize.String(query), combinedParams) if err := se.Get(ctx, urll, anonUrll); err != nil { retErrors = append(retErrors, err) diff --git a/src/search/engines/bing/search_test.go b/src/search/engines/bing/search_test.go index be434de1..a50b07ec 100644 --- a/src/search/engines/bing/search_test.go +++ b/src/search/engines/bing/search_test.go @@ -9,14 +9,11 @@ import ( ) func TestSearch(t *testing.T) { - // Search engine name - seName := info.Name - - // testing options + // Testing options. conf := _engines_test.NewConfig(seName) opt := _engines_test.NewOpts() - // test cases + // Test cases. tchar := []_engines_test.TestCaseHasAnyResults{{ Query: "ping", Options: opt, diff --git a/src/search/engines/bingimages/info.go b/src/search/engines/bingimages/info.go new file mode 100644 index 00000000..45f4b956 --- /dev/null +++ b/src/search/engines/bingimages/info.go @@ -0,0 +1,12 @@ +package bingimages + +import ( + "github.com/hearchco/agent/src/search/engines" +) + +const ( + seName = engines.BINGIMAGES + searchURL = "https://www.bing.com/images/async" +) + +var origins = [...]engines.Name{engines.BINGIMAGES} diff --git a/src/search/engines/bingimages/infoparams.go b/src/search/engines/bingimages/infoparams.go deleted file mode 100644 index fd3f52d1..00000000 --- a/src/search/engines/bingimages/infoparams.go +++ /dev/null @@ -1,23 +0,0 @@ -package bingimages - -import ( - "github.com/hearchco/agent/src/search/engines" - "github.com/hearchco/agent/src/search/scraper" -) - -var info = scraper.Info{ - Name: engines.BINGIMAGES, - Domain: "www.bing.com", - URL: "https://www.bing.com/images/async", - Origins: []engines.Name{engines.BINGIMAGES}, -} - -var params = scraper.Params{ - Page: "first", - Locale: "setlang", // Should be first 2 characters of Locale. - LocaleSec: "cc", // Should be last 2 characters of Locale. - SafeSearch: "", // Always enabled. -} - -const asyncParam = "async=1" -const countParam = "count=35" diff --git a/src/search/engines/bingimages/params.go b/src/search/engines/bingimages/params.go index 885f1b45..b905690b 100644 --- a/src/search/engines/bingimages/params.go +++ b/src/search/engines/bingimages/params.go @@ -7,7 +7,17 @@ import ( "github.com/hearchco/agent/src/search/engines/options" ) +const ( + paramKeyPage = "first" + paramKeyLocale = "setlang" // Should be first 2 characters of Locale. + paramKeyLocaleSec = "cc" // Should be last 2 characters of Locale. + paramKeySafeSearch = "" // Always enabled. + + paramAsync = "async=1" + paramCount = "count=35" +) + func localeParamString(locale options.Locale) string { spl := strings.SplitN(strings.ToLower(locale.String()), "_", 2) - return fmt.Sprintf("%v=%v&%v=%v", params.Locale, spl[0], params.LocaleSec, spl[1]) + return fmt.Sprintf("%v=%v&%v=%v", paramKeyLocale, spl[0], paramKeyLocaleSec, spl[1]) } diff --git a/src/search/engines/bingimages/search.go b/src/search/engines/bingimages/search.go index 32d904c9..afcfcde7 100644 --- a/src/search/engines/bingimages/search.go +++ b/src/search/engines/bingimages/search.go @@ -21,10 +21,10 @@ type Engine struct { scraper.EngineBase } -func New() *Engine { - return &Engine{EngineBase: scraper.EngineBase{ - Name: info.Name, - Origins: info.Origins, +func New() scraper.Enginer { + return &Engine{scraper.EngineBase{ + Name: seName, + Origins: origins[:], }} } @@ -222,7 +222,7 @@ func (se Engine) Search(query string, opts options.Options, resChan chan result. }) // Static params. - localeParam := localeParamString(opts.Locale) + paramLocale := localeParamString(opts.Locale) for i := range opts.Pages.Max { pageNum0 := i + opts.Pages.Start @@ -230,12 +230,12 @@ func (se Engine) Search(query string, opts options.Options, resChan chan result. ctx.Put("page", strconv.Itoa(i)) // Dynamic params. - pageParam := fmt.Sprintf("%v=%v", params.Page, pageNum0*35+1) + paramPage := fmt.Sprintf("%v=%v", paramKeyPage, pageNum0*35+1) - combinedParams := morestrings.JoinNonEmpty([]string{asyncParam, pageParam, countParam, localeParam}, "&", "&") + combinedParams := morestrings.JoinNonEmpty([]string{paramAsync, paramPage, paramCount, paramLocale}, "&", "&") - urll := fmt.Sprintf("%v?q=%v%v", info.URL, query, combinedParams) - anonUrll := fmt.Sprintf("%v?q=%v%v", info.URL, anonymize.String(query), combinedParams) + urll := fmt.Sprintf("%v?q=%v%v", searchURL, query, combinedParams) + anonUrll := fmt.Sprintf("%v?q=%v%v", searchURL, anonymize.String(query), combinedParams) if err := se.Get(ctx, urll, anonUrll); err != nil { retErrors = append(retErrors, err) diff --git a/src/search/engines/bingimages/search_test.go b/src/search/engines/bingimages/search_test.go index 5e595c18..dfed1785 100644 --- a/src/search/engines/bingimages/search_test.go +++ b/src/search/engines/bingimages/search_test.go @@ -9,14 +9,11 @@ import ( ) func TestSearch(t *testing.T) { - // Search engine name - seName := info.Name - - // testing options + // Testing options. conf := _engines_test.NewConfig(seName) opt := _engines_test.NewOpts() - // test cases + // Test cases. tchar := []_engines_test.TestCaseHasAnyResults{{ Query: "ping", Options: opt, diff --git a/src/search/engines/brave/cookies.go b/src/search/engines/brave/cookies.go deleted file mode 100644 index da2bc510..00000000 --- a/src/search/engines/brave/cookies.go +++ /dev/null @@ -1,21 +0,0 @@ -package brave - -import ( - "fmt" - "strings" - - "github.com/hearchco/agent/src/search/engines/options" -) - -func localeCookieString(locale options.Locale) string { - region := strings.SplitN(strings.ToLower(locale.String()), "_", 2)[1] - return fmt.Sprintf("%v=%v", params.Locale, region) -} - -func safeSearchCookieString(safesearch bool) string { - if safesearch { - return fmt.Sprintf("%v=%v", params.SafeSearch, "strict") - } else { - return fmt.Sprintf("%v=%v", params.SafeSearch, "off") - } -} diff --git a/src/search/engines/brave/info.go b/src/search/engines/brave/info.go new file mode 100644 index 00000000..64da1987 --- /dev/null +++ b/src/search/engines/brave/info.go @@ -0,0 +1,12 @@ +package brave + +import ( + "github.com/hearchco/agent/src/search/engines" +) + +const ( + seName = engines.BRAVE + searchURL = "https://search.brave.com/search" +) + +var origins = [...]engines.Name{engines.BRAVE, engines.GOOGLE} diff --git a/src/search/engines/brave/infoparams.go b/src/search/engines/brave/infoparams.go deleted file mode 100644 index 018b9bca..00000000 --- a/src/search/engines/brave/infoparams.go +++ /dev/null @@ -1,22 +0,0 @@ -package brave - -import ( - "github.com/hearchco/agent/src/search/engines" - "github.com/hearchco/agent/src/search/scraper" -) - -var info = scraper.Info{ - Name: engines.BRAVE, - Domain: "search.brave.com", - URL: "https://search.brave.com/search", - Origins: []engines.Name{engines.BRAVE, engines.GOOGLE}, -} - -var params = scraper.Params{ - Page: "offset", - Locale: "country", // Should be last 2 characters of Locale. - SafeSearch: "safesearch", // Can be "off" or "strict". -} - -const sourceParam = "source=web" -const spellcheckParam = "spellcheck=0" diff --git a/src/search/engines/brave/params.go b/src/search/engines/brave/params.go new file mode 100644 index 00000000..ca21f8e2 --- /dev/null +++ b/src/search/engines/brave/params.go @@ -0,0 +1,30 @@ +package brave + +import ( + "fmt" + "strings" + + "github.com/hearchco/agent/src/search/engines/options" +) + +const ( + paramKeyPage = "offset" + paramKeyLocale = "country" // Should be last 2 characters of Locale. + paramKeySafeSearch = "safesearch" // Can be "off" or "strict". + + paramSource = "source=web" + paramSpellcheck = "spellcheck=0" +) + +func localeCookieString(locale options.Locale) string { + region := strings.SplitN(strings.ToLower(locale.String()), "_", 2)[1] + return fmt.Sprintf("%v=%v", paramKeyLocale, region) +} + +func safeSearchCookieString(safesearch bool) string { + if safesearch { + return fmt.Sprintf("%v=%v", paramKeySafeSearch, "strict") + } else { + return fmt.Sprintf("%v=%v", paramKeySafeSearch, "off") + } +} diff --git a/src/search/engines/brave/search.go b/src/search/engines/brave/search.go index 22c59e78..f0f6aec3 100644 --- a/src/search/engines/brave/search.go +++ b/src/search/engines/brave/search.go @@ -20,10 +20,10 @@ type Engine struct { scraper.EngineBase } -func New() *Engine { - return &Engine{EngineBase: scraper.EngineBase{ - Name: info.Name, - Origins: info.Origins, +func New() scraper.Enginer { + return &Engine{scraper.EngineBase{ + Name: seName, + Origins: origins[:], }} } @@ -80,15 +80,15 @@ func (se Engine) Search(query string, opts options.Options, resChan chan result. ctx.Put("page", strconv.Itoa(i)) // Dynamic params. - pageParam := "" - combinedParams := morestrings.JoinNonEmpty([]string{sourceParam, pageParam}, "&", "&") + paramPage := "" + combinedParams := morestrings.JoinNonEmpty([]string{paramSource, paramPage}, "&", "&") if pageNum0 > 0 { - pageParam = fmt.Sprintf("%v=%v", params.Page, pageNum0) - combinedParams = morestrings.JoinNonEmpty([]string{spellcheckParam, pageParam}, "&", "&") + paramPage = fmt.Sprintf("%v=%v", paramKeyPage, pageNum0) + combinedParams = morestrings.JoinNonEmpty([]string{paramSpellcheck, paramPage}, "&", "&") } - urll := fmt.Sprintf("%v?q=%v%v", info.URL, query, combinedParams) - anonUrll := fmt.Sprintf("%v?q=%v%v", info.URL, anonymize.String(query), combinedParams) + urll := fmt.Sprintf("%v?q=%v%v", searchURL, query, combinedParams) + anonUrll := fmt.Sprintf("%v?q=%v%v", searchURL, anonymize.String(query), combinedParams) if err := se.Get(ctx, urll, anonUrll); err != nil { retErrors = append(retErrors, err) diff --git a/src/search/engines/brave/search_test.go b/src/search/engines/brave/search_test.go index 422add79..e1597813 100644 --- a/src/search/engines/brave/search_test.go +++ b/src/search/engines/brave/search_test.go @@ -9,14 +9,11 @@ import ( ) func TestSearch(t *testing.T) { - // Search engine name - seName := info.Name - - // testing options + // Testing options. conf := _engines_test.NewConfig(seName) opt := _engines_test.NewOpts() - // test cases + // Test cases. tchar := []_engines_test.TestCaseHasAnyResults{{ Query: "ping", Options: opt, diff --git a/src/search/engines/duckduckgo/cookies.go b/src/search/engines/duckduckgo/cookies.go deleted file mode 100644 index f2daad25..00000000 --- a/src/search/engines/duckduckgo/cookies.go +++ /dev/null @@ -1,13 +0,0 @@ -package duckduckgo - -import ( - "fmt" - "strings" - - "github.com/hearchco/agent/src/search/engines/options" -) - -func localeCookieString(locale options.Locale) string { - spl := strings.SplitN(strings.ToLower(locale.String()), "_", 2) - return fmt.Sprintf("%v=%v-%v", params.Locale, spl[1], spl[0]) -} diff --git a/src/search/engines/duckduckgo/info.go b/src/search/engines/duckduckgo/info.go new file mode 100644 index 00000000..5b6ef099 --- /dev/null +++ b/src/search/engines/duckduckgo/info.go @@ -0,0 +1,12 @@ +package duckduckgo + +import ( + "github.com/hearchco/agent/src/search/engines" +) + +const ( + seName = engines.DUCKDUCKGO + searchURL = "https://lite.duckduckgo.com/lite/" +) + +var origins = [...]engines.Name{engines.DUCKDUCKGO, engines.BING} diff --git a/src/search/engines/duckduckgo/infoparams.go b/src/search/engines/duckduckgo/infoparams.go deleted file mode 100644 index d6158f2b..00000000 --- a/src/search/engines/duckduckgo/infoparams.go +++ /dev/null @@ -1,19 +0,0 @@ -package duckduckgo - -import ( - "github.com/hearchco/agent/src/search/engines" - "github.com/hearchco/agent/src/search/scraper" -) - -var info = scraper.Info{ - Name: engines.DUCKDUCKGO, - Domain: "lite.duckduckgo.com", - URL: "https://lite.duckduckgo.com/lite/", - Origins: []engines.Name{engines.DUCKDUCKGO, engines.BING}, -} - -var params = scraper.Params{ - Page: "dc", - Locale: "kl", // Should be Locale with _ replaced by - and first 2 letters as last and vice versa. - SafeSearch: "", // Always enabled. -} diff --git a/src/search/engines/duckduckgo/params.go b/src/search/engines/duckduckgo/params.go new file mode 100644 index 00000000..a1037334 --- /dev/null +++ b/src/search/engines/duckduckgo/params.go @@ -0,0 +1,19 @@ +package duckduckgo + +import ( + "fmt" + "strings" + + "github.com/hearchco/agent/src/search/engines/options" +) + +const ( + paramKeyPage = "dc" + paramKeyLocale = "kl" // Should be Locale with _ replaced by - and first 2 letters as last and vice versa. + paramKeySafeSearch = "" // Always enabled. +) + +func localeCookieString(locale options.Locale) string { + spl := strings.SplitN(strings.ToLower(locale.String()), "_", 2) + return fmt.Sprintf("%v=%v-%v", paramKeyLocale, spl[1], spl[0]) +} diff --git a/src/search/engines/duckduckgo/search.go b/src/search/engines/duckduckgo/search.go index ce544340..0047587b 100644 --- a/src/search/engines/duckduckgo/search.go +++ b/src/search/engines/duckduckgo/search.go @@ -21,10 +21,10 @@ type Engine struct { scraper.EngineBase } -func New() *Engine { - return &Engine{EngineBase: scraper.EngineBase{ - Name: info.Name, - Origins: info.Origins, +func New() scraper.Enginer { + return &Engine{scraper.EngineBase{ + Name: seName, + Origins: origins[:], }} } @@ -105,15 +105,15 @@ func (se Engine) Search(query string, opts options.Options, resChan chan result. var err error if pageNum0 == 0 { - urll := fmt.Sprintf("%v?q=%v", info.URL, query) - anonUrll := fmt.Sprintf("%v?q=%v", info.URL, anonymize.String(query)) + urll := fmt.Sprintf("%v?q=%v", searchURL, query) + anonUrll := fmt.Sprintf("%v?q=%v", searchURL, anonymize.String(query)) err = se.Get(ctx, urll, anonUrll) } else { // This value changes depending on how many results were returned on the first page, so it's set to the lowest seen value. - pageParam := fmt.Sprintf("%v=%v", params.Page, pageNum0*20) - body := strings.NewReader(fmt.Sprintf("q=%v&%v", query, pageParam)) - anonBody := fmt.Sprintf("q=%v&%v", anonymize.String(query), pageParam) - err = se.Post(ctx, info.URL, body, anonBody) + paramPage := fmt.Sprintf("%v=%v", paramKeyPage, pageNum0*20) + body := strings.NewReader(fmt.Sprintf("q=%v&%v", query, paramPage)) + anonBody := fmt.Sprintf("q=%v&%v", anonymize.String(query), paramPage) + err = se.Post(ctx, searchURL, body, anonBody) } if err != nil { diff --git a/src/search/engines/duckduckgo/search_test.go b/src/search/engines/duckduckgo/search_test.go index 8b559bf3..e1c33ac5 100644 --- a/src/search/engines/duckduckgo/search_test.go +++ b/src/search/engines/duckduckgo/search_test.go @@ -9,14 +9,11 @@ import ( ) func TestSearch(t *testing.T) { - // Search engine name - seName := info.Name - - // testing options + // Testing options. conf := _engines_test.NewConfig(seName) opt := _engines_test.NewOpts() - // test cases + // Test cases. tchar := []_engines_test.TestCaseHasAnyResults{{ Query: "ping", Options: opt, diff --git a/src/search/engines/etools/info.go b/src/search/engines/etools/info.go new file mode 100644 index 00000000..d58bbea9 --- /dev/null +++ b/src/search/engines/etools/info.go @@ -0,0 +1,13 @@ +package etools + +import ( + "github.com/hearchco/agent/src/search/engines" +) + +const ( + seName = engines.ETOOLS + searchURL = "https://www.etools.ch/searchSubmit.do" + pageURL = "https://www.etools.ch/search.do" +) + +var origins = [...]engines.Name{engines.ETOOLS, engines.BING, engines.BRAVE, engines.DUCKDUCKGO, engines.GOOGLE, engines.MOJEEK, engines.QWANT, engines.YAHOO} diff --git a/src/search/engines/etools/infoparams.go b/src/search/engines/etools/infoparams.go deleted file mode 100644 index 222ff5df..00000000 --- a/src/search/engines/etools/infoparams.go +++ /dev/null @@ -1,23 +0,0 @@ -package etools - -import ( - "github.com/hearchco/agent/src/search/engines" - "github.com/hearchco/agent/src/search/scraper" -) - -var info = scraper.Info{ - Name: engines.ETOOLS, - Domain: "www.etools.ch", - URL: "https://www.etools.ch/searchSubmit.do", - Origins: []engines.Name{engines.ETOOLS}, // Disabled because ETOOLS has issues most of the time: []engines.Name{engines.BING, engines.BRAVE, engines.DUCKDUCKGO, engines.GOOGLE, engines.MOJEEK, engines.QWANT, engines.YAHOO}, -} - -const pageURL = "https://www.etools.ch/search.do" - -var params = scraper.Params{ - Page: "page", - SafeSearch: "safeSearch", // Can be "true" or "false". -} - -const countryParam = "country=web" -const languageParam = "language=all" diff --git a/src/search/engines/etools/params.go b/src/search/engines/etools/params.go index 0681f964..5f833596 100644 --- a/src/search/engines/etools/params.go +++ b/src/search/engines/etools/params.go @@ -4,10 +4,18 @@ import ( "fmt" ) +const ( + paramKeyPage = "page" + paramKeySafeSearch = "safeSearch" // Can be "true" or "false". + + paramCountry = "country=web" + paramLanguage = "language=all" +) + func safeSearchParamString(safesearch bool) string { if safesearch { - return fmt.Sprintf("%v=%v", params.SafeSearch, "true") + return fmt.Sprintf("%v=%v", paramKeySafeSearch, "true") } else { - return fmt.Sprintf("%v=%v", params.SafeSearch, "false") + return fmt.Sprintf("%v=%v", paramKeySafeSearch, "false") } } diff --git a/src/search/engines/etools/search.go b/src/search/engines/etools/search.go index bfd4bce5..c5e6d090 100644 --- a/src/search/engines/etools/search.go +++ b/src/search/engines/etools/search.go @@ -21,10 +21,10 @@ type Engine struct { scraper.EngineBase } -func New() *Engine { - return &Engine{EngineBase: scraper.EngineBase{ - Name: info.Name, - Origins: info.Origins, +func New() scraper.Enginer { + return &Engine{scraper.EngineBase{ + Name: seName, + Origins: origins[:], }} } @@ -95,7 +95,7 @@ func (se Engine) Search(query string, opts options.Options, resChan chan result. firstRequest := true // Static params. - safeSearchParam := safeSearchParamString(opts.SafeSearch) + paramSafeSearch := safeSearchParamString(opts.SafeSearch) for i := range opts.Pages.Max { pageNum0 := i + opts.Pages.Start @@ -105,7 +105,7 @@ func (se Engine) Search(query string, opts options.Options, resChan chan result. var err error // eTools requires a request for the first page. if pageNum0 == 0 || firstRequest { - combinedParams := morestrings.JoinNonEmpty([]string{countryParam, languageParam, safeSearchParam}, "&", "&") + combinedParams := morestrings.JoinNonEmpty([]string{paramCountry, paramLanguage, paramSafeSearch}, "&", "&") body := strings.NewReader(fmt.Sprintf("query=%v%v", query, combinedParams)) anonBody := fmt.Sprintf("query=%v%v", anonymize.String(query), combinedParams) @@ -113,9 +113,9 @@ func (se Engine) Search(query string, opts options.Options, resChan chan result. if firstRequest { firstCtx := colly.NewContext() firstCtx.Put("ignore", strconv.FormatBool(true)) - err = se.Post(firstCtx, info.URL, body, anonBody) + err = se.Post(firstCtx, searchURL, body, anonBody) } else { - err = se.Post(ctx, info.URL, body, anonBody) + err = se.Post(ctx, searchURL, body, anonBody) } firstRequest = false @@ -125,8 +125,8 @@ func (se Engine) Search(query string, opts options.Options, resChan chan result. // Since the above can happen for the first request and then we need to request the wanted page. if pageNum0 > 0 { // Query isn't needed as it's saved in the JSESSION cookie. - pageParam := fmt.Sprintf("%v=%v", params.Page, pageNum0+1) - urll := fmt.Sprintf("%v?%v", pageURL, pageParam) + paramPage := fmt.Sprintf("%v=%v", paramKeyPage, pageNum0+1) + urll := fmt.Sprintf("%v?%v", pageURL, paramPage) err = se.Get(ctx, urll, urll) } diff --git a/src/search/engines/etools/search_test.go b/src/search/engines/etools/search_test.go index 974c37dc..69547c77 100644 --- a/src/search/engines/etools/search_test.go +++ b/src/search/engines/etools/search_test.go @@ -9,14 +9,11 @@ import ( ) func TestSearch(t *testing.T) { - // Search engine name - seName := info.Name - - // testing options + // Testing options. conf := _engines_test.NewConfig(seName) opt := _engines_test.NewOpts() - // test cases + // Test cases. tchar := []_engines_test.TestCaseHasAnyResults{{ Query: "ping", Options: opt, diff --git a/src/search/engines/google/info.go b/src/search/engines/google/info.go new file mode 100644 index 00000000..5963d0f6 --- /dev/null +++ b/src/search/engines/google/info.go @@ -0,0 +1,12 @@ +package google + +import ( + "github.com/hearchco/agent/src/search/engines" +) + +const ( + seName = engines.GOOGLE + searchURL = "https://www.google.com/search" +) + +var origins = [...]engines.Name{engines.GOOGLE} diff --git a/src/search/engines/google/infoparams.go b/src/search/engines/google/infoparams.go deleted file mode 100644 index e0db00f2..00000000 --- a/src/search/engines/google/infoparams.go +++ /dev/null @@ -1,22 +0,0 @@ -package google - -import ( - "github.com/hearchco/agent/src/search/engines" - "github.com/hearchco/agent/src/search/scraper" -) - -var info = scraper.Info{ - Name: engines.GOOGLE, - Domain: "www.google.com", - URL: "https://www.google.com/search", - Origins: []engines.Name{engines.GOOGLE}, -} - -var params = scraper.Params{ - Page: "start", - Locale: "hl", // Should be first 2 characters of Locale. - LocaleSec: "lr", // Should be first 2 characters of Locale with prefixed "lang_". - SafeSearch: "safe", // Can be "off", "medium or "high". -} - -const filterParam = "filter=0" diff --git a/src/search/engines/google/params.go b/src/search/engines/google/params.go index 762b1a11..4d5b0e11 100644 --- a/src/search/engines/google/params.go +++ b/src/search/engines/google/params.go @@ -7,15 +7,24 @@ import ( "github.com/hearchco/agent/src/search/engines/options" ) +const ( + paramKeyPage = "start" + paramKeyLocale = "hl" // Should be first 2 characters of Locale. + paramKeyLocaleSec = "lr" // Should be first 2 characters of Locale with prefixed "lang_". + paramKeySafeSearch = "safe" // Can be "off", "medium or "high". + + paramFilter = "filter=0" +) + func localeParamString(locale options.Locale) string { lang := strings.SplitN(strings.ToLower(locale.String()), "_", 2)[0] - return fmt.Sprintf("%v=%v&%v=lang_%v", params.Locale, lang, params.LocaleSec, lang) + return fmt.Sprintf("%v=%v&%v=lang_%v", paramKeyLocale, lang, paramKeyLocaleSec, lang) } func safeSearchParamString(safesearch bool) string { if safesearch { - return fmt.Sprintf("%v=%v", params.SafeSearch, "high") + return fmt.Sprintf("%v=%v", paramKeySafeSearch, "high") } else { - return fmt.Sprintf("%v=%v", params.SafeSearch, "off") + return fmt.Sprintf("%v=%v", paramKeySafeSearch, "off") } } diff --git a/src/search/engines/google/search.go b/src/search/engines/google/search.go index 67d88d02..a7552585 100644 --- a/src/search/engines/google/search.go +++ b/src/search/engines/google/search.go @@ -20,10 +20,10 @@ type Engine struct { scraper.EngineBase } -func New() *Engine { - return &Engine{EngineBase: scraper.EngineBase{ - Name: info.Name, - Origins: info.Origins, +func New() scraper.Enginer { + return &Engine{scraper.EngineBase{ + Name: seName, + Origins: origins[:], }} } @@ -61,8 +61,8 @@ func (se Engine) Search(query string, opts options.Options, resChan chan result. }) // Static params. - localeParam := localeParamString(opts.Locale) - safeSearchParam := safeSearchParamString(opts.SafeSearch) + paramLocale := localeParamString(opts.Locale) + paramSafeSearch := safeSearchParamString(opts.SafeSearch) for i := range opts.Pages.Max { pageNum0 := i + opts.Pages.Start @@ -70,15 +70,15 @@ func (se Engine) Search(query string, opts options.Options, resChan chan result. ctx.Put("page", strconv.Itoa(i)) // Dynamic params. - pageParam := "" + paramPage := "" if pageNum0 > 0 { - pageParam = fmt.Sprintf("%v=%v", params.Page, pageNum0*10) + paramPage = fmt.Sprintf("%v=%v", paramKeyPage, pageNum0*10) } - combinedParams := morestrings.JoinNonEmpty([]string{filterParam, pageParam, localeParam, safeSearchParam}, "&", "&") + combinedParams := morestrings.JoinNonEmpty([]string{paramFilter, paramPage, paramLocale, paramSafeSearch}, "&", "&") - urll := fmt.Sprintf("%v?q=%v%v", info.URL, query, combinedParams) - anonUrll := fmt.Sprintf("%v?q=%v%v", info.URL, anonymize.String(query), combinedParams) + urll := fmt.Sprintf("%v?q=%v%v", searchURL, query, combinedParams) + anonUrll := fmt.Sprintf("%v?q=%v%v", searchURL, anonymize.String(query), combinedParams) if err := se.Get(ctx, urll, anonUrll); err != nil { retErrors = append(retErrors, err) diff --git a/src/search/engines/google/search_test.go b/src/search/engines/google/search_test.go index af3288cb..c66a69a5 100644 --- a/src/search/engines/google/search_test.go +++ b/src/search/engines/google/search_test.go @@ -9,14 +9,11 @@ import ( ) func TestSearch(t *testing.T) { - // Search engine name - seName := info.Name - - // testing options + // Testing options. conf := _engines_test.NewConfig(seName) opt := _engines_test.NewOpts() - // test cases + // Test cases. tchar := []_engines_test.TestCaseHasAnyResults{{ Query: "ping", Options: opt, diff --git a/src/search/engines/googleimages/info.go b/src/search/engines/googleimages/info.go new file mode 100644 index 00000000..8b95b5ab --- /dev/null +++ b/src/search/engines/googleimages/info.go @@ -0,0 +1,12 @@ +package googleimages + +import ( + "github.com/hearchco/agent/src/search/engines" +) + +const ( + seName = engines.GOOGLEIMAGES + searchURL = "https://www.google.com/search" +) + +var origins = [...]engines.Name{engines.GOOGLEIMAGES} diff --git a/src/search/engines/googleimages/infoparams.go b/src/search/engines/googleimages/infoparams.go deleted file mode 100644 index 00ff658c..00000000 --- a/src/search/engines/googleimages/infoparams.go +++ /dev/null @@ -1,24 +0,0 @@ -package googleimages - -import ( - "github.com/hearchco/agent/src/search/engines" - "github.com/hearchco/agent/src/search/scraper" -) - -var info = scraper.Info{ - Name: engines.GOOGLEIMAGES, - Domain: "images.google.com", - URL: "https://www.google.com/search", - Origins: []engines.Name{engines.GOOGLEIMAGES}, -} - -var params = scraper.Params{ - Page: "async=_fmt:json,p:1,ijn", - Locale: "hl", // Should be first 2 characters of Locale. - LocaleSec: "lr", // Should be first 2 characters of Locale with prefixed "lang_". - SafeSearch: "safe", // Can be "off", "medium or "high". -} - -const tbmParam = "tbm=isch" -const asearchParam = "asearch=isch" -const filterParam = "filter=0" diff --git a/src/search/engines/googleimages/params.go b/src/search/engines/googleimages/params.go index c422e205..fc7de016 100644 --- a/src/search/engines/googleimages/params.go +++ b/src/search/engines/googleimages/params.go @@ -7,15 +7,26 @@ import ( "github.com/hearchco/agent/src/search/engines/options" ) +const ( + paramKeyPage = "async=_fmt:json,p:1,ijn" + paramKeyLocale = "hl" // Should be first 2 characters of Locale. + paramKeyLocaleSec = "lr" // Should be first 2 characters of Locale with prefixed "lang_". + paramKeySafeSearch = "safe" // Can be "off", "medium or "high". + + paramTbm = "tbm=isch" + paramAsearch = "asearch=isch" + paramFilter = "filter=0" +) + func localeParamString(locale options.Locale) string { lang := strings.SplitN(strings.ToLower(locale.String()), "_", 2)[0] - return fmt.Sprintf("%v=%v&%v=lang_%v", params.Locale, lang, params.LocaleSec, lang) + return fmt.Sprintf("%v=%v&%v=lang_%v", paramKeyLocale, lang, paramKeyLocaleSec, lang) } func safeSearchParamString(safesearch bool) string { if safesearch { - return fmt.Sprintf("%v=%v", params.SafeSearch, "high") + return fmt.Sprintf("%v=%v", paramKeySafeSearch, "high") } else { - return fmt.Sprintf("%v=%v", params.SafeSearch, "off") + return fmt.Sprintf("%v=%v", paramKeySafeSearch, "off") } } diff --git a/src/search/engines/googleimages/search.go b/src/search/engines/googleimages/search.go index 38c0cd6a..c9bf30e3 100644 --- a/src/search/engines/googleimages/search.go +++ b/src/search/engines/googleimages/search.go @@ -21,10 +21,10 @@ type Engine struct { scraper.EngineBase } -func New() *Engine { - return &Engine{EngineBase: scraper.EngineBase{ - Name: info.Name, - Origins: info.Origins, +func New() scraper.Enginer { + return &Engine{scraper.EngineBase{ + Name: seName, + Origins: origins[:], }} } @@ -107,8 +107,8 @@ func (se Engine) Search(query string, opts options.Options, resChan chan result. }) // Static params. - localeParam := localeParamString(opts.Locale) - safeSearchParam := safeSearchParamString(opts.SafeSearch) + paramLocale := localeParamString(opts.Locale) + paramSafeSearch := safeSearchParamString(opts.SafeSearch) for i := range opts.Pages.Max { pageNum0 := i + opts.Pages.Start @@ -116,15 +116,15 @@ func (se Engine) Search(query string, opts options.Options, resChan chan result. ctx.Put("page", strconv.Itoa(i)) // Dynamic params. - pageParam := fmt.Sprintf("%v:1", params.Page) + paramPage := fmt.Sprintf("%v:1", paramKeyPage) if pageNum0 > 0 { - pageParam = fmt.Sprintf("%v:%v", params.Page, pageNum0*10) + paramPage = fmt.Sprintf("%v:%v", paramKeyPage, pageNum0*10) } - combinedParams := morestrings.JoinNonEmpty([]string{tbmParam, asearchParam, filterParam, pageParam, localeParam, safeSearchParam}, "&", "&") + combinedParams := morestrings.JoinNonEmpty([]string{paramTbm, paramAsearch, paramFilter, paramPage, paramLocale, paramSafeSearch}, "&", "&") - urll := fmt.Sprintf("%v?q=%v%v", info.URL, query, combinedParams) - anonUrll := fmt.Sprintf("%v?q=%v%v", info.URL, anonymize.String(query), combinedParams) + urll := fmt.Sprintf("%v?q=%v%v", searchURL, query, combinedParams) + anonUrll := fmt.Sprintf("%v?q=%v%v", searchURL, anonymize.String(query), combinedParams) if err := se.Get(ctx, urll, anonUrll); err != nil { retErrors = append(retErrors, err) diff --git a/src/search/engines/googleimages/search_test.go b/src/search/engines/googleimages/search_test.go index 02eb6488..08a58464 100644 --- a/src/search/engines/googleimages/search_test.go +++ b/src/search/engines/googleimages/search_test.go @@ -9,14 +9,11 @@ import ( ) func TestSearch(t *testing.T) { - // Search engine name - seName := info.Name - - // testing options + // Testing options. conf := _engines_test.NewConfig(seName) opt := _engines_test.NewOpts() - // test cases + // Test cases. tchar := []_engines_test.TestCaseHasAnyResults{{ Query: "ping", Options: opt, diff --git a/src/search/engines/googlescholar/info.go b/src/search/engines/googlescholar/info.go new file mode 100644 index 00000000..c77ea12c --- /dev/null +++ b/src/search/engines/googlescholar/info.go @@ -0,0 +1,12 @@ +package googlescholar + +import ( + "github.com/hearchco/agent/src/search/engines" +) + +const ( + seName = engines.GOOGLESCHOLAR + searchURL = "https://scholar.google.com/scholar" +) + +var origins = [...]engines.Name{engines.GOOGLESCHOLAR} diff --git a/src/search/engines/googlescholar/infoparams.go b/src/search/engines/googlescholar/infoparams.go deleted file mode 100644 index 67b5972a..00000000 --- a/src/search/engines/googlescholar/infoparams.go +++ /dev/null @@ -1,22 +0,0 @@ -package googlescholar - -import ( - "github.com/hearchco/agent/src/search/engines" - "github.com/hearchco/agent/src/search/scraper" -) - -var info = scraper.Info{ - Name: engines.GOOGLESCHOLAR, - Domain: "scholar.google.com", - URL: "https://scholar.google.com/scholar", - Origins: []engines.Name{engines.GOOGLESCHOLAR}, -} - -var params = scraper.Params{ - Page: "start", - Locale: "hl", // Should be first 2 characters of Locale. - LocaleSec: "lr", // Should be first 2 characters of Locale with prefixed "lang_". - SafeSearch: "safe", // Can be "off", "medium or "high". -} - -const filterParam = "filter=0" diff --git a/src/search/engines/googlescholar/params.go b/src/search/engines/googlescholar/params.go index 8578cb67..4c11c41f 100644 --- a/src/search/engines/googlescholar/params.go +++ b/src/search/engines/googlescholar/params.go @@ -7,15 +7,24 @@ import ( "github.com/hearchco/agent/src/search/engines/options" ) +const ( + paramKeyPage = "start" + paramKeyLocale = "hl" // Should be first 2 characters of Locale. + paramKeyLocaleSec = "lr" // Should be first 2 characters of Locale with prefixed "lang_". + paramKeySafeSearch = "safe" // Can be "off", "medium or "high". + + paramFilter = "filter=0" +) + func localeParamString(locale options.Locale) string { lang := strings.SplitN(strings.ToLower(locale.String()), "_", 2)[0] - return fmt.Sprintf("%v=%v&%v=lang_%v", params.Locale, lang, params.LocaleSec, lang) + return fmt.Sprintf("%v=%v&%v=lang_%v", paramKeyLocale, lang, paramKeyLocaleSec, lang) } func safeSearchParamString(safesearch bool) string { if safesearch { - return fmt.Sprintf("%v=%v", params.SafeSearch, "high") + return fmt.Sprintf("%v=%v", paramKeySafeSearch, "high") } else { - return fmt.Sprintf("%v=%v", params.SafeSearch, "off") + return fmt.Sprintf("%v=%v", paramKeySafeSearch, "off") } } diff --git a/src/search/engines/googlescholar/search.go b/src/search/engines/googlescholar/search.go index 85e30e3e..208db913 100644 --- a/src/search/engines/googlescholar/search.go +++ b/src/search/engines/googlescholar/search.go @@ -20,10 +20,10 @@ type Engine struct { scraper.EngineBase } -func New() *Engine { - return &Engine{EngineBase: scraper.EngineBase{ - Name: info.Name, - Origins: info.Origins, +func New() scraper.Enginer { + return &Engine{scraper.EngineBase{ + Name: seName, + Origins: origins[:], }} } @@ -74,8 +74,8 @@ func (se Engine) Search(query string, opts options.Options, resChan chan result. }) // Static params. - localeParam := localeParamString(opts.Locale) - safeSearchParam := safeSearchParamString(opts.SafeSearch) + paramLocale := localeParamString(opts.Locale) + paramSafeSearch := safeSearchParamString(opts.SafeSearch) for i := range opts.Pages.Max { pageNum0 := i + opts.Pages.Start @@ -83,15 +83,15 @@ func (se Engine) Search(query string, opts options.Options, resChan chan result. ctx.Put("page", strconv.Itoa(i)) // Dynamic params. - pageParam := "" + paramPage := "" if pageNum0 > 0 { - pageParam = fmt.Sprintf("%v=%v", params.Page, pageNum0*10) + paramPage = fmt.Sprintf("%v=%v", paramKeyPage, pageNum0*10) } - combinedParams := morestrings.JoinNonEmpty([]string{filterParam, pageParam, localeParam, safeSearchParam}, "&", "&") + combinedParams := morestrings.JoinNonEmpty([]string{paramFilter, paramPage, paramLocale, paramSafeSearch}, "&", "&") - urll := fmt.Sprintf("%v?q=%v%v", info.URL, query, combinedParams) - anonUrll := fmt.Sprintf("%v?q=%v%v", info.URL, anonymize.String(query), combinedParams) + urll := fmt.Sprintf("%v?q=%v%v", searchURL, query, combinedParams) + anonUrll := fmt.Sprintf("%v?q=%v%v", searchURL, anonymize.String(query), combinedParams) if err := se.Get(ctx, urll, anonUrll); err != nil { retErrors = append(retErrors, err) diff --git a/src/search/engines/googlescholar/search_test.go b/src/search/engines/googlescholar/search_test.go index ab3687af..df494362 100644 --- a/src/search/engines/googlescholar/search_test.go +++ b/src/search/engines/googlescholar/search_test.go @@ -9,14 +9,11 @@ import ( ) func TestSearch(t *testing.T) { - // Search engine name - seName := info.Name - - // testing options + // Testing options. conf := _engines_test.NewConfig(seName) opt := _engines_test.NewOpts() - // test cases + // Test cases. tchar := []_engines_test.TestCaseHasAnyResults{{ Query: "ping", Options: opt, diff --git a/src/search/engines/mojeek/info.go b/src/search/engines/mojeek/info.go new file mode 100644 index 00000000..10b0e08e --- /dev/null +++ b/src/search/engines/mojeek/info.go @@ -0,0 +1,12 @@ +package mojeek + +import ( + "github.com/hearchco/agent/src/search/engines" +) + +const ( + seName = engines.MOJEEK + searchURL = "https://www.mojeek.com/search" +) + +var origins = [...]engines.Name{engines.MOJEEK} diff --git a/src/search/engines/mojeek/infoparams.go b/src/search/engines/mojeek/infoparams.go deleted file mode 100644 index 9563becb..00000000 --- a/src/search/engines/mojeek/infoparams.go +++ /dev/null @@ -1,20 +0,0 @@ -package mojeek - -import ( - "github.com/hearchco/agent/src/search/engines" - "github.com/hearchco/agent/src/search/scraper" -) - -var info = scraper.Info{ - Name: engines.MOJEEK, - Domain: "www.mojeek.com", - URL: "https://www.mojeek.com/search", - Origins: []engines.Name{engines.MOJEEK}, -} - -var params = scraper.Params{ - Page: "s", - Locale: "lb", // Should be first 2 characters of Locale. - LocaleSec: "arc", // Should be last 2 characters of Locale. - SafeSearch: "safe", // Can be "0" or "1". -} diff --git a/src/search/engines/mojeek/params.go b/src/search/engines/mojeek/params.go index fa3aa064..20910dee 100644 --- a/src/search/engines/mojeek/params.go +++ b/src/search/engines/mojeek/params.go @@ -7,15 +7,22 @@ import ( "github.com/hearchco/agent/src/search/engines/options" ) +const ( + paramKeyPage = "s" + paramKeyLocale = "lb" // Should be first 2 characters of Locale. + paramKeyLocaleSec = "arc" // Should be last 2 characters of Locale. + paramKeySafeSearch = "safe" // Can be "0" or "1". +) + func localeParamString(locale options.Locale) string { spl := strings.SplitN(strings.ToLower(locale.String()), "_", 2) - return fmt.Sprintf("%v=%v&%v=%v", params.Locale, spl[0], params.LocaleSec, spl[1]) + return fmt.Sprintf("%v=%v&%v=%v", paramKeyLocale, spl[0], paramKeyLocaleSec, spl[1]) } func safeSearchParamString(safesearch bool) string { if safesearch { - return fmt.Sprintf("%v=%v", params.SafeSearch, "1") + return fmt.Sprintf("%v=%v", paramKeySafeSearch, "1") } else { - return fmt.Sprintf("%v=%v", params.SafeSearch, "0") + return fmt.Sprintf("%v=%v", paramKeySafeSearch, "0") } } diff --git a/src/search/engines/mojeek/search.go b/src/search/engines/mojeek/search.go index a2bf0744..375793fe 100644 --- a/src/search/engines/mojeek/search.go +++ b/src/search/engines/mojeek/search.go @@ -20,10 +20,10 @@ type Engine struct { scraper.EngineBase } -func New() *Engine { - return &Engine{EngineBase: scraper.EngineBase{ - Name: info.Name, - Origins: info.Origins, +func New() scraper.Enginer { + return &Engine{scraper.EngineBase{ + Name: seName, + Origins: origins[:], }} } @@ -61,8 +61,8 @@ func (se Engine) Search(query string, opts options.Options, resChan chan result. }) // Static params. - localeParam := localeParamString(opts.Locale) - safeSearchParam := safeSearchParamString(opts.SafeSearch) + paramLocale := localeParamString(opts.Locale) + paramSafeSearch := safeSearchParamString(opts.SafeSearch) for i := range opts.Pages.Max { pageNum0 := i + opts.Pages.Start @@ -70,15 +70,15 @@ func (se Engine) Search(query string, opts options.Options, resChan chan result. ctx.Put("page", strconv.Itoa(i)) // Dynamic params. - pageParam := "" + paramPage := "" if pageNum0 > 0 { - pageParam = fmt.Sprintf("%v=%v", params.Page, pageNum0*10+1) + paramPage = fmt.Sprintf("%v=%v", paramKeyPage, pageNum0*10+1) } - combinedParams := morestrings.JoinNonEmpty([]string{pageParam, localeParam, safeSearchParam}, "&", "&") + combinedParams := morestrings.JoinNonEmpty([]string{paramPage, paramLocale, paramSafeSearch}, "&", "&") - urll := fmt.Sprintf("%v?q=%v%v", info.URL, query, combinedParams) - anonUrll := fmt.Sprintf("%v?q=%v%v", info.URL, anonymize.String(query), combinedParams) + urll := fmt.Sprintf("%v?q=%v%v", searchURL, query, combinedParams) + anonUrll := fmt.Sprintf("%v?q=%v%v", searchURL, anonymize.String(query), combinedParams) if err := se.Get(ctx, urll, anonUrll); err != nil { retErrors = append(retErrors, err) diff --git a/src/search/engines/mojeek/search_test.go b/src/search/engines/mojeek/search_test.go index a707a04c..2db46a9a 100644 --- a/src/search/engines/mojeek/search_test.go +++ b/src/search/engines/mojeek/search_test.go @@ -9,14 +9,11 @@ import ( ) func TestSearch(t *testing.T) { - // Search engine name - seName := info.Name - - // testing options + // Testing options. conf := _engines_test.NewConfig(seName) opt := _engines_test.NewOpts() - // test cases + // Test cases. tchar := []_engines_test.TestCaseHasAnyResults{{ Query: "ping", Options: opt, diff --git a/src/search/engines/presearch/cookies.go b/src/search/engines/presearch/cookies.go deleted file mode 100644 index b707cdd0..00000000 --- a/src/search/engines/presearch/cookies.go +++ /dev/null @@ -1,13 +0,0 @@ -package presearch - -import ( - "fmt" -) - -func safeSearchCookieString(safesearch bool) string { - if safesearch { - return fmt.Sprintf("%v=%v", params.SafeSearch, "true") - } else { - return fmt.Sprintf("%v=%v", params.SafeSearch, "false") - } -} diff --git a/src/search/engines/presearch/info.go b/src/search/engines/presearch/info.go new file mode 100644 index 00000000..a02fbd8b --- /dev/null +++ b/src/search/engines/presearch/info.go @@ -0,0 +1,12 @@ +package presearch + +import ( + "github.com/hearchco/agent/src/search/engines" +) + +const ( + seName = engines.PRESEARCH + searchURL = "https://presearch.com/search" +) + +var origins = [...]engines.Name{engines.PRESEARCH, engines.GOOGLE} diff --git a/src/search/engines/presearch/infoparams.go b/src/search/engines/presearch/infoparams.go deleted file mode 100644 index 484a0d11..00000000 --- a/src/search/engines/presearch/infoparams.go +++ /dev/null @@ -1,18 +0,0 @@ -package presearch - -import ( - "github.com/hearchco/agent/src/search/engines" - "github.com/hearchco/agent/src/search/scraper" -) - -var info = scraper.Info{ - Name: engines.PRESEARCH, - Domain: "presearch.com", - URL: "https://presearch.com/search", - Origins: []engines.Name{engines.PRESEARCH, engines.GOOGLE}, -} - -var params = scraper.Params{ - Page: "page", - SafeSearch: "use_safe_search", // // Can be "true" or "false". -} diff --git a/src/search/engines/presearch/params.go b/src/search/engines/presearch/params.go new file mode 100644 index 00000000..04f411d1 --- /dev/null +++ b/src/search/engines/presearch/params.go @@ -0,0 +1,18 @@ +package presearch + +import ( + "fmt" +) + +const ( + paramKeyPage = "page" + paramKeySafeSearch = "use_safe_search" // // Can be "true" or "false". +) + +func safeSearchCookieString(safesearch bool) string { + if safesearch { + return fmt.Sprintf("%v=%v", paramKeySafeSearch, "true") + } else { + return fmt.Sprintf("%v=%v", paramKeySafeSearch, "false") + } +} diff --git a/src/search/engines/presearch/search.go b/src/search/engines/presearch/search.go index d0b17d1f..c093cc42 100644 --- a/src/search/engines/presearch/search.go +++ b/src/search/engines/presearch/search.go @@ -22,10 +22,10 @@ type Engine struct { scraper.EngineBase } -func New() *Engine { - return &Engine{EngineBase: scraper.EngineBase{ - Name: info.Name, - Origins: info.Origins, +func New() scraper.Enginer { + return &Engine{scraper.EngineBase{ + Name: seName, + Origins: origins[:], }} } @@ -113,15 +113,15 @@ func (se Engine) Search(query string, opts options.Options, resChan chan result. ctx.Put("page", strconv.Itoa(i)) // Dynamic params. - pageParam := "" + paramPage := "" if pageNum0 > 0 { - pageParam = fmt.Sprintf("%v=%v", params.Page, pageNum0+1) + paramPage = fmt.Sprintf("%v=%v", paramKeyPage, pageNum0+1) } - combinedParams := morestrings.JoinNonEmpty([]string{pageParam}, "&", "&") + combinedParams := morestrings.JoinNonEmpty([]string{paramPage}, "&", "&") - urll := fmt.Sprintf("%v?q=%v%v", info.URL, query, combinedParams) - anonUrll := fmt.Sprintf("%v?q=%v%v", info.URL, anonymize.String(query), combinedParams) + urll := fmt.Sprintf("%v?q=%v%v", searchURL, query, combinedParams) + anonUrll := fmt.Sprintf("%v?q=%v%v", searchURL, anonymize.String(query), combinedParams) if err := se.Get(ctx, urll, anonUrll); err != nil { retErrors = append(retErrors, err) diff --git a/src/search/engines/presearch/search_test.go b/src/search/engines/presearch/search_test.go index 8f73bf3c..2cea63d2 100644 --- a/src/search/engines/presearch/search_test.go +++ b/src/search/engines/presearch/search_test.go @@ -9,14 +9,11 @@ import ( ) func TestSearch(t *testing.T) { - // Search engine name - seName := info.Name - - // testing options + // Testing options. conf := _engines_test.NewConfig(seName) opt := _engines_test.NewOpts() - // test cases + // Test cases. tchar := []_engines_test.TestCaseHasAnyResults{{ Query: "ping", Options: opt, diff --git a/src/search/engines/qwant/info.go b/src/search/engines/qwant/info.go new file mode 100644 index 00000000..02515f62 --- /dev/null +++ b/src/search/engines/qwant/info.go @@ -0,0 +1,12 @@ +package qwant + +import ( + "github.com/hearchco/agent/src/search/engines" +) + +const ( + seName = engines.QWANT + searchURL = "https://api.qwant.com/v3/search/web" +) + +var origins = [...]engines.Name{engines.QWANT, engines.BING} diff --git a/src/search/engines/qwant/infoparams.go b/src/search/engines/qwant/infoparams.go deleted file mode 100644 index 59c4c8cb..00000000 --- a/src/search/engines/qwant/infoparams.go +++ /dev/null @@ -1,21 +0,0 @@ -package qwant - -import ( - "github.com/hearchco/agent/src/search/engines" - "github.com/hearchco/agent/src/search/scraper" -) - -var info = scraper.Info{ - Name: engines.QWANT, - Domain: "www.qwant.com", - URL: "https://api.qwant.com/v3/search/web", - Origins: []engines.Name{engines.QWANT, engines.BING}, -} - -var params = scraper.Params{ - Page: "offset", - Locale: "locale", // Same as Locale, only the last two characters are lowered and not everything is supported. - SafeSearch: "safesearch", // Can be "0" or "1". -} - -const countParam = "count=10" diff --git a/src/search/engines/qwant/params.go b/src/search/engines/qwant/params.go index dfa638de..c2caf4b5 100644 --- a/src/search/engines/qwant/params.go +++ b/src/search/engines/qwant/params.go @@ -8,13 +8,21 @@ import ( "github.com/rs/zerolog/log" ) +const ( + paramKeyPage = "offset" + paramKeyLocale = "locale" // Same as Locale, only the last two characters are lowered and not everything is supported. + paramKeySafeSearch = "safesearch" // Can be "0" or "1". + + paramCount = "count=10" +) + var validLocales = [...]string{"bg_bg", "br_fr", "ca_ad", "ca_es", "ca_fr", "co_fr", "cs_cz", "cy_gb", "da_dk", "de_at", "de_ch", "de_de", "ec_ca", "el_gr", "en_au", "en_ca", "en_gb", "en_ie", "en_my", "en_nz", "en_us", "es_ad", "es_ar", "es_cl", "es_co", "es_es", "es_mx", "es_pe", "et_ee", "eu_es", "eu_fr", "fc_ca", "fi_fi", "fr_ad", "fr_be", "fr_ca", "fr_ch", "fr_fr", "gd_gb", "he_il", "hu_hu", "it_ch", "it_it", "ko_kr", "nb_no", "nl_be", "nl_nl", "pl_pl", "pt_ad", "pt_pt", "ro_ro", "sv_se", "th_th", "zh_cn", "zh_hk"} func localeParamString(locale options.Locale) string { l := strings.ToLower(locale.String()) for _, vl := range validLocales { if l == vl { - return fmt.Sprintf("%v=%v", params.Locale, l) + return fmt.Sprintf("%v=%v", paramKeyLocale, l) } } @@ -23,13 +31,13 @@ func localeParamString(locale options.Locale) string { Str("locale", locale.String()). Strs("validLocales", validLocales[:]). Msg("Unsupported locale supplied for this engine, falling back to default") - return fmt.Sprintf("%v=%v", params.Locale, strings.ToLower(options.LocaleDefault.String())) + return fmt.Sprintf("%v=%v", paramKeyLocale, strings.ToLower(options.LocaleDefault.String())) } func safeSearchParamString(safesearch bool) string { if safesearch { - return fmt.Sprintf("%v=%v", params.SafeSearch, "1") + return fmt.Sprintf("%v=%v", paramKeySafeSearch, "1") } else { - return fmt.Sprintf("%v=%v", params.SafeSearch, "2") + return fmt.Sprintf("%v=%v", paramKeySafeSearch, "2") } } diff --git a/src/search/engines/qwant/search.go b/src/search/engines/qwant/search.go index 403c9d00..36e29b3f 100644 --- a/src/search/engines/qwant/search.go +++ b/src/search/engines/qwant/search.go @@ -21,10 +21,10 @@ type Engine struct { scraper.EngineBase } -func New() *Engine { - return &Engine{EngineBase: scraper.EngineBase{ - Name: info.Name, - Origins: info.Origins, +func New() scraper.Enginer { + return &Engine{scraper.EngineBase{ + Name: seName, + Origins: origins[:], }} } @@ -83,8 +83,8 @@ func (se Engine) Search(query string, opts options.Options, resChan chan result. }) // Static params. - localeParam := localeParamString(opts.Locale) - safeSearchParam := safeSearchParamString(opts.SafeSearch) + paramLocale := localeParamString(opts.Locale) + paramSafeSearch := safeSearchParamString(opts.SafeSearch) for i := range opts.Pages.Max { pageNum0 := i + opts.Pages.Start @@ -92,11 +92,11 @@ func (se Engine) Search(query string, opts options.Options, resChan chan result. ctx.Put("page", strconv.Itoa(i)) // Dynamic params. - pageParam := fmt.Sprintf("%v=%v", params.Page, pageNum0*10) - combinedParams := morestrings.JoinNonEmpty([]string{countParam, localeParam, pageParam, safeSearchParam}, "&", "&") + paramPage := fmt.Sprintf("%v=%v", paramKeyPage, pageNum0*10) + combinedParams := morestrings.JoinNonEmpty([]string{paramCount, paramLocale, paramPage, paramSafeSearch}, "&", "&") - urll := fmt.Sprintf("%v?q=%v%v", info.URL, query, combinedParams) - anonUrll := fmt.Sprintf("%v?q=%v%v", info.URL, anonymize.String(query), combinedParams) + urll := fmt.Sprintf("%v?q=%v%v", searchURL, query, combinedParams) + anonUrll := fmt.Sprintf("%v?q=%v%v", searchURL, anonymize.String(query), combinedParams) if err := se.Get(ctx, urll, anonUrll); err != nil { retErrors = append(retErrors, err) diff --git a/src/search/engines/qwant/search_test.go b/src/search/engines/qwant/search_test.go index d4ca0c4e..6c0888f7 100644 --- a/src/search/engines/qwant/search_test.go +++ b/src/search/engines/qwant/search_test.go @@ -9,14 +9,11 @@ import ( ) func TestSearch(t *testing.T) { - // Search engine name - seName := info.Name - - // testing options + // Testing options. conf := _engines_test.NewConfig(seName) opt := _engines_test.NewOpts() - // test cases + // Test cases. tchar := []_engines_test.TestCaseHasAnyResults{{ Query: "ping", Options: opt, diff --git a/src/search/engines/startpage/info.go b/src/search/engines/startpage/info.go new file mode 100644 index 00000000..f9598899 --- /dev/null +++ b/src/search/engines/startpage/info.go @@ -0,0 +1,12 @@ +package startpage + +import ( + "github.com/hearchco/agent/src/search/engines" +) + +const ( + seName = engines.STARTPAGE + searchURL = "https://www.startpage.com/sp/search" +) + +var origins = [...]engines.Name{engines.STARTPAGE, engines.GOOGLE} diff --git a/src/search/engines/startpage/infoparams.go b/src/search/engines/startpage/infoparams.go deleted file mode 100644 index c17ab9fe..00000000 --- a/src/search/engines/startpage/infoparams.go +++ /dev/null @@ -1,18 +0,0 @@ -package startpage - -import ( - "github.com/hearchco/agent/src/search/engines" - "github.com/hearchco/agent/src/search/scraper" -) - -var info = scraper.Info{ - Name: engines.STARTPAGE, - Domain: "www.startpage.com", - URL: "https://www.startpage.com/sp/search", - Origins: []engines.Name{engines.STARTPAGE, engines.GOOGLE}, -} - -var params = scraper.Params{ - Page: "page", - SafeSearch: "qadf", // Can be "none" or empty param (empty means it's enabled). -} diff --git a/src/search/engines/startpage/params.go b/src/search/engines/startpage/params.go index 5d300319..c149ffd5 100644 --- a/src/search/engines/startpage/params.go +++ b/src/search/engines/startpage/params.go @@ -4,10 +4,15 @@ import ( "fmt" ) +const ( + paramKeyPage = "page" + paramKeySafeSearch = "qadf" // Can be "none" or empty param (empty means it's enabled). +) + func safeSearchParamString(safesearch bool) string { if safesearch { return "" } else { - return fmt.Sprintf("%v=%v", params.SafeSearch, "none") + return fmt.Sprintf("%v=%v", paramKeySafeSearch, "none") } } diff --git a/src/search/engines/startpage/search.go b/src/search/engines/startpage/search.go index 9a01eaac..9753c47c 100644 --- a/src/search/engines/startpage/search.go +++ b/src/search/engines/startpage/search.go @@ -21,10 +21,10 @@ type Engine struct { scraper.EngineBase } -func New() *Engine { - return &Engine{EngineBase: scraper.EngineBase{ - Name: info.Name, - Origins: info.Origins, +func New() scraper.Enginer { + return &Engine{scraper.EngineBase{ + Name: seName, + Origins: origins[:], }} } @@ -74,7 +74,7 @@ func (se Engine) Search(query string, opts options.Options, resChan chan result. }) // Static params. - safeSearchParam := safeSearchParamString(opts.SafeSearch) + paramSafeSearch := safeSearchParamString(opts.SafeSearch) for i := range opts.Pages.Max { pageNum0 := i + opts.Pages.Start @@ -82,15 +82,15 @@ func (se Engine) Search(query string, opts options.Options, resChan chan result. ctx.Put("page", strconv.Itoa(i)) // Dynamic params. - pageParam := "" + paramPage := "" if pageNum0 > 0 { - pageParam = fmt.Sprintf("%v=%v", params.Page, pageNum0+1) + paramPage = fmt.Sprintf("%v=%v", paramKeyPage, pageNum0+1) } - combinedParams := morestrings.JoinNonEmpty([]string{pageParam, safeSearchParam}, "&", "&") + combinedParams := morestrings.JoinNonEmpty([]string{paramPage, paramSafeSearch}, "&", "&") - urll := fmt.Sprintf("%v?q=%v%v", info.URL, query, combinedParams) - anonUrll := fmt.Sprintf("%v?q=%v%v", info.URL, anonymize.String(query), combinedParams) + urll := fmt.Sprintf("%v?q=%v%v", searchURL, query, combinedParams) + anonUrll := fmt.Sprintf("%v?q=%v%v", searchURL, anonymize.String(query), combinedParams) if err := se.Get(ctx, urll, anonUrll); err != nil { retErrors = append(retErrors, err) diff --git a/src/search/engines/startpage/search_test.go b/src/search/engines/startpage/search_test.go index 746935d8..ff9393a7 100644 --- a/src/search/engines/startpage/search_test.go +++ b/src/search/engines/startpage/search_test.go @@ -9,14 +9,11 @@ import ( ) func TestSearch(t *testing.T) { - // Search engine name - seName := info.Name - - // testing options + // Testing options. conf := _engines_test.NewConfig(seName) opt := _engines_test.NewOpts() - // test cases + // Test cases. tchar := []_engines_test.TestCaseHasAnyResults{{ Query: "ping", Options: opt, diff --git a/src/search/engines/swisscows/info.go b/src/search/engines/swisscows/info.go new file mode 100644 index 00000000..ea28df63 --- /dev/null +++ b/src/search/engines/swisscows/info.go @@ -0,0 +1,12 @@ +package swisscows + +import ( + "github.com/hearchco/agent/src/search/engines" +) + +const ( + seName = engines.SWISSCOWS + searchURL = "https://api.swisscows.com/web/search" +) + +var origins = [...]engines.Name{engines.SWISSCOWS, engines.BING} diff --git a/src/search/engines/swisscows/infoparams.go b/src/search/engines/swisscows/infoparams.go deleted file mode 100644 index 32bdfdf1..00000000 --- a/src/search/engines/swisscows/infoparams.go +++ /dev/null @@ -1,21 +0,0 @@ -package swisscows - -import ( - "github.com/hearchco/agent/src/search/engines" - "github.com/hearchco/agent/src/search/scraper" -) - -var info = scraper.Info{ - Name: engines.SWISSCOWS, - Domain: "swisscows.com", - URL: "https://api.swisscows.com/web/search", - Origins: []engines.Name{engines.SWISSCOWS, engines.BING}, -} - -var params = scraper.Params{ - Page: "offset", - Locale: "region", // Should be the same as Locale, only with "_" replaced by "-". -} - -const freshnessParam = "freshness=All" -const itemsParam = "itemsCount=10" diff --git a/src/search/engines/swisscows/params.go b/src/search/engines/swisscows/params.go index 88005194..cb034db8 100644 --- a/src/search/engines/swisscows/params.go +++ b/src/search/engines/swisscows/params.go @@ -7,7 +7,15 @@ import ( "github.com/hearchco/agent/src/search/engines/options" ) +const ( + paramKeyPage = "offset" + paramKeyLocale = "region" // Should be the same as Locale, only with "_" replaced by "-". + + paramFreshness = "freshness=All" + paramItems = "itemsCount=10" +) + func localeParamString(locale options.Locale) string { region := strings.Replace(locale.String(), "_", "-", 1) - return fmt.Sprintf("%v=%v", params.Locale, region) + return fmt.Sprintf("%v=%v", paramKeyLocale, region) } diff --git a/src/search/engines/swisscows/search.go b/src/search/engines/swisscows/search.go index 94101e27..3094feeb 100644 --- a/src/search/engines/swisscows/search.go +++ b/src/search/engines/swisscows/search.go @@ -21,10 +21,10 @@ type Engine struct { scraper.EngineBase } -func New() *Engine { - return &Engine{EngineBase: scraper.EngineBase{ - Name: info.Name, - Origins: info.Origins, +func New() scraper.Enginer { + return &Engine{scraper.EngineBase{ + Name: seName, + Origins: origins[:], }} } @@ -101,7 +101,7 @@ func (se Engine) Search(query string, opts options.Options, resChan chan result. }) // Static params. - localeParam := localeParamString(opts.Locale) + paramLocale := localeParamString(opts.Locale) for i := range opts.Pages.Max { pageNum0 := i + opts.Pages.Start @@ -109,13 +109,14 @@ func (se Engine) Search(query string, opts options.Options, resChan chan result. ctx.Put("page", strconv.Itoa(i)) // Dynamic params. - pageParam := fmt.Sprintf("%v=%v", params.Page, pageNum0*10) + paramPage := fmt.Sprintf("%v=%v", paramKeyPage, pageNum0*10) - combinedParams := morestrings.JoinNonEmpty([]string{freshnessParam, itemsParam, pageParam}, "?", "&") + combinedParamsLeft := morestrings.JoinNonEmpty([]string{paramFreshness, paramItems, paramPage}, "?", "&") + combinedParamsRight := morestrings.JoinNonEmpty([]string{paramLocale}, "&", "&") // Non standard order of parameters required - urll := fmt.Sprintf("%v%v&query=%v&%v", info.URL, combinedParams, query, localeParam) - anonUrll := fmt.Sprintf("%v%v&query=%v&%v", info.URL, combinedParams, anonymize.String(query), localeParam) + urll := fmt.Sprintf("%v%v&query=%v%v", searchURL, combinedParamsLeft, query, combinedParamsRight) + anonUrll := fmt.Sprintf("%v%v&query=%v%v", searchURL, combinedParamsLeft, anonymize.String(query), combinedParamsRight) if err := se.Get(ctx, urll, anonUrll); err != nil { retErrors = append(retErrors, err) diff --git a/src/search/engines/swisscows/search_test.go b/src/search/engines/swisscows/search_test.go index f38afecb..6944b180 100644 --- a/src/search/engines/swisscows/search_test.go +++ b/src/search/engines/swisscows/search_test.go @@ -9,14 +9,11 @@ import ( ) func TestSearch(t *testing.T) { - // Search engine name - seName := info.Name - - // testing options + // Testing options. conf := _engines_test.NewConfig(seName) opt := _engines_test.NewOpts() - // test cases + // Test cases. tchar := []_engines_test.TestCaseHasAnyResults{{ Query: "ping", Options: opt, diff --git a/src/search/engines/yahoo/cookies.go b/src/search/engines/yahoo/cookies.go deleted file mode 100644 index ca1ac657..00000000 --- a/src/search/engines/yahoo/cookies.go +++ /dev/null @@ -1,13 +0,0 @@ -package yahoo - -import ( - "fmt" -) - -func safeSearchCookieString(safesearch bool) string { - if safesearch { - return fmt.Sprintf("%v=%v", params.SafeSearch, "r") - } else { - return fmt.Sprintf("%v=%v", params.SafeSearch, "p") - } -} diff --git a/src/search/engines/yahoo/info.go b/src/search/engines/yahoo/info.go new file mode 100644 index 00000000..fb1996f9 --- /dev/null +++ b/src/search/engines/yahoo/info.go @@ -0,0 +1,12 @@ +package yahoo + +import ( + "github.com/hearchco/agent/src/search/engines" +) + +const ( + seName = engines.YAHOO + searchURL = "https://search.yahoo.com/search" +) + +var origins = [...]engines.Name{engines.YAHOO, engines.BING} diff --git a/src/search/engines/yahoo/infoparams.go b/src/search/engines/yahoo/infoparams.go deleted file mode 100644 index f16444af..00000000 --- a/src/search/engines/yahoo/infoparams.go +++ /dev/null @@ -1,20 +0,0 @@ -package yahoo - -import ( - "github.com/hearchco/agent/src/search/engines" - "github.com/hearchco/agent/src/search/scraper" -) - -var info = scraper.Info{ - Name: engines.YAHOO, - Domain: "search.yahoo.com", - URL: "https://search.yahoo.com/search", - Origins: []engines.Name{engines.YAHOO, engines.BING}, -} - -var params = scraper.Params{ - Page: "b", - SafeSearch: "vm", // Can be "p" (disabled) or "r" (enabled). -} - -const safeSearchCookiePrefix = "sB=v=1&pn=10&rw=new&userset=0" diff --git a/src/search/engines/yahoo/params.go b/src/search/engines/yahoo/params.go new file mode 100644 index 00000000..2082452e --- /dev/null +++ b/src/search/engines/yahoo/params.go @@ -0,0 +1,20 @@ +package yahoo + +import ( + "fmt" +) + +const ( + paramKeyPage = "b" + paramKeySafeSearch = "vm" // Can be "p" (disabled) or "r" (enabled). + + paramSafeSearchPrefix = "sB=v=1&pn=10&rw=new&userset=0" +) + +func safeSearchCookieString(safesearch bool) string { + if safesearch { + return fmt.Sprintf("%v&%v=%v", paramSafeSearchPrefix, paramKeySafeSearch, "r") + } else { + return fmt.Sprintf("%v&%v=%v", paramSafeSearchPrefix, paramKeySafeSearch, "p") + } +} diff --git a/src/search/engines/yahoo/search.go b/src/search/engines/yahoo/search.go index dd51b9ee..8d0dd1d4 100644 --- a/src/search/engines/yahoo/search.go +++ b/src/search/engines/yahoo/search.go @@ -21,10 +21,10 @@ type Engine struct { scraper.EngineBase } -func New() *Engine { - return &Engine{EngineBase: scraper.EngineBase{ - Name: info.Name, - Origins: info.Origins, +func New() scraper.Enginer { + return &Engine{scraper.EngineBase{ + Name: seName, + Origins: origins[:], }} } @@ -34,7 +34,7 @@ func (se Engine) Search(query string, opts options.Options, resChan chan result. pageRankCounter := scraper.NewPageRankCounter(opts.Pages.Max) se.OnRequest(func(r *colly.Request) { - r.Headers.Add("Cookie", fmt.Sprintf("%v&%v", safeSearchCookiePrefix, safeSearchCookieString(opts.SafeSearch))) + r.Headers.Add("Cookie", safeSearchCookieString(opts.SafeSearch)) }) se.OnHTML(dompaths.Result, func(e *colly.HTMLElement) { @@ -108,15 +108,15 @@ func (se Engine) Search(query string, opts options.Options, resChan chan result. ctx.Put("page", strconv.Itoa(i)) // Dynamic params. - pageParam := "" + paramPage := "" if pageNum0 > 0 { - pageParam = fmt.Sprintf("%v=%v", params.Page, (pageNum0-1)*7+8) + paramPage = fmt.Sprintf("%v=%v", paramKeyPage, (pageNum0-1)*7+8) } - combinedParams := morestrings.JoinNonEmpty([]string{pageParam}, "&", "&") + combinedParams := morestrings.JoinNonEmpty([]string{paramPage}, "&", "&") - urll := fmt.Sprintf("%v?p=%v%v", info.URL, query, combinedParams) - anonUrll := fmt.Sprintf("%v?p=%v%v", info.URL, anonymize.String(query), combinedParams) + urll := fmt.Sprintf("%v?p=%v%v", searchURL, query, combinedParams) + anonUrll := fmt.Sprintf("%v?p=%v%v", searchURL, anonymize.String(query), combinedParams) if err := se.Get(ctx, urll, anonUrll); err != nil { retErrors = append(retErrors, err) diff --git a/src/search/engines/yahoo/search_test.go b/src/search/engines/yahoo/search_test.go index e1367dfc..d3900b87 100644 --- a/src/search/engines/yahoo/search_test.go +++ b/src/search/engines/yahoo/search_test.go @@ -9,14 +9,11 @@ import ( ) func TestSearch(t *testing.T) { - // Search engine name - seName := info.Name - - // testing options + // Testing options. conf := _engines_test.NewConfig(seName) opt := _engines_test.NewOpts() - // test cases + // Test cases. tchar := []_engines_test.TestCaseHasAnyResults{{ Query: "ping", Options: opt, diff --git a/src/search/engines/yep/info.go b/src/search/engines/yep/info.go new file mode 100644 index 00000000..4da49475 --- /dev/null +++ b/src/search/engines/yep/info.go @@ -0,0 +1,12 @@ +package yep + +// import ( +// "github.com/hearchco/agent/src/search/engines" +// ) + +// const ( +// seName = engines.YEP +// searchURL = "https://api.yep.com/fs/2/search" +// ) + +// var origins = [...]engines.Name{engines.YEP} diff --git a/src/search/engines/yep/infoparams.go b/src/search/engines/yep/infoparams.go deleted file mode 100644 index 75994f58..00000000 --- a/src/search/engines/yep/infoparams.go +++ /dev/null @@ -1,23 +0,0 @@ -package yep - -// import ( -// "github.com/hearchco/agent/src/search/engines" -// "github.com/hearchco/agent/src/search/scraper" -// ) - -// var info = scraper.Info{ -// Name: engines.YEP, -// Domain: "yep.com", -// URL: "https://api.yep.com/fs/2/search", -// Origins: []engines.Name{engines.YEP}, -// } - -// var params = scraper.Params{ -// Page: "limit", -// Locale: "gl", // Should be last 2 characters of Locale. -// SafeSearch: "safeSearch", // Can be "off" or "strict". -// } - -// const clientParam = "client=web" -// const no_correctParam = "no_correct=false" -// const typeParam = "type=web" diff --git a/src/search/engines/yep/params.go b/src/search/engines/yep/params.go index 59d992e2..65987c9a 100644 --- a/src/search/engines/yep/params.go +++ b/src/search/engines/yep/params.go @@ -7,15 +7,25 @@ package yep // "github.com/hearchco/agent/src/search/engines/options" // ) +// const ( +// paramKeyPage = "limit" +// paramKeyLocale = "gl" // Should be last 2 characters of Locale. +// paramKeySafeSearch = "safeSearch" // Can be "off" or "strict". + +// paramClient = "client=web" +// paramNo_correct = "no_correct=false" +// paramType = "type=web" +// ) + // func localeParamString(locale options.Locale) string { // country := strings.Split(locale.String(), "_")[1] -// return fmt.Sprintf("%v=%v", params.Locale, country) +// return fmt.Sprintf("%v=%v", paramKeyLocale, country) // } // func safeSearchParamString(safesearch bool) string { // if safesearch { -// return fmt.Sprintf("%v=%v", params.SafeSearch, "strict") +// return fmt.Sprintf("%v=%v", paramKeySafeSearch, "strict") // } else { -// return fmt.Sprintf("%v=%v", params.SafeSearch, "off") +// return fmt.Sprintf("%v=%v", paramKeySafeSearch, "off") // } // } diff --git a/src/search/engines/yep/search.go b/src/search/engines/yep/search.go index 0410bced..c08f33e2 100644 --- a/src/search/engines/yep/search.go +++ b/src/search/engines/yep/search.go @@ -5,6 +5,7 @@ package yep // "fmt" // "strconv" // "strings" +// "sync/atomic" // "github.com/gocolly/colly/v2" // "github.com/rs/zerolog/log" @@ -21,14 +22,15 @@ package yep // scraper.EngineBase // } -// func New() *Engine { -// return &Engine{EngineBase: scraper.EngineBase{ -// Name: info.Name, -// Origins: info.Origins, +// func New() scraper.Enginer { +// return &Engine{scraper.EngineBase{ +// Name: seName, +// Origins: origins[:], // }} // } // func (se Engine) Search(query string, opts options.Options, resChan chan result.ResultScraped) ([]error, bool) { +// foundResults := atomic.Bool{} // retErrors := make([]error, 0, opts.Pages.Max) // pageRankCounter := scraper.NewPageRankCounter(opts.Pages.Max) @@ -94,13 +96,16 @@ package yep // Msg("Sending result to channel") // resChan <- r // pageRankCounter.Increment(pageIndex) +// if !foundResults.Load() { +// foundResults.Store(true) +// } // } // } // }) // // Static params. -// localeParam := localeParamString(opts.Locale) -// safeSearchParam := safeSearchParamString(opts.SafeSearch) +// paramLocale := localeParamString(opts.Locale) +// paramSafeSearch := safeSearchParamString(opts.SafeSearch) // for i := range opts.Pages.Max { // pageNum := i + opts.Pages.Start @@ -108,17 +113,17 @@ package yep // ctx.Put("page", strconv.Itoa(i)) // // Dynamic params. -// pageParam := "" +// paramPage := "" // if pageNum > 0 { -// pageParam = fmt.Sprintf("%v=%v", params.Page, (pageNum+2)*10+1) +// paramPage = fmt.Sprintf("%v=%v", paramKeyPage, (pageNum+2)*10+1) // } -// combinedParamsLeft := morestrings.JoinNonEmpty([]string{clientParam, localeParam, pageParam, no_correctParam}, "&", "&") -// combinedParamsRight := morestrings.JoinNonEmpty([]string{safeSearchParam, typeParam}, "&", "&") +// combinedParamsLeft := morestrings.JoinNonEmpty([]string{paramClient, paramLocale, paramPage, paramNo_correct}, "?", "&") +// combinedParamsRight := morestrings.JoinNonEmpty([]string{paramSafeSearch, paramType}, "&", "&") // // Non standard order of params required -// urll := fmt.Sprintf("%v?%v&q=%v&%v", info.URL, combinedParamsLeft, query, combinedParamsRight) -// anonUrll := fmt.Sprintf("%v?%v&q=%v&%v", info.URL, combinedParamsLeft, anonymize.String(query), combinedParamsRight) +// urll := fmt.Sprintf("%v%v&q=%v%v", searchURL, combinedParamsLeft, query, combinedParamsRight) +// anonUrll := fmt.Sprintf("%v%v&q=%v%v", searchURL, combinedParamsLeft, anonymize.String(query), combinedParamsRight) // if err := se.Get(ctx, urll, anonUrll); err != nil { // retErrors = append(retErrors, err) diff --git a/src/search/engines/yep/search_test.go b/src/search/engines/yep/search_test.go index db85c794..e0d62b1a 100644 --- a/src/search/engines/yep/search_test.go +++ b/src/search/engines/yep/search_test.go @@ -9,14 +9,11 @@ package yep // ) // func TestSearch(t *testing.T) { -// // Search engine name -// seName := info.Name - -// // testing options +// // Testing options. // conf := _engines_test.NewConfig(seName) // opt := _engines_test.NewOpts() -// // test cases +// // Test cases. // tchar := []_engines_test.TestCaseHasAnyResults{{ // Query: "ping", // Options: opt, diff --git a/src/search/result/rank/score.go b/src/search/result/rank/score.go index 1799e5bb..a30c15d2 100644 --- a/src/search/result/rank/score.go +++ b/src/search/result/rank/score.go @@ -14,18 +14,17 @@ func (r Results) calculateScores(rconf config.CategoryRanking) { } } -// Only calculates the score for one result. +// Calculates the score for one result. func calculateScore(res result.Result, rconf config.CategoryRanking) float64 { - retRankScore := float64(0) + var retRankScore float64 = 0 for _, er := range res.EngineRanks() { - seMul := rconf.Engines[er.SearchEngine().ToLower()].Mul - seConst := rconf.Engines[er.SearchEngine().ToLower()].Const //these 2 could be preproced into array - retRankScore += (100.0/math.Pow(float64(er.Rank())*rconf.A+rconf.B, rconf.REXP)*rconf.C+rconf.D)*seMul + seConst + eng := rconf.Engines[er.SearchEngine().ToLower()] + retRankScore += (100.0/math.Pow(float64(er.Rank())*rconf.A+rconf.B, rconf.REXP)*rconf.C+rconf.D)*eng.Mul + eng.Const } - retRankScore /= float64(len(res.EngineRanks())) + retRankScore /= float64(len(res.EngineRanks())) timesReturnedScore := math.Log(float64(len(res.EngineRanks()))*rconf.TRA+rconf.TRB)*10*rconf.TRC + rconf.TRD - score := retRankScore + timesReturnedScore + return score } diff --git a/src/search/scraper/infoparams.go b/src/search/scraper/infoparams.go deleted file mode 100644 index 3f221ff7..00000000 --- a/src/search/scraper/infoparams.go +++ /dev/null @@ -1,17 +0,0 @@ -package scraper - -import "github.com/hearchco/agent/src/search/engines" - -type Info struct { - Name engines.Name - Domain string - URL string - Origins []engines.Name -} - -type Params struct { - Page string - Locale string - LocaleSec string - SafeSearch string -} From 744724478c7d242d7812fa74700236e13139f7cb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Aleksa=20Siri=C5=A1ki?= <31509435+aleksasiriski@users.noreply.github.com> Date: Wed, 19 Jun 2024 12:16:21 +0200 Subject: [PATCH 2/3] fix: switch to consts for first origin --- src/search/engines/bing/info.go | 2 +- src/search/engines/bingimages/info.go | 2 +- src/search/engines/brave/info.go | 2 +- src/search/engines/duckduckgo/info.go | 2 +- src/search/engines/etools/info.go | 2 +- src/search/engines/google/info.go | 2 +- src/search/engines/googleimages/info.go | 2 +- src/search/engines/googlescholar/info.go | 2 +- src/search/engines/mojeek/info.go | 2 +- src/search/engines/presearch/info.go | 2 +- src/search/engines/qwant/info.go | 2 +- src/search/engines/startpage/info.go | 2 +- src/search/engines/swisscows/info.go | 2 +- src/search/engines/yahoo/info.go | 2 +- src/search/engines/yep/info.go | 2 +- 15 files changed, 15 insertions(+), 15 deletions(-) diff --git a/src/search/engines/bing/info.go b/src/search/engines/bing/info.go index 53ccd1e9..0085d2bc 100644 --- a/src/search/engines/bing/info.go +++ b/src/search/engines/bing/info.go @@ -9,4 +9,4 @@ const ( searchURL = "https://www.bing.com/search" ) -var origins = [...]engines.Name{engines.BING} +var origins = [...]engines.Name{seName} diff --git a/src/search/engines/bingimages/info.go b/src/search/engines/bingimages/info.go index 45f4b956..49eccff5 100644 --- a/src/search/engines/bingimages/info.go +++ b/src/search/engines/bingimages/info.go @@ -9,4 +9,4 @@ const ( searchURL = "https://www.bing.com/images/async" ) -var origins = [...]engines.Name{engines.BINGIMAGES} +var origins = [...]engines.Name{seName} diff --git a/src/search/engines/brave/info.go b/src/search/engines/brave/info.go index 64da1987..e6d7a328 100644 --- a/src/search/engines/brave/info.go +++ b/src/search/engines/brave/info.go @@ -9,4 +9,4 @@ const ( searchURL = "https://search.brave.com/search" ) -var origins = [...]engines.Name{engines.BRAVE, engines.GOOGLE} +var origins = [...]engines.Name{seName, engines.GOOGLE} diff --git a/src/search/engines/duckduckgo/info.go b/src/search/engines/duckduckgo/info.go index 5b6ef099..8bdd74f2 100644 --- a/src/search/engines/duckduckgo/info.go +++ b/src/search/engines/duckduckgo/info.go @@ -9,4 +9,4 @@ const ( searchURL = "https://lite.duckduckgo.com/lite/" ) -var origins = [...]engines.Name{engines.DUCKDUCKGO, engines.BING} +var origins = [...]engines.Name{seName, engines.BING} diff --git a/src/search/engines/etools/info.go b/src/search/engines/etools/info.go index d58bbea9..8df61110 100644 --- a/src/search/engines/etools/info.go +++ b/src/search/engines/etools/info.go @@ -10,4 +10,4 @@ const ( pageURL = "https://www.etools.ch/search.do" ) -var origins = [...]engines.Name{engines.ETOOLS, engines.BING, engines.BRAVE, engines.DUCKDUCKGO, engines.GOOGLE, engines.MOJEEK, engines.QWANT, engines.YAHOO} +var origins = [...]engines.Name{seName, engines.BING, engines.BRAVE, engines.DUCKDUCKGO, engines.GOOGLE, engines.MOJEEK, engines.QWANT, engines.YAHOO} diff --git a/src/search/engines/google/info.go b/src/search/engines/google/info.go index 5963d0f6..2d89ce70 100644 --- a/src/search/engines/google/info.go +++ b/src/search/engines/google/info.go @@ -9,4 +9,4 @@ const ( searchURL = "https://www.google.com/search" ) -var origins = [...]engines.Name{engines.GOOGLE} +var origins = [...]engines.Name{seName} diff --git a/src/search/engines/googleimages/info.go b/src/search/engines/googleimages/info.go index 8b95b5ab..92d4e29e 100644 --- a/src/search/engines/googleimages/info.go +++ b/src/search/engines/googleimages/info.go @@ -9,4 +9,4 @@ const ( searchURL = "https://www.google.com/search" ) -var origins = [...]engines.Name{engines.GOOGLEIMAGES} +var origins = [...]engines.Name{seName} diff --git a/src/search/engines/googlescholar/info.go b/src/search/engines/googlescholar/info.go index c77ea12c..8c0f1701 100644 --- a/src/search/engines/googlescholar/info.go +++ b/src/search/engines/googlescholar/info.go @@ -9,4 +9,4 @@ const ( searchURL = "https://scholar.google.com/scholar" ) -var origins = [...]engines.Name{engines.GOOGLESCHOLAR} +var origins = [...]engines.Name{seName} diff --git a/src/search/engines/mojeek/info.go b/src/search/engines/mojeek/info.go index 10b0e08e..247c2feb 100644 --- a/src/search/engines/mojeek/info.go +++ b/src/search/engines/mojeek/info.go @@ -9,4 +9,4 @@ const ( searchURL = "https://www.mojeek.com/search" ) -var origins = [...]engines.Name{engines.MOJEEK} +var origins = [...]engines.Name{seName} diff --git a/src/search/engines/presearch/info.go b/src/search/engines/presearch/info.go index a02fbd8b..32bd06ce 100644 --- a/src/search/engines/presearch/info.go +++ b/src/search/engines/presearch/info.go @@ -9,4 +9,4 @@ const ( searchURL = "https://presearch.com/search" ) -var origins = [...]engines.Name{engines.PRESEARCH, engines.GOOGLE} +var origins = [...]engines.Name{seName, engines.GOOGLE} diff --git a/src/search/engines/qwant/info.go b/src/search/engines/qwant/info.go index 02515f62..37481349 100644 --- a/src/search/engines/qwant/info.go +++ b/src/search/engines/qwant/info.go @@ -9,4 +9,4 @@ const ( searchURL = "https://api.qwant.com/v3/search/web" ) -var origins = [...]engines.Name{engines.QWANT, engines.BING} +var origins = [...]engines.Name{seName, engines.BING} diff --git a/src/search/engines/startpage/info.go b/src/search/engines/startpage/info.go index f9598899..9332a3ef 100644 --- a/src/search/engines/startpage/info.go +++ b/src/search/engines/startpage/info.go @@ -9,4 +9,4 @@ const ( searchURL = "https://www.startpage.com/sp/search" ) -var origins = [...]engines.Name{engines.STARTPAGE, engines.GOOGLE} +var origins = [...]engines.Name{seName, engines.GOOGLE} diff --git a/src/search/engines/swisscows/info.go b/src/search/engines/swisscows/info.go index ea28df63..2690022c 100644 --- a/src/search/engines/swisscows/info.go +++ b/src/search/engines/swisscows/info.go @@ -9,4 +9,4 @@ const ( searchURL = "https://api.swisscows.com/web/search" ) -var origins = [...]engines.Name{engines.SWISSCOWS, engines.BING} +var origins = [...]engines.Name{seName, engines.BING} diff --git a/src/search/engines/yahoo/info.go b/src/search/engines/yahoo/info.go index fb1996f9..9df89469 100644 --- a/src/search/engines/yahoo/info.go +++ b/src/search/engines/yahoo/info.go @@ -9,4 +9,4 @@ const ( searchURL = "https://search.yahoo.com/search" ) -var origins = [...]engines.Name{engines.YAHOO, engines.BING} +var origins = [...]engines.Name{seName, engines.BING} diff --git a/src/search/engines/yep/info.go b/src/search/engines/yep/info.go index 4da49475..57de5781 100644 --- a/src/search/engines/yep/info.go +++ b/src/search/engines/yep/info.go @@ -9,4 +9,4 @@ package yep // searchURL = "https://api.yep.com/fs/2/search" // ) -// var origins = [...]engines.Name{engines.YEP} +// var origins = [...]engines.Name{seName} From 295ce278cffac7058b1f1529c13fc4e8886a47f3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Aleksa=20Siri=C5=A1ki?= <31509435+aleksasiriski@users.noreply.github.com> Date: Wed, 19 Jun 2024 12:17:56 +0200 Subject: [PATCH 3/3] chore: general category etools comment --- src/config/defaults_cat_general.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/config/defaults_cat_general.go b/src/config/defaults_cat_general.go index 3a2d22dc..7c293196 100644 --- a/src/config/defaults_cat_general.go +++ b/src/config/defaults_cat_general.go @@ -29,7 +29,7 @@ var generalRequiredByOriginEngines = []engines.Name{ } var generalPreferredEngines = []engines.Name{ - engines.ETOOLS, + engines.ETOOLS, // Not in ByOrigin because it only gives 10 results across a lot of engines that it scrapes from. } var generalPreferredByOriginEngines = []engines.Name{