From dbbfd0e6705a47ad9b8e97858af478e2ba939e1a Mon Sep 17 00:00:00 2001 From: Silvio Moioli Date: Sat, 12 May 2018 00:13:58 +0200 Subject: [PATCH] Ability to download repos from SCC added --- README.md | 30 +++++++------- cmd/sync.go | 60 +++++++++++++++++++-------- get/scc.go | 100 +++++++++++++++++++++++++++++++++++++++++++++ get/scc_test.go | 33 +++++++++++++++ get/syncer.go | 13 +++--- get/syncer_test.go | 7 +++- 6 files changed, 206 insertions(+), 37 deletions(-) create mode 100644 get/scc.go create mode 100644 get/scc_test.go diff --git a/README.md b/README.md index 237362f..5b80bde 100644 --- a/README.md +++ b/README.md @@ -12,27 +12,29 @@ Currently, the only implemented functionality is the smart downloading of RPM re You can specify configuration in YAML either in a file (by default `minima.yaml`) or the `MINIMA_CONFIG` environment variable. -A directory-based example `minima.yaml` is below: +An example `minima.yaml` is below: ```yaml storage: type: file path: /srv/mirror + # uncomment to save to an AWS S3 bucket instead of the filesystem + # type: s3 + # access_key_id: ACCESS_KEY_ID + # secret_access_key: SECRET_ACCESS_KEY + # region: us-east-1 + # bucket: minima-bucket-key http: - url: http://download.opensuse.org/repositories/myrepo1/openSUSE_Leap_42.3/ -``` - -An s3-based example `minima.yaml` is below: -```yaml -storage: - type: s3 - access_key_id: ACCESS_KEY_ID - secret_access_key: SECRET_ACCESS_KEY - region: us-east-1 - bucket: minima-bucket-key - -- url: http://download.opensuse.org/repositories/myrepo1/openSUSE_Leap_42.3/ - archs: [x86_64] + archs: [x86_64] + +# optional section to download repos from SCC +# scc: +# username: UC7 +# password: ***REMOVED*** +# repo_names: +# - SLES12-SP2-LTSS-Updates +# archs: [x86_64] ``` To sync repositories, use `minima sync`. diff --git a/cmd/sync.go b/cmd/sync.go index 34c9e0c..537e40a 100644 --- a/cmd/sync.go +++ b/cmd/sync.go @@ -19,24 +19,29 @@ var syncCmd = &cobra.Command{ You can specify configuration in YAML either in a file or the MINIMA_CONFIG environment variable. - A directory-based example minima.yaml is below: + An example minima.yaml is below: + storage: type: file path: /srv/mirror + # uncomment to save to an AWS S3 bucket instead of the filesystem + # type: s3 + # access_key_id: ACCESS_KEY_ID + # secret_access_key: SECRET_ACCESS_KEY + # region: us-east-1 + # bucket: minima-bucket-key http: - url: http://download.opensuse.org/repositories/myrepo1/openSUSE_Leap_42.3/ - - An s3-based example minima.yaml is below: - storage: - type: s3 - access_key_id: ACCESS_KEY_ID - secret_access_key: SECRET_ACCESS_KEY - region: us-east-1 - bucket: minima-bucket-key - - - url: http://download.opensuse.org/repositories/myrepo1/openSUSE_Leap_42.3/ - archs: [x86_64] + archs: [x86_64] + + # optional section to download repos from SCC + # scc: + # username: UC7 + # password: ***REMOVED*** + # repo_names: + # - SLES12-SP2-LTSS-Updates + # archs: [x86_64] `, Run: func(cmd *cobra.Command, args []string) { syncers, err := syncersFromConfig(cfgString) @@ -44,7 +49,7 @@ var syncCmd = &cobra.Command{ log.Fatal(err) } for _, syncer := range syncers { - log.Printf("Processing repo: %s", syncer.Url) + log.Printf("Processing repo: %s", syncer.URL.String()) err := syncer.StoreRepo() if err != nil { log.Println(err) @@ -67,10 +72,19 @@ type Config struct { Region string Bucket string } - HTTP []struct { - URL string - Archs []string + SCC struct { + Username string + Password string + RepoNames []string `yaml:"repo_names"` + Archs []string } + HTTP []HTTPRepoConfig +} + +// HTTPRepoConfig defines the configuration of an HTTP repo +type HTTPRepoConfig struct { + URL string + Archs []string } func syncersFromConfig(configString string) (result []*get.Syncer, err error) { @@ -82,6 +96,17 @@ func syncersFromConfig(configString string) (result []*get.Syncer, err error) { return nil, fmt.Errorf("Configuration parse error: unrecognised storage type") } + if config.SCC.Username != "" { + httpURLs, err := get.SCCURLs("https://scc.suse.com", config.SCC.Username, config.SCC.Password, config.SCC.RepoNames, config.SCC.Archs) + if err != nil { + return nil, err + } + + for _, httpURL := range httpURLs { + config.HTTP = append(config.HTTP, HTTPRepoConfig{httpURL, config.SCC.Archs}) + } + } + for _, httpRepo := range config.HTTP { repoURL, err := url.Parse(httpRepo.URL) if err != nil { @@ -103,7 +128,8 @@ func syncersFromConfig(configString string) (result []*get.Syncer, err error) { return nil, err } } - result = append(result, get.NewSyncer(httpRepo.URL, archs, storage)) + + result = append(result, get.NewSyncer(*repoURL, archs, storage)) } return diff --git a/get/scc.go b/get/scc.go new file mode 100644 index 0000000..57d50ab --- /dev/null +++ b/get/scc.go @@ -0,0 +1,100 @@ +package get + +import ( + "encoding/base64" + "encoding/json" + "fmt" + "io/ioutil" + "net/http" + "regexp" + "strings" +) + +// SCCURLs returns URLs for repos in SCC +func SCCURLs(baseURL string, username string, password string, nameFilters []string, descriptionFilters []string) (urls []string, err error) { + urls = []string{} + + token := base64.URLEncoding.EncodeToString([]byte(username + ":" + password)) + + fmt.Println("Repos available in SCC follow:") + next := baseURL + "/connect/organizations/repositories" + for { + var page []byte + page, next, err = _downloadPaged(next, token) + if err != nil { + return nil, err + } + + type Repo struct { + URL string + Name string + Description string + DistroTarget string `json:"distro_target"` + } + + var repos []Repo + err := json.Unmarshal(page, &repos) + if err != nil { + return nil, err + } + + for _, repo := range repos { + fmt.Printf(" %s: %s\n", repo.Name, repo.Description) + if _matches(repo.Name, repo.Description, nameFilters, descriptionFilters) { + urls = append(urls, repo.URL) + } + } + + if next == "" { + break + } + } + fmt.Println(urls) + + return +} + +func _matches(name string, description string, nameFilters []string, descriptionFilters []string) bool { + for _, nameFilter := range nameFilters { + if strings.Contains(name, nameFilter) { + for _, descriptionFilter := range descriptionFilters { + if strings.Contains(description, descriptionFilter) { + return true + } + } + } + } + return false +} + +func _downloadPaged(url string, token string) (page []byte, next string, err error) { + req, err := http.NewRequest("GET", url, nil) + if err != nil { + return + } + + req.Header.Add("Authorization", fmt.Sprintf("Basic %s", token)) + req.Header.Add("Accept", "application/vnd.scc.suse.com.v4+json") + resp, err := http.DefaultClient.Do(req) + if err != nil { + return + } + + if resp.StatusCode != 200 { + err = &UnexpectedStatusCodeError{url, resp.StatusCode} + return + } + + page, err = ioutil.ReadAll(resp.Body) + if err != nil { + return + } + + re := regexp.MustCompile("<([^>]+)>; rel=\"next\"") + matches := re.FindStringSubmatch(resp.Header["Link"][0]) + if matches != nil { + next = matches[1] + } + + return +} diff --git a/get/scc_test.go b/get/scc_test.go new file mode 100644 index 0000000..f5acd9c --- /dev/null +++ b/get/scc_test.go @@ -0,0 +1,33 @@ +package get + +import ( + "fmt" + "net/http" + "testing" +) + +func TestSCCURLs(t *testing.T) { + // Respond to http://localhost:8080/test with "Hello, World" + http.HandleFunc("/connect/organizations/repositories", func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Link", "; rel=\"next\"") + fmt.Fprintf(w, "[{\"url\" : \"test\", \"name\" : \"test\", \"description\" : \"test\"}]") + }) + + http.HandleFunc("/connect/organizations/repositories2", func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Link", "") + fmt.Fprintf(w, "[{\"url\" : \"test2\", \"name\" : \"test2\", \"description\" : \"test2\"}]") + }) + + urls, err := SCCURLs("http://localhost:8080", "user", "pass", []string{"test2"}, []string{""}) + if err != nil { + t.Error(err) + } + + if len(urls) != 1 { + t.Error("expected 1 url") + } + + if urls[0] != "test2" { + t.Error("expected test2, got " + urls[0]) + } +} diff --git a/get/syncer.go b/get/syncer.go index 2b99e99..832f919 100644 --- a/get/syncer.go +++ b/get/syncer.go @@ -7,6 +7,7 @@ import ( "fmt" "io" "log" + "net/url" "path" "github.com/moio/minima/util" @@ -64,13 +65,13 @@ const repomdPath = "repodata/repomd.xml" // Syncer syncs repos from an HTTP source to a Storage type Syncer struct { // URL of the repo this syncer syncs - Url string + URL url.URL archs map[string]bool storage Storage } // NewSyncer creates a new Syncer -func NewSyncer(url string, archs map[string]bool, storage Storage) *Syncer { +func NewSyncer(url url.URL, archs map[string]bool, storage Storage) *Syncer { return &Syncer{url, archs, storage} } @@ -145,14 +146,16 @@ func (r *Syncer) downloadStore(path string, description string) error { } // downloadStoreApply downloads a repo-relative path into a file, while applying a ReaderConsumer -func (r *Syncer) downloadStoreApply(path string, checksum string, description string, hash crypto.Hash, f util.ReaderConsumer) error { +func (r *Syncer) downloadStoreApply(relativePath string, checksum string, description string, hash crypto.Hash, f util.ReaderConsumer) error { log.Printf("Downloading %v...", description) - body, err := ReadURL(r.Url + "/" + path) + url := r.URL + url.Path = path.Join(r.URL.Path, relativePath) + body, err := ReadURL(url.String()) if err != nil { return err } - return util.Compose(r.storage.StoringMapper(path, checksum, hash), f)(body) + return util.Compose(r.storage.StoringMapper(relativePath, checksum, hash), f)(body) } // processMetadata stores the repo metadata and returns a list of package file diff --git a/get/syncer_test.go b/get/syncer_test.go index 5847902..7ea305a 100644 --- a/get/syncer_test.go +++ b/get/syncer_test.go @@ -2,6 +2,7 @@ package get import ( "net/http" + "net/url" "os" "path/filepath" "testing" @@ -21,7 +22,11 @@ func TestStoreRepo(t *testing.T) { "x86_64": true, } storage := NewFileStorage(directory) - syncer := NewSyncer("http://localhost:8080/repo", archs, storage) + url, err := url.Parse("http://localhost:8080/repo") + if err != nil { + t.Error(err) + } + syncer := NewSyncer(*url, archs, storage) // first sync err = syncer.StoreRepo()