Skip to content

Commit

Permalink
Ability to download repos from SCC added
Browse files Browse the repository at this point in the history
  • Loading branch information
moio committed May 11, 2018
1 parent 4892f57 commit dbbfd0e
Show file tree
Hide file tree
Showing 6 changed files with 206 additions and 37 deletions.
30 changes: 16 additions & 14 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,27 +12,29 @@ Currently, the only implemented functionality is the smart downloading of RPM re

You can specify configuration in YAML either in a file (by default `minima.yaml`) or the `MINIMA_CONFIG` environment variable.

A directory-based example `minima.yaml` is below:
An example `minima.yaml` is below:
```yaml
storage:
type: file
path: /srv/mirror
# uncomment to save to an AWS S3 bucket instead of the filesystem
# type: s3
# access_key_id: ACCESS_KEY_ID
# secret_access_key: SECRET_ACCESS_KEY
# region: us-east-1
# bucket: minima-bucket-key

http:
- url: http://download.opensuse.org/repositories/myrepo1/openSUSE_Leap_42.3/
```
An s3-based example `minima.yaml` is below:
```yaml
storage:
type: s3
access_key_id: ACCESS_KEY_ID
secret_access_key: SECRET_ACCESS_KEY
region: us-east-1
bucket: minima-bucket-key
- url: http://download.opensuse.org/repositories/myrepo1/openSUSE_Leap_42.3/
archs: [x86_64]
archs: [x86_64]

# optional section to download repos from SCC
# scc:
# username: UC7
# password: ***REMOVED***
# repo_names:
# - SLES12-SP2-LTSS-Updates
# archs: [x86_64]
```

To sync repositories, use `minima sync`.
Expand Down
60 changes: 43 additions & 17 deletions cmd/sync.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,32 +19,37 @@ var syncCmd = &cobra.Command{
You can specify configuration in YAML either in a file or the MINIMA_CONFIG environment variable.
A directory-based example minima.yaml is below:
An example minima.yaml is below:
storage:
type: file
path: /srv/mirror
# uncomment to save to an AWS S3 bucket instead of the filesystem
# type: s3
# access_key_id: ACCESS_KEY_ID
# secret_access_key: SECRET_ACCESS_KEY
# region: us-east-1
# bucket: minima-bucket-key
http:
- url: http://download.opensuse.org/repositories/myrepo1/openSUSE_Leap_42.3/
An s3-based example minima.yaml is below:
storage:
type: s3
access_key_id: ACCESS_KEY_ID
secret_access_key: SECRET_ACCESS_KEY
region: us-east-1
bucket: minima-bucket-key
- url: http://download.opensuse.org/repositories/myrepo1/openSUSE_Leap_42.3/
archs: [x86_64]
archs: [x86_64]
# optional section to download repos from SCC
# scc:
# username: UC7
# password: ***REMOVED***
# repo_names:
# - SLES12-SP2-LTSS-Updates
# archs: [x86_64]
`,
Run: func(cmd *cobra.Command, args []string) {
syncers, err := syncersFromConfig(cfgString)
if err != nil {
log.Fatal(err)
}
for _, syncer := range syncers {
log.Printf("Processing repo: %s", syncer.Url)
log.Printf("Processing repo: %s", syncer.URL.String())
err := syncer.StoreRepo()
if err != nil {
log.Println(err)
Expand All @@ -67,10 +72,19 @@ type Config struct {
Region string
Bucket string
}
HTTP []struct {
URL string
Archs []string
SCC struct {
Username string
Password string
RepoNames []string `yaml:"repo_names"`
Archs []string
}
HTTP []HTTPRepoConfig
}

// HTTPRepoConfig defines the configuration of an HTTP repo
type HTTPRepoConfig struct {
URL string
Archs []string
}

func syncersFromConfig(configString string) (result []*get.Syncer, err error) {
Expand All @@ -82,6 +96,17 @@ func syncersFromConfig(configString string) (result []*get.Syncer, err error) {
return nil, fmt.Errorf("Configuration parse error: unrecognised storage type")
}

if config.SCC.Username != "" {
httpURLs, err := get.SCCURLs("https://scc.suse.com", config.SCC.Username, config.SCC.Password, config.SCC.RepoNames, config.SCC.Archs)
if err != nil {
return nil, err
}

for _, httpURL := range httpURLs {
config.HTTP = append(config.HTTP, HTTPRepoConfig{httpURL, config.SCC.Archs})
}
}

for _, httpRepo := range config.HTTP {
repoURL, err := url.Parse(httpRepo.URL)
if err != nil {
Expand All @@ -103,7 +128,8 @@ func syncersFromConfig(configString string) (result []*get.Syncer, err error) {
return nil, err
}
}
result = append(result, get.NewSyncer(httpRepo.URL, archs, storage))

result = append(result, get.NewSyncer(*repoURL, archs, storage))
}

return
Expand Down
100 changes: 100 additions & 0 deletions get/scc.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
package get

import (
"encoding/base64"
"encoding/json"
"fmt"
"io/ioutil"
"net/http"
"regexp"
"strings"
)

// SCCURLs returns URLs for repos in SCC
func SCCURLs(baseURL string, username string, password string, nameFilters []string, descriptionFilters []string) (urls []string, err error) {
urls = []string{}

token := base64.URLEncoding.EncodeToString([]byte(username + ":" + password))

fmt.Println("Repos available in SCC follow:")
next := baseURL + "/connect/organizations/repositories"
for {
var page []byte
page, next, err = _downloadPaged(next, token)
if err != nil {
return nil, err
}

type Repo struct {
URL string
Name string
Description string
DistroTarget string `json:"distro_target"`
}

var repos []Repo
err := json.Unmarshal(page, &repos)
if err != nil {
return nil, err
}

for _, repo := range repos {
fmt.Printf(" %s: %s\n", repo.Name, repo.Description)
if _matches(repo.Name, repo.Description, nameFilters, descriptionFilters) {
urls = append(urls, repo.URL)
}
}

if next == "" {
break
}
}
fmt.Println(urls)

return
}

func _matches(name string, description string, nameFilters []string, descriptionFilters []string) bool {
for _, nameFilter := range nameFilters {
if strings.Contains(name, nameFilter) {
for _, descriptionFilter := range descriptionFilters {
if strings.Contains(description, descriptionFilter) {
return true
}
}
}
}
return false
}

func _downloadPaged(url string, token string) (page []byte, next string, err error) {
req, err := http.NewRequest("GET", url, nil)
if err != nil {
return
}

req.Header.Add("Authorization", fmt.Sprintf("Basic %s", token))
req.Header.Add("Accept", "application/vnd.scc.suse.com.v4+json")
resp, err := http.DefaultClient.Do(req)
if err != nil {
return
}

if resp.StatusCode != 200 {
err = &UnexpectedStatusCodeError{url, resp.StatusCode}
return
}

page, err = ioutil.ReadAll(resp.Body)
if err != nil {
return
}

re := regexp.MustCompile("<([^>]+)>; rel=\"next\"")
matches := re.FindStringSubmatch(resp.Header["Link"][0])
if matches != nil {
next = matches[1]
}

return
}
33 changes: 33 additions & 0 deletions get/scc_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
package get

import (
"fmt"
"net/http"
"testing"
)

func TestSCCURLs(t *testing.T) {
// Respond to http://localhost:8080/test with "Hello, World"
http.HandleFunc("/connect/organizations/repositories", func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Link", "<http://localhost:8080/connect/organizations/repositories2>; rel=\"next\"")
fmt.Fprintf(w, "[{\"url\" : \"test\", \"name\" : \"test\", \"description\" : \"test\"}]")
})

http.HandleFunc("/connect/organizations/repositories2", func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Link", "")
fmt.Fprintf(w, "[{\"url\" : \"test2\", \"name\" : \"test2\", \"description\" : \"test2\"}]")
})

urls, err := SCCURLs("http://localhost:8080", "user", "pass", []string{"test2"}, []string{""})
if err != nil {
t.Error(err)
}

if len(urls) != 1 {
t.Error("expected 1 url")
}

if urls[0] != "test2" {
t.Error("expected test2, got " + urls[0])
}
}
13 changes: 8 additions & 5 deletions get/syncer.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import (
"fmt"
"io"
"log"
"net/url"
"path"

"github.com/moio/minima/util"
Expand Down Expand Up @@ -64,13 +65,13 @@ const repomdPath = "repodata/repomd.xml"
// Syncer syncs repos from an HTTP source to a Storage
type Syncer struct {
// URL of the repo this syncer syncs
Url string
URL url.URL
archs map[string]bool
storage Storage
}

// NewSyncer creates a new Syncer
func NewSyncer(url string, archs map[string]bool, storage Storage) *Syncer {
func NewSyncer(url url.URL, archs map[string]bool, storage Storage) *Syncer {
return &Syncer{url, archs, storage}
}

Expand Down Expand Up @@ -145,14 +146,16 @@ func (r *Syncer) downloadStore(path string, description string) error {
}

// downloadStoreApply downloads a repo-relative path into a file, while applying a ReaderConsumer
func (r *Syncer) downloadStoreApply(path string, checksum string, description string, hash crypto.Hash, f util.ReaderConsumer) error {
func (r *Syncer) downloadStoreApply(relativePath string, checksum string, description string, hash crypto.Hash, f util.ReaderConsumer) error {
log.Printf("Downloading %v...", description)
body, err := ReadURL(r.Url + "/" + path)
url := r.URL
url.Path = path.Join(r.URL.Path, relativePath)
body, err := ReadURL(url.String())
if err != nil {
return err
}

return util.Compose(r.storage.StoringMapper(path, checksum, hash), f)(body)
return util.Compose(r.storage.StoringMapper(relativePath, checksum, hash), f)(body)
}

// processMetadata stores the repo metadata and returns a list of package file
Expand Down
7 changes: 6 additions & 1 deletion get/syncer_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package get

import (
"net/http"
"net/url"
"os"
"path/filepath"
"testing"
Expand All @@ -21,7 +22,11 @@ func TestStoreRepo(t *testing.T) {
"x86_64": true,
}
storage := NewFileStorage(directory)
syncer := NewSyncer("http://localhost:8080/repo", archs, storage)
url, err := url.Parse("http://localhost:8080/repo")
if err != nil {
t.Error(err)
}
syncer := NewSyncer(*url, archs, storage)

// first sync
err = syncer.StoreRepo()
Expand Down

0 comments on commit dbbfd0e

Please sign in to comment.