Skip to content

Commit

Permalink
new release
Browse files Browse the repository at this point in the history
  • Loading branch information
tamerh committed Oct 29, 2019
1 parent 8734c56 commit 31aee89
Show file tree
Hide file tree
Showing 16 changed files with 65 additions and 52 deletions.
22 changes: 20 additions & 2 deletions biobtree.go
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@ import (
"github.com/urfave/cli"
)

const version = "1.1.4"
const versionTag = "v1.1.4"
const version = "1.2.0"
const versionTag = "v1.2.0"

var config *conf.Conf

Expand Down Expand Up @@ -148,6 +148,13 @@ func main() {
return runWebCommand(c)
},
},
{
Name: "install",
Usage: "Install configuration files. Used for genomes and datasets listing",
Action: func(c *cli.Context) error {
return runInstallCommand(c)
},
},
{
Name: "alias",
Usage: "Recreates alias db this is used if new aliases wants to added while keeping existing state",
Expand Down Expand Up @@ -350,6 +357,17 @@ func runWebCommand(c *cli.Context) error {

}

func runInstallCommand(c *cli.Context) error {

confdir := c.GlobalString("confdir")
outDir := c.GlobalString("out_dir")
config = &conf.Conf{}
config.Init(confdir, versionTag, true, outDir)

return nil

}

func runProfileCommand(c *cli.Context) error {

confdir := c.GlobalString("confdir")
Expand Down
12 changes: 0 additions & 12 deletions conf/optional.dataset.json
Original file line number Diff line number Diff line change
Expand Up @@ -269,12 +269,6 @@
"id": "544",
"url": "http://mammoth.bcm.tmc.edu/cgi-bin/report_maker_ls/uniprotTraceServerResults.pl?identifier=£{id}"
},
"flybase": {
"aliases": "FlyBase",
"name": "FlyBase",
"id": "545",
"url": "http://flybase.org/reports/£{id}.html"
},
"foodb": {
"aliases": "FooDB,FooDB accession",
"name": "FooDB",
Expand Down Expand Up @@ -755,12 +749,6 @@
"id": "625",
"url": ""
},
"supfam": {
"aliases": "SUPFAM",
"name": "SUPFAM",
"id": "626",
"url": "http://supfam.org/SUPERFAMILY/cgi-bin/scop.cgi?ipid=£{id}"
},
"swiss-2dpage": {
"aliases": "SWISS-2DPAGE",
"name": "SWISS-2DPAGE",
Expand Down
2 changes: 1 addition & 1 deletion ensembl/ensembl.paths.json

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion service/mapfilter.go
Original file line number Diff line number Diff line change
Expand Up @@ -213,7 +213,7 @@ func (s *service) inputXrefs(ids []string, idsDomain uint32, filterq *query.Quer

if pages == nil {

res, err := s.search(ids, idsDomain, rootPage, filterq, false, false)
res, err := s.search(ids, idsDomain, rootPage, filterq, true, false)

if err != nil {
return nil, "", err
Expand Down
3 changes: 2 additions & 1 deletion service/service.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import (
"io/ioutil"
"log"
"os"
"path/filepath"
"sort"
"strconv"
"strings"
Expand Down Expand Up @@ -45,7 +46,7 @@ type service struct {
func (s *service) init() {

meta := make(map[string]interface{})
f, err := ioutil.ReadFile(config.Appconf["dbDir"] + "/db.meta.json")
f, err := ioutil.ReadFile(filepath.FromSlash(config.Appconf["dbDir"] + "/db.meta.json"))
if err != nil {
log.Fatalln("Error while reading meta information file which should be produced with generate command. Please make sure you did previous steps correctly.")
fmt.Printf("Error: %v", err)
Expand Down
6 changes: 3 additions & 3 deletions service/web.go
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@ func (web *Web) Start(c *conf.Conf) {

web.service = s

// start grpc
rpc := biobtreegrpc{
service: s,
}
Expand All @@ -46,17 +45,18 @@ func (web *Web) Start(c *conf.Conf) {
web.metaRes = []byte(s.metajson())

searchGz := gziphandler.GzipHandler(http.HandlerFunc(web.search))
metaGz := gziphandler.GzipHandler(http.HandlerFunc(web.meta))
searchEntryGz := gziphandler.GzipHandler(http.HandlerFunc(web.entry))
mapFilterGz := gziphandler.GzipHandler(http.HandlerFunc(web.mapFilter))
searchPageGz := gziphandler.GzipHandler(http.HandlerFunc(web.searchPage))
searchFilterGz := gziphandler.GzipHandler(http.HandlerFunc(web.searchFilter))
metaGz := gziphandler.GzipHandler(http.HandlerFunc(web.meta))

http.Handle("/ws/", searchGz)
http.Handle("/ws/meta/", metaGz)
http.Handle("/ws/entry/", searchEntryGz)
http.Handle("/ws/map/", mapFilterGz)
http.Handle("/ws/page/", searchPageGz)
http.Handle("/ws/filter/", searchFilterGz)
http.Handle("/ws/meta/", metaGz)

//web ui
fs := http.FileServer(http.Dir("website"))
Expand Down
3 changes: 1 addition & 2 deletions update/ensembl.go
Original file line number Diff line number Diff line change
Expand Up @@ -63,14 +63,13 @@ func (e *ensembl) getEnsemblPaths() (*ensemblPaths, string) {

}

func (e *ensembl) updateEnsemblPaths() (*ensemblPaths, string) {
func (e *ensembl) updateEnsemblPaths(version int) (*ensemblPaths, string) {

var branch string
var ftpAddress string
var ftpJSONPath string
var ftpMysqlPath string
var ftpBiomartFolder string
var version int
var err error

switch e.source {
Expand Down
23 changes: 15 additions & 8 deletions update/update.go
Original file line number Diff line number Diff line change
Expand Up @@ -434,7 +434,8 @@ func (d *DataUpdate) setEnsemblPaths() {

if _, ok := config.Appconf["disableEnsemblReleaseCheck"]; !ok {

if d.hasEnsemblNewRelease() {
hasNewRelease, version := d.hasEnsemblNewRelease()
if hasNewRelease {

ensembls := [6]ensembl{}
ensembls[0] = ensembl{source: "ensembl", d: d, branch: pbuf.Ensemblbranch_ENSEMBL}
Expand All @@ -445,7 +446,7 @@ func (d *DataUpdate) setEnsemblPaths() {
ensembls[5] = ensembl{source: "ensembl_protists", d: d, branch: pbuf.Ensemblbranch_PROTIST}

for _, ens := range ensembls {
ens.updateEnsemblPaths()
ens.updateEnsemblPaths(version)
time.Sleep(time.Duration(2) * time.Second) // just for not to kicked out from ensembl ftp
}

Expand All @@ -454,12 +455,13 @@ func (d *DataUpdate) setEnsemblPaths() {

}

func (d *DataUpdate) hasEnsemblNewRelease() bool {
func (d *DataUpdate) hasEnsemblNewRelease() (bool, int) {

epaths := ensemblPaths{}
pathFile := filepath.FromSlash(config.Appconf["ensemblDir"] + "/ensembl_metazoa.paths.json")
if !fileExists(pathFile) {
return true

return true, d.getLatestEnsemblVersion()
}
f, err := os.Open(pathFile)
check(err)
Expand All @@ -472,6 +474,14 @@ func (d *DataUpdate) hasEnsemblNewRelease() bool {
log.Fatal("Missing ensembl_version_url param")
}

latestVersion := d.getLatestEnsemblVersion()

return latestVersion != epaths.Version, latestVersion

}

func (d *DataUpdate) getLatestEnsemblVersion() int {

egversion := ensemblGLatestVersion{}
res, err := http.Get(config.Appconf["ensembl_version_url"])
if err != nil {
Expand All @@ -482,10 +492,7 @@ func (d *DataUpdate) hasEnsemblNewRelease() bool {
log.Fatal("Error while getting ensembl release info from its rest service. This error could be temporary try again later or use param disableEnsemblReleaseCheck", err)
}
err = json.Unmarshal(body, &egversion)

//fmt.Println(egversion.Version)
//fmt.Println(epaths.Version)
return egversion.Version != epaths.Version
return egversion.Version
}

func (d *DataUpdate) showProgres() {
Expand Down
30 changes: 15 additions & 15 deletions update/update_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ var loadConf = initConf()
func initConf() bool {

c := conf.Conf{}
c.Init("../", "", []string{}, []string{}, true)
c.Init("../", "", true, "")
config = &c
return true

Expand Down Expand Up @@ -55,7 +55,7 @@ func TestHgnc(t *testing.T) {
config.Appconf["kvgenCount"] = "4"
config.Appconf["kvgenChunkSize"] = "13"

d := NewDataUpdate([]string{"hgnc"}, []string{}, []string{}, config, "1")
d := NewDataUpdate([]string{"hgnc"}, []string{}, []string{}, []string{}, config, "1")

parsed, kvs := d.Update()

Expand All @@ -68,7 +68,7 @@ func TestHgnc(t *testing.T) {
}

var m = generate.Merge{}
j, k, _ := m.Merge(config)
j, k, _ := m.Merge(config, false)

if j != 18 {
panic("merge write key value is invalid")
Expand Down Expand Up @@ -110,7 +110,7 @@ func TestKeyLink(t *testing.T) {
config.Appconf["kvgenChunkSize"] = "13"
config.Appconf["pageSize"] = "2"

d := NewDataUpdate([]string{"uniprot"}, []string{}, []string{}, config, "1")
d := NewDataUpdate([]string{"uniprot"}, []string{}, []string{}, []string{}, config, "1")

parsed, kvs := d.Update()

Expand All @@ -123,7 +123,7 @@ func TestKeyLink(t *testing.T) {
}

var m = generate.Merge{}
j, k, l := m.Merge(config)
j, k, l := m.Merge(config, false)

if j != 8 {
panic("merge write key value is invalid")
Expand Down Expand Up @@ -160,7 +160,7 @@ func TestPaging(t *testing.T) {
//c.Appconf["kvgenChunkSize"] = "13"
config.Appconf["pageSize"] = "2"

d := NewDataUpdate([]string{"hgnc"}, []string{}, []string{}, config, "1")
d := NewDataUpdate([]string{"hgnc"}, []string{}, []string{}, []string{}, config, "1")

parsed, kvs := d.Update()

Expand All @@ -173,7 +173,7 @@ func TestPaging(t *testing.T) {
}

var m = generate.Merge{}
j, k, _ := m.Merge(config)
j, k, _ := m.Merge(config, false)

if j != 19 { // todo empty xref key hgnc:2 is not written??
panic("merge write key value is invalid")
Expand Down Expand Up @@ -207,7 +207,7 @@ func TestTargetDbs(t *testing.T) {
config.Appconf["kvgenCount"] = "4"
config.Appconf["kvgenChunkSize"] = "13"

d := NewDataUpdate([]string{"hgnc"}, []string{"VEGA"}, []string{}, config, "1")
d := NewDataUpdate([]string{"hgnc"}, []string{"VEGA"}, []string{}, []string{}, config, "1")

parsed, kvs := d.Update()

Expand All @@ -220,7 +220,7 @@ func TestTargetDbs(t *testing.T) {
}

var m = generate.Merge{}
j, k, _ := m.Merge(config)
j, k, _ := m.Merge(config, false)

if j != 10 {
panic("merge write key value is invalid")
Expand Down Expand Up @@ -253,7 +253,7 @@ func TestDuplicateValue(t *testing.T) {
config.Appconf["kvgenCount"] = "1"
config.Appconf["kvgenChunkSize"] = "20"

d := NewDataUpdate([]string{"hgnc"}, []string{}, []string{}, config, "1")
d := NewDataUpdate([]string{"hgnc"}, []string{}, []string{}, []string{}, config, "1")

parsed, kvs := d.Update()

Expand All @@ -266,7 +266,7 @@ func TestDuplicateValue(t *testing.T) {
}

var m = generate.Merge{}
j, k, _ := m.Merge(config)
j, k, _ := m.Merge(config, false)

if j != 3 {
panic("merge write key value is invalid")
Expand Down Expand Up @@ -345,7 +345,7 @@ func TestEnsembl(t *testing.T) {
config.Appconf["kvgenCount"] = "4"
config.Appconf["kvgenChunkSize"] = "13"

d := NewDataUpdate([]string{"ensembl"}, []string{}, []string{}, config, "1")
d := NewDataUpdate([]string{"ensembl"}, []string{}, []string{}, []string{}, config, "1")

parsed, kvs := d.Update()

Expand All @@ -358,7 +358,7 @@ func TestEnsembl(t *testing.T) {
}

var m = generate.Merge{}
j, k, l := m.Merge(config)
j, k, l := m.Merge(config, false)

if j != 11 {
panic("merge write key value is invalid")
Expand Down Expand Up @@ -402,12 +402,12 @@ func TestSamples(t *testing.T) {
config.Appconf["kvgenCount"] = "4"
config.Appconf["kvgenChunkSize"] = "1000000"

d := NewDataUpdate([]string{"uniprot", "uniref100", "uniref90", "uniref50", "uniparc", "taxonomy", "interpro"}, []string{}, []string{}, config, "1")
d := NewDataUpdate([]string{"uniprot", "uniref100", "uniref90", "uniref50", "uniparc", "taxonomy", "interpro"}, []string{}, []string{}, []string{}, config, "1")

d.Update()

var m = generate.Merge{}
i, j, _ := m.Merge(config)
i, j, _ := m.Merge(config, false)

fmt.Println("lmdb key value size", i)
fmt.Println("max uid", j)
Expand Down

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading

0 comments on commit 31aee89

Please sign in to comment.