Skip to content

Commit

Permalink
Migration to GitHub Actions (#53)
Browse files Browse the repository at this point in the history
* Create LICENSE.md

* WormJam v0.1.0 - Prerelease before curation round #3 (#48)

* Relocated Website/Documentation to WormJam Consortium Organisation

* Added Requirements.txt

* Improved annotations

* Added continuous integration pipeline, targeted at the devel branch that builds, tests and reports on the model

* Merge

* Drafting GitHub Actions Workflow

* Removed old check function from tsv_to_sbml.py

* Testing a build

* Separated Linting and Testing

* Added MEMOTE

* Test of reporting

* Test of env var

* Test of reporting

* updated secrets

* Moving CI to actions

* Updated Config File

* Updated settings path

* Formatted GitHub Actions pipeline with Black

* Added doc to send reports and failure_reporter - added intentional break to test fail handling

* Removed failure reporter trigger

* Updated workflow file

* Added Status Badge to readme and updated workflow name

* Updated Readme

* Added Dependabot
  • Loading branch information
JakeHattwell authored Feb 24, 2021
1 parent 2fb4729 commit 79eda41
Show file tree
Hide file tree
Showing 24 changed files with 50,214 additions and 49,467 deletions.
12 changes: 12 additions & 0 deletions .github/dependabot.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
version: 2
updates:
# Enable version updates for python
- package-ecosystem: pip
# Look for files in the `root` directory
directory: "/"
#check daily
schedule:
interval: "weekly"
# Check for updates on Sundays
day: "sunday"

8 changes: 4 additions & 4 deletions travis/basic_fba.py → .github/tests/basic_fba.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@

model = cobra.io.read_sbml_model("WormJam.xml")
print("Model:")
print(len(model.reactions),"reactions")
print(len(model.metabolites),"metabolites")
print(len(model.genes),"genes")
print(len(model.reactions), "reactions")
print(len(model.metabolites), "metabolites")
print(len(model.genes), "genes")

biomass_rxn = model.reactions.get_by_id("BIO0100")
model.objective = biomass_rxn
Expand All @@ -19,4 +19,4 @@
print(solution.objective_value)
print(solution.status)

assert solution.objective_value!= 0, "Flux not carried in normal growth"
assert solution.objective_value != 0, "Flux not carried in normal growth"
16 changes: 16 additions & 0 deletions .github/tests/config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
import json

pipeline = {}

pipeline["name"] = input(
"What should the SBML file be named? Do not include the .xml extension: "
)
pipeline["organism"] = input(
"What is the name of the system? For example, Human Epithelial Cell or Caenorhabditis elegans: "
)
pipeline["short name"] = input("What is the abbreviated name? ")
pipeline["dbtable"] = input(
"Are you using a databases table (Database-SBtab.tsv)? True/False: "
)
with open(r"travis/settings.json", "w+") as f:
json.dump({"pipeline": pipeline}, f, indent=4)
39 changes: 39 additions & 0 deletions .github/tests/failure_reporter.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
import requests
import json
import sys
import datetime

DISCORD_ENDPOINT = sys.argv[1] #Github Actions Channel Webhook
GITHUB_BUILD_NUMBER = sys.argv[2] # Github build counter
GITHUB_BUILD_WEB_URL = sys.argv[3] # github unique run ID used for link construction
GITHUB_REPO_SLUG = sys.argv[4] # user/repo
GITHUB_REPO_BRANCH = sys.argv[5].split("/")[-1] #branch - process the string and grab the last term

payload_json = {
"embeds": [
{
"title": "WormJam CI Report",
"color": 10027008, #red
"description": "A build has failed from [%s](%s) on branch %s"
% (GITHUB_REPO_SLUG, "https://github.com/" + GITHUB_REPO_SLUG,GITHUB_REPO_BRANCH),
"fields": [
{"name": "Build Number", "value": str(GITHUB_BUILD_NUMBER)},
{
"name": "Build logs",
"value": "Logs can be found [here](https://github.com/%s/actions/runs/%s)"
% (GITHUB_REPO_SLUG, GITHUB_BUILD_WEB_URL),
},
],
"thumbnail": {
"url":"https://avatars1.githubusercontent.com/u/44036562?s=280&v=4, " #github actions logo
},
"timestamp": str(datetime.datetime.now().isoformat()),
}
]
}
#send failure message
r = requests.post(
DISCORD_ENDPOINT,
data=json.dumps(payload_json),
headers={"Content-Type": "application/json"},
)
147 changes: 147 additions & 0 deletions .github/tests/helper_classes.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,147 @@
import os
import csv


class ModelSystem:
"""Class for reading SBtab files"""

def __init__(self):
"""Initialization function"""
self.tables = {}
self.size = (
{}
) # potentially worth removing this - It logs number of entries in every table, but no longer needed.

def _load_table(self, name, filename):
"""Function to import a SBtab file into the ModelSystem, using the SBtable class"""
self.tables[name] = SBtable(filename)
self.size[name] = self.tables[name].rows - 2

def load_folder(self, name):
"""Function to bulk import multiple SBtab files using a folder and _load_table"""
success = False
if os.path.isdir(name) == False:
print(
"The curation folder cannot be found. Unable to build the model. Aborting."
)
exit(1)
else:
print("Folder loaded")
paths = []
for f in os.listdir(name):
if "SBtab.tsv" in f:
filename = f.replace("-SBtab.tsv", "")
paths.append(filename)
try:
assert paths != [], "There were no SBtab files found in " + name
except AssertionError as error:
print(error)
exit(1)
else:
print("SBtab files found! Loading now!")
self.count = 1
for sbfile in paths:
print(" ".join(["Loading file:", sbfile]))
self._load_table(sbfile, name + "/" + sbfile + "-SBtab.tsv")

print(" ".join([str(len(paths)), "files loaded into the model"]))
success = True

def validate_rxn_mets(self):
"""Function to check that all metabolites included in reactions are in the compounds table"""
met_list = self.tables.get("Compound").data.keys()
rxn_met_list = {}
for key, val in self.tables.get("Reaction").data.items():
r, p = self._process_reaction_string(val["!ReactionFormula"])
sub_mets = []
sub_mets.extend(r.keys())
sub_mets.extend(p.keys())
rxn_met_list[key] = sub_mets
missing = {
key: [met for met in val if met not in met_list]
for key, val in rxn_met_list.items()
if any(met not in met_list for met in val)
and [met for met in val if met not in met_list] != [""]
}
return missing

def _process_reaction_string(self, rxn):
"""Helper function to parse reaction strings"""

r, p = rxn.split("<=>")

def quick(frag):
"""splitting function"""
frag = frag.split("+")
frag = [
i.rstrip().lstrip() for i in frag
] # remove leading and trailing whitespace.
frag = [i.split(" ") for i in frag] # split into each compound
return frag

r = quick(r)
p = quick(p)
# packaging
reactants = {
(i[1] if len(i) == 2 else i[0]): (i[0] if len(i) == 2 else "1") for i in r
}
products = {
(i[1] if len(i) == 2 else i[0]): (i[0] if len(i) == 2 else "1") for i in p
}
for d in [reactants, products]:
for key, val in d.items():
try:
d[key] = str(float(val))
except:
pass
return (reactants, products)


class SBtable:
"""Importable class for loading SBTab files\nConverts SBTab as nested dictionary.\n
instance.data = Dictionary of entries in SBTab\n
Each entry is a dictionary of the data associated with that entry, with column headers as keys.
Arguments:
xlsx {str} -- Path to SBTab file of interest.
Keyword Arguments:
headerRow {int} -- Excel row of the header information, (default: {2})
mode {str} -- version of SBtable to load
"""

def __init__(self, filename, headerRow=2):
"""Loads the SBTab file"""
self.name = filename
with open(filename, encoding="latin-1") as tsvfile:
tsv = csv.reader(tsvfile, delimiter="\t")
entries = []
for row in tsv:
if tsv.line_num == 1: # row 1 - SBtab DocString
self.sbString = row[0]
elif tsv.line_num == 2: # row 2 - headers of the table
self.headers = row
else:
entries.append(row)
# define size of data
self.cols = len(self.headers)
self.rows = len(entries) + 2
# create the nested dict object
try:
self.data = {
entry[0]: {
self.headers[i]: (
entry[i] if len(entry) >= len(self.headers) else ""
)
for i in range(1, len(self.headers))
}
for entry in entries
}
while "" in self.data:
self.data.pop("")
except:
print(self.name)
print("tsv import failed. Aborting...")
exit()
# remove blank entries
8 changes: 4 additions & 4 deletions travis/restricted_fba.py → .github/tests/restricted_fba.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@

model = cobra.io.read_sbml_model("WormJam.xml")
print("Model:")
print(len(model.reactions),"reactions")
print(len(model.metabolites),"metabolites")
print(len(model.genes),"genes")
print(len(model.reactions), "reactions")
print(len(model.metabolites), "metabolites")
print(len(model.genes), "genes")

biomass_rxn = model.reactions.get_by_id("BIO0100")
model.objective = biomass_rxn
Expand All @@ -19,4 +19,4 @@
print(solution.objective_value)
print(solution.status)

assert solution.objective_value== 0, "Flux carried under restricted conditions"
assert solution.objective_value == 0, "Flux carried under restricted conditions"
57 changes: 40 additions & 17 deletions travis/result_web_gen.py → .github/tests/result_web_gen.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,19 @@
import json
from pathlib import Path
from bs4 import BeautifulSoup

from json2html import *



data = json.load(open("results.json","r"))
data = json.load(open("results.json", "r"))
meta = data["meta"]
tests = data["tests"]
meta_html = json2html.convert(json=meta)
test_keys = list(tests.keys())
settings = json.load(open("travis/settings.json","r"))["pipeline"]
settings_path = Path(".github") / "tests" / "settings.json"
settings = json.load(open(settings_path, "r"))["pipeline"]

###Templates
###Templates
collapsible_template = """<div class="card">
<div class="card-header" id="{0}">
<h2 class="mb-0">
Expand All @@ -33,13 +34,32 @@
collapsible_insert = ""
toc_insert = ""
for test in test_keys:
toc_insert += toc_template.format(test,test.replace("test","").replace("_"," ").title().replace("Ids","IDs").replace("Id ","ID "))+"\n"
collapsible_insert += collapsible_template.format(test,test.replace("test","").replace("_"," ").title().replace("Ids","IDs").replace("Id ","ID "),json2html.convert(json=tests[test]))+"\n"




#double curly brackets for escaping string formatting
toc_insert += (
toc_template.format(
test,
test.replace("test", "")
.replace("_", " ")
.title()
.replace("Ids", "IDs")
.replace("Id ", "ID "),
)
+ "\n"
)
collapsible_insert += (
collapsible_template.format(
test,
test.replace("test", "")
.replace("_", " ")
.title()
.replace("Ids", "IDs")
.replace("Id ", "ID "),
json2html.convert(json=tests[test]),
)
+ "\n"
)


# double curly brackets for escaping string formatting
blob = f"""<html>
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<head>
Expand Down Expand Up @@ -149,11 +169,14 @@
</body>
</html>"""

blob = blob.replace("""<table border="1">""","""<table class="table table-sm table-bordered table-responsive">""")
blob = blob.replace("""failed""","""failed <i class="fas fa-times-circle"></i>""")
blob = blob.replace("""passed""","""passed <i class="fas fa-check-circle"></i>""")
blob = blob.replace(
"""<table border="1">""",
"""<table class="table table-sm table-bordered table-responsive">""",
)
blob = blob.replace("""failed""", """failed <i class="fas fa-times-circle"></i>""")
blob = blob.replace("""passed""", """passed <i class="fas fa-check-circle"></i>""")

soup = BeautifulSoup(blob,features="lxml")
soup = BeautifulSoup(blob, features="lxml")

with open("Report.html","w+",encoding="utf-8") as f:
f.write(soup.prettify(formatter="html5"))
with open("Report.html", "w+", encoding="utf-8") as f:
f.write(soup.prettify(formatter="html5"))
11 changes: 11 additions & 0 deletions .github/tests/run_memote.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
import json
from memote.suite.api import test_model
import cobra

model = cobra.io.read_sbml_model("WormJam.xml")
code, results = test_model(
model, sbml_version=(3, 1), results=True
) # ,skip=["test_consistency"]
with open("results.json", "w+") as f:
f.write(json.dumps(results, indent=4))
print("Memote Done")
Loading

0 comments on commit 79eda41

Please sign in to comment.