Skip to content

Commit

Permalink
Merge pull request #125 from NOAA-EDAB/ATLNTS-358_groupNames
Browse files Browse the repository at this point in the history
Atlnts 358 group names
  • Loading branch information
RGamble1 authored Oct 20, 2021
2 parents 9c7d8f5 + dd11195 commit e7fca3f
Show file tree
Hide file tree
Showing 16 changed files with 744 additions and 21 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
#'
#' Write the mapping to a file format (GitHub flavored markdown) that can used on the wiki.
#'
#' Requires: andybeet/utilities package for "capitalize_first_letter" function
#' Requires: andybeet/abutils package for "capitalize_first_letter" function
#' andybeet/dbutils package for create_species_lookup.r and connect_to_database

# list packages and check to see if needed to be installed
Expand All @@ -24,7 +24,7 @@ for (apack in packages$pkgName) {
}


map_functional_group <- function(channel,writeToFile=F) {
create_map_functional_group <- function(channel,writeToFile=F) {

# read in functional group codes and name from Atlantis input file
fg <- atlantisom::load_fgs(here::here("currentVersion"),"neus_groups.csv") %>%
Expand All @@ -33,7 +33,7 @@ map_functional_group <- function(channel,writeToFile=F) {
#fg <- readr::read_csv(here::here("data-raw","initialFunctionalGroupNames.csv"))

# read in species membership to group, then join with functional group names
data <- readr::read_csv(here::here("data-raw","Atlantis_1_5_groups_svspp_nespp3.csv")) %>%
data <- readr::read_csv(here::here("data-raw/data","Atlantis_1_5_groups_svspp_nespp3.csv")) %>%
dplyr::mutate(NESPP3 = sprintf("%03d",NESPP3)) %>%
dplyr::left_join(.,fg,by="Code")

Expand All @@ -55,15 +55,15 @@ map_functional_group <- function(channel,writeToFile=F) {
dplyr::full_join(.,NESPP3Data, by="NESPP3") %>%
dplyr::arrange(Code) %>%
dplyr::rename(Species = Name,Functional_Group = LongName,Common_Name = COMNAME.y,Scientific_Name=SCIENTIFIC_NAME.y,Species_Itis=SPECIES_ITIS.y) %>%
dplyr::mutate(Common_Name = utilities::capitalize_first_letter(Common_Name),NESPP3=as.numeric(NESPP3),Species_Itis=as.numeric(Species_Itis)) %>%
dplyr::mutate(Common_Name = abutils::capitalize_first_letter(Common_Name),NESPP3=as.numeric(NESPP3),Species_Itis=as.numeric(Species_Itis)) %>%
dplyr::select(Code,Functional_Group,Species,Scientific_Name,SVSPP,NESPP3,Species_Itis,isFished) %>%
dplyr::mutate(isFishedSpecies = (Functional_Group==Species) & (isFished==T)) %>%
dplyr::select(-isFished)


# format to markdown table. Copy output to wiki
# open file and write
outputFile <- here::here("data-raw","functionalGroupNames.txt")
outputFile <- here::here("data","functionalGroupNames.txt")
fileConn<-file(outputFile,open="w")
header <- paste0("|",paste0(names(masterList),collapse = "|"),"|")
cat(header,file=fileConn,append=T)
Expand All @@ -81,7 +81,7 @@ map_functional_group <- function(channel,writeToFile=F) {
close(fileConn)

if(writeToFile){
readr::write_csv(masterList,here::here("data-raw","functionalGroupNames.csv"))
readr::write_csv(masterList,here::here("data","functionalGroupNames.csv"))
}

return(masterList)
Expand Down
30 changes: 30 additions & 0 deletions data-raw/create_mapv1groups_to_v2.r
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
#' Read and process changes in groups from V1.0 to V2.0
#'
#' Create a txt file in md format for use in the wiki
#'

codes <- readr::read_csv(here::here("data-raw/data","specieslist_v1_2.csv")) %>%
dplyr::select(GroupV1,Atcode,GroupV2,At2code) %>%
dplyr::distinct() %>%
dplyr::arrange(Atcode,At2code)

# format to markdown table. Copy output to wiki
# open file and write
outputFile <- here::here("data","v1v2GroupMap.txt")
fileConn<-file(outputFile,open="w")
header <- paste0("|",paste0(names(codes),collapse = "|"),"|")
cat(header,file=fileConn,append=T)
cat("\n",file=fileConn,append=T)
spacer <- paste0("|",paste0(rep("---",ncol(codes)),collapse = "|"),"|")
cat(spacer,file=fileConn,append=T)
cat("\n",file=fileConn,append=T)

for (irow in 1:nrow(codes)) {
rowData <- paste0("|",paste0(codes[irow,],collapse = "|"),"|")
cat(rowData,file=fileConn,append=T)
cat("\n",file=fileConn,append=T)
}

close(fileConn)


11 changes: 6 additions & 5 deletions data-raw/stockSMART_data.r → data-raw/create_stockSMART_data.r
Original file line number Diff line number Diff line change
@@ -1,14 +1,15 @@
#' Combines the stock SMART data with the Atlantis functional group codes
#'
#' Filters out all species that reside on the east coast from stock smart, and assign them with an atlantis functional group code
#'
#' Filters stock smart data by Atlantis species
#'
#' Needed for QC for fishing inputs and possibly for reasonability checks
#' Filters out all species that reside on the east coast from stock smart
#'
#' saves RDS file
#'
#' stockSMARTData.Rds"

library(magrittr)

stockSMART_data <- function() {
create_stockSMART_data <- function() {

# pulls in all of stockSMART data
saData <- dplyr::as_tibble(assessmentdata::stockAssessmentData)
Expand Down
15 changes: 10 additions & 5 deletions data-raw/survdat_biomass.r → data-raw/create_survdat_biomass.r
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
#' Estimate swept area biomass over whole shelf for species in Atlantis present in survey
#' Creates biomass datafiles needed for reasonability checks
#'
#' Creats RDS files:
#'
#' Estimate swept area biomass (with uncertainty measures) over whole shelf for species in Atlantis
#' Bottom trawl Survey data is used (survdat) with 3 custom files for scallop, quahogs, surfclams
#'
#' Creates RDS files saved in data folder:
#' sweptAreaBiomassEPU.RDS
#' sweptAreaBiomassNEUS_Box.RDS
#' sweptAreaBiomassNEUS.RDS
Expand All @@ -17,12 +21,13 @@ if (pullFromDB) {
channel <- dbutils::connect_to_database(server,uid)
survey <- survdat::get_survdat_data(channel)
} else { # or read in previous pull
# eventually this will reside on Github in version controlled package
survey <- readRDS("C:/Users/andrew.beet/Documents/MyWork/gitHub_repos/survdat/testing/survdat2021.RDS")
}

### read in atlantic surfclam data. Poorly sampled in bottom trawl survey

clam <- readr::read_csv(file=here::here("data-raw","surfclam403Biomass.csv"),skip=8)
clam <- readr::read_csv(file=here::here("data-raw/data","surfclam403Biomass.csv"),skip=8)
# from Dan Hennen swept are biomass
clam <- clam %>%
dplyr::select(Yr,Value,StdDev) %>%
Expand All @@ -37,7 +42,7 @@ clam <- clam %>%

### read in ocean quahog data. Poorly sampled in bottom trawl survey

quahog <- readr::read_csv(file=here::here("data-raw","quahog754Biomass.csv"),skip=8)
quahog <- readr::read_csv(file=here::here("data-raw/data","quahog754Biomass.csv"),skip=8)
# from Dan Hennen swept are biomass
quahog <- quahog %>%
dplyr::select(Yr,Value,StdDev) %>%
Expand All @@ -56,7 +61,7 @@ quahog <- quahog %>%
# dplyr::select(Year,Value,Metric, Description, Units)
# # from 65 Stock assessment table A9.4 p80

scallop <- readr::read_csv(file=here::here("data-raw","scallop401Biomass.csv"),skip=9)
scallop <- readr::read_csv(file=here::here("data-raw/data","scallop401Biomass.csv"),skip=9)

scallops <- scallop %>%
dplyr::select(Year,Bms,CV_2) %>%
Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,10 @@
# V2.0 Atlantis Functional Group names and Codes. (First Cut)
#
# Manually created
#
# Temporary file used to compare with neus groups file to aid in creating main file
#
#
Group Code,Group Name
MAK,Atlantic mackerel
HER,Atlantic herring
Expand Down
File renamed without changes.
File renamed without changes.
Loading

0 comments on commit e7fca3f

Please sign in to comment.