Skip to content

Commit

Permalink
fix all conflicts
Browse files Browse the repository at this point in the history
  • Loading branch information
wangzhao0217 committed Dec 2, 2024
2 parents 4bc4efd + d181f80 commit b00c6e6
Show file tree
Hide file tree
Showing 8 changed files with 276 additions and 67 deletions.
22 changes: 13 additions & 9 deletions R/corenet_build_OS.R
Original file line number Diff line number Diff line change
Expand Up @@ -239,19 +239,23 @@ corenet_build_OS = function(os_scotland, osm_scotland, la_names) {
)

# Define the file path for the combined GeoJSON
combined_CN_file = glue::glue("{output_folder}/combined_CN_{date_folder}_OS.geojson")
combined_CN_geojson_file = glue::glue("{output_folder}/combined_CN_{date_folder}_OS.geojson")
combined_CN_gpkg_file = glue::glue("{output_folder}/combined_CN_{date_folder}_OS.gpkg")
# Write the combined data to GeoJSON and GeoPackage files
sf::st_write(final_la_coherent, combined_CN_geojson_file, delete_dsn = TRUE)
sf::st_write(final_la_coherent, combined_CN_gpkg_file, delete_dsn = TRUE)

# Write the combined GeoJSON to a file
sf::st_write(combined_CN_geojson, combined_CN_file, delete_dsn = TRUE)
cat("Combined cohesive networks GeoJSON file for group has been saved to:", combined_CN_file, "\n")
# Print messages indicating where the files have been saved
cat("Combined cohesive networks GeoJSON file for group has been saved to:", combined_CN_geojson_file, "\n")
cat("Combined cohesive networks GeoPackage file for group has been saved to:", combined_CN_gpkg_file, "\n")

# Define the path for the PMtiles
combined_CN_pmtiles = glue::glue("{output_folder}/combined_CN_{number}_{date_folder}_OS.pmtiles")
combined_CN_pmtiles = glue::glue("{output_folder}/combined_CN_{date_folder}_OS.pmtiles")

# Construct the Tippecanoe command for the current group
command_tippecanoe = paste0(
'tippecanoe -o ', combined_CN_pmtiles,
' --name="', 'Scottish_Coherent_Networks_', number, '"',
' --name="', 'Scottish_Coherent_Networks',
' --layer=coherent_networks',
' --attribution="University of Leeds"',
' --minimum-zoom=6',
Expand All @@ -261,11 +265,11 @@ corenet_build_OS = function(os_scotland, osm_scotland, la_names) {
' --buffer=5',
' -rg',
' --force ',
combined_CN_file
combined_CN_geojson_file
)

# Execute the command and capture output
system_output = system(command_tippecanoe, intern = TRUE)
cat("Tippecanoe output for group", number, ":\n", system_output, "\n")
cat("Tippecanoe output for group :\n", system_output, "\n")

}
}
3 changes: 2 additions & 1 deletion R/pkgs.R
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ get_pkgs = function() {
"tidyverse", # Includes dplyr, ggplot2, tidyr, stringr etc.
"zonebuilder", # For creating zones for spatial analysis
"iterators", # For creating iterators
"doParallel" # For parallel processing
"doParallel", # For parallel processing
"httr"
)
}
30 changes: 10 additions & 20 deletions R/simplify_network.R
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,9 @@ simplify_network = function(rnet_y, region_name, region_boundary) {
region_snake_case = snakecase::to_snake_case(region_name)
base_name = paste0("OS_Scotland_Network_", region_snake_case, ".geojson")
rnet_x_f = file.path("inputdata", base_name)
rnet_x = sf::read_sf(rnet_x_f)
# rnet_x = geojsonsf::geojson_sf(rnet_x_f) # bit faster
rnet_x = sf::read_sf(rnet_x_f) |> sf::st_transform(crs = "EPSG:27700")

# Transform the spatial data to a different coordinate reference system (EPSG:27700)
# TODO: uncomment:
rnet_x = rnet_x[region_boundary, ] # TODO: is this needed? Can remove if not
rnet_x = rnet_x[region_boundary |> sf::st_transform(crs = "EPSG:27700") , ] # TODO: is this needed? Can remove if not
rnet_xp = sf::st_transform(rnet_x, "EPSG:27700")
rnet_yp = sf::st_transform(rnet_y, "EPSG:27700")

Expand Down Expand Up @@ -69,36 +66,29 @@ simplify_network = function(rnet_y, region_name, region_boundary) {
rnet_merged_all = rnet_merged_all |>
dplyr::filter_at(columns_to_check, any_vars(!is.na(.)))

# Selecting only the geometry column from the 'rnet_merged_all' dataset.
rnet_merged_all_only_geometry = rnet_merged_all |> dplyr::select(geometry)

# Merging all geometries into a single geometry using st_union from the sf package.
rnet_merged_all_union = sf::st_union(rnet_merged_all_only_geometry)

# Transforming the merged geometry to a specific coordinate reference system (CRS), EPSG:27700.
rnet_merged_all_projected = sf::st_transform(rnet_merged_all_union, "EPSG:27700")

# Converting the projected geometry into a GEOS geometry. GEOS is a library used for spatial operations.
rnet_merged_all_geos = geos::as_geos_geometry(rnet_merged_all_projected)
rnet_merged_all_geos = geos::as_geos_geometry(rnet_merged_all)

# Creating a buffer around the GEOS geometry. This expands the geometry by a specified distance (in meters).
rnet_merged_all_geos_buffer = geos::geos_buffer(rnet_merged_all_geos, distance = 30, params = geos::geos_buffer_params(quad_segs = 4))

# Converting the buffered GEOS geometry back to an sf object.
rnet_merged_all_projected_buffer = sf::st_as_sf(rnet_merged_all_geos_buffer)

# Transform the coordinate reference system of 'rnet_merged_all' to WGS 84 (EPSG:4326).
rnet_merged_all_buffer = sf::st_transform(rnet_merged_all_projected_buffer, "EPSG:4326")

# Subsetting another dataset 'rnet_y' based on the spatial relation with 'rnet_merged_all_buffer'.
# It selects features from 'rnet_y' that are within the boundaries of 'rnet_merged_all_buffer'.
rnet_y_subset = rnet_y[rnet_merged_all_buffer, , op = sf::st_within]
# rnet_y_subset = sf::st_intersection(rnet_yp, rnet_merged_all_projected_buffer)
# browser()
rnet_yp_points = sf::st_point_on_surface(rnet_yp)
rnet_yp_points_subset = rnet_yp_points[rnet_merged_all_projected_buffer, ]
rnet_y_subset = rnet_yp[rnet_yp_points_subset, ]

# Filter 'rnet_y' to exclude geometries within 'within_join'
rnet_y_rest = rnet_y[!rnet_y$geometry %in% rnet_y_subset$geometry, ]
rnet_y_rest = rnet_yp[!rnet_yp$geometry %in% rnet_y_subset$geometry, ]

# Transform the CRS of the 'rnet_merged_all' object to WGS 84 (EPSG:4326)
rnet_merged_all = sf::st_transform(rnet_merged_all, "EPSG:4326")
rnet_y_rest = sf::st_transform(rnet_y_rest, "EPSG:4326")

# Combine 'rnet_y_rest' and 'rnet_merged_all' into a single dataset
simplified_network = dplyr::bind_rows(rnet_y_rest, rnet_merged_all)
Expand Down
13 changes: 5 additions & 8 deletions _targets.R
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ library(targets) # Needed to make targets work
library(magrittr) # Light load of |>
library(sf) # Needed for sf support
set.seed(2023)
httr::set_config(httr::timeout(seconds = 6000))
tar_source()
pkgs = get_pkgs()

Expand Down Expand Up @@ -626,7 +627,6 @@ tar_target(rs_school, {
os_pois_subset[region_boundary_buffered, ] |>
sf::st_transform("EPSG:27700")
}),

tar_target(grid, {
grid = readRDS("./inputdata/grid_scot.Rds")
grid = sf::st_transform(grid, "EPSG:4326")
Expand Down Expand Up @@ -676,14 +676,13 @@ tar_target(rs_school, {
# Combined utility trip purposes --------------------------------------------

tar_target(od_utility_combined, {

od_utility_combined = rbind(od_shopping, od_visiting, od_leisure) |>
dplyr::slice_max(n = parameters$max_to_route, order_by = all, with_ties = FALSE)
sum(od_utility_combined$bicycle) / sum(od_utility_combined$all)

# Get % cycling for commuting per zone
# pcycle_regional = sum(commute_stats$comm_orig_bicycle, na.rm = TRUE) /
# sum(commute_stats$comm_orig_all, na.rm = TRUE)
# sum(commute_stats$comm_orig_all, na.rm = TRUE)
pcycle_national = 0.016

commute_stats_minimal = commute_stats |>
Expand All @@ -692,7 +691,7 @@ tar_target(rs_school, {
dplyr::transmute(
DataZone,
multiplier = (comm_orig_bicycle / comm_orig_all) /
pcycle_national
pcycle_national
) |>
# 0 to 0.1:
dplyr::mutate(multiplier = case_when(
Expand Down Expand Up @@ -829,7 +828,7 @@ tar_target(rs_school, {

tar_target(utility_stats_baseline, {
stats = sf::st_drop_geometry(od_utility_combined)
stats = stats[, c(
stats = stats[, c(
"startDZ", "endDZ", "purpose", "all", "car",
"foot", "bicycle", "public_transport", "taxi"
)]
Expand Down Expand Up @@ -1130,7 +1129,6 @@ tar_target(rs_school, {
responce = system(command_all, intern = TRUE)
responce
}),

tar_target(pmtiles_buildings, {
check = length(zones_dasymetric_tile)

Expand Down Expand Up @@ -1229,7 +1227,7 @@ tar_target(rs_school, {

message("Saving outputs for ", parameters$date_routing)

saveRDS(od_commute_subset, file.path(region_folder, "od_commute_subset.Rds"))
saveRDS(od_commute_subset, file.path(region_folder, "od_commute_subset.Rds"))
saveRDS(zones_stats, file.path(region_folder, "zones_stats.Rds"))
saveRDS(school_stats, file.path(region_folder, "school_stats.Rds"))

Expand All @@ -1241,7 +1239,6 @@ tar_target(rs_school, {
sf::write_sf(combined_network, file.path(region_folder, "combined_network.gpkg"), delete_dsn = TRUE)
as.character(Sys.Date())
}),

tar_target(metadata, {
# TODO: generate build summary
# metadata_all = tar_meta()
Expand Down
51 changes: 29 additions & 22 deletions code/build.R
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,7 @@ library(doParallel)
tar_source()

parameters = jsonlite::read_json("parameters.json", simplifyVector = T)
lads = sf::read_sf("inputdata/boundaries/la_regions_2023.geojson")
mapview::mapview(lads)
# To test for a single local authority:
lads = lads |>
filter(LAD23NM %in% c("Clackmannanshire"))
lads = sf::read_sf("inputdata/boundaries/la_regions_scotland_bfe_simplified_2023.geojson")
date_folder = parameters$date_routing
la_names = lads$LAD23NM
output_folder = file.path("outputdata", date_folder)
Expand Down Expand Up @@ -44,21 +40,20 @@ if (GENERATE_CDB) {

library(osmactive)
# See https://github.com/nptscot/osmactive/blob/main/code/classify-roads.R and traffic-volumes.R
# TODO: Change to V6:
"final_estimates_Scotland.gpkg"
# https://github.com/nptscot/scottraffic/releases
f_traffic = "scottraffic/final_estimates_Scotland_higherror_discarded.gpkg"
f_traffic = "scottraffic/final_estimates_Scotland.gpkg"
if (!file.exists(f_traffic)) {
system("gh repo clone nptscot/scottraffic")
file.remove(f_traffic)
setwd("scottraffic")
system("gh release list")
system("gh release download v5 --clobber")
system("gh release download v6")
setwd("..")
}
traffic_volumes_scotland = sf::read_sf(f_traffic)

# Generate cycle_net - this is slow, we should save the file
osm_national = get_travel_network("Scotland", force_download = T)
# Generate cycle_net: forcing update:
# osm_national = get_travel_network("Scotland", force_download = TRUE)
osm_national = get_travel_network("Scotland")
# saveRDS(osm_national, "inputdata/osm_national_2024_05_23")

# Generate road segment midpoints
Expand Down Expand Up @@ -180,15 +175,20 @@ if (GENERATE_CDB) {

#
cycle_net_traffic = level_of_service(cycle_net_traffic)

cbd_layer = cycle_net_traffic |>
transmute(
osm_id,
highway,
`Traffic volume` = final_traffic,
`Speed limit` = final_speed,
`Infrastructure type` = cycle_segregation,
`Level of Service`
`Level of Service`,
`Traffic volume category` = case_when(
final_traffic >= 0 & final_traffic < 1999.5 ~ "0 to 1999",
final_traffic >= 1999.5 & final_traffic < 3999.5 ~ "2000 to 3999",
final_traffic >= 3999.5 ~ "4000+",
TRUE ~ NA_character_
)
)
# save file for individual district
district_name = district_geom$LAD23NM |>
Expand All @@ -202,16 +202,26 @@ if (GENERATE_CDB) {
}

# Combine all CBD files into a single file
# Remove combined file if it already exists:
if (file.exists(cbd_filename)) {
file.remove(cbd_filename)
}
cbd_files = list.files(output_folder, pattern = "cbd_layer_.*\\.geojson$", full.names = TRUE)
# Create an empty cbd_layers and cbd_layer
cbd_layers = sf::st_sf(geometry = st_sfc())
cbd_layer = sf::st_sf(geometry = st_sfc())
cbd_layers = lapply(cbd_files, sf::read_sf)
cbd_layer = do.call(rbind, cbd_layers)
cbd_filename = paste0(output_folder, "/cbd_layer_", date_folder, ".geojson")
if (file.exists(cbd_filename)) {
file.remove(cbd_filename)
}
# Update traffic volumes for off road cycleways
cbd_layer = cbd_layer |>
mutate(
`Traffic volume category` = case_when(
`Infrastructure type` == "Off Road Cycleway" ~ NA_character_,
highway %in% c("footway", "path", "pedestrian", "steps") ~ NA_character_,
TRUE ~ `Traffic volume category`
)
)
sf::write_sf(cbd_layer, cbd_filename)
fs::file_size(cbd_filename)

Expand Down Expand Up @@ -286,7 +296,7 @@ if (parameters$generate_CN_start) {
# mapview::mapview(cn_test, zcol = "road_function")

# Combine regional outputs
---------------------------------------------------

GENERATE_PMTILES = TRUE

if (GENERATE_PMTILES) {
Expand Down Expand Up @@ -666,9 +676,6 @@ if (PUSH_TO_GITHUB) {
parameters$max_to_route > 20e3
is_linux = Sys.info()[["sysname"]] == "Linux"
if (full_build) {
v = paste0("v", Sys.Date(), "_commit_", commit$commit)
v = gsub(pattern = " |:", replacement = "-", x = v)
# Or latest release:
setwd(glue::glue(getwd(),"/", output_folder))
system("gh release list")
v = glue::glue("v{date_folder}")
Expand Down
33 changes: 29 additions & 4 deletions code/prep_admin_bounds.R
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,31 @@ library(sf)
# la = sf::read_sf("la.gpkg")
# plot(la)

la = sf::read_sf("https://services1.arcgis.com/ESMARspQHYMw9BZ9/arcgis/rest/services/Local_Authority_Districts_December_2023_Boundaries_UK_BSC/FeatureServer/0/query?outFields=*&where=1%3D1&f=geojson")

# Boundary options:
# https://www.ons.gov.uk/methodology/geography/geographicalproducts/digitalboundaries
# (BFC) Full resolution - clipped to the coastline (Mean High Water mark)
# (BFE) Full resolution - extent of the realm (usually this is the Mean Low Water mark but, in some cases, boundaries extend beyond this to include offshore islands)
# (BGC) Generalised (20m) - clipped to the coastline (Mean High Water mark)
# (BUC) Ultra Generalised (500m) - clipped to the coastline (Mean High Water mark)


la_bsc = sf::read_sf("https://services1.arcgis.com/ESMARspQHYMw9BZ9/arcgis/rest/services/Local_Authority_Districts_December_2023_Boundaries_UK_BSC/FeatureServer/0/query?outFields=*&where=1%3D1&f=geojson")
la_bfe = sf::read_sf("https://services1.arcgis.com/ESMARspQHYMw9BZ9/arcgis/rest/services/Local_Authority_Districts_December_2023_Boundaries_UK_BFE/FeatureServer/0/query?outFields=*&where=1%3D1&f=geojson")

# BFE version is 50x larger than BSC
object.size(la_bfe) |> as.numeric() /
object.size(la_bsc) |> as.numeric()

la_bfe_simplified = rmapshaper::ms_simplify(la_bfe, keep = 0.02)

# BFE version is now 1.4x larger than BSC
object.size(la_bfe_simplified) |> as.numeric() /
object.size(la_bsc) |> as.numeric()

mapview::mapview(la_bsc) + mapview::mapview(la_bfe_simplified)

la = la_bfe_simplified

la = la[, c("LAD23CD", "LAD23NM")]
# LAs in Scotland, CD starts with "S":
Expand Down Expand Up @@ -75,10 +99,11 @@ la_regions |>
# check for NAs:
la_regions[is.na(la_regions$Region),]

sf::write_sf(la_regions, "la_regions_scotland_bfe_simplified_2023.geojson", delete_dsn = TRUE)
sf::write_sf(la, "la_uk_bfe_simplified_2023.geojson", delete_dsn = TRUE)

# system("gh release upload boundaries-2024 las_scotland_2023.geojson las_2023.geojson --clobber")
dir.create("inputdata/boundaries", showWarnings = FALSE)
sf::write_sf(la_regions, "inputdata/boundaries/la_regions_2023.geojson", delete_dsn = TRUE)
sf::write_sf(la, "inputdata/boundaries/las_2023.geojson", delete_dsn = TRUE)
system("gh release upload boundaries-2024 la_regions_scotland_bfe_simplified_2023.geojson la_uk_bfe_simplified_2023.geojson --clobber")

# https://github.com/nptscot/npt/releases/download/boundaries-2024/las_2023.geojson

Expand Down
Loading

0 comments on commit b00c6e6

Please sign in to comment.