From 00a8c53b61bddf31238269dfa22296af88525605 Mon Sep 17 00:00:00 2001 From: Matteo Santamaria Date: Thu, 8 Jul 2021 20:45:42 -0400 Subject: [PATCH 01/41] Add .idea to .gitignore --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 4f6e730..4d2527a 100644 --- a/.gitignore +++ b/.gitignore @@ -4,6 +4,7 @@ #PyCharm *.iws *.workspace.xml +.idea # Prevent generated and test files from being committed requirements.txt From 4245fd4e883dc493430429a0cf6c4f95fc3cd344 Mon Sep 17 00:00:00 2001 From: Matteo Santamaria Date: Thu, 8 Jul 2021 20:46:21 -0400 Subject: [PATCH 02/41] Refactor DRY code in SubCommand::Install --- src/main.rs | 50 +++----------------------------------------------- src/util.rs | 32 +++++++++++++++++++++++++++++++- 2 files changed, 34 insertions(+), 48 deletions(-) diff --git a/src/main.rs b/src/main.rs index ce7d4b9..bde54ee 100644 --- a/src/main.rs +++ b/src/main.rs @@ -5,7 +5,7 @@ use crate::dep_resolution::res; use crate::dep_types::{ Constraint, Extras, Lock, LockPackage, Package, Rename, Req, ReqType, Version, }; -use crate::util::{abort, Os}; +use crate::util::{abort, process_reqs, Os}; use regex::Regex; use serde::Deserialize; @@ -1573,52 +1573,8 @@ fn main() { let dont_uninstall = util::find_dont_uninstall(&updated_reqs, &up_dev_reqs); - // git_reqs is used to store requirements from packages installed via git. - let mut git_reqs = vec![]; // For path reqs too. - let mut git_reqs_dev = vec![]; - for req in updated_reqs.iter().filter(|r| r.git.is_some()) { - // todo: as_ref() would be better than clone, if we can get it working. - let mut metadata = install::download_and_install_git( - &req.name, - // util::GitPath::Git(req.git.clone().unwrap()), - &req.git.clone().unwrap(), - &git_path, - &paths, - ); - - git_reqs.append(&mut metadata.requires_dist); - } - - // todo: lots of DRY between reqs and dev reqs - for req in up_dev_reqs.iter().filter(|r| r.git.is_some()) { - let mut metadata = install::download_and_install_git( - &req.name, - // util::GitPath::Git(req.git.clone().unwrap()), - &req.git.clone().unwrap(), - &git_path, - &paths, - ); - - git_reqs_dev.append(&mut metadata.requires_dist); - } - - // We don't pass the git requirement itself, since we've directly installed it, - // but we do pass its requirements. - let mut updated_reqs: Vec = updated_reqs - .into_iter() - .filter(|r| r.git.is_none() && r.path.is_none()) - .collect(); - let mut up_dev_reqs: Vec = up_dev_reqs - .into_iter() - .filter(|r| r.git.is_none() && r.path.is_none()) - .collect(); - - for r in git_reqs { - updated_reqs.push(r); - } - for r in git_reqs_dev { - up_dev_reqs.push(r); - } + let updated_reqs = process_reqs(updated_reqs, &git_path, &paths); + let up_dev_reqs = process_reqs(up_dev_reqs, &git_path, &paths); sync( &paths, diff --git a/src/util.rs b/src/util.rs index 747f7c6..3f1cb1f 100644 --- a/src/util.rs +++ b/src/util.rs @@ -8,7 +8,7 @@ use crate::{ dep_types::{Constraint, DependencyError, Req, ReqType, Version}, files, install::{self, PackageType}, - py_versions, CliConfig, + py_versions, util, CliConfig, }; use ini::Ini; use regex::Regex; @@ -956,6 +956,36 @@ pub fn canon_join(path: &Path, extend: &str) -> PathBuf { new_path } +/// Install git requirements and collect their downstream dependencies. +/// +/// The git requirements are removed from the `reqs` vector, and are replaced +/// by all their downstream requirements. +pub fn process_reqs(reqs: Vec, git_path: &Path, paths: &util::Paths) -> Vec { + // git_reqs is used to store requirements from packages installed via git. + let mut git_reqs = vec![]; // For path reqs too. + for req in reqs.iter().filter(|r| r.git.is_some()) { + // todo: as_ref() would be better than clone, if we can get it working. + let mut metadata = install::download_and_install_git( + &req.name, + // util::GitPath::Git(req.git.clone().unwrap()), + &req.git.clone().unwrap(), + &git_path, + &paths, + ); + git_reqs.append(&mut metadata.requires_dist); + } + // We don't pass the git requirement itself, since we've directly installed it, + // but we do pass its requirements. + let mut updated_reqs: Vec = reqs + .into_iter() + .filter(|r| r.git.is_none() && r.path.is_none()) + .collect(); + for r in git_reqs { + updated_reqs.push(r); + } + updated_reqs +} + #[cfg(test)] mod tests { use rstest::rstest; From 8cf90989164996a1a222dc836f286e9e24332dd4 Mon Sep 17 00:00:00 2001 From: Matteo Santamaria Date: Fri, 9 Jul 2021 11:03:30 -0400 Subject: [PATCH 03/41] Clean up DRY when adding both deps and dev-deps to pyproject.toml --- src/files.rs | 212 +++++++++++++++++++++++++++------------------------ 1 file changed, 111 insertions(+), 101 deletions(-) diff --git a/src/files.rs b/src/files.rs index 4c64013..29fa733 100644 --- a/src/files.rs +++ b/src/files.rs @@ -123,116 +123,135 @@ pub struct Poetry { // pub extras: Option>, } -/// Split from `add_reqs_to_cfg` to accomodate testing -fn update_cfg(cfg_data: &str, added: &[Req], added_dev: &[Req]) -> String { - let mut result = String::new(); - let mut in_dep = false; - let mut in_dev_dep = false; - let sect_re = Regex::new(r"^\[.*\]$").unwrap(); - - // We collect lines here so we can start the index at a non-0 point. - let lines_vec: Vec<&str> = cfg_data.lines().collect(); - - // todo: Lots of DRY between dep and dev dep - let mut dep_start = 0; - let mut dev_dep_start = 0; - let mut dep_end = 0; - let mut dev_dep_end = 0; +/// Encapsulate one section of the `pyproject.toml`. +/// +/// # Attributes: +/// * lines: A vector containing each line of the section +/// * i_start: Zero-indexed indicating the line of the header. +/// * i_end: Zero-indexed indicating the line number of the next section header, +/// or the last line of the file. +struct Section { + lines: Vec, + i_start: usize, + i_end: usize, +} - for (i, line) in cfg_data.lines().enumerate() { - if &line.replace(" ", "") == "[tool.pyflow.dependencies]" { - dep_start = i + 1; - if in_dev_dep { - dev_dep_end = i - 1; - } - in_dep = true; - in_dev_dep = false; - continue; // Continue so this line doesn't trigger the section's end. +/// Identify the start index, end index, and lines of a particular section. +fn collect_section(cfg_lines: &[String], title: &str) -> Option
{ + // This will tell us when we've reached a new section + let section_re = Regex::new(r"^\[.*\]$").unwrap(); + + let mut existing_entries = Vec::new(); + let mut in_section = false; + let mut i_start = 0usize; + + for (i, line) in cfg_lines.iter().enumerate() { + if in_section && section_re.is_match(line) { + return Some(Section { + lines: existing_entries, + i_start, + i_end: i, + }); } - if &line.replace(" ", "") == "[tool.pyflow.dev-dependencies]" { - dev_dep_start = i + 1; - if in_dep { - dep_end = i - 1; - } - in_dep = false; - in_dev_dep = true; - continue; + if in_section { + existing_entries.push(line.parse().unwrap()) } - // We've found the end of the dependencies section. - if in_dep && (sect_re.is_match(line) || i == lines_vec.len() - 1) { - in_dep = false; - dep_end = i - 1; - } - - if in_dev_dep && (sect_re.is_match(line) || i == lines_vec.len() - 1) { - in_dev_dep = false; - dev_dep_end = i - 1; + // This must be the last step of the loop to work properly + if line.replace(" ", "") == title { + existing_entries.push(title.into()); + i_start = i; + in_section = true; } } - - let mut insertion_pt = dep_start; - if dep_start != 0 { - #[allow(clippy::needless_range_loop)] - for i in dep_start..=dep_end { - let line = lines_vec[i]; - if !line.is_empty() { - insertion_pt = i + 1 - } - } + // We've reached the end of the file without detecting a new section + if in_section { + Some(Section { + lines: existing_entries, + i_start, + i_end: cfg_lines.len(), + }) + } else { + None } +} - let mut dev_insertion_pt = dev_dep_start; - if dev_dep_start != 0 { - #[allow(clippy::needless_range_loop)] - for i in dev_dep_start..=dev_dep_end { - let line = lines_vec[i]; - if !line.is_empty() { - dev_insertion_pt = i + 1 +/// Main logic for adding dependencies to a particular section. +/// +/// If the section is detected, then the dependencies are appended to that section. Otherwise, +/// a new section is appended to the end of the file. +fn extend_or_insert(mut cfg_lines: Vec, section_header: &str, reqs: &[Req]) -> Vec { + let collected = collect_section(&cfg_lines, section_header); + + match collected { + // The section already exists, so we can just add the new reqs + Some(section) => { + + // To enforce proper spacing we first remove any empty lines, + // and later we append a trailing empty line + let mut all_deps: Vec = section + .lines + .to_owned() + .into_iter() + .filter(|x| !x.is_empty()) + .collect(); + + for req in reqs { + all_deps.push(req.to_cfg_string()) } - } - } + all_deps.push("".into()); - for (i, line) in cfg_data.lines().enumerate() { - if i == insertion_pt && dep_start != 0 { - for req in added { - result.push_str(&req.to_cfg_string()); - result.push('\n'); - } + // Replace the original lines with our new updated lines + cfg_lines.splice(section.i_start..section.i_end, all_deps); + cfg_lines } - if i == dev_insertion_pt && dev_dep_start != 0 { - for req in added_dev { - result.push_str(&req.to_cfg_string()); - result.push('\n'); + // The section did not alredy exist, so we must create it + None => { + // A section is composed of its header, followed by all the requirements + // and then an empty line + let mut section = vec![section_header.to_string()]; + section.extend(reqs.iter().map(|r| r.to_cfg_string())); + section.push("".into()); + + // We want an empty line before adding the new section + if let Some(last) = cfg_lines.last() { + if !last.is_empty() { + cfg_lines.push("".into()) + } } + cfg_lines.extend(section); + cfg_lines } - result.push_str(line); - result.push('\n'); } +} - // If the sections don't exist, create them. - // todo: Adjust start pad as needed so there's exactly two blank lines before adding the section. - if dep_start == 0 { - // todo: Should add dependencies section before dev deps section. - result.push_str("[tool.pyflow.dependencies]\n"); - for req in added { - result.push_str(&req.to_cfg_string()); - result.push('\n'); - } - result.push('\n'); - } +/// Add dependencies and dev-dependencies to `cfg-data`, creating the sections if necessary. +/// +/// The added sections are appended to the end of the file. Split from `add_reqs_to_cfg` +/// to accomodate testing. +fn update_cfg(cfg_data: &str, added: &[Req], added_dev: &[Req]) -> String { + let cfg_lines: Vec = cfg_data.lines().map(str::to_string).collect(); - if dev_dep_start == 0 { - result.push_str("[tool.pyflow.dev-dependencies]\n"); - for req in added_dev { - result.push_str(&req.to_cfg_string()); - result.push('\n'); - } - result.push('\n'); - } + // First we update the dependencies section + let cfg_lines_with_reqs = if !added.is_empty() { + extend_or_insert(cfg_lines, "[tool.pyflow.dependencies]", added) + } else { + cfg_lines + }; - result + // Then we move onto the dev-dependencies + let cfg_lines_with_all_reqs = if !added_dev.is_empty() { + extend_or_insert( + cfg_lines_with_reqs, + "[tool.pyflow.dev-dependencies]", + added_dev, + ) + } else { + cfg_lines_with_reqs + }; + + cfg_lines_with_all_reqs.join("\n") } /// Write dependencies to pyproject.toml. If an entry for that package already exists, ask if @@ -373,7 +392,6 @@ a = "^0.3.5" [tool.pyflow.dev-dependencies] dev_a = "^1.17.2" - "#; const _BASELINE_NO_DEPS: &str = r#" @@ -382,7 +400,6 @@ name = "" [tool.pyflow.dev-dependencies] dev_a = "^1.17.2" - "#; const BASELINE_NO_DEV_DEPS: &str = r#" @@ -391,13 +408,11 @@ name = "" [tool.pyflow.dependencies] a = "^0.3.5" - "#; const BASELINE_NO_DEPS_NO_DEV_DEPS: &str = r#" [tool.pyflow] name = "" - "#; const BASELINE_EMPTY_DEPS: &str = r#" @@ -408,7 +423,6 @@ name = "" [tool.pyflow.dev-dependencies] dev_a = "^1.17.2" - "#; #[test] @@ -434,7 +448,6 @@ c = "^0.0.1" [tool.pyflow.dev-dependencies] dev_a = "^1.17.2" dev_b = "^0.0.1" - "#; assert_eq!(expected, &actual); @@ -462,7 +475,6 @@ c = "^0.0.1" [tool.pyflow.dev-dependencies] dev_b = "^0.0.1" - "#; assert_eq!(expected, &actual); @@ -490,7 +502,6 @@ c = "^0.0.1" [tool.pyflow.dev-dependencies] dev_a = "^1.17.2" dev_b = "^0.0.1" - "#; assert_eq!(expected, &actual); @@ -517,7 +528,6 @@ c = "^0.0.1" [tool.pyflow.dev-dependencies] dev_b = "^0.0.1" - "#; assert_eq!(expected, &actual); } From 2ea34a0c2109162907b0ac2104d7727cacef3b3f Mon Sep 17 00:00:00 2001 From: Matteo Santamaria Date: Thu, 8 Jul 2021 11:49:17 -0400 Subject: [PATCH 04/41] Refactor script function into its own file --- src/files.rs | 1 - src/main.rs | 172 ++------------------------------------------------ src/script.rs | 157 +++++++++++++++++++++++++++++++++++++++++++++ src/util.rs | 19 +++++- 4 files changed, 179 insertions(+), 170 deletions(-) create mode 100644 src/script.rs diff --git a/src/files.rs b/src/files.rs index 29fa733..56c6ade 100644 --- a/src/files.rs +++ b/src/files.rs @@ -187,7 +187,6 @@ fn extend_or_insert(mut cfg_lines: Vec, section_header: &str, reqs: &[Re match collected { // The section already exists, so we can just add the new reqs Some(section) => { - // To enforce proper spacing we first remove any empty lines, // and later we append a trailing empty line let mut all_deps: Vec = section diff --git a/src/main.rs b/src/main.rs index bde54ee..dbbaa37 100644 --- a/src/main.rs +++ b/src/main.rs @@ -2,16 +2,13 @@ #[mockall_double::double] use crate::dep_resolution::res; -use crate::dep_types::{ - Constraint, Extras, Lock, LockPackage, Package, Rename, Req, ReqType, Version, -}; +use crate::dep_types::{Constraint, Lock, LockPackage, Package, Rename, Req, ReqType, Version}; use crate::util::{abort, process_reqs, Os}; use regex::Regex; use serde::Deserialize; use std::{collections::HashMap, env, error::Error, fs, path::PathBuf, str::FromStr}; -use std::io::{BufRead, BufReader}; use std::path::Path; use std::sync::{Arc, RwLock}; use structopt::StructOpt; @@ -25,6 +22,7 @@ mod dep_types; mod files; mod install; mod py_versions; +mod script; mod util; // todo: @@ -712,19 +710,6 @@ __pypackages__/ Ok(()) } -/// Read dependency data from a lock file. -fn read_lock(path: &Path) -> Result> { - let data = fs::read_to_string(path)?; - Ok(toml::from_str(&data)?) -} - -/// Write dependency data to a lock file. -fn write_lock(path: &Path, data: &Lock) -> Result<(), Box> { - let data = toml::to_string(data)?; - fs::write(path, data)?; - Ok(()) -} - fn parse_lockpack_rename(rename: &str) -> (u32, String) { let re = Regex::new(r"^(\d+)\s(.*)$").unwrap(); let caps = re @@ -972,153 +957,6 @@ fn run_cli_tool( } } -/// Find a script's dependencies from a variable: `__requires__ = [dep1, dep2]` -fn find_deps_from_script(file_path: &Path) -> Vec { - // todo: Helper for this type of logic? We use it several times in the program. - let f = fs::File::open(file_path).expect("Problem opening the Python script file."); - - let re = Regex::new(r"^__requires__\s*=\s*\[(.*?)\]$").unwrap(); - - let mut result = vec![]; - for line in BufReader::new(f).lines().flatten() { - if let Some(c) = re.captures(&line) { - let deps_list = c.get(1).unwrap().as_str().to_owned(); - let deps: Vec<&str> = deps_list.split(',').collect(); - result = deps - .into_iter() - .map(|d| { - d.to_owned() - .replace(" ", "") - .replace("\"", "") - .replace("'", "") - }) - .filter(|d| !d.is_empty()) - .collect(); - } - } - - result -} - -/// Run a standalone script file, with package management -/// // todo: Perhaps move this logic to its own file, if it becomes long. -/// todo: We're using script name as unique identifier; address this in the future, -/// todo perhaps with an id in a comment at the top of a file -fn run_script( - script_env_path: &Path, - dep_cache_path: &Path, - os: util::Os, - args: &[String], - pyflow_dir: &Path, -) { - #[cfg(debug_assertions)] - eprintln!("Run script args: {:?}", args); - // todo: DRY with run_cli_tool and subcommand::Install - let filename = if let Some(a) = args.get(0) { - a.clone() - } else { - abort("`script` must be followed by the script to run, eg `pyflow script myscript.py`"); - unreachable!() - }; - - // todo: Consider a metadata file, but for now, we'll use folders - // let scripts_data_path = script_env_path.join("scripts.toml"); - - let env_path = util::canon_join(script_env_path, &filename); - if !env_path.exists() { - fs::create_dir_all(&env_path).expect("Problem creating environment for the script"); - } - - // Write the version we found to a file. - let cfg_vers; - let py_vers_path = env_path.join("py_vers.txt"); - - if py_vers_path.exists() { - cfg_vers = Version::from_str( - &fs::read_to_string(py_vers_path) - .expect("Problem reading Python version for this script") - .replace("\n", ""), - ) - .expect("Problem parsing version from file"); - } else { - cfg_vers = util::prompt_py_vers(); - - fs::File::create(&py_vers_path) - .expect("Problem creating a file to store the Python version for this script"); - fs::write(py_vers_path, &cfg_vers.to_string()) - .expect("Problem writing Python version file."); - } - - // todo DRY - let pypackages_dir = env_path.join("__pypackages__"); - let (vers_path, py_vers) = - util::find_or_create_venv(&cfg_vers, &pypackages_dir, pyflow_dir, dep_cache_path); - - let bin_path = util::find_bin_path(&vers_path); - let lib_path = vers_path.join("lib"); - let script_path = vers_path.join("bin"); - let lock_path = env_path.join("pyproject.lock"); - - let paths = util::Paths { - bin: bin_path, - lib: lib_path, - entry_pt: script_path, - cache: dep_cache_path.to_owned(), - }; - - let deps = find_deps_from_script(&PathBuf::from(&filename)); - - let lock = match read_lock(&lock_path) { - Ok(l) => l, - Err(_) => Lock::default(), - }; - - let lockpacks = lock.package.unwrap_or_else(Vec::new); - - let reqs: Vec = deps - .iter() - .map(|name| { - let (fmtd_name, version) = if let Some(lp) = lockpacks - .iter() - .find(|lp| util::compare_names(&lp.name, name)) - { - ( - lp.name.clone(), - Version::from_str(&lp.version).expect("Problem getting version"), - ) - } else { - let vinfo = res::get_version_info( - name, - Some(Req::new_with_extras( - name.to_string(), - vec![Constraint::new_any()], - Extras::new_py(Constraint::new(ReqType::Exact, py_vers.clone())), - )), - ) - .unwrap_or_else(|_| panic!("Problem getting version info for {}", &name)); - (vinfo.0, vinfo.1) - }; - - Req::new(fmtd_name, vec![Constraint::new(ReqType::Caret, version)]) - }) - .collect(); - - sync( - &paths, - &lockpacks, - &reqs, - &[], - &[], - os, - &py_vers, - &lock_path, - ); - - if commands::run_python(&paths.bin, &[paths.lib], args).is_err() { - abort("Problem running this script") - }; -} - /// Function used by `Install` and `Uninstall` subcommands to syn dependencies with /// the config and lock files. #[allow(clippy::too_many_arguments)] @@ -1236,7 +1074,7 @@ fn sync( metadata: HashMap::new(), // todo: Problem with toml conversion. package: Some(updated_lock_packs.clone()), }; - if write_lock(lock_path, &updated_lock).is_err() { + if util::write_lock(lock_path, &updated_lock).is_err() { abort("Problem writing lock file"); } @@ -1373,7 +1211,7 @@ fn main() { // Run this before parsing the config. if let Some(x) = extcmd.clone() { if let ExternalSubcommands::Script = x.cmd { - run_script(&script_env_path, &dep_cache_path, os, &x.args, &pyflow_path); + script::run_script(&script_env_path, &dep_cache_path, os, &x.args, &pyflow_path); return; } } @@ -1531,7 +1369,7 @@ fn main() { } let mut found_lock = false; - let lock = match read_lock(&lock_path) { + let lock = match util::read_lock(&lock_path) { Ok(l) => { found_lock = true; l diff --git a/src/script.rs b/src/script.rs new file mode 100644 index 0000000..f91c1a2 --- /dev/null +++ b/src/script.rs @@ -0,0 +1,157 @@ +use crate::dep_resolution::res; +use crate::dep_types::{Constraint, Extras, Lock, Req, ReqType, Version}; +use crate::util; +use regex::Regex; +use std::fs; +use std::io::{BufRead, BufReader}; +use std::path::{Path, PathBuf}; + +use crate::commands; +use std::str::FromStr; + +/// Run a standalone script file, with package management +/// todo: We're using script name as unique identifier; address this in the future, +/// todo perhaps with an id in a comment at the top of a file +pub fn run_script( + script_env_path: &Path, + dep_cache_path: &Path, + os: util::Os, + args: &[String], + pyflow_dir: &Path, +) { + #[cfg(debug_assertions)] + eprintln!("Run script args: {:?}", args); + // todo: DRY with run_cli_tool and subcommand::Install + let filename = if let Some(a) = args.get(0) { + a.clone() + } else { + util::abort( + "`script` must be followed by the script to run, eg `pyflow script myscript.py`", + ); + unreachable!() + }; + + // todo: Consider a metadata file, but for now, we'll use folders + // let scripts_data_path = script_env_path.join("scripts.toml"); + + let env_path = util::canon_join(script_env_path, &filename); + if !env_path.exists() { + fs::create_dir_all(&env_path).expect("Problem creating environment for the script"); + } + + // Write the version we found to a file. + let cfg_vers; + let py_vers_path = env_path.join("py_vers.txt"); + + if py_vers_path.exists() { + cfg_vers = Version::from_str( + &fs::read_to_string(py_vers_path) + .expect("Problem reading Python version for this script") + .replace("\n", ""), + ) + .expect("Problem parsing version from file"); + } else { + cfg_vers = util::prompt_py_vers(); + + fs::File::create(&py_vers_path) + .expect("Problem creating a file to store the Python version for this script"); + fs::write(py_vers_path, &cfg_vers.to_string()) + .expect("Problem writing Python version file."); + } + + // todo DRY + let pypackages_dir = env_path.join("__pypackages__"); + let (vers_path, py_vers) = + util::find_or_create_venv(&cfg_vers, &pypackages_dir, pyflow_dir, dep_cache_path); + + let bin_path = util::find_bin_path(&vers_path); + let lib_path = vers_path.join("lib"); + let script_path = vers_path.join("bin"); + let lock_path = env_path.join("pyproject.lock"); + + let paths = util::Paths { + bin: bin_path, + lib: lib_path, + entry_pt: script_path, + cache: dep_cache_path.to_owned(), + }; + + let deps = find_deps_from_script(&PathBuf::from(&filename)); + + let lock = match util::read_lock(&lock_path) { + Ok(l) => l, + Err(_) => Lock::default(), + }; + + let lockpacks = lock.package.unwrap_or_else(Vec::new); + + let reqs: Vec = deps + .iter() + .map(|name| { + let (fmtd_name, version) = if let Some(lp) = lockpacks + .iter() + .find(|lp| util::compare_names(&lp.name, name)) + { + ( + lp.name.clone(), + Version::from_str(&lp.version).expect("Problem getting version"), + ) + } else { + let vinfo = res::get_version_info( + name, + Some(Req::new_with_extras( + name.to_string(), + vec![Constraint::new_any()], + Extras::new_py(Constraint::new(ReqType::Exact, py_vers.clone())), + )), + ) + .unwrap_or_else(|_| panic!("Problem getting version info for {}", &name)); + (vinfo.0, vinfo.1) + }; + + Req::new(fmtd_name, vec![Constraint::new(ReqType::Caret, version)]) + }) + .collect(); + + crate::sync( + &paths, + &lockpacks, + &reqs, + &[], + &[], + os, + &py_vers, + &lock_path, + ); + + if commands::run_python(&paths.bin, &[paths.lib], args).is_err() { + util::abort("Problem running this script") + }; +} + +/// Find a script's dependencies from a variable: `__requires__ = [dep1, dep2]` +fn find_deps_from_script(file_path: &Path) -> Vec { + // todo: Helper for this type of logic? We use it several times in the program. + let f = fs::File::open(file_path).expect("Problem opening the Python script file."); + + let re = Regex::new(r"^__requires__\s*=\s*\[(.*?)\]$").unwrap(); + + let mut result = vec![]; + for line in BufReader::new(f).lines().flatten() { + if let Some(c) = re.captures(&line) { + let deps_list = c.get(1).unwrap().as_str().to_owned(); + let deps: Vec<&str> = deps_list.split(',').collect(); + result = deps + .into_iter() + .map(|d| { + d.to_owned() + .replace(" ", "") + .replace("\"", "") + .replace("'", "") + }) + .filter(|d| !d.is_empty()) + .collect(); + } + } + result +} diff --git a/src/util.rs b/src/util.rs index 3f1cb1f..33ffc33 100644 --- a/src/util.rs +++ b/src/util.rs @@ -5,7 +5,7 @@ use crate::dep_resolution::WarehouseRelease; use crate::dep_types::Extras; use crate::{ commands, - dep_types::{Constraint, DependencyError, Req, ReqType, Version}, + dep_types::{Constraint, DependencyError, Lock, Req, ReqType, Version}, files, install::{self, PackageType}, py_versions, util, CliConfig, @@ -17,7 +17,9 @@ use std::io::{self, BufRead, BufReader, Read, Write}; use std::str::FromStr; use std::{ collections::HashMap, - env, fs, + env, + error::Error, + fs, path::{Path, PathBuf}, process, thread, time, }; @@ -986,6 +988,19 @@ pub fn process_reqs(reqs: Vec, git_path: &Path, paths: &util::Paths) -> Vec updated_reqs } +/// Read dependency data from a lock file. +pub fn read_lock(path: &Path) -> Result> { + let data = fs::read_to_string(path)?; + Ok(toml::from_str(&data)?) +} + +/// Write dependency data to a lock file. +pub fn write_lock(path: &Path, data: &Lock) -> Result<(), Box> { + let data = toml::to_string(data)?; + fs::write(path, data)?; + Ok(()) +} + #[cfg(test)] mod tests { use rstest::rstest; From 54db99f6ef11b3d4b8b63179ac11585c7d7fdf94 Mon Sep 17 00:00:00 2001 From: Matteo Santamaria Date: Sun, 11 Jul 2021 21:00:31 -0400 Subject: [PATCH 05/41] Allow for __python__ variable specification --- Cargo.lock | 60 +++++++++++++++++++++++---- Cargo.toml | 2 +- src/script.rs | 112 +++++++++++++++++++++++++++++++++++++++++++------- 3 files changed, 149 insertions(+), 25 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ed6e36b..878110f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -566,7 +566,7 @@ dependencies = [ "itoa", "log", "net2", - "rustc_version", + "rustc_version 0.2.3", "time", "tokio", "tokio-buf", @@ -906,7 +906,7 @@ checksum = "f842b1982eb6c2fe34036a4fbfb06dd185a3f5c8edfaacdf7d1ea10b07de6252" dependencies = [ "lock_api", "parking_lot_core", - "rustc_version", + "rustc_version 0.2.3", ] [[package]] @@ -919,7 +919,7 @@ dependencies = [ "cloudabi", "libc", "redox_syscall 0.1.57", - "rustc_version", + "rustc_version 0.2.3", "smallvec", "winapi 0.3.9", ] @@ -936,6 +936,15 @@ version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" +[[package]] +name = "pest" +version = "2.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10f4872ae94d7b90ae48754df22fd42ad52ce740b8f370b03da4835417403e53" +dependencies = [ + "ucd-trie", +] + [[package]] name = "pkg-config" version = "0.3.19" @@ -1260,14 +1269,14 @@ dependencies = [ [[package]] name = "rstest" -version = "0.6.4" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dec448bc157977efdc0a71369cf923915b0c4806b1b2449c3fb011071d6f7c38" +checksum = "041bb0202c14f6a158bbbf086afb03d0c6e975c2dec7d4912f8061ed44f290af" dependencies = [ - "cfg-if 0.1.10", + "cfg-if 1.0.0", "proc-macro2", "quote", - "rustc_version", + "rustc_version 0.3.3", "syn", ] @@ -1289,7 +1298,16 @@ version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" dependencies = [ - "semver", + "semver 0.9.0", +] + +[[package]] +name = "rustc_version" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0dfe2087c51c460008730de8b57e6a320782fbfb312e1f4d520e6c6fae155ee" +dependencies = [ + "semver 0.11.0", ] [[package]] @@ -1333,7 +1351,16 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403" dependencies = [ - "semver-parser", + "semver-parser 0.7.0", +] + +[[package]] +name = "semver" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f301af10236f6df4160f7c3f04eec6dbc70ace82d23326abad5edee88801c6b6" +dependencies = [ + "semver-parser 0.10.2", ] [[package]] @@ -1342,6 +1369,15 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" +[[package]] +name = "semver-parser" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0bef5b7f9e0df16536d3961cfb6e84331c065b4066afb39768d0e319411f7" +dependencies = [ + "pest", +] + [[package]] name = "serde" version = "1.0.126" @@ -1731,6 +1767,12 @@ dependencies = [ "cfg-if 0.1.10", ] +[[package]] +name = "ucd-trie" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56dee185309b50d1f11bfedef0fe6d036842e3fb77413abef29f8f8d1c5d4c1c" + [[package]] name = "unicase" version = "2.6.0" diff --git a/Cargo.toml b/Cargo.toml index 3d0494a..aa2b2e9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -47,7 +47,7 @@ reqwest = { version = "^0.9.21", default-features = false, features = ["rustls-t mockall_double = "^0.2.0" [dev-dependencies] -rstest = "0.6.4" +rstest = "0.10.0" mockall = "^0.9" [package.metadata.deb] diff --git a/src/script.rs b/src/script.rs index f91c1a2..b8ef38d 100644 --- a/src/script.rs +++ b/src/script.rs @@ -3,10 +3,10 @@ use crate::dep_types::{Constraint, Extras, Lock, Req, ReqType, Version}; use crate::util; use regex::Regex; use std::fs; -use std::io::{BufRead, BufReader}; -use std::path::{Path, PathBuf}; +use std::path::Path; use crate::commands; +use crate::dep_parser::parse_version; use std::str::FromStr; /// Run a standalone script file, with package management @@ -21,9 +21,10 @@ pub fn run_script( ) { #[cfg(debug_assertions)] eprintln!("Run script args: {:?}", args); + // todo: DRY with run_cli_tool and subcommand::Install - let filename = if let Some(a) = args.get(0) { - a.clone() + let filename = if let Some(arg) = args.get(0) { + arg } else { util::abort( "`script` must be followed by the script to run, eg `pyflow script myscript.py`", @@ -43,7 +44,13 @@ pub fn run_script( let cfg_vers; let py_vers_path = env_path.join("py_vers.txt"); - if py_vers_path.exists() { + let script = fs::read_to_string(filename).expect("Problem opening the Python script file."); + let dunder_python_vers = check_for_specified_py_vers(&script); + + if let Some(dpv) = dunder_python_vers { + cfg_vers = dpv; + create_or_update_version_file(&py_vers_path, &cfg_vers); + } else if py_vers_path.exists() { cfg_vers = Version::from_str( &fs::read_to_string(py_vers_path) .expect("Problem reading Python version for this script") @@ -52,11 +59,7 @@ pub fn run_script( .expect("Problem parsing version from file"); } else { cfg_vers = util::prompt_py_vers(); - - fs::File::create(&py_vers_path) - .expect("Problem creating a file to store the Python version for this script"); - fs::write(py_vers_path, &cfg_vers.to_string()) - .expect("Problem writing Python version file."); + create_or_update_version_file(&py_vers_path, &cfg_vers); } // todo DRY @@ -76,7 +79,7 @@ pub fn run_script( cache: dep_cache_path.to_owned(), }; - let deps = find_deps_from_script(&PathBuf::from(&filename)); + let deps = find_deps_from_script(&script); let lock = match util::read_lock(&lock_path) { Ok(l) => l, @@ -129,15 +132,55 @@ pub fn run_script( }; } +/// Create the `py_vers.txt` if it doesn't exist, and then store `cfg_vers` within. +fn create_or_update_version_file(py_vers_path: &Path, cfg_vers: &Version) { + if !py_vers_path.exists() { + fs::File::create(&py_vers_path) + .expect("Problem creating a file to store the Python version for this script"); + } + fs::write(py_vers_path, &cfg_vers.to_string()).expect("Problem writing Python version file."); +} + +/// Find a script's Python version specificion by looking for the `__python__` variable. +/// +/// If a `__python__` variable is identified, the version must have major, minor, and +/// patch components to be considered valid. Otherwise, there is still some ambiguity in +/// which version to use and an error is thrown. +fn check_for_specified_py_vers(script: &str) -> Option { + let re = Regex::new(r#"^__python__\s*=\s*"(.*?)"$"#).unwrap(); + + for line in script.lines() { + if let Some(capture) = re.captures(&line) { + let specification = capture.get(1).unwrap().as_str(); + let (_, version) = parse_version(specification).unwrap(); + match version { + Version { + major: Some(_), + minor: Some(_), + patch: Some(_), + extra_num: None, + modifier: None, + .. + } => return Some(version), + _ => { + util::abort( + "Problem parsing `__python__` variable. Make sure you've included \ + major, minor, and patch specifications (eg `__python__ = X.Y.Z`)", + ); + } + } + } + } + None +} + /// Find a script's dependencies from a variable: `__requires__ = [dep1, dep2]` -fn find_deps_from_script(file_path: &Path) -> Vec { +fn find_deps_from_script(script: &str) -> Vec { // todo: Helper for this type of logic? We use it several times in the program. - let f = fs::File::open(file_path).expect("Problem opening the Python script file."); - let re = Regex::new(r"^__requires__\s*=\s*\[(.*?)\]$").unwrap(); let mut result = vec![]; - for line in BufReader::new(f).lines().flatten() { + for line in script.lines() { if let Some(c) = re.captures(&line) { let deps_list = c.get(1).unwrap().as_str().to_owned(); let deps: Vec<&str> = deps_list.split(',').collect(); @@ -155,3 +198,42 @@ fn find_deps_from_script(file_path: &Path) -> Vec { } result } + +#[cfg(test)] +mod tests { + use crate::dep_types::Version; + use crate::script::check_for_specified_py_vers; + use rstest::rstest; + + const NO_DUNDER_PYTHON: &str = r#" +if __name__ == "__main__": + print("Hello, world") +"#; + + const VALID_DUNDER_PYTHON: &str = r#" +__python__ = "3.9.1" + +if __name__ == "__main__": + print("Hello, world") +"#; + + fn py_version() -> Option { + let version = Version { + major: Some(3), + minor: Some(9), + patch: Some(1), + extra_num: None, + modifier: None, + star: false, + }; + Some(version) + } + + #[rstest] + #[case(NO_DUNDER_PYTHON, None)] + #[case(VALID_DUNDER_PYTHON, py_version())] + fn dunder_python_specified(#[case] src: &str, #[case] expected: Option) { + let result = check_for_specified_py_vers(src); + assert_eq!(result, expected) + } +} From b0b604d58703cbfa1112dea6ac84403bc49340f0 Mon Sep 17 00:00:00 2001 From: Matteo Santamaria Date: Mon, 12 Jul 2021 08:11:38 -0400 Subject: [PATCH 06/41] Include instructions for __python__ in README --- README.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 82388b9..a1e3f21 100644 --- a/README.md +++ b/README.md @@ -67,7 +67,9 @@ creates a folder with the basics. ## Quick-and-dirty start for quick-and-dirty scripts - Add the line `__requires__ = ['numpy', 'requests']` somewhere in your script, where `numpy` and `requests` are dependencies. -Run `pyflow script myscript.py`, where `myscript.py` is the name of your script. +- Optionally add the line `__python__ = X.Y.Z`, where `X.Y.Z` is a Python version specification. +Without this line, you will be prompted to choose a version when running the script. +- Run `pyflow script myscript.py`, where `myscript.py` is the name of your script. This will set up an isolated environment for this script, and install dependencies as required. This is a safe way to run one-off Python files that aren't attached to a project, but have dependencies. From 3cfbb442f8617fbac0364279bef3dc916eb7853c Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Fri, 8 Oct 2021 23:34:54 +0200 Subject: [PATCH 07/41] GitHub Action to lint Python code --- .github/workflows/lint_python.yml | 25 +++++++++++++++++++++++++ update_version.py | 5 ++++- 2 files changed, 29 insertions(+), 1 deletion(-) create mode 100644 .github/workflows/lint_python.yml diff --git a/.github/workflows/lint_python.yml b/.github/workflows/lint_python.yml new file mode 100644 index 0000000..4428cbc --- /dev/null +++ b/.github/workflows/lint_python.yml @@ -0,0 +1,25 @@ +name: lint_python +on: [pull_request, push] +jobs: + lint_python: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: '3.10' + - run: pip install --upgrade pip wheel + - run: pip install bandit black codespell flake8 flake8-bugbear + flake8-comprehensions isort mypy pytest pyupgrade safety + - run: bandit --recursive --skip B101 . + - run: black --check . || true + - run: codespell || true # --ignore-words-list="" --skip="*.css,*.js,*.lock" + - run: flake8 . --max-complexity=10 --max-line-length=88 --show-source --statistics + - run: isort --check-only --profile black . + - run: pip install -r requirements.txt || pip install --editable . || true + - run: mkdir --parents --verbose .mypy_cache + - run: mypy --ignore-missing-imports --install-types --non-interactive . + - run: pytest . || true + - run: pytest --doctest-modules . || true + - run: shopt -s globstar && pyupgrade --py36-plus **/*.py + - run: safety check diff --git a/update_version.py b/update_version.py index f7ef799..8bac956 100644 --- a/update_version.py +++ b/update_version.py @@ -1,9 +1,10 @@ # A script to update the version in config files. -import sys import re +import sys vers = sys.argv[1] + def helper(filename: str, startswith: str, quotes: bool): data = "" with open(filename) as f: @@ -16,6 +17,7 @@ def helper(filename: str, startswith: str, quotes: bool): with open(filename, 'w') as f: f.write(data) + def main(): helper('Cargo.toml', "version = ", True) helper('snapcraft.yaml', "version: ", False) @@ -31,4 +33,5 @@ def main(): print(f"Updated version to {vers}") + main() From 9907d4143b5869517649e989822da65409d86dac Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Mon, 25 Oct 2021 09:36:40 +0200 Subject: [PATCH 08/41] Fix typos discovered by codespell --- .github/workflows/codespell.yml | 11 +++++++++++ .github/workflows/lint_python.yml | 25 ------------------------- Cargo.toml | 4 ++-- README.md | 4 ++-- src/dep_resolution.rs | 8 ++++---- src/dep_types.rs | 4 ++-- src/files.rs | 4 ++-- src/install.rs | 4 ++-- src/main.rs | 8 ++++---- src/util.rs | 2 +- 10 files changed, 30 insertions(+), 44 deletions(-) create mode 100644 .github/workflows/codespell.yml delete mode 100644 .github/workflows/lint_python.yml diff --git a/.github/workflows/codespell.yml b/.github/workflows/codespell.yml new file mode 100644 index 0000000..2958852 --- /dev/null +++ b/.github/workflows/codespell.yml @@ -0,0 +1,11 @@ +name: codespell +on: [pull_request, push] +jobs: + codespell: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: codespell-project/actions-codespell@master + with: + ignore_words_list: crate,dows,pard,raison + diff --git a/.github/workflows/lint_python.yml b/.github/workflows/lint_python.yml deleted file mode 100644 index 4428cbc..0000000 --- a/.github/workflows/lint_python.yml +++ /dev/null @@ -1,25 +0,0 @@ -name: lint_python -on: [pull_request, push] -jobs: - lint_python: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-python@v2 - with: - python-version: '3.10' - - run: pip install --upgrade pip wheel - - run: pip install bandit black codespell flake8 flake8-bugbear - flake8-comprehensions isort mypy pytest pyupgrade safety - - run: bandit --recursive --skip B101 . - - run: black --check . || true - - run: codespell || true # --ignore-words-list="" --skip="*.css,*.js,*.lock" - - run: flake8 . --max-complexity=10 --max-line-length=88 --show-source --statistics - - run: isort --check-only --profile black . - - run: pip install -r requirements.txt || pip install --editable . || true - - run: mkdir --parents --verbose .mypy_cache - - run: mypy --ignore-missing-imports --install-types --non-interactive . - - run: pytest . || true - - run: pytest --doctest-modules . || true - - run: shopt -s globstar && pyupgrade --py36-plus **/*.py - - run: safety check diff --git a/Cargo.toml b/Cargo.toml index aa2b2e9..7940989 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -25,7 +25,7 @@ rust-ini = "0.13" xz2 = "^0.1.6" regex = "^1.1.9" ring = "^0.16.9" -# We disable, by ommission, suggestions, so it doesn't think `pyflow ipython` is a misspelling +# We disable, by omission, suggestions, so it doesn't think `pyflow ipython` is a misspelling # of `pyflow python`. structopt = { version = "^0.3.3", default_features = false, features = ["color", "wrap_help", "doc"] } serde = {version = "^1.0.101", features = ["derive"]} @@ -58,7 +58,7 @@ PEP 582 -- Python local packages directory. It manages dependencies, keeping them isolated in the project directory, and runs python in an environment which uses this directory. Per PEP 582, dependencies are stored in the project directory → `__pypackages__` → `3.7`(etc) → `lib`. -A virtual environment is created in the same diretory as `lib`, and is used +A virtual environment is created in the same directory as `lib`, and is used transparently.""" diff --git a/README.md b/README.md index a1e3f21..1268e75 100644 --- a/README.md +++ b/README.md @@ -282,7 +282,7 @@ be added to `pyproject.toml` and installed. You can use the `--dev` flag to inst - `pyflow` - Run a Python REPL - `pyflow main.py` - Run a python file - `pyflow ipython`, `pyflow black` etc - Run a CLI tool like `ipython`, or a project function - For the former, this must have been installed by a dependency; for the latter, it's specfied + For the former, this must have been installed by a dependency; for the latter, it's specified under `[tool.pyflow]`, `scripts` - `pyflow script myscript.py` - Run a one-off script, outside a project directory, with per-file package management @@ -437,7 +437,7 @@ deb, run the new version's installer or deb. If manually calling a binary, repla - If installed via `Pip`, run `pip uninstall pyflow`. - If installed via Windows installer, run the Installer again and select `Remove` when asked, or use `Apps & features`. -- If installed via a `deb`, useg the `Software Center`. +- If installed via a `deb`, use the `Software Center`. - If manually calling a binary, remove it. ## Contributing diff --git a/src/dep_resolution.rs b/src/dep_resolution.rs index 95555dd..8eae487 100644 --- a/src/dep_resolution.rs +++ b/src/dep_resolution.rs @@ -50,7 +50,7 @@ struct WarehouseData { #[derive(Clone, Debug, Deserialize)] struct ReqCache { - // Name is present from pydeps if gestruct packagetting deps for multiple package names. Otherwise, we ommit + // Name is present from pydeps if gestruct packagetting deps for multiple package names. Otherwise, we commit // it since we already know the name when making the request. name: Option, version: String, @@ -189,7 +189,7 @@ fn guess_graph( }; // Now add info from lock packs for data we didn't query. The purpose of passing locks - // into the dep resolution process is to avoid unecessary HTTP calls and resolution iterations. + // into the dep resolution process is to avoid unnecessary HTTP calls and resolution iterations. for req in locked_reqs { // Find the corresponding lock package. There should be exactly one. let package = locked @@ -622,7 +622,7 @@ pub(super) mod res { let mut children: Vec<(u32, String, Version)> = packs2 .iter() .filter_map(|p| { - // If there wee multiple instances of this dep, the parent id may have been updated. + // If there were multiple instances of this dep, the parent id may have been updated. let parent_id = match updated_ids.get(&p.parent) { Some(updated_parent) => *updated_parent, None => p.parent, @@ -723,7 +723,7 @@ pub(super) mod res { } // If a version we've examined meets all constraints for packages that use it, use it - - // we've already built the graph to accomodate its sub-deps. + // we've already built the graph to accommodate its sub-deps. // If unable, find the highest version that meets the constraints, and determine // what its dependencies are. diff --git a/src/dep_types.rs b/src/dep_types.rs index 88da21d..1287a23 100644 --- a/src/dep_types.rs +++ b/src/dep_types.rs @@ -536,7 +536,7 @@ impl Constraint { /// Called `to_string2` to avoid shadowing `Display` pub fn to_string2(&self, ommit_equals: bool, pip_style: bool) -> String { - // ommit_equals indicates we dont' want to add any type if it's exact. Eg in config files. + // ommit_equals indicates we don't want to add any type if it's exact. Eg in config files. // pip_style means that ^ is transformed to ^=, and ~ to ~= let mut type_str = if ommit_equals && self.type_ == ReqType::Exact { "".to_string() @@ -792,7 +792,7 @@ pub fn intersection( ranges2: &[(Version, Version)], ) -> Vec<(Version, Version)> { let mut result = vec![]; - // Each range imposes an additonal constraint. + // Each range imposes an additional constraint. for rng1 in ranges1 { for rng2 in ranges2 { // 0 is min, 1 is max. diff --git a/src/files.rs b/src/files.rs index 56c6ade..a629711 100644 --- a/src/files.rs +++ b/src/files.rs @@ -205,7 +205,7 @@ fn extend_or_insert(mut cfg_lines: Vec, section_header: &str, reqs: &[Re cfg_lines.splice(section.i_start..section.i_end, all_deps); cfg_lines } - // The section did not alredy exist, so we must create it + // The section did not already exist, so we must create it None => { // A section is composed of its header, followed by all the requirements // and then an empty line @@ -228,7 +228,7 @@ fn extend_or_insert(mut cfg_lines: Vec, section_header: &str, reqs: &[Re /// Add dependencies and dev-dependencies to `cfg-data`, creating the sections if necessary. /// /// The added sections are appended to the end of the file. Split from `add_reqs_to_cfg` -/// to accomodate testing. +/// to accommodate testing. fn update_cfg(cfg_data: &str, added: &[Req], added_dev: &[Req]) -> String { let cfg_lines: Vec = cfg_data.lines().map(str::to_string).collect(); diff --git a/src/install.rs b/src/install.rs index a05f84c..4fe9ca9 100644 --- a/src/install.rs +++ b/src/install.rs @@ -280,7 +280,7 @@ pub fn download_and_install_package( f_path.file_name().expect("Problem getting file name"); // In the `pandocfilters` Python package, the readme file specified in - // `setup.py` is a symlink, which we can't unwrap, and is requried to exist, + // `setup.py` is a symlink, which we can't unwrap, and is required to exist, // or the wheel build fails. Workaround here; may apply to other packages as well. if filename .to_str() @@ -603,7 +603,7 @@ pub fn rename_package_files(top_path: &Path, old: &str, new: &str) { ); data = data.replace(&format!("from {}.", old), &format!("from {}.", new)); data = data.replace(&format!("import {}", old), &format!("import {}", new)); - // Todo: Is this one too general? Supercedes the first. Needed for things like `add_newdoc('numpy.core.multiarray...` + // Todo: Is this one too general? Supersedes the first. Needed for things like `add_newdoc('numpy.core.multiarray...` data = data.replace(&format!("{}.", old), &format!("{}.", new)); fs::write(path, data).expect("Problem writing file while renaming"); diff --git a/src/main.rs b/src/main.rs index dbbaa37..7b58b1e 100644 --- a/src/main.rs +++ b/src/main.rs @@ -56,7 +56,7 @@ enum SubCommand { name: String, // holds the project name. }, - /** Install packages from `pyproject.toml`, `pyflow.lock`, or speficied ones. Example: + /** Install packages from `pyproject.toml`, `pyflow.lock`, or specified ones. Example: `pyflow install`: sync your installation with `pyproject.toml`, or `pyflow.lock` if it exists. `pyflow install numpy scipy`: install `numpy` and `scipy`.*/ @@ -1010,7 +1010,7 @@ fn sync( // #[cfg(target_os = "macos")] // println!("🔍 Resolving dependencies..."); - // Dev reqs and normal reqs are both installed here; we only ommit dev reqs + // Dev reqs and normal reqs are both installed here; we only commit dev reqs // when packaging. let mut combined_reqs = reqs.to_vec(); for dev_req in dev_reqs.to_vec() { @@ -1079,7 +1079,7 @@ fn sync( } // Now that we've confirmed or modified the lock file, we're ready to sync installed - // depenencies with it. + // dependencies with it. sync_deps( paths, &updated_lock_packs, @@ -1392,7 +1392,7 @@ fn main() { // Now handle subcommands that require info about the environment match subcmd { - // Add pacakge names to `pyproject.toml` if needed. Then sync installed packages + // Add package names to `pyproject.toml` if needed. Then sync installed packages // and `pyproject.lock` with the `pyproject.toml`. // We use data from three sources: `pyproject.toml`, `pyflow.lock`, and // the currently-installed packages, found by crawling metadata in the `lib` path. diff --git a/src/util.rs b/src/util.rs index 33ffc33..7459ad2 100644 --- a/src/util.rs +++ b/src/util.rs @@ -787,7 +787,7 @@ pub fn get_git_author() -> Vec { } pub fn find_first_file(path: &Path) -> PathBuf { - // todo: Propogate errors rather than abort here? + // todo: Propagate errors rather than abort here? { // There should only be one file in this dist folder: The wheel we're looking for. for entry in path From 872d042c3aa1a5f2774756df3368f1a2247cc5d1 Mon Sep 17 00:00:00 2001 From: Alex Touchet Date: Tue, 26 Oct 2021 23:04:01 -0700 Subject: [PATCH 09/41] Fix crates.io badge --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 1268e75..cd8fabf 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -[![crates.io version](https://meritbadge.herokuapp.com/pyflow)](https://crates.io/crates/pyflow) +[![crates.io version](https://img.shields.io/crates/v/pyflow.svg)](https://crates.io/crates/pyflow) [![Build Status](https://travis-ci.org/David-OConnor/pyflow.svg?branch=master)](https://travis-ci.org/David-OConnor/pyflow) From ac83448e561847371a1221db04e7f2d7ca6384d1 Mon Sep 17 00:00:00 2001 From: Noel Tautges Date: Sun, 7 Nov 2021 20:14:41 -0600 Subject: [PATCH 10/41] Use existing variables to transform name in extract_zip --- src/util.rs | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/src/util.rs b/src/util.rs index 7459ad2..2700704 100644 --- a/src/util.rs +++ b/src/util.rs @@ -428,18 +428,14 @@ pub fn extract_zip(file: &fs::File, out_path: &Path, rename: &Option<(String, St let extracted_file = if !file_str.contains("dist-info") && !file_str.contains("egg-info") { match rename { Some((old, new)) => PathBuf::from_str( - file.enclosed_name() - .unwrap() - .to_str() - .unwrap() - .to_owned() + file_str.to_owned() .replace(old, new) .as_str(), ), - None => PathBuf::from_str(file.enclosed_name().unwrap().to_str().unwrap()), + None => PathBuf::from_str(file_str), } } else { - PathBuf::from_str(file.enclosed_name().unwrap().to_str().unwrap()) + PathBuf::from_str(file_str) }; let outpath = out_path.join(extracted_file.unwrap()); From 993d9002f17f9f45b68cfd3a2d7d8af9d6d9448c Mon Sep 17 00:00:00 2001 From: Noel Tautges Date: Sun, 7 Nov 2021 23:46:23 -0600 Subject: [PATCH 11/41] Add top-level folder to source packages without it to enable wheel-building --- src/install.rs | 10 +++++----- src/util.rs | 14 ++++++++++++-- 2 files changed, 17 insertions(+), 7 deletions(-) diff --git a/src/install.rs b/src/install.rs index 4fe9ca9..a5f42c7 100644 --- a/src/install.rs +++ b/src/install.rs @@ -233,7 +233,7 @@ pub fn download_and_install_package( match package_type { PackageType::Wheel => { - util::extract_zip(&archive_file, &paths.lib, &rename); + util::extract_zip(&archive_file, &paths.lib, &rename, &None); } PackageType::Source => { // todo: Support .tar.bz2 @@ -308,7 +308,7 @@ pub fn download_and_install_package( // We'll then continue with this leg, and build/move/cleanup. // Check if we have a zip file instead. - util::extract_zip(&archive_file, &paths.lib, &None); + util::extract_zip(&archive_file, &paths.lib, &None, &Some((name, filename))); } } } @@ -322,7 +322,7 @@ pub fn download_and_install_package( // We'll then continue with this leg, and build/move/cleanup. // Check if we have a zip file instead. - util::extract_zip(&archive_file, &paths.lib, &None); + util::extract_zip(&archive_file, &paths.lib, &None, &Some((name, filename))); } } @@ -465,7 +465,7 @@ pub fn download_and_install_package( .expect("Problem copying wheel built from source"); let file_created = fs::File::open(&moved_path).expect("Can't find created wheel."); - util::extract_zip(&file_created, &paths.lib, &rename); + util::extract_zip(&file_created, &paths.lib, &rename, &None); // Remove the created and moved wheel if fs::remove_file(moved_path).is_err() { @@ -681,7 +681,7 @@ pub fn download_and_install_git( let archive_path = &paths.lib.join(&filename); let archive_file = util::open_archive(archive_path); - util::extract_zip(&archive_file, &paths.lib, &None); + util::extract_zip(&archive_file, &paths.lib, &None, &None); // Use the wheel's name to find the dist-info path, to avoid the chicken-egg scenario // of need the dist-info path to find the version. diff --git a/src/util.rs b/src/util.rs index 2700704..17a3475 100644 --- a/src/util.rs +++ b/src/util.rs @@ -14,6 +14,7 @@ use ini::Ini; use regex::Regex; use serde::Deserialize; use std::io::{self, BufRead, BufReader, Read, Write}; +use std::path::Component; use std::str::FromStr; use std::{ collections::HashMap, @@ -406,7 +407,7 @@ pub fn compare_names(name1: &str, name2: &str) -> bool { /// Extract the wheel or zip. /// From [this example](https://github.com/mvdnes/zip-rs/blob/master/examples/extract.rs#L32) -pub fn extract_zip(file: &fs::File, out_path: &Path, rename: &Option<(String, String)>) { +pub fn extract_zip(file: &fs::File, out_path: &Path, rename: &Option<(String, String)>, package_names: &Option<(&str, &str)>) { // Separate function, since we use it twice. let mut archive = if let Ok(a) = zip::ZipArchive::new(file) { a @@ -422,8 +423,17 @@ pub fn extract_zip(file: &fs::File, out_path: &Path, rename: &Option<(String, St let mut file = archive.by_index(i).unwrap(); // Change name here instead of after in case we've already installed a non-renamed version. // (which would be overwritten by this one.) + let mut file_str = PathBuf::new(); let file_str2 = file.enclosed_name().unwrap(); - let file_str = file_str2.to_str().expect("Problem converting path to str"); + if let Some((name, filename)) = package_names { + let stem = Path::new(filename).file_stem().unwrap(); + let components: Vec = file_str2.components().collect(); + if components.len() == 1 || !components[0].as_os_str().to_string_lossy().starts_with(name) { + file_str.push(stem); + } + } + file_str.push(file_str2); + let file_str = file_str.to_str().expect("Problem converting path to str"); let extracted_file = if !file_str.contains("dist-info") && !file_str.contains("egg-info") { match rename { From d29811146d8a5fc1f4535415576ec69f8ddbfa7c Mon Sep 17 00:00:00 2001 From: Noel Tautges Date: Mon, 8 Nov 2021 00:25:54 -0600 Subject: [PATCH 12/41] Fix typos in error messages/comments --- src/install.rs | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/install.rs b/src/install.rs index a5f42c7..7f5fa73 100644 --- a/src/install.rs +++ b/src/install.rs @@ -36,8 +36,8 @@ fn replace_distutils(setup_path: &Path) { t } else { util::abort(&format!( - "Can't find setup.py in this source distribution\ - path: {:?}. This could mean there are no suitable wheels for this package,\ + "Can't find setup.py in this source distribution \ + path: {:?}. This could mean there are no suitable wheels for this package, \ and there's a problem with its setup.py.", setup_path )); @@ -210,7 +210,7 @@ pub fn download_and_install_package( let mut input = String::new(); io::stdin() .read_line(&mut input) - .expect("Unable to read user input Hash fail decision"); + .expect("Unable to read user input hash fail decision"); let input = input .chars() @@ -301,7 +301,7 @@ pub fn download_and_install_package( Err(e) => { // todo: dRY while troubleshooting println!( - "Problem opening the tar.gz archive: {:?}: {:?}, checking if it's a zip...", + "Problem opening the tar.gz archive: {:?}: {:?}, checking if it's a zip...", &archive_file, e ); // The extract_wheel function just extracts a zip file, so it's appropriate here. @@ -315,7 +315,7 @@ pub fn download_and_install_package( } Err(e) => { println!( - "Problem opening the tar.gz archive: {:?}: {:?}, checking if it's a zip...", + "Problem opening the tar.gz archive: {:?}: {:?}, checking if it's a zip...", &archive_file, e ); // The extract_wheel function just extracts a zip file, so it's appropriate here. @@ -327,7 +327,7 @@ pub fn download_and_install_package( } // The archive is now unpacked into a parent folder from the `tar.gz`. Place - // its sub-folders directly in the lib folder, and deleten the parent. + // its sub-folders directly in the lib folder, and delete the parent. let re = Regex::new(r"^(.*?)(?:\.tar\.gz|\.zip)$").unwrap(); let folder_name = re .captures(filename) From 4b208dae84d1805e998e01fc62dca777912427df Mon Sep 17 00:00:00 2001 From: Noel Tautges Date: Mon, 8 Nov 2021 00:29:36 -0600 Subject: [PATCH 13/41] Deduplicate archive troubleshooting code Additionally, hope that this is acceptable Rust --- src/install.rs | 32 +++++++++++++------------------- 1 file changed, 13 insertions(+), 19 deletions(-) diff --git a/src/install.rs b/src/install.rs index 7f5fa73..72cc70c 100644 --- a/src/install.rs +++ b/src/install.rs @@ -257,6 +257,7 @@ pub fn download_and_install_package( // symlinks in the archive may cause the unpack to break. If this happens, we want // to continue unpacking the other files. // Overall, this is a pretty verbose workaround! + let mut archive_error = Ok(()); match archive.entries() { Ok(entries) => { for file in entries { @@ -299,32 +300,25 @@ pub fn download_and_install_package( }; } Err(e) => { - // todo: dRY while troubleshooting - println!( - "Problem opening the tar.gz archive: {:?}: {:?}, checking if it's a zip...", - &archive_file, e - ); - // The extract_wheel function just extracts a zip file, so it's appropriate here. - // We'll then continue with this leg, and build/move/cleanup. - - // Check if we have a zip file instead. - util::extract_zip(&archive_file, &paths.lib, &None, &Some((name, filename))); + // We'll continue with this leg, then check if we have a zip file instead. + archive_error = Err(e); } } } } Err(e) => { - println!( - "Problem opening the tar.gz archive: {:?}: {:?}, checking if it's a zip...", - &archive_file, e - ); - // The extract_wheel function just extracts a zip file, so it's appropriate here. - // We'll then continue with this leg, and build/move/cleanup. - - // Check if we have a zip file instead. - util::extract_zip(&archive_file, &paths.lib, &None, &Some((name, filename))); + // We'll continue with this leg, then check if we have a zip file instead. + archive_error = Err(e); } } + // Check if we have a zip file instead. + if let Err(e) = archive_error { + println!( + "Problem opening the tar.gz archive: {:?}: {:?}, checking if it's a zip...", + &archive_file, e + ); + util::extract_zip(&archive_file, &paths.lib, &None, &Some((name, filename))); + } // The archive is now unpacked into a parent folder from the `tar.gz`. Place // its sub-folders directly in the lib folder, and delete the parent. From 69361703fe44635a62937fdb6a321b8d5eb3ded3 Mon Sep 17 00:00:00 2001 From: Noel Tautges Date: Mon, 8 Nov 2021 00:33:36 -0600 Subject: [PATCH 14/41] Run `cargo fmt` --- src/util.rs | 22 +++++++++++++++------- 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/src/util.rs b/src/util.rs index 17a3475..2f93b99 100644 --- a/src/util.rs +++ b/src/util.rs @@ -407,7 +407,12 @@ pub fn compare_names(name1: &str, name2: &str) -> bool { /// Extract the wheel or zip. /// From [this example](https://github.com/mvdnes/zip-rs/blob/master/examples/extract.rs#L32) -pub fn extract_zip(file: &fs::File, out_path: &Path, rename: &Option<(String, String)>, package_names: &Option<(&str, &str)>) { +pub fn extract_zip( + file: &fs::File, + out_path: &Path, + rename: &Option<(String, String)>, + package_names: &Option<(&str, &str)>, +) { // Separate function, since we use it twice. let mut archive = if let Ok(a) = zip::ZipArchive::new(file) { a @@ -428,7 +433,12 @@ pub fn extract_zip(file: &fs::File, out_path: &Path, rename: &Option<(String, St if let Some((name, filename)) = package_names { let stem = Path::new(filename).file_stem().unwrap(); let components: Vec = file_str2.components().collect(); - if components.len() == 1 || !components[0].as_os_str().to_string_lossy().starts_with(name) { + if components.len() == 1 + || !components[0] + .as_os_str() + .to_string_lossy() + .starts_with(name) + { file_str.push(stem); } } @@ -437,11 +447,9 @@ pub fn extract_zip(file: &fs::File, out_path: &Path, rename: &Option<(String, St let extracted_file = if !file_str.contains("dist-info") && !file_str.contains("egg-info") { match rename { - Some((old, new)) => PathBuf::from_str( - file_str.to_owned() - .replace(old, new) - .as_str(), - ), + Some((old, new)) => { + PathBuf::from_str(file_str.to_owned().replace(old, new).as_str()) + } None => PathBuf::from_str(file_str), } } else { From bcb05f69203ead611b802c425646ff096d71fe28 Mon Sep 17 00:00:00 2001 From: Noel Tautges Date: Mon, 8 Nov 2021 00:45:37 -0600 Subject: [PATCH 15/41] Add comment explaining hexdump workaround --- src/util.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/util.rs b/src/util.rs index 2f93b99..498c102 100644 --- a/src/util.rs +++ b/src/util.rs @@ -430,6 +430,10 @@ pub fn extract_zip( // (which would be overwritten by this one.) let mut file_str = PathBuf::new(); let file_str2 = file.enclosed_name().unwrap(); + // The `hexdump` Python package intentionally strips its own root folder from its zip source + // distribution, which breaks wheel building. As a workaround, add the package name and version + // as a prefix to the path when extracting if the package name isn't in the first folder's + // name already. if let Some((name, filename)) = package_names { let stem = Path::new(filename).file_stem().unwrap(); let components: Vec = file_str2.components().collect(); From cefc7f632152d687f4304d30c376303746eb2531 Mon Sep 17 00:00:00 2001 From: Noel Tautges Date: Mon, 8 Nov 2021 01:02:55 -0600 Subject: [PATCH 16/41] Initialize PathBuf with capacity of archive entry path length --- src/util.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/util.rs b/src/util.rs index 498c102..03aca10 100644 --- a/src/util.rs +++ b/src/util.rs @@ -428,8 +428,8 @@ pub fn extract_zip( let mut file = archive.by_index(i).unwrap(); // Change name here instead of after in case we've already installed a non-renamed version. // (which would be overwritten by this one.) - let mut file_str = PathBuf::new(); let file_str2 = file.enclosed_name().unwrap(); + let mut file_str = PathBuf::with_capacity(file_str2.as_os_str().len()); // The `hexdump` Python package intentionally strips its own root folder from its zip source // distribution, which breaks wheel building. As a workaround, add the package name and version // as a prefix to the path when extracting if the package name isn't in the first folder's From ee82695542f2e9588f9febae4005537d7af176ea Mon Sep 17 00:00:00 2001 From: Noel Tautges Date: Mon, 8 Nov 2021 01:16:33 -0600 Subject: [PATCH 17/41] Reuse PathBuf if no replacement is necessary --- src/util.rs | 35 +++++++++++++++++++---------------- 1 file changed, 19 insertions(+), 16 deletions(-) diff --git a/src/util.rs b/src/util.rs index 03aca10..21436b0 100644 --- a/src/util.rs +++ b/src/util.rs @@ -428,37 +428,40 @@ pub fn extract_zip( let mut file = archive.by_index(i).unwrap(); // Change name here instead of after in case we've already installed a non-renamed version. // (which would be overwritten by this one.) - let file_str2 = file.enclosed_name().unwrap(); - let mut file_str = PathBuf::with_capacity(file_str2.as_os_str().len()); + let entry_path = file.enclosed_name().unwrap(); + let mut final_entry_path = PathBuf::with_capacity(entry_path.as_os_str().len()); // The `hexdump` Python package intentionally strips its own root folder from its zip source // distribution, which breaks wheel building. As a workaround, add the package name and version // as a prefix to the path when extracting if the package name isn't in the first folder's // name already. if let Some((name, filename)) = package_names { let stem = Path::new(filename).file_stem().unwrap(); - let components: Vec = file_str2.components().collect(); + let components: Vec = entry_path.components().collect(); if components.len() == 1 || !components[0] .as_os_str() .to_string_lossy() .starts_with(name) { - file_str.push(stem); + final_entry_path.push(stem); } } - file_str.push(file_str2); - let file_str = file_str.to_str().expect("Problem converting path to str"); - - let extracted_file = if !file_str.contains("dist-info") && !file_str.contains("egg-info") { - match rename { - Some((old, new)) => { - PathBuf::from_str(file_str.to_owned().replace(old, new).as_str()) + final_entry_path.push(entry_path); + let entry_path_str = final_entry_path + .to_str() + .expect("Problem converting path to str"); + + let extracted_file = + if !entry_path_str.contains("dist-info") && !entry_path_str.contains("egg-info") { + match rename { + Some((old, new)) => { + PathBuf::from_str(entry_path_str.to_owned().replace(old, new).as_str()) + } + None => Ok(final_entry_path), } - None => PathBuf::from_str(file_str), - } - } else { - PathBuf::from_str(file_str) - }; + } else { + Ok(final_entry_path) + }; let outpath = out_path.join(extracted_file.unwrap()); From 8af1c6e844f18bd332bd8434d283b91c833f07ec Mon Sep 17 00:00:00 2001 From: Noel Tautges Date: Wed, 10 Nov 2021 19:36:51 -0600 Subject: [PATCH 18/41] Fix DRY in finding dist-info path --- src/install.rs | 38 +++++++++++++------------------------- 1 file changed, 13 insertions(+), 25 deletions(-) diff --git a/src/install.rs b/src/install.rs index 72cc70c..27b437d 100644 --- a/src/install.rs +++ b/src/install.rs @@ -3,6 +3,7 @@ use crate::{commands, dep_types::Version, util}; use flate2::read::GzDecoder; use regex::Regex; use ring::digest; +use std::path::PathBuf; use std::{fs, io, io::BufRead, path::Path, process::Command}; use tar::Archive; use termcolor::Color; @@ -90,15 +91,10 @@ if __name__ == '__main__': .unwrap_or_else(|_| util::abort(&format!("Problem creating script file for {}", name))); } -/// Set up entry points (ie scripts like `ipython`, `black` etc) in a single file. -/// Alternatively, we could just parse all `dist-info` folders every run; this should -/// be faster. -pub fn setup_scripts(name: &str, version: &Version, lib_path: &Path, entry_pt_path: &Path) { - let mut scripts = vec![]; - // todo: Sep fn for dist_info path, to avoid repetition between here and uninstall? +/// Find `dist-info` folder for package. +fn find_dist_info_path(name: &str, version: &Version, lib_path: &Path) -> PathBuf { let mut dist_info_path = lib_path.join(format!("{}-{}.dist-info", name, version.to_string())); // If we can't find the dist_info path, it may be due to it not using a full 3-digit semver format. - // todo: Dry from dep_resolution, release check. if !dist_info_path.exists() && (version.patch == Some(0) || version.patch == None) { dist_info_path = lib_path.join(format!("{}-{}.dist-info", name, version.to_string_med())); if !dist_info_path.exists() && (version.minor == Some(0) || version.minor == None) { @@ -106,6 +102,15 @@ pub fn setup_scripts(name: &str, version: &Version, lib_path: &Path, entry_pt_pa lib_path.join(format!("{}-{}.dist-info", name, version.to_string_short())); } } + dist_info_path +} + +/// Set up entry points (ie scripts like `ipython`, `black` etc) in a single file. +/// Alternatively, we could just parse all `dist-info` folders every run; this should +/// be faster. +pub fn setup_scripts(name: &str, version: &Version, lib_path: &Path, entry_pt_path: &Path) { + let mut scripts = vec![]; + let dist_info_path = find_dist_info_path(name, version, lib_path); if let Ok(ep_file) = fs::File::open(&dist_info_path.join("entry_points.txt")) { let mut in_scripts_section = false; @@ -497,24 +502,7 @@ pub fn uninstall(name_ins: &str, vers_ins: &Version, lib_path: &Path) { // Uninstall the package // package folders appear to be lowercase, while metadata keeps the package title's casing. - let mut dist_info_path = - lib_path.join(format!("{}-{}.dist-info", name_ins, vers_ins.to_string())); - // todo: DRY - if !dist_info_path.exists() && (vers_ins.patch == Some(0) || vers_ins.patch == None) { - dist_info_path = lib_path.join(format!( - "{}-{}.dist-info", - name_ins, - vers_ins.to_string_med() - )); - if !dist_info_path.exists() && (vers_ins.minor == Some(0) || vers_ins.minor == None) { - dist_info_path = lib_path.join(format!( - "{}-{}.dist-info", - name_ins, - vers_ins.to_string_short() - )); - } - } - + let dist_info_path = find_dist_info_path(name_ins, vers_ins, lib_path); let egg_info_path = lib_path.join(format!("{}-{}.egg-info", name_ins, vers_ins.to_string())); // todo: could top_level.txt be in egg-info too? From 84ad40295385be0a35da28106408841e7814351c Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Wed, 24 Nov 2021 08:26:38 +0100 Subject: [PATCH 19/41] Delete .travis.yml Travis CI stopped working 5 months ago. https://travis-ci.org/github/David-OConnor/pyflow/pull_requests --- .travis.yml | 35 ----------------------------------- 1 file changed, 35 deletions(-) delete mode 100644 .travis.yml diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index cd001a8..0000000 --- a/.travis.yml +++ /dev/null @@ -1,35 +0,0 @@ -language: rust - -rust: - - stable - - beta - - nightly - -matrix: - allow_failures: - - rust: nightly - fast_finish: true - -os: -# - windows todo: Windows builds on Travis are failing for an unknown reason - - osx - - linux - -cache: cargo - -env: - global: - - RUST_BACKTRACE=1 - -script: - - cargo build --verbose --all - - cargo test --verbose --all - - -# TODO: make it faster ; useful links: -# https://www.reddit.com/r/rust/comments/9zpyww/idea_a_local_cache_of_compiled_dependencies_in/ -# https://doc.rust-lang.org/cargo/reference/environment-variables.html - CARGO_TARGET_DIR -# https://github.com/holmgr/cargo-sweep -# https://github.com/matthiaskrgr/cargo-cache -# https://github.com/mozilla/sccache -# https://github.com/alexcrichton/cargo-vendor From 0a4032fb0196f67bebe57fad1b91d00bec48f891 Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Wed, 24 Nov 2021 08:46:58 +0100 Subject: [PATCH 20/41] GitHub Action to replace Travis CI Related to #158 --- .github/workflows/build_and_test.yml | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 .github/workflows/build_and_test.yml diff --git a/.github/workflows/build_and_test.yml b/.github/workflows/build_and_test.yml new file mode 100644 index 0000000..c5a32ed --- /dev/null +++ b/.github/workflows/build_and_test.yml @@ -0,0 +1,26 @@ +on: [pull_request, push] +name: build_and_test +jobs: + build_and_test: + env: + RUST_BACKTRACE: 1 + strategy: + fail-fast: false + matrix: + os: [macos-latest, ubuntu-latest, windows-latest] + toolchain: [stable, beta] # , nightly] + runs-on: ubuntu-latest + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v2 + - uses: actions-rs/toolchain@v1 + with: + toolchain: stable + - uses: actions-rs/cargo@v1 + with: + command: build + args: --verbose --all + - uses: actions-rs/cargo@v1 + with: + command: test + args: --verbose --all From 46c83746ba7440fc1661ce23f66013be6939c7d1 Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Wed, 24 Nov 2021 08:48:55 +0100 Subject: [PATCH 21/41] Update build_and_test.yml --- .github/workflows/build_and_test.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/build_and_test.yml b/.github/workflows/build_and_test.yml index c5a32ed..ed9f17c 100644 --- a/.github/workflows/build_and_test.yml +++ b/.github/workflows/build_and_test.yml @@ -9,13 +9,12 @@ jobs: matrix: os: [macos-latest, ubuntu-latest, windows-latest] toolchain: [stable, beta] # , nightly] - runs-on: ubuntu-latest runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v2 - uses: actions-rs/toolchain@v1 with: - toolchain: stable + toolchain: ${{ matrix.toolchain }} - uses: actions-rs/cargo@v1 with: command: build From c2f1bf08c9988d155da7ebcb9518de4fd3e13fa7 Mon Sep 17 00:00:00 2001 From: Dmitry Rubinstein Date: Sat, 27 Nov 2021 12:20:05 +0200 Subject: [PATCH 22/41] Split main - initial approach --- .idea/.gitignore | 3 - .idea/inspectionProfiles/Project_Default.xml | 14 - .idea/misc.xml | 7 - .idea/modules.xml | 8 - .idea/pypackages.iml | 20 - .idea/vcs.xml | 6 - Cargo.lock | 16 + Cargo.toml | 1 + src/actions/install.rs | 53 +++ src/actions/mod.rs | 11 + src/actions/new.rs | 57 +++ src/dep_types.rs | 4 +- src/main.rs | 443 ++----------------- src/py_versions.rs | 2 +- src/script.rs | 4 +- src/util/deps.rs | 291 ++++++++++++ src/util/fs.rs | 29 ++ src/{util.rs => util/mod.rs} | 165 ++----- src/util/os.rs | 50 +++ src/util/prompts.rs | 87 ++++ 20 files changed, 671 insertions(+), 600 deletions(-) delete mode 100644 .idea/.gitignore delete mode 100644 .idea/inspectionProfiles/Project_Default.xml delete mode 100644 .idea/misc.xml delete mode 100644 .idea/modules.xml delete mode 100644 .idea/pypackages.iml delete mode 100644 .idea/vcs.xml create mode 100644 src/actions/install.rs create mode 100644 src/actions/mod.rs create mode 100644 src/actions/new.rs create mode 100644 src/util/deps.rs create mode 100644 src/util/fs.rs rename src/{util.rs => util/mod.rs} (88%) create mode 100644 src/util/os.rs create mode 100644 src/util/prompts.rs diff --git a/.idea/.gitignore b/.idea/.gitignore deleted file mode 100644 index 0e40fe8..0000000 --- a/.idea/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ - -# Default ignored files -/workspace.xml \ No newline at end of file diff --git a/.idea/inspectionProfiles/Project_Default.xml b/.idea/inspectionProfiles/Project_Default.xml deleted file mode 100644 index e4f1dbc..0000000 --- a/.idea/inspectionProfiles/Project_Default.xml +++ /dev/null @@ -1,14 +0,0 @@ - - - - \ No newline at end of file diff --git a/.idea/misc.xml b/.idea/misc.xml deleted file mode 100644 index 7fd4575..0000000 --- a/.idea/misc.xml +++ /dev/null @@ -1,7 +0,0 @@ - - - - - - \ No newline at end of file diff --git a/.idea/modules.xml b/.idea/modules.xml deleted file mode 100644 index 1d21d7b..0000000 --- a/.idea/modules.xml +++ /dev/null @@ -1,8 +0,0 @@ - - - - - - - - \ No newline at end of file diff --git a/.idea/pypackages.iml b/.idea/pypackages.iml deleted file mode 100644 index c3f7198..0000000 --- a/.idea/pypackages.iml +++ /dev/null @@ -1,20 +0,0 @@ - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/.idea/vcs.xml b/.idea/vcs.xml deleted file mode 100644 index 94a25f7..0000000 --- a/.idea/vcs.xml +++ /dev/null @@ -1,6 +0,0 @@ - - - - - - \ No newline at end of file diff --git a/Cargo.lock b/Cargo.lock index 878110f..c9dab31 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -628,6 +628,15 @@ dependencies = [ "hashbrown", ] +[[package]] +name = "indoc" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5a75aeaaef0ce18b58056d306c27b07436fbb34b8816c53094b76dd81803136" +dependencies = [ + "unindent", +] + [[package]] name = "iovec" version = "0.1.4" @@ -1033,6 +1042,7 @@ dependencies = [ "directories", "flate2", "fs_extra", + "indoc", "mockall", "mockall_double", "nom", @@ -1818,6 +1828,12 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3" +[[package]] +name = "unindent" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f14ee04d9415b52b3aeab06258a3f07093182b88ba0f9b8d203f211a7a7d41c7" + [[package]] name = "untrusted" version = "0.7.1" diff --git a/Cargo.toml b/Cargo.toml index 7940989..9e4b5f2 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -45,6 +45,7 @@ reqwest = { version = "^0.9.21", default-features = false, features = ["rustls-t #reqwest = "^0.9.21" mockall_double = "^0.2.0" +indoc = "1.0.3" [dev-dependencies] rstest = "0.10.0" diff --git a/src/actions/install.rs b/src/actions/install.rs new file mode 100644 index 0000000..b65b136 --- /dev/null +++ b/src/actions/install.rs @@ -0,0 +1,53 @@ +use std::path::PathBuf; + +use termcolor::Color; + +use crate::{ + dep_types::{LockPackage, Version}, + util::{self, process_reqs, Os, Paths}, + Config, +}; + +use util::deps::sync; + +pub fn install( + cfg_path: &PathBuf, + cfg: &Config, + git_path: &PathBuf, + paths: &Paths, + found_lock: bool, + packages: &Vec, + dev: bool, + lockpacks: &Vec, + os: &Os, + py_vers: &Version, + lock_path: &PathBuf, +) { + if !cfg_path.exists() { + cfg.write_file(&cfg_path); + } + + if found_lock { + util::print_color("Found lockfile", Color::Green); + } + + // Merge reqs added via cli with those in `pyproject.toml`. + let (updated_reqs, up_dev_reqs) = util::merge_reqs(packages, dev, &cfg, &cfg_path); + + let dont_uninstall = util::find_dont_uninstall(&updated_reqs, &up_dev_reqs); + + let updated_reqs = process_reqs(updated_reqs, &git_path, paths); + let up_dev_reqs = process_reqs(up_dev_reqs, &git_path, paths); + + sync( + paths, + lockpacks, + &updated_reqs, + &up_dev_reqs, + &dont_uninstall, + *os, + py_vers, + lock_path, + ); + util::print_color("Installation complete", Color::Green); +} diff --git a/src/actions/mod.rs b/src/actions/mod.rs new file mode 100644 index 0000000..2667c68 --- /dev/null +++ b/src/actions/mod.rs @@ -0,0 +1,11 @@ +mod install; +mod new; + +pub use install::install; +pub use new::new; + + +pub const NEW_ERROR_MESSAGE: &str = indoc::indoc! {r#" +Problem creating the project. This may be due to a permissions problem. +If on linux, please try again with `sudo`. +"#}; \ No newline at end of file diff --git a/src/actions/new.rs b/src/actions/new.rs new file mode 100644 index 0000000..38778cf --- /dev/null +++ b/src/actions/new.rs @@ -0,0 +1,57 @@ +use std::{ + error::Error, + fs, + path::{Path, PathBuf}, +}; + +use termcolor::Color; + +use crate::{commands, util, Config}; + +const GITIGNORE_INIT: &str = indoc::indoc! {r##" +# General Python ignores +build/ +dist/ +__pycache__/ +__pypackages__/ +.ipynb_checkpoints/ +*.pyc +*~ +*/.mypy_cache/ + + +# Project ignores +"##}; + +/// Create a template directory for a python project. +pub fn new(name: &str) -> Result<(), Box> { + if !PathBuf::from(name).exists() { + fs::create_dir_all(&format!("{}/{}", name, name.replace("-", "_")))?; + fs::File::create(&format!("{}/{}/__init__.py", name, name.replace("-", "_")))?; + fs::File::create(&format!("{}/README.md", name))?; + fs::File::create(&format!("{}/.gitignore", name))?; + } + + let readme_init = &format!("# {}\n\n{}", name, "(A description)"); + + fs::write(&format!("{}/.gitignore", name), GITIGNORE_INIT)?; + fs::write(&format!("{}/README.md", name), readme_init)?; + + let cfg = Config { + name: Some(name.to_string()), + authors: util::get_git_author(), + py_version: Some(util::prompts::py_vers()), + ..Default::default() + }; + + cfg.write_file(&PathBuf::from(format!("{}/pyproject.toml", name))); + + if commands::git_init(Path::new(name)).is_err() { + util::print_color( + "Unable to initialize a git repo for your project", + Color::Yellow, // Dark + ); + }; + + Ok(()) +} diff --git a/src/dep_types.rs b/src/dep_types.rs index 1287a23..a09d0d8 100644 --- a/src/dep_types.rs +++ b/src/dep_types.rs @@ -1550,7 +1550,7 @@ pub mod tests { name: "win-unicode-console".into(), constraints: vec![Constraint::new(Gte, Version::new(0, 5, 0))], extra: None, - sys_platform: Some((Exact, crate::Os::Windows32)), + sys_platform: Some((Exact, util::Os::Windows32)), python_version: Some(vec![Constraint::new(Lt, Version::new(3, 6, 0))]), install_with_extras: None, path: None, @@ -1791,7 +1791,7 @@ pub mod tests { #[test] fn req_to_cfg_string_empty_constraints() { let ctx = res::get_version_info_context(); - ctx.expect().returning(|name, py_ver| { + ctx.expect().returning(|name, _py_ver| { Ok(( name.to_string(), Version::new(1, 2, 3), diff --git a/src/main.rs b/src/main.rs index 7b58b1e..1e2e767 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,19 +1,24 @@ #![allow(clippy::non_ascii_literal)] -#[mockall_double::double] -use crate::dep_resolution::res; -use crate::dep_types::{Constraint, Lock, LockPackage, Package, Rename, Req, ReqType, Version}; -use crate::util::{abort, process_reqs, Os}; +use crate::actions::new; +use crate::dep_types::{Constraint, Lock, Package, Req, Version}; +use crate::util::abort; +use crate::util::deps::sync; use regex::Regex; use serde::Deserialize; -use std::{collections::HashMap, env, error::Error, fs, path::PathBuf, str::FromStr}; +use std::{ + collections::HashMap, + env, fs, + path::{Path, PathBuf}, + str::FromStr, + sync::{Arc, RwLock}, +}; -use std::path::Path; -use std::sync::{Arc, RwLock}; use structopt::StructOpt; use termcolor::{Color, ColorChoice}; +mod actions; mod build; mod commands; mod dep_parser; @@ -663,53 +668,6 @@ thread_local! { static CLI_CONFIG: RwLock> = RwLock::new(Default::default()); } -/// Create a template directory for a python project. -pub fn new(name: &str) -> Result<(), Box> { - if !PathBuf::from(name).exists() { - fs::create_dir_all(&format!("{}/{}", name, name.replace("-", "_")))?; - fs::File::create(&format!("{}/{}/__init__.py", name, name.replace("-", "_")))?; - fs::File::create(&format!("{}/README.md", name))?; - fs::File::create(&format!("{}/.gitignore", name))?; - } - - let gitignore_init = r##"# General Python ignores -build/ -dist/ -__pycache__/ -__pypackages__/ -.ipynb_checkpoints/ -*.pyc -*~ -*/.mypy_cache/ - - -# Project ignores -"##; - - let readme_init = &format!("# {}\n\n{}", name, "(A description)"); - - fs::write(&format!("{}/.gitignore", name), gitignore_init)?; - fs::write(&format!("{}/README.md", name), readme_init)?; - - let cfg = Config { - name: Some(name.to_string()), - authors: util::get_git_author(), - py_version: Some(util::prompt_py_vers()), - ..Default::default() - }; - - cfg.write_file(&PathBuf::from(format!("{}/pyproject.toml", name))); - - if commands::git_init(Path::new(name)).is_err() { - util::print_color( - "Unable to initialize a git repo for your project", - Color::Yellow, // Dark - ); - }; - - Ok(()) -} - fn parse_lockpack_rename(rename: &str) -> (u32, String) { let re = Regex::new(r"^(\d+)\s(.*)$").unwrap(); let caps = re @@ -722,152 +680,6 @@ fn parse_lockpack_rename(rename: &str) -> (u32, String) { (id, name) } -/// Install/uninstall deps as required from the passed list, and re-write the lock file. -fn sync_deps( - paths: &util::Paths, - lock_packs: &[LockPackage], - dont_uninstall: &[String], - installed: &[(String, Version, Vec)], - os: util::Os, - python_vers: &Version, -) { - let packages: Vec = lock_packs - .iter() - .map(|lp| { - ( - ( - util::standardize_name(&lp.name), - Version::from_str(&lp.version).expect("Problem parsing lock version"), - ), - lp.rename.as_ref().map(|rn| parse_lockpack_rename(rn)), - ) - }) - .collect(); - - // todo shim. Use top-level A/R. We discard it temporarily while working other issues. - let installed: Vec<(String, Version)> = installed - .iter() - // Don't standardize name here; see note below in to_uninstall. - .map(|t| (t.0.clone(), t.1.clone())) - .collect(); - - // Filter by not-already-installed. - let to_install: Vec<&PackToInstall> = packages - .iter() - .filter(|(pack, _)| { - let mut contains = false; - for inst in &installed { - if util::compare_names(&pack.0, &inst.0) && pack.1 == inst.1 { - contains = true; - break; - } - } - - // The typing module is sometimes downloaded, causing a conflict/improper - // behavior compared to the built in module. - !contains && pack.0 != "typing" - }) - .collect(); - - // todo: Once you include rename info in installed, you won't need to use the map logic here. - let packages_only: Vec<&(String, Version)> = packages.iter().map(|(p, _)| p).collect(); - let to_uninstall: Vec<&(String, Version)> = installed - .iter() - .filter(|inst| { - // Don't standardize the name here; we need original capitalization to uninstall - // metadata etc. - let inst = (inst.0.clone(), inst.1.clone()); - let mut contains = false; - // We can't just use the contains method, due to needing compare_names(). - for pack in &packages_only { - if util::compare_names(&pack.0, &inst.0) && pack.1 == inst.1 { - contains = true; - break; - } - } - - for name in dont_uninstall { - if util::compare_names(name, &inst.0) { - contains = true; - break; - } - } - - !contains - }) - .collect(); - - for (name, version) in &to_uninstall { - // todo: Deal with renamed. Currently won't work correctly with them. - install::uninstall(name, version, &paths.lib) - } - - for ((name, version), rename) in &to_install { - let data = - res::get_warehouse_release(name, version).expect("Problem getting warehouse data"); - - let (best_release, package_type) = - util::find_best_release(&data, name, version, os, python_vers); - - // Powershell doesn't like emojis - // todo format literal issues, so repeating this whole statement. - #[cfg(target_os = "windows")] - util::print_color_(&format!("Installing {}", &name), Color::Cyan); - #[cfg(target_os = "linux")] - util::print_color_(&format!("⬇ Installing {}", &name), Color::Cyan); - #[cfg(target_os = "macos")] - util::print_color_(&format!("⬇ Installing {}", &name), Color::Cyan); - println!(" {} ...", &version.to_string_color()); - - if install::download_and_install_package( - name, - version, - &best_release.url, - &best_release.filename, - &best_release.digests.sha256, - paths, - package_type, - rename, - ) - .is_err() - { - abort("Problem downloading packages"); - } - } - // Perform renames after all packages are installed, or we may attempt to rename a package - // we haven't yet installed. - for ((name, version), rename) in &to_install { - if let Some((id, new)) = rename { - // Rename in the renamed package - - let renamed_path = &paths.lib.join(util::standardize_name(new)); - - util::wait_for_dirs(&[renamed_path.clone()]).expect("Problem creating renamed path"); - install::rename_package_files(renamed_path, name, new); - - // Rename in the parent calling the renamed package. // todo: Multiple parents? - let parent = lock_packs - .iter() - .find(|lp| lp.id == *id) - .expect("Can't find parent calling renamed package"); - install::rename_package_files( - &paths.lib.join(util::standardize_name(&parent.name)), - name, - new, - ); - - // todo: Handle this more generally, in case we don't have proper semver dist-info paths. - install::rename_metadata( - &paths - .lib - .join(&format!("{}-{}.dist-info", name, version.to_string())), - name, - new, - ); - } - } -} - fn already_locked(locked: &[Package], name: &str, constraints: &[Constraint]) -> bool { let mut result = true; for constr in constraints.iter() { @@ -957,139 +769,6 @@ fn run_cli_tool( } } -/// Function used by `Install` and `Uninstall` subcommands to syn dependencies with -/// the config and lock files. -#[allow(clippy::too_many_arguments)] -fn sync( - paths: &util::Paths, - lockpacks: &[LockPackage], - reqs: &[Req], - dev_reqs: &[Req], - dont_uninstall: &[String], - os: util::Os, - py_vers: &Version, - lock_path: &Path, -) { - let installed = util::find_installed(&paths.lib); - // We control the lock format, so this regex will always match - let dep_re = Regex::new(r"^(.*?)\s(.*)\s.*$").unwrap(); - - // We don't need to resolve reqs that are already locked. - let locked: Vec = lockpacks - .iter() - .map(|lp| { - let mut deps = vec![]; - for dep in lp.dependencies.as_ref().unwrap_or(&vec![]) { - let caps = dep_re - .captures(dep) - .expect("Problem reading lock file dependencies"); - let name = caps.get(1).unwrap().as_str().to_owned(); - let vers = Version::from_str(caps.get(2).unwrap().as_str()) - .expect("Problem parsing version from lock"); - deps.push((999, name, vers)); // dummy id - } - - Package { - id: lp.id, // todo - parent: 0, // todo - name: lp.name.clone(), - version: Version::from_str(&lp.version).expect("Problem parsing lock version"), - deps, - rename: Rename::No, // todo - } - }) - .collect(); - - // todo: Only show this when needed. - // todo: Temporarily? Removed. - // Powershell doesn't like emojis - // #[cfg(target_os = "windows")] - // println!("Resolving dependencies..."); - // #[cfg(target_os = "linux")] - // println!("🔍 Resolving dependencies..."); - // #[cfg(target_os = "macos")] - // println!("🔍 Resolving dependencies..."); - - // Dev reqs and normal reqs are both installed here; we only commit dev reqs - // when packaging. - let mut combined_reqs = reqs.to_vec(); - for dev_req in dev_reqs.to_vec() { - combined_reqs.push(dev_req); - } - - let resolved = if let Ok(r) = res::resolve(&combined_reqs, &locked, os, py_vers) { - r - } else { - abort("Problem resolving dependencies"); - unreachable!() - }; - - // Now merge the existing lock packages with new ones from resolved packages. - // We have a collection of requirements; attempt to merge them with the already-locked ones. - let mut updated_lock_packs = vec![]; - - for package in &resolved { - let dummy_constraints = vec![Constraint::new(ReqType::Exact, package.version.clone())]; - if already_locked(&locked, &package.name, &dummy_constraints) { - let existing: Vec<&LockPackage> = lockpacks - .iter() - .filter(|lp| util::compare_names(&lp.name, &package.name)) - .collect(); - let existing2 = existing[0]; - - updated_lock_packs.push(existing2.clone()); - continue; - } - - let deps = package - .deps - .iter() - .map(|(_, name, version)| { - format!( - "{} {} pypi+https://pypi.org/pypi/{}/{}/json", - name, version, name, version, - ) - }) - .collect(); - - updated_lock_packs.push(LockPackage { - id: package.id, - name: package.name.clone(), - version: package.version.to_string(), - source: Some(format!( - "pypi+https://pypi.org/pypi/{}/{}/json", - package.name, - package.version.to_string() - )), - dependencies: Some(deps), - rename: match &package.rename { - Rename::Yes(parent_id, _, name) => Some(format!("{} {}", parent_id, name)), - Rename::No => None, - }, - }); - } - - let updated_lock = Lock { - // metadata: Some(lock_metadata), - metadata: HashMap::new(), // todo: Problem with toml conversion. - package: Some(updated_lock_packs.clone()), - }; - if util::write_lock(lock_path, &updated_lock).is_err() { - abort("Problem writing lock file"); - } - - // Now that we've confirmed or modified the lock file, we're ready to sync installed - // dependencies with it. - sync_deps( - paths, - &updated_lock_packs, - dont_uninstall, - &installed, - os, - py_vers, - ); -} - #[derive(Clone)] enum ClearChoice { Dependencies, @@ -1107,7 +786,7 @@ impl ToString for ClearChoice { /// Clear `Pyflow`'s cache. Allow the user to select which parts to clear based on a prompt. fn clear(pyflow_path: &Path, cache_path: &Path, script_env_path: &Path) { - let result = util::prompt_list( + let result = util::prompts::list( "Which cached items would you like to clear?", "choice", &[ @@ -1152,49 +831,23 @@ fn clear(pyflow_path: &Path, cache_path: &Path, script_env_path: &Path) { } } +const CFG_FILENAME: &str = "pyproject.toml"; +const LOCK_FILENAME: &str = "pyflow.lock"; + /// We process input commands in a deliberate order, to ensure the required, and only the required /// setup steps are accomplished before each. fn main() { - let cfg_filename = "pyproject.toml"; - let lock_filename = "pyflow.lock"; - - let base_dir = directories::BaseDirs::new(); - let pyflow_path = base_dir - .expect("Problem finding base directory") - .data_dir() - .to_owned() - .join("pyflow"); - - let dep_cache_path = pyflow_path.join("dependency-cache"); - let script_env_path = pyflow_path.join("script-envs"); - let git_path = pyflow_path.join("git"); - - #[cfg(target_os = "windows")] - let os = Os::Windows; - #[cfg(target_os = "linux")] - let os = Os::Linux; - #[cfg(target_os = "macos")] - let os = Os::Mac; + let (pyflow_path, dep_cache_path, script_env_path, git_path) = util::fs::get_paths(); + let os = util::get_os(); let opt = Opt::from_args(); #[cfg(debug_assertions)] eprintln!("opts {:?}", opt); - // Handle color option - let choice = match opt.color.unwrap_or_else(|| String::from("auto")).as_str() { - "always" => ColorChoice::Always, - "ansi" => ColorChoice::AlwaysAnsi, - "auto" => { - if atty::is(atty::Stream::Stdout) { - ColorChoice::Auto - } else { - ColorChoice::Never - } - } - _ => ColorChoice::Never, - }; CliConfig { - color_choice: choice, + color_choice: util::handle_color_option( + opt.color.unwrap_or_else(|| String::from("auto")).as_str(), + ), } .make_current(); @@ -1218,10 +871,7 @@ fn main() { if let SubCommand::New { name } = subcmd { if new(&name).is_err() { - abort( - "Problem creating the project. This may be due to a permissions problem. \ - If on linux, please try again with `sudo`.", - ); + abort(actions::NEW_ERROR_MESSAGE); } util::print_color( &format!("Created a new Python project named {}", name), @@ -1231,7 +881,7 @@ fn main() { } if let SubCommand::Init {} = subcmd { - let cfg_path = PathBuf::from(cfg_filename); + let cfg_path = PathBuf::from(CFG_FILENAME); if cfg_path.exists() { abort("pyproject.toml already exists - not overwriting.") } @@ -1241,7 +891,7 @@ fn main() { false => Config::default(), }; - cfg.py_version = Some(util::prompt_py_vers()); + cfg.py_version = Some(util::prompts::py_vers()); files::parse_req_dot_text(&mut cfg, &PathBuf::from("requirements.txt")); @@ -1251,7 +901,7 @@ fn main() { } // We need access to the config from here on; throw an error if we can't find it. - let mut cfg_path = PathBuf::from(cfg_filename); + let mut cfg_path = PathBuf::from(CFG_FILENAME); if !&cfg_path.exists() { // if let SubCommand::Python { args: _ } = subcmd { // Try looking recursively in parent directories for a config file. @@ -1259,7 +909,7 @@ fn main() { let mut current_level = env::current_dir().expect("Can't access current directory"); for _ in 0..recursion_limit { if let Some(parent) = current_level.parent() { - let parent_cfg_path = parent.join(cfg_filename); + let parent_cfg_path = parent.join(CFG_FILENAME); if parent_cfg_path.exists() { cfg_path = parent_cfg_path; break; @@ -1284,7 +934,7 @@ fn main() { // Base pypackages_path and lock_path on the `pyproject.toml` folder. let proj_path = cfg_path.parent().expect("Can't find proj pathw via parent"); let pypackages_path = proj_path.join("__pypackages__"); - let lock_path = &proj_path.join(lock_filename); + let lock_path = &proj_path.join(LOCK_FILENAME); let mut cfg = Config::from_file(&cfg_path).unwrap_or_default(); cfg.populate_path_subreqs(); @@ -1337,7 +987,7 @@ fn main() { let cfg_vers = if let Some(v) = cfg.py_version.clone() { v } else { - let specified = util::prompt_py_vers(); + let specified = util::prompts::py_vers(); if !cfg_path.exists() { cfg.write_file(&cfg_path); @@ -1392,40 +1042,15 @@ fn main() { // Now handle subcommands that require info about the environment match subcmd { - // Add package names to `pyproject.toml` if needed. Then sync installed packages - // and `pyproject.lock` with the `pyproject.toml`. + // Add pacakge names to `pyproject.toml` if needed. Then sync installed packages + // and `pyflow.lock` with the `pyproject.toml`. // We use data from three sources: `pyproject.toml`, `pyflow.lock`, and // the currently-installed packages, found by crawling metadata in the `lib` path. // See the readme section `How installation and locking work` for details. - SubCommand::Install { packages, dev } => { - if !cfg_path.exists() { - cfg.write_file(&cfg_path); - } - - if found_lock { - util::print_color("Found lockfile", Color::Green); - } - - // Merge reqs added via cli with those in `pyproject.toml`. - let (updated_reqs, up_dev_reqs) = util::merge_reqs(&packages, dev, &cfg, &cfg_path); - - let dont_uninstall = util::find_dont_uninstall(&updated_reqs, &up_dev_reqs); - - let updated_reqs = process_reqs(updated_reqs, &git_path, &paths); - let up_dev_reqs = process_reqs(up_dev_reqs, &git_path, &paths); - - sync( - &paths, - &lockpacks, - &updated_reqs, - &up_dev_reqs, - &dont_uninstall, - os, - &py_vers, - &lock_path, - ); - util::print_color("Installation complete", Color::Green); - } + SubCommand::Install { packages, dev } => actions::install( + &cfg_path, &cfg, &git_path, &paths, found_lock, &packages, dev, &lockpacks, &os, + &py_vers, lock_path, + ), SubCommand::Uninstall { packages } => { // todo: uninstall dev? diff --git a/src/py_versions.rs b/src/py_versions.rs index 45e4767..3f477ac 100644 --- a/src/py_versions.rs +++ b/src/py_versions.rs @@ -430,7 +430,7 @@ pub fn create_venv( } _ => { // let r = prompt_alias(&aliases); - let r = util::prompt_list( + let r = util::prompts::list( "Found multiple compatible Python versions. Please enter the number associated with the one you'd like to use:", "Python alias", &aliases, diff --git a/src/script.rs b/src/script.rs index b8ef38d..0c213a3 100644 --- a/src/script.rs +++ b/src/script.rs @@ -58,7 +58,7 @@ pub fn run_script( ) .expect("Problem parsing version from file"); } else { - cfg_vers = util::prompt_py_vers(); + cfg_vers = util::prompts::py_vers(); create_or_update_version_file(&py_vers_path, &cfg_vers); } @@ -116,7 +116,7 @@ pub fn run_script( }) .collect(); - crate::sync( + util::deps::sync( &paths, &lockpacks, &reqs, diff --git a/src/util/deps.rs b/src/util/deps.rs new file mode 100644 index 0000000..1983cde --- /dev/null +++ b/src/util/deps.rs @@ -0,0 +1,291 @@ +use std::{collections::HashMap, path::Path, str::FromStr}; + +use regex::Regex; +use termcolor::Color; + +use crate::{ + already_locked, + dep_resolution::res, + dep_types::{Constraint, Lock, LockPackage, Package, Rename, Req, ReqType, Version}, + install, parse_lockpack_rename, + util::{self, abort}, + PackToInstall, +}; + +/// Function used by `Install` and `Uninstall` subcommands to syn dependencies with +/// the config and lock files. +#[allow(clippy::too_many_arguments)] +pub fn sync( + paths: &util::Paths, + lockpacks: &[LockPackage], + reqs: &[Req], + dev_reqs: &[Req], + dont_uninstall: &[String], + os: util::Os, + py_vers: &Version, + lock_path: &Path, +) { + let installed = util::find_installed(&paths.lib); + // We control the lock format, so this regex will always match + let dep_re = Regex::new(r"^(.*?)\s(.*)\s.*$").unwrap(); + + // We don't need to resolve reqs that are already locked. + let locked: Vec = lockpacks + .iter() + .map(|lp| { + let mut deps = vec![]; + for dep in lp.dependencies.as_ref().unwrap_or(&vec![]) { + let caps = dep_re + .captures(dep) + .expect("Problem reading lock file dependencies"); + let name = caps.get(1).unwrap().as_str().to_owned(); + let vers = Version::from_str(caps.get(2).unwrap().as_str()) + .expect("Problem parsing version from lock"); + deps.push((999, name, vers)); // dummy id + } + + Package { + id: lp.id, // todo + parent: 0, // todo + name: lp.name.clone(), + version: Version::from_str(&lp.version).expect("Problem parsing lock version"), + deps, + rename: Rename::No, // todo + } + }) + .collect(); + + // todo: Only show this when needed. + // todo: Temporarily? Removed. + // Powershell doesn't like emojis + // #[cfg(target_os = "windows")] + // println!("Resolving dependencies..."); + // #[cfg(target_os = "linux")] + // println!("🔍 Resolving dependencies..."); + // #[cfg(target_os = "macos")] + // println!("🔍 Resolving dependencies..."); + + // Dev reqs and normal reqs are both installed here; we only commit dev reqs + // when packaging. + let mut combined_reqs = reqs.to_vec(); + for dev_req in dev_reqs.to_vec() { + combined_reqs.push(dev_req); + } + + let resolved = if let Ok(r) = res::resolve(&combined_reqs, &locked, os, py_vers) { + r + } else { + abort("Problem resolving dependencies"); + unreachable!() + }; + + // Now merge the existing lock packages with new ones from resolved packages. + // We have a collection of requirements; attempt to merge them with the already-locked ones. + let mut updated_lock_packs = vec![]; + + for package in &resolved { + let dummy_constraints = vec![Constraint::new(ReqType::Exact, package.version.clone())]; + if already_locked(&locked, &package.name, &dummy_constraints) { + let existing: Vec<&LockPackage> = lockpacks + .iter() + .filter(|lp| util::compare_names(&lp.name, &package.name)) + .collect(); + let existing2 = existing[0]; + + updated_lock_packs.push(existing2.clone()); + continue; + } + + let deps = package + .deps + .iter() + .map(|(_, name, version)| { + format!( + "{} {} pypi+https://pypi.org/pypi/{}/{}/json", + name, version, name, version, + ) + }) + .collect(); + + updated_lock_packs.push(LockPackage { + id: package.id, + name: package.name.clone(), + version: package.version.to_string(), + source: Some(format!( + "pypi+https://pypi.org/pypi/{}/{}/json", + package.name, + package.version.to_string() + )), + dependencies: Some(deps), + rename: match &package.rename { + Rename::Yes(parent_id, _, name) => Some(format!("{} {}", parent_id, name)), + Rename::No => None, + }, + }); + } + + let updated_lock = Lock { + // metadata: Some(lock_metadata), + metadata: HashMap::new(), // todo: Problem with toml conversion. + package: Some(updated_lock_packs.clone()), + }; + if util::write_lock(lock_path, &updated_lock).is_err() { + abort("Problem writing lock file"); + } + + // Now that we've confirmed or modified the lock file, we're ready to sync installed + // dependencies with it. + sync_deps( + paths, + &updated_lock_packs, + dont_uninstall, + &installed, + os, + py_vers, + ); +} +/// Install/uninstall deps as required from the passed list, and re-write the lock file. +fn sync_deps( + paths: &util::Paths, + lock_packs: &[LockPackage], + dont_uninstall: &[String], + installed: &[(String, Version, Vec)], + os: util::Os, + python_vers: &Version, +) { + let packages: Vec = lock_packs + .iter() + .map(|lp| { + ( + ( + util::standardize_name(&lp.name), + Version::from_str(&lp.version).expect("Problem parsing lock version"), + ), + lp.rename.as_ref().map(|rn| parse_lockpack_rename(rn)), + ) + }) + .collect(); + + // todo shim. Use top-level A/R. We discard it temporarily while working other issues. + let installed: Vec<(String, Version)> = installed + .iter() + // Don't standardize name here; see note below in to_uninstall. + .map(|t| (t.0.clone(), t.1.clone())) + .collect(); + + // Filter by not-already-installed. + let to_install: Vec<&PackToInstall> = packages + .iter() + .filter(|(pack, _)| { + let mut contains = false; + for inst in &installed { + if util::compare_names(&pack.0, &inst.0) && pack.1 == inst.1 { + contains = true; + break; + } + } + + // The typing module is sometimes downloaded, causing a conflict/improper + // behavior compared to the built in module. + !contains && pack.0 != "typing" + }) + .collect(); + + // todo: Once you include rename info in installed, you won't need to use the map logic here. + let packages_only: Vec<&(String, Version)> = packages.iter().map(|(p, _)| p).collect(); + let to_uninstall: Vec<&(String, Version)> = installed + .iter() + .filter(|inst| { + // Don't standardize the name here; we need original capitalization to uninstall + // metadata etc. + let inst = (inst.0.clone(), inst.1.clone()); + let mut contains = false; + // We can't just use the contains method, due to needing compare_names(). + for pack in &packages_only { + if util::compare_names(&pack.0, &inst.0) && pack.1 == inst.1 { + contains = true; + break; + } + } + + for name in dont_uninstall { + if util::compare_names(name, &inst.0) { + contains = true; + break; + } + } + + !contains + }) + .collect(); + + for (name, version) in &to_uninstall { + // todo: Deal with renamed. Currently won't work correctly with them. + install::uninstall(name, version, &paths.lib) + } + + for ((name, version), rename) in &to_install { + let data = + res::get_warehouse_release(name, version).expect("Problem getting warehouse data"); + + let (best_release, package_type) = + util::find_best_release(&data, name, version, os, python_vers); + + // Powershell doesn't like emojis + // todo format literal issues, so repeating this whole statement. + #[cfg(target_os = "windows")] + util::print_color_(&format!("Installing {}", &name), Color::Cyan); + #[cfg(target_os = "linux")] + util::print_color_(&format!("⬇ Installing {}", &name), Color::Cyan); + #[cfg(target_os = "macos")] + util::print_color_(&format!("⬇ Installing {}", &name), Color::Cyan); + println!(" {} ...", &version.to_string_color()); + + if install::download_and_install_package( + name, + version, + &best_release.url, + &best_release.filename, + &best_release.digests.sha256, + paths, + package_type, + rename, + ) + .is_err() + { + abort("Problem downloading packages"); + } + } + // Perform renames after all packages are installed, or we may attempt to rename a package + // we haven't yet installed. + for ((name, version), rename) in &to_install { + if let Some((id, new)) = rename { + // Rename in the renamed package + + let renamed_path = &paths.lib.join(util::standardize_name(new)); + + util::wait_for_dirs(&[renamed_path.clone()]).expect("Problem creating renamed path"); + install::rename_package_files(renamed_path, name, new); + + // Rename in the parent calling the renamed package. // todo: Multiple parents? + let parent = lock_packs + .iter() + .find(|lp| lp.id == *id) + .expect("Can't find parent calling renamed package"); + install::rename_package_files( + &paths.lib.join(util::standardize_name(&parent.name)), + name, + new, + ); + + // todo: Handle this more generally, in case we don't have proper semver dist-info paths. + install::rename_metadata( + &paths + .lib + .join(&format!("{}-{}.dist-info", name, version.to_string())), + name, + new, + ); + } + } +} diff --git a/src/util/fs.rs b/src/util/fs.rs new file mode 100644 index 0000000..249160a --- /dev/null +++ b/src/util/fs.rs @@ -0,0 +1,29 @@ +use std::path::PathBuf; + +pub fn pyflow_path() -> PathBuf { + directories::BaseDirs::new() + .expect("Problem finding base directory") + .data_dir() + .to_owned() + .join("pyflow") +} + +pub fn dep_cache_path(pyflow_path: &PathBuf) -> PathBuf { + pyflow_path.join("dependency_cache") +} + +pub fn script_env_path(pyflow_path: &PathBuf) -> PathBuf { + pyflow_path.join("script_envs") +} + +pub fn git_path(pyflow_path: &PathBuf) -> PathBuf { + pyflow_path.join("git") +} + +pub fn get_paths() -> (PathBuf, PathBuf, PathBuf, PathBuf) { + let pyflow_path = pyflow_path(); + let dep_cache_path = dep_cache_path(&pyflow_path); + let script_env_path = script_env_path(&pyflow_path); + let git_path = git_path(&pyflow_path); + (pyflow_path, dep_cache_path, script_env_path, git_path) +} diff --git a/src/util.rs b/src/util/mod.rs similarity index 88% rename from src/util.rs rename to src/util/mod.rs index 21436b0..5a15da2 100644 --- a/src/util.rs +++ b/src/util/mod.rs @@ -1,3 +1,10 @@ +pub mod deps; +pub mod fs; +pub mod prompts; + +mod os; +pub use os::{get_os, Os}; + #[mockall_double::double] use crate::dep_resolution::res; @@ -12,20 +19,18 @@ use crate::{ }; use ini::Ini; use regex::Regex; -use serde::Deserialize; + use std::io::{self, BufRead, BufReader, Read, Write}; use std::path::Component; use std::str::FromStr; use std::{ - collections::HashMap, env, error::Error, - fs, path::{Path, PathBuf}, process, thread, time, }; use tar::Archive; -use termcolor::{Color, ColorSpec, StandardStream, WriteColor}; +use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor}; use xz2::read::XzDecoder; #[derive(Debug)] @@ -50,41 +55,6 @@ pub struct Metadata { pub requires_dist: Vec, } -#[derive(Copy, Clone, Debug, Deserialize, PartialEq)] -/// Used to determine which version of a binary package to download. Assume 64-bit. -pub enum Os { - Linux32, - Linux, - Windows32, - Windows, - // Mac32, - Mac, - Any, -} - -impl FromStr for Os { - type Err = DependencyError; - - fn from_str(s: &str) -> Result { - let re_linux32 = Regex::new(r"(many)?linux.*i686").unwrap(); - let re_linux = Regex::new(r"((many)?linux.*|cygwin|(open)?bsd6*)").unwrap(); - let re_win = Regex::new(r"^win(dows|_amd64)?").unwrap(); - let re_mac = Regex::new(r"(macosx.*|darwin|.*mac.*)").unwrap(); - - Ok(match s { - x if re_linux32.is_match(x) => Self::Linux32, - x if re_linux.is_match(x) => Self::Linux, - "win32" => Self::Windows32, - x if re_win.is_match(x) => Self::Windows, - x if re_mac.is_match(x) => Self::Mac, - "any" => Self::Any, - _ => { - return Err(DependencyError::new(&format!("Problem parsing Os: {}", s))); - } - }) - } -} - /// Print line in a color, then reset formatting. pub fn print_color(message: &str, color: Color) { if let Err(_e) = print_color_res(message, color) { @@ -258,7 +228,7 @@ pub fn find_installed(lib_path: &Path) -> Vec<(String, Version, Vec)> { let top_level = lib_path.join(folder_name).join("top_level.txt"); let mut tops = vec![]; - match fs::File::open(top_level) { + match std::fs::File::open(top_level) { Ok(f) => { for line in BufReader::new(f).lines().flatten() { tops.push(line); @@ -408,7 +378,7 @@ pub fn compare_names(name1: &str, name2: &str) -> bool { /// Extract the wheel or zip. /// From [this example](https://github.com/mvdnes/zip-rs/blob/master/examples/extract.rs#L32) pub fn extract_zip( - file: &fs::File, + file: &std::fs::File, out_path: &Path, rename: &Option<(String, String)>, package_names: &Option<(&str, &str)>, @@ -466,14 +436,14 @@ pub fn extract_zip( let outpath = out_path.join(extracted_file.unwrap()); if (&*file.name()).ends_with('/') { - fs::create_dir_all(&outpath).unwrap(); + std::fs::create_dir_all(&outpath).unwrap(); } else { if let Some(p) = outpath.parent() { if !p.exists() { - fs::create_dir_all(&p).unwrap(); + std::fs::create_dir_all(&p).unwrap(); } } - let mut outfile = fs::File::create(&outpath).unwrap(); + let mut outfile = std::fs::File::create(&outpath).unwrap(); io::copy(&mut file, &mut outfile).unwrap(); } @@ -483,14 +453,14 @@ pub fn extract_zip( use std::os::unix::fs::PermissionsExt; if let Some(mode) = file.unix_mode() { - fs::set_permissions(&outpath, fs::Permissions::from_mode(mode)).unwrap(); + std::fs::set_permissions(&outpath, std::fs::Permissions::from_mode(mode)).unwrap(); } } } } pub fn unpack_tar_xz(archive_path: &Path, dest: &Path) { - let archive_bytes = fs::read(archive_path).expect("Problem reading archive as bytes"); + let archive_bytes = std::fs::read(archive_path).expect("Problem reading archive as bytes"); let mut tar: Vec = Vec::new(); let mut decompressor = XzDecoder::new(&archive_bytes[..]); @@ -576,7 +546,7 @@ pub fn find_or_create_venv( #[cfg(target_os = "macos")] { - let vers_path = fs::canonicalize(vers_path); + let vers_path = std::fs::canonicalize(vers_path); let vers_path = match vers_path { Ok(path) => path, Err(error) => { @@ -618,59 +588,6 @@ pub fn fallible_v_parse(vers: &str) -> Version { } } -/// A generic prompt function, where the user selects from a list -pub fn prompt_list( - init_msg: &str, - type_: &str, - items: &[(String, T)], - show_item: bool, -) -> (String, T) { - print_color(init_msg, Color::Magenta); - for (i, (name, content)) in items.iter().enumerate() { - if show_item { - println!("{}: {}: {}", i + 1, name, content.to_string()) - } else { - println!("{}: {}", i + 1, name) - } - } - - let mut mapping = HashMap::new(); - for (i, item) in items.iter().enumerate() { - mapping.insert(i + 1, item); - } - - let mut input = String::new(); - io::stdin() - .read_line(&mut input) - .expect("Problem reading input"); - - let input = input - .chars() - .next() - .expect("Problem parsing input") - .to_string() - .parse::(); - - let input = if let Ok(ip) = input { - ip - } else { - abort("Please try again; enter a number like 1 or 2 ."); - unreachable!() - }; - - let (name, content) = if let Some(r) = mapping.get(&input) { - r - } else { - abort(&format!( - "Can't find the {} associated with that number. Is it in the list above?", - type_ - )); - unreachable!() - }; - - (name.to_string(), content.clone()) -} - /// Find the operating system from a wheel filename. This doesn't appear to be available /// anywhere else on the Pypi Warehouse. fn os_from_wheel_fname(filename: &str) -> Result { @@ -829,9 +746,9 @@ pub fn find_first_file(path: &Path) -> PathBuf { } /// Mainly to avoid repeating error-handling code. -pub fn open_archive(path: &Path) -> fs::File { +pub fn open_archive(path: &Path) -> std::fs::File { // We must re-open the file after computing the hash. - if let Ok(f) = fs::File::open(&path) { + if let Ok(f) = std::fs::File::open(&path) { f } else { abort(&format!( @@ -849,7 +766,7 @@ pub fn parse_metadata(path: &Path) -> Metadata { let mut result = Metadata::default(); - let data = fs::read_to_string(path).expect("Problem reading METADATA"); + let data = std::fs::read_to_string(path).expect("Problem reading METADATA"); for line in data.lines() { if let Some(caps) = re("Version").captures(line) { let val = caps.get(1).unwrap().as_str(); @@ -894,29 +811,6 @@ fn default_python() -> Version { } } -/// Ask the user what Python version to use. -pub fn prompt_py_vers() -> Version { - print_color( - "Please enter the Python version for this project: (eg: 3.8)", - Color::Magenta, - ); - let default_ver = default_python(); - print!("Default [{}]:", default_ver); - std::io::stdout().flush().unwrap(); - let mut input = String::new(); - io::stdin() - .read_line(&mut input) - .expect("Unable to read user input for version"); - - input.pop(); // Remove trailing newline. - let input = input.replace("\n", "").replace("\r", ""); - if !input.is_empty() { - fallible_v_parse(&input) - } else { - default_ver - } -} - /// We've removed the git repos from packages to install form pypi, but make /// sure we flag them as not-to-uninstall. pub fn find_dont_uninstall(reqs: &[Req], dev_reqs: &[Req]) -> Vec { @@ -1011,17 +905,32 @@ pub fn process_reqs(reqs: Vec, git_path: &Path, paths: &util::Paths) -> Vec /// Read dependency data from a lock file. pub fn read_lock(path: &Path) -> Result> { - let data = fs::read_to_string(path)?; + let data = std::fs::read_to_string(path)?; Ok(toml::from_str(&data)?) } /// Write dependency data to a lock file. pub fn write_lock(path: &Path, data: &Lock) -> Result<(), Box> { let data = toml::to_string(data)?; - fs::write(path, data)?; + std::fs::write(path, data)?; Ok(()) } +pub fn handle_color_option(s: &str) -> ColorChoice { + match s { + "always" => ColorChoice::Always, + "ansi" => ColorChoice::AlwaysAnsi, + "auto" => { + if atty::is(atty::Stream::Stdout) { + ColorChoice::Auto + } else { + ColorChoice::Never + } + } + _ => ColorChoice::Never, + } +} + #[cfg(test)] mod tests { use rstest::rstest; diff --git a/src/util/os.rs b/src/util/os.rs new file mode 100644 index 0000000..3d013c7 --- /dev/null +++ b/src/util/os.rs @@ -0,0 +1,50 @@ +use std::str::FromStr; + +use regex::Regex; +use serde::Deserialize; + +use crate::dep_types::DependencyError; + +#[derive(Copy, Clone, Debug, Deserialize, PartialEq)] +/// Used to determine which version of a binary package to download. Assume 64-bit. +pub enum Os { + Linux32, + Linux, + Windows32, + Windows, + // Mac32, + Mac, + Any, +} + +impl FromStr for Os { + type Err = DependencyError; + + fn from_str(s: &str) -> Result { + let re_linux32 = Regex::new(r"(many)?linux.*i686").unwrap(); + let re_linux = Regex::new(r"((many)?linux.*|cygwin|(open)?bsd6*)").unwrap(); + let re_win = Regex::new(r"^win(dows|_amd64)?").unwrap(); + let re_mac = Regex::new(r"(macosx.*|darwin|.*mac.*)").unwrap(); + + Ok(match s { + x if re_linux32.is_match(x) => Self::Linux32, + x if re_linux.is_match(x) => Self::Linux, + "win32" => Self::Windows32, + x if re_win.is_match(x) => Self::Windows, + x if re_mac.is_match(x) => Self::Mac, + "any" => Self::Any, + _ => { + return Err(DependencyError::new(&format!("Problem parsing Os: {}", s))); + } + }) + } +} + +pub fn get_os() -> Os { + #[cfg(target_os = "windows")] + return Os::Windows; + #[cfg(target_os = "linux")] + return Os::Linux; + #[cfg(target_os = "macos")] + return Os::Mac; +} diff --git a/src/util/prompts.rs b/src/util/prompts.rs new file mode 100644 index 0000000..fc29b27 --- /dev/null +++ b/src/util/prompts.rs @@ -0,0 +1,87 @@ +use std::{ + collections::HashMap, + io::{self, Write}, +}; + +use termcolor::Color; + +use crate::{ + dep_types::Version, + util::{abort, default_python, fallible_v_parse, print_color}, +}; + +/// Ask the user what Python version to use. +pub fn py_vers() -> Version { + print_color( + "Please enter the Python version for this project: (eg: 3.8)", + Color::Magenta, + ); + let default_ver = default_python(); + print!("Default [{}]:", default_ver); + std::io::stdout().flush().unwrap(); + let mut input = String::new(); + io::stdin() + .read_line(&mut input) + .expect("Unable to read user input for version"); + + input.pop(); // Remove trailing newline. + let input = input.replace("\n", "").replace("\r", ""); + if !input.is_empty() { + fallible_v_parse(&input) + } else { + default_ver + } +} + +/// A generic prompt function, where the user selects from a list +pub fn list( + init_msg: &str, + type_: &str, + items: &[(String, T)], + show_item: bool, +) -> (String, T) { + print_color(init_msg, Color::Magenta); + for (i, (name, content)) in items.iter().enumerate() { + if show_item { + println!("{}: {}: {}", i + 1, name, content.to_string()) + } else { + println!("{}: {}", i + 1, name) + } + } + + let mut mapping = HashMap::new(); + for (i, item) in items.iter().enumerate() { + mapping.insert(i + 1, item); + } + + let mut input = String::new(); + io::stdin() + .read_line(&mut input) + .expect("Problem reading input"); + + let input = input + .chars() + .next() + .expect("Problem parsing input") + .to_string() + .parse::(); + + let input = if let Ok(ip) = input { + ip + } else { + abort("Please try again; enter a number like 1 or 2 ."); + unreachable!() + }; + + let (name, content) = if let Some(r) = mapping.get(&input) { + r + } else { + abort(&format!( + "Can't find the {} associated with that number. Is it in the list above?", + type_ + )); + unreachable!() + }; + + (name.to_string(), content.clone()) +} From 2290c16ca2e57f2c80f2733dd0be997851ac39e2 Mon Sep 17 00:00:00 2001 From: Dmitry Rubinstein Date: Sat, 27 Nov 2021 14:55:22 +0200 Subject: [PATCH 23/41] Fix linux runs --- src/main.rs | 2 +- src/py_versions.rs | 2 +- src/util/mod.rs | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/main.rs b/src/main.rs index 1e2e767..6555170 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1042,7 +1042,7 @@ fn main() { // Now handle subcommands that require info about the environment match subcmd { - // Add pacakge names to `pyproject.toml` if needed. Then sync installed packages + // Add package names to `pyproject.toml` if needed. Then sync installed packages // and `pyflow.lock` with the `pyproject.toml`. // We use data from three sources: `pyproject.toml`, `pyflow.lock`, and // the currently-installed packages, found by crawling metadata in the `lib` path. diff --git a/src/py_versions.rs b/src/py_versions.rs index 3f477ac..4e2efd6 100644 --- a/src/py_versions.rs +++ b/src/py_versions.rs @@ -195,7 +195,7 @@ fn download(py_install_path: &Path, version: &Version) { } #[cfg(target_os = "linux")] { - let result = util::prompt_list( + let result = util::prompts::list( "Please enter the number corresponding to your Linux distro:", "Linux distro", &[ diff --git a/src/util/mod.rs b/src/util/mod.rs index 5a15da2..248d02b 100644 --- a/src/util/mod.rs +++ b/src/util/mod.rs @@ -530,7 +530,7 @@ pub fn find_or_create_venv( #[cfg(target_os = "linux")] { - let vers_path = fs::canonicalize(vers_path); + let vers_path = std::fs::canonicalize(vers_path); let vers_path = match vers_path { Ok(path) => path, Err(error) => { From ff2c4d354c587151a4eaf6963b045f2bd93503eb Mon Sep 17 00:00:00 2001 From: Dmitry Rubinstein Date: Sun, 28 Nov 2021 02:39:27 +0200 Subject: [PATCH 24/41] Removing unreachables --- src/build.rs | 1 - src/dep_resolution.rs | 5 +---- src/dep_types.rs | 1 - src/install.rs | 8 ++------ src/py_versions.rs | 6 ++---- src/script.rs | 1 - src/util/deps.rs | 3 +-- src/util/fs.rs | 29 ----------------------------- src/util/mod.rs | 31 +++++++++++-------------------- src/util/prompts.rs | 6 ++---- 10 files changed, 19 insertions(+), 72 deletions(-) delete mode 100644 src/util/fs.rs diff --git a/src/build.rs b/src/build.rs index a83dffa..533f482 100644 --- a/src/build.rs +++ b/src/build.rs @@ -81,7 +81,6 @@ fn cfg_to_setup(cfg: &crate::Config) -> String { "Problem parsing the `authors` field in `pyproject.toml`: {:?}", &cfg.authors )); - unreachable!() }; author = caps.get(1).unwrap().as_str().to_owned(); author_email = caps.get(2).unwrap().as_str().to_owned(); diff --git a/src/dep_resolution.rs b/src/dep_resolution.rs index 8eae487..74d4670 100644 --- a/src/dep_resolution.rs +++ b/src/dep_resolution.rs @@ -136,7 +136,6 @@ fn guess_graph( ReqType::Ne => os_ != os, _ => { util::abort("Reqtypes for Os must be == or !="); - unreachable!() } }, None => true, @@ -185,7 +184,6 @@ fn guess_graph( still occurs, consider opening an issue on github.", &reqs )); - unreachable!() }; // Now add info from lock packs for data we didn't query. The purpose of passing locks @@ -487,8 +485,7 @@ pub(super) mod res { "Can't get version info for the dependency `{}`. \ Is it spelled correctly? Is the internet connection ok?", &req.name - )); - ("".to_string(), Version::new(0, 0, 0), vec![]) // match-compatibility placeholder + )) } } }; diff --git a/src/dep_types.rs b/src/dep_types.rs index a09d0d8..a400013 100644 --- a/src/dep_types.rs +++ b/src/dep_types.rs @@ -953,7 +953,6 @@ impl Req { (fmtd_name, version) } else { util::abort(&format!("Unable to find version info for {:?}", &self.name)); - unreachable!() }; format!( r#"{} = "{}""#, diff --git a/src/install.rs b/src/install.rs index 27b437d..e50d76e 100644 --- a/src/install.rs +++ b/src/install.rs @@ -42,7 +42,6 @@ fn replace_distutils(setup_path: &Path) { and there's a problem with its setup.py.", setup_path )); - unreachable!() }; let re = Regex::new(r"distutils.core").unwrap(); @@ -205,7 +204,6 @@ pub fn download_and_install_package( let reader = io::BufReader::new(&file); let file_digest = sha256_digest(reader).unwrap_or_else(|_| { util::abort(&format!("Problem reading hash for {}", filename)); - unreachable!() }); let file_digest_str = data_encoding::HEXUPPER.encode(file_digest.as_ref()); @@ -336,8 +334,7 @@ pub fn download_and_install_package( util::abort(&format!( "Unable to find extracted folder name: {}", filename - )); - unreachable!() + )) }) .as_str(); @@ -675,8 +672,7 @@ pub fn download_and_install_git( caps.get(2).unwrap().as_str() ) } else { - util::abort("Unable to find the dist info path from wheel filename"); - unreachable!(); + util::abort("Unable to find the dist info path from wheel filename") }; let metadata = util::parse_metadata(&paths.lib.join(dist_info).join("METADATA")); // todo temp! diff --git a/src/py_versions.rs b/src/py_versions.rs index 4e2efd6..fedbeaa 100644 --- a/src/py_versions.rs +++ b/src/py_versions.rs @@ -38,8 +38,7 @@ impl From<(Version, Os)> for PyVers { let unsupported = "Unsupported python version requested; only Python ≥ 3.4 is supported. \ to fix this, edit the `py_version` line of `pyproject.toml`, or run `pyflow switch 3.7`"; if v_o.0.major != Some(3) { - util::abort(unsupported); - unreachable!() + util::abort(unsupported) } match v_o.0.minor.unwrap_or(0) { 4 => match v_o.1 { @@ -112,8 +111,7 @@ impl From<(Version, Os)> for PyVers { } }, _ => { - util::abort(unsupported); - unreachable!() + util::abort(unsupported) } } } diff --git a/src/script.rs b/src/script.rs index 0c213a3..ee0defc 100644 --- a/src/script.rs +++ b/src/script.rs @@ -29,7 +29,6 @@ pub fn run_script( util::abort( "`script` must be followed by the script to run, eg `pyflow script myscript.py`", ); - unreachable!() }; // todo: Consider a metadata file, but for now, we'll use folders diff --git a/src/util/deps.rs b/src/util/deps.rs index 1983cde..97f0d26 100644 --- a/src/util/deps.rs +++ b/src/util/deps.rs @@ -75,8 +75,7 @@ pub fn sync( let resolved = if let Ok(r) = res::resolve(&combined_reqs, &locked, os, py_vers) { r } else { - abort("Problem resolving dependencies"); - unreachable!() + abort("Problem resolving dependencies") }; // Now merge the existing lock packages with new ones from resolved packages. diff --git a/src/util/fs.rs b/src/util/fs.rs deleted file mode 100644 index 249160a..0000000 --- a/src/util/fs.rs +++ /dev/null @@ -1,29 +0,0 @@ -use std::path::PathBuf; - -pub fn pyflow_path() -> PathBuf { - directories::BaseDirs::new() - .expect("Problem finding base directory") - .data_dir() - .to_owned() - .join("pyflow") -} - -pub fn dep_cache_path(pyflow_path: &PathBuf) -> PathBuf { - pyflow_path.join("dependency_cache") -} - -pub fn script_env_path(pyflow_path: &PathBuf) -> PathBuf { - pyflow_path.join("script_envs") -} - -pub fn git_path(pyflow_path: &PathBuf) -> PathBuf { - pyflow_path.join("git") -} - -pub fn get_paths() -> (PathBuf, PathBuf, PathBuf, PathBuf) { - let pyflow_path = pyflow_path(); - let dep_cache_path = dep_cache_path(&pyflow_path); - let script_env_path = script_env_path(&pyflow_path); - let git_path = git_path(&pyflow_path); - (pyflow_path, dep_cache_path, script_env_path, git_path) -} diff --git a/src/util/mod.rs b/src/util/mod.rs index 248d02b..cbb493a 100644 --- a/src/util/mod.rs +++ b/src/util/mod.rs @@ -1,5 +1,5 @@ pub mod deps; -pub mod fs; +pub mod pathes; pub mod prompts; mod os; @@ -88,7 +88,7 @@ fn print_color_res_(message: &str, color: Color) -> io::Result<()> { /// Used when the program should exit from a condition that may arise normally from program use, /// like incorrect info in config files, problems with dependencies, or internet connection problems. /// We use `expect`, `panic!` etc for problems that indicate a bug in this program. -pub fn abort(message: &str) { +pub fn abort(message: &str) -> ! { print_color(message, Color::Red); process::exit(1) } @@ -321,8 +321,7 @@ pub fn merge_reqs( ) { r } else { - abort("Problem getting latest version of the package you added. Is it spelled correctly? Is the internet OK?"); - unreachable!() + abort("Problem getting latest version of the package you added. Is it spelled correctly? Is the internet OK?") }; added_req.constraints.push(Constraint::new( @@ -390,8 +389,7 @@ pub fn extract_zip( abort(&format!( "Problem reading the wheel archive: {:?}. Is it corrupted?", &file - )); - unreachable!() + )) }; for i in 0..archive.len() { @@ -518,8 +516,7 @@ pub fn find_or_create_venv( // todo: Handle this, eg by letting the user pick the one to use? "Multiple compatible Python environments found for this project.", - ); - unreachable!() + ) } } @@ -553,8 +550,7 @@ pub fn find_or_create_venv( abort(&format!( "Problem converting path to absolute path: {:?}", error - )); - unreachable!() + )) } }; (vers_path, py_vers) @@ -583,8 +579,7 @@ pub fn fallible_v_parse(vers: &str) -> Version { if let Ok(v) = Version::from_str(&vers) { v } else { - abort("Problem parsing the Python version you entered. It should look like this: 3.7 or 3.7.1"); - unreachable!() + abort("Problem parsing the Python version you entered. It should look like this: 3.7 or 3.7.1") } } @@ -687,8 +682,7 @@ pub fn find_best_release( "Unable to find a compatible release for {}: {}", name, version.to_string_color() - )); - unreachable!() + )) } else { best_release = source_releases[0].clone(); package_type = install::PackageType::Source; @@ -740,8 +734,7 @@ pub fn find_first_file(path: &Path) -> PathBuf { abort(&format!( "Problem the first file in the directory: {:?}", path - )); - unreachable!() + )) }; } @@ -755,8 +748,7 @@ pub fn open_archive(path: &Path) -> std::fs::File { "Problem opening the archive file: {:?}. Was there a problem while downloading it?", &path - )); - unreachable!() + )) } } @@ -858,8 +850,7 @@ pub fn canon_join(path: &Path, extend: &str) -> PathBuf { let canon = match ex_path.canonicalize() { Ok(c) => c, Err(e) => { - abort(&format!("{}\n\"{}\"", e, extend)); - unreachable!() + abort(&format!("{}\n\"{}\"", e, extend)) } }; let mut new_path = path.to_path_buf(); diff --git a/src/util/prompts.rs b/src/util/prompts.rs index fc29b27..c939f87 100644 --- a/src/util/prompts.rs +++ b/src/util/prompts.rs @@ -69,8 +69,7 @@ pub fn list( let input = if let Ok(ip) = input { ip } else { - abort("Please try again; enter a number like 1 or 2 ."); - unreachable!() + abort("Please try again; enter a number like 1 or 2 .") }; let (name, content) = if let Some(r) = mapping.get(&input) { @@ -79,8 +78,7 @@ pub fn list( abort(&format!( "Can't find the {} associated with that number. Is it in the list above?", type_ - )); - unreachable!() + )) }; (name.to_string(), content.clone()) From b79abd1c12c81d1832115daca6ae69e3fbf1a43c Mon Sep 17 00:00:00 2001 From: Dmitry Rubinstein Date: Sun, 28 Nov 2021 02:39:43 +0200 Subject: [PATCH 25/41] Moving Structopts into separate module --- src/cli_options.rs | 161 +++++++++++ src/main.rs | 635 +++---------------------------------------- src/pyproject/mod.rs | 397 +++++++++++++++++++++++++++ src/util/mod.rs | 2 +- src/util/paths.rs | 29 ++ 5 files changed, 630 insertions(+), 594 deletions(-) create mode 100644 src/cli_options.rs create mode 100644 src/pyproject/mod.rs create mode 100644 src/util/paths.rs diff --git a/src/cli_options.rs b/src/cli_options.rs new file mode 100644 index 0000000..073283e --- /dev/null +++ b/src/cli_options.rs @@ -0,0 +1,161 @@ +use std::str::FromStr; + +use structopt::StructOpt; + +#[derive(StructOpt, Debug)] +#[structopt(name = "pyflow", about = "Python packaging and publishing")] +pub struct Opt { + #[structopt(subcommand)] + pub subcmds: SubCommand, + + /// Force a color option: auto (default), always, ansi, never + #[structopt(short, long)] + pub color: Option, +} + +#[derive(StructOpt, Debug)] +pub enum SubCommand { + /// Create a project folder with the basics + #[structopt(name = "new")] + New { + #[structopt(name = "name")] + name: String, // holds the project name. + }, + + /** Install packages from `pyproject.toml`, `pyflow.lock`, or specified ones. Example: + + `pyflow install`: sync your installation with `pyproject.toml`, or `pyflow.lock` if it exists. + `pyflow install numpy scipy`: install `numpy` and `scipy`.*/ + #[structopt(name = "install")] + Install { + #[structopt(name = "packages")] + packages: Vec, + /// Save package to your dev-dependencies section + #[structopt(short, long)] + dev: bool, + }, + /// Uninstall all packages, or ones specified + #[structopt(name = "uninstall")] + Uninstall { + #[structopt(name = "packages")] + packages: Vec, + }, + /// Display all installed packages and console scripts + #[structopt(name = "list")] + List, + /// Build the package - source and wheel + #[structopt(name = "package")] + Package { + #[structopt(name = "extras")] + extras: Vec, + }, + /// Publish to `pypi` + #[structopt(name = "publish")] + Publish, + /// Create a `pyproject.toml` from requirements.txt, pipfile etc, setup.py etc + #[structopt(name = "init")] + Init, + /// Remove the environment, and uninstall all packages + #[structopt(name = "reset")] + Reset, + /// Remove cached packages, Python installs, or script-environments. Eg to free up hard drive space. + #[structopt(name = "clear")] + Clear, + /// Run a CLI script like `ipython` or `black`. Note that you can simply run `pyflow black` + /// as a shortcut. + // Dummy option with space at the end for documentation + #[structopt(name = "run ")] // We don't need to invoke this directly, but the option exists + Run, + + /// Run the project python or script with the project python environment. + /// As a shortcut you can simply specify a script name ending in `.py` + // Dummy option with space at the end for documentation + #[structopt(name = "python ")] + Python, + + /// Run a standalone script not associated with a project + // Dummy option with space at the end for documentation + #[structopt(name = "script ")] + Script, + // /// Run a package globally; used for CLI tools like `ipython` and `black`. Doesn't + // /// interfere Python installations. Must have been installed with `pyflow install -g black` etc + // #[structopt(name = "global")] + // Global { + // #[structopt(name = "name")] + // name: String, + // }, + /// Change the Python version for this project. eg `pyflow switch 3.8`. Equivalent to setting + /// `py_version` in `pyproject.toml`. + #[structopt(name = "switch")] + Switch { + #[structopt(name = "version")] + version: String, + }, + // Documentation for supported external subcommands can be documented by + // adding a `dummy` subcommand with the name having a trailing space. + // #[structopt(name = "external ")] + #[structopt(external_subcommand, name = "external")] + External(Vec), +} + +#[derive(Clone, Debug)] +pub enum ExternalSubcommands { + Run, + Script, + Python, + ImpliedRun(String), + ImpliedPython(String), +} + +impl ToString for ExternalSubcommands { + fn to_string(&self) -> String { + match self { + Self::Run => "run".into(), + Self::Script => "script".into(), + Self::Python => "python".into(), + Self::ImpliedRun(x) => x.into(), + Self::ImpliedPython(x) => x.into(), + } + } +} + +impl FromStr for ExternalSubcommands { + type Err = anyhow::Error; + fn from_str(s: &str) -> anyhow::Result { + let result = match s { + "run" => Self::Run, + "script" => Self::Script, + "python" => Self::Python, + x if x.ends_with(".py") => Self::ImpliedPython(x.to_string()), + x => Self::ImpliedRun(x.to_string()), + }; + Ok(result) + } +} + +#[derive(Clone, Debug)] +pub struct ExternalCommand { + pub cmd: ExternalSubcommands, + pub args: Vec, +} + +impl ExternalCommand { + pub fn from_opt(args: Vec) -> Self { + let cmd = ExternalSubcommands::from_str(&args[0]).unwrap(); + let cmd_args = match cmd { + ExternalSubcommands::Run + | ExternalSubcommands::Script + | ExternalSubcommands::Python => &args[1..], + ExternalSubcommands::ImpliedRun(_) | ExternalSubcommands::ImpliedPython(_) => &args, + }; + let cmd = match cmd { + ExternalSubcommands::ImpliedRun(_) => ExternalSubcommands::Run, + ExternalSubcommands::ImpliedPython(_) => ExternalSubcommands::Python, + x => x, + }; + Self { + cmd, + args: cmd_args.to_vec(), + } + } +} diff --git a/src/main.rs b/src/main.rs index 6555170..9c40239 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,25 +1,24 @@ #![allow(clippy::non_ascii_literal)] use crate::actions::new; +use crate::cli_options::{ExternalCommand, ExternalSubcommands, Opt, SubCommand}; use crate::dep_types::{Constraint, Lock, Package, Req, Version}; +use crate::pyproject::Config; use crate::util::abort; use crate::util::deps::sync; use regex::Regex; -use serde::Deserialize; use std::{ - collections::HashMap, env, fs, path::{Path, PathBuf}, - str::FromStr, sync::{Arc, RwLock}, }; -use structopt::StructOpt; use termcolor::{Color, ColorChoice}; mod actions; mod build; +mod cli_options; mod commands; mod dep_parser; mod dep_resolution; @@ -27,6 +26,7 @@ mod dep_types; mod files; mod install; mod py_versions; +mod pyproject; mod script; mod util; @@ -41,190 +41,6 @@ mod util; type PackToInstall = ((String, Version), Option<(u32, String)>); // ((Name, Version), (parent id, rename name)) -#[derive(StructOpt, Debug)] -#[structopt(name = "pyflow", about = "Python packaging and publishing")] -struct Opt { - #[structopt(subcommand)] - subcmds: SubCommand, - - /// Force a color option: auto (default), always, ansi, never - #[structopt(short, long)] - color: Option, -} - -#[derive(StructOpt, Debug)] -enum SubCommand { - /// Create a project folder with the basics - #[structopt(name = "new")] - New { - #[structopt(name = "name")] - name: String, // holds the project name. - }, - - /** Install packages from `pyproject.toml`, `pyflow.lock`, or specified ones. Example: - - `pyflow install`: sync your installation with `pyproject.toml`, or `pyflow.lock` if it exists. - `pyflow install numpy scipy`: install `numpy` and `scipy`.*/ - #[structopt(name = "install")] - Install { - #[structopt(name = "packages")] - packages: Vec, - /// Save package to your dev-dependencies section - #[structopt(short, long)] - dev: bool, - }, - /// Uninstall all packages, or ones specified - #[structopt(name = "uninstall")] - Uninstall { - #[structopt(name = "packages")] - packages: Vec, - }, - /// Display all installed packages and console scripts - #[structopt(name = "list")] - List, - /// Build the package - source and wheel - #[structopt(name = "package")] - Package { - #[structopt(name = "extras")] - extras: Vec, - }, - /// Publish to `pypi` - #[structopt(name = "publish")] - Publish, - /// Create a `pyproject.toml` from requirements.txt, pipfile etc, setup.py etc - #[structopt(name = "init")] - Init, - /// Remove the environment, and uninstall all packages - #[structopt(name = "reset")] - Reset, - /// Remove cached packages, Python installs, or script-environments. Eg to free up hard drive space. - #[structopt(name = "clear")] - Clear, - /// Run a CLI script like `ipython` or `black`. Note that you can simply run `pyflow black` - /// as a shortcut. - // Dummy option with space at the end for documentation - #[structopt(name = "run ")] // We don't need to invoke this directly, but the option exists - Run, - - /// Run the project python or script with the project python environment. - /// As a shortcut you can simply specify a script name ending in `.py` - // Dummy option with space at the end for documentation - #[structopt(name = "python ")] - Python, - - /// Run a standalone script not associated with a project - // Dummy option with space at the end for documentation - #[structopt(name = "script ")] - Script, - // /// Run a package globally; used for CLI tools like `ipython` and `black`. Doesn't - // /// interfere Python installations. Must have been installed with `pyflow install -g black` etc - // #[structopt(name = "global")] - // Global { - // #[structopt(name = "name")] - // name: String, - // }, - /// Change the Python version for this project. eg `pyflow switch 3.8`. Equivalent to setting - /// `py_version` in `pyproject.toml`. - #[structopt(name = "switch")] - Switch { - #[structopt(name = "version")] - version: String, - }, - // Documentation for supported external subcommands can be documented by - // adding a `dummy` subcommand with the name having a trailing space. - // #[structopt(name = "external ")] - #[structopt(external_subcommand, name = "external")] - External(Vec), -} - -#[derive(Clone, Debug)] -enum ExternalSubcommands { - Run, - Script, - Python, - ImpliedRun(String), - ImpliedPython(String), -} - -impl ToString for ExternalSubcommands { - fn to_string(&self) -> String { - match self { - Self::Run => "run".into(), - Self::Script => "script".into(), - Self::Python => "python".into(), - Self::ImpliedRun(x) => x.into(), - Self::ImpliedPython(x) => x.into(), - } - } -} - -impl FromStr for ExternalSubcommands { - type Err = anyhow::Error; - fn from_str(s: &str) -> anyhow::Result { - let result = match s { - "run" => Self::Run, - "script" => Self::Script, - "python" => Self::Python, - x if x.ends_with(".py") => Self::ImpliedPython(x.to_string()), - x => Self::ImpliedRun(x.to_string()), - }; - Ok(result) - } -} - -#[derive(Clone, Debug)] -struct ExternalCommand { - cmd: ExternalSubcommands, - args: Vec, -} - -impl ExternalCommand { - fn from_opt(args: Vec) -> Self { - let cmd = ExternalSubcommands::from_str(&args[0]).unwrap(); - let cmd_args = match cmd { - ExternalSubcommands::Run - | ExternalSubcommands::Script - | ExternalSubcommands::Python => &args[1..], - ExternalSubcommands::ImpliedRun(_) | ExternalSubcommands::ImpliedPython(_) => &args, - }; - let cmd = match cmd { - ExternalSubcommands::ImpliedRun(_) => ExternalSubcommands::Run, - ExternalSubcommands::ImpliedPython(_) => ExternalSubcommands::Python, - x => x, - }; - Self { - cmd, - args: cmd_args.to_vec(), - } - } -} -/// A config, parsed from pyproject.toml -#[derive(Clone, Debug, Default, Deserialize)] -// todo: Auto-desr some of these -pub struct Config { - name: Option, - py_version: Option, - reqs: Vec, - dev_reqs: Vec, - version: Option, - authors: Vec, - license: Option, - extras: HashMap, - description: Option, - classifiers: Vec, // https://pypi.org/classifiers/ - keywords: Vec, - homepage: Option, - repository: Option, - repo_url: Option, - package_url: Option, - readme: Option, - build: Option, // A python file used to build non-python extensions - // entry_points: HashMap>, // todo option? - scripts: HashMap, //todo: put under [tool.pyflow.scripts] ? - // console_scripts: Vec, // We don't parse these; pass them to `setup.py` as-entered. - python_requires: Option, -} - /// Reduce repetition between reqs and dev reqs when populating reqs of path reqs. fn pop_reqs_helper(reqs: &[Req], dev: bool) -> Vec { let mut result = vec![]; @@ -273,375 +89,6 @@ fn pop_reqs_helper(reqs: &[Req], dev: bool) -> Vec { result } -impl Config { - /// Helper fn to prevent repetition - fn parse_deps(deps: HashMap) -> Vec { - let mut result = Vec::new(); - for (name, data) in deps { - let constraints; - let mut extras = None; - let mut git = None; - let mut path = None; - let mut python_version = None; - match data { - files::DepComponentWrapper::A(constrs) => { - constraints = if let Ok(c) = Constraint::from_str_multiple(&constrs) { - c - } else { - abort(&format!( - "Problem parsing constraints in `pyproject.toml`: {}", - &constrs - )); - unreachable!() - }; - } - files::DepComponentWrapper::B(subdata) => { - constraints = match subdata.constrs { - Some(constrs) => { - if let Ok(c) = Constraint::from_str_multiple(&constrs) { - c - } else { - abort(&format!( - "Problem parsing constraints in `pyproject.toml`: {}", - &constrs - )); - unreachable!() - } - } - None => vec![], - }; - - if let Some(ex) = subdata.extras { - extras = Some(ex); - } - if let Some(p) = subdata.path { - path = Some(p); - } - if let Some(repo) = subdata.git { - git = Some(repo); - } - if let Some(v) = subdata.python { - let pv = Constraint::from_str(&v) - .expect("Problem parsing python version in dependency"); - python_version = Some(vec![pv]); - } - } - } - - result.push(Req { - name, - constraints, - extra: None, - sys_platform: None, - python_version, - install_with_extras: extras, - path, - git, - }); - } - result - } - - // todo: DRY at the top from `from_file`. - fn from_pipfile(path: &Path) -> Option { - // todo: Lots of tweaks and QC could be done re what fields to parse, and how best to - // todo parse and store them. - let toml_str = match fs::read_to_string(path).ok() { - Some(d) => d, - None => return None, - }; - - let decoded: files::Pipfile = if let Ok(d) = toml::from_str(&toml_str) { - d - } else { - abort("Problem parsing `Pipfile`"); - unreachable!() - }; - let mut result = Self::default(); - - if let Some(pipfile_deps) = decoded.packages { - result.reqs = Self::parse_deps(pipfile_deps); - } - if let Some(pipfile_dev_deps) = decoded.dev_packages { - result.dev_reqs = Self::parse_deps(pipfile_dev_deps); - } - - Some(result) - } - - /// Pull config data from `pyproject.toml`. We use this to deserialize things like Versions - /// and requirements. - fn from_file(path: &Path) -> Option { - // todo: Lots of tweaks and QC could be done re what fields to parse, and how best to - // todo parse and store them. - let toml_str = match fs::read_to_string(path) { - Ok(d) => d, - Err(_) => return None, - }; - - let decoded: files::Pyproject = if let Ok(d) = toml::from_str(&toml_str) { - d - } else { - abort("Problem parsing `pyproject.toml`"); - unreachable!() - }; - let mut result = Self::default(); - - // Parse Poetry first, since we'll use pyflow if there's a conflict. - if let Some(po) = decoded.tool.poetry { - if let Some(v) = po.name { - result.name = Some(v); - } - if let Some(v) = po.authors { - result.authors = v; - } - if let Some(v) = po.license { - result.license = Some(v); - } - - if let Some(v) = po.homepage { - result.homepage = Some(v); - } - if let Some(v) = po.description { - result.description = Some(v); - } - if let Some(v) = po.repository { - result.repository = Some(v); - } - if let Some(v) = po.readme { - result.readme = Some(v); - } - if let Some(v) = po.build { - result.build = Some(v); - } - // todo: Process entry pts, classifiers etc? - if let Some(v) = po.classifiers { - result.classifiers = v; - } - if let Some(v) = po.keywords { - result.keywords = v; - } - - // if let Some(v) = po.source { - // result.source = v; - // } - // if let Some(v) = po.scripts { - // result.console_scripts = v; - // } - if let Some(v) = po.extras { - result.extras = v; - } - - if let Some(v) = po.version { - result.version = Some( - Version::from_str(&v).expect("Problem parsing version in `pyproject.toml`"), - ) - } - - // todo: DRY (c+p) from pyflow dependency parsing, other than parsing python version here, - // todo which only poetry does. - // todo: Parse poetry dev deps - if let Some(deps) = po.dependencies { - for (name, data) in deps { - let constraints; - let mut extras = None; - let mut python_version = None; - match data { - files::DepComponentWrapperPoetry::A(constrs) => { - constraints = Constraint::from_str_multiple(&constrs) - .expect("Problem parsing constraints in `pyproject.toml`."); - } - files::DepComponentWrapperPoetry::B(subdata) => { - constraints = Constraint::from_str_multiple(&subdata.constrs) - .expect("Problem parsing constraints in `pyproject.toml`."); - if let Some(ex) = subdata.extras { - extras = Some(ex); - } - if let Some(v) = subdata.python { - let pv = Constraint::from_str(&v) - .expect("Problem parsing python version in dependency"); - python_version = Some(vec![pv]); - } - // todo repository etc - } - } - if &name.to_lowercase() == "python" { - if let Some(constr) = constraints.get(0) { - result.py_version = Some(constr.version.clone()) - } - } else { - result.reqs.push(Req { - name, - constraints, - extra: None, - sys_platform: None, - python_version, - install_with_extras: extras, - path: None, - git: None, - }); - } - } - } - } - - if let Some(pf) = decoded.tool.pyflow { - if let Some(v) = pf.name { - result.name = Some(v); - } - - if let Some(v) = pf.authors { - result.authors = if v.is_empty() { - util::get_git_author() - } else { - v - }; - } - if let Some(v) = pf.license { - result.license = Some(v); - } - if let Some(v) = pf.homepage { - result.homepage = Some(v); - } - if let Some(v) = pf.description { - result.description = Some(v); - } - if let Some(v) = pf.repository { - result.repository = Some(v); - } - - // todo: Process entry pts, classifiers etc? - if let Some(v) = pf.classifiers { - result.classifiers = v; - } - if let Some(v) = pf.keywords { - result.keywords = v; - } - if let Some(v) = pf.readme { - result.readme = Some(v); - } - if let Some(v) = pf.build { - result.build = Some(v); - } - // if let Some(v) = pf.entry_points { - // result.entry_points = v; - // } // todo - if let Some(v) = pf.scripts { - result.scripts = v; - } - - if let Some(v) = pf.python_requires { - result.python_requires = Some(v); - } - - if let Some(v) = pf.package_url { - result.package_url = Some(v); - } - - if let Some(v) = pf.version { - result.version = Some( - Version::from_str(&v).expect("Problem parsing version in `pyproject.toml`"), - ) - } - - if let Some(v) = pf.py_version { - result.py_version = Some( - Version::from_str(&v) - .expect("Problem parsing python version in `pyproject.toml`"), - ); - } - - if let Some(deps) = pf.dependencies { - result.reqs = Self::parse_deps(deps); - } - if let Some(deps) = pf.dev_dependencies { - result.dev_reqs = Self::parse_deps(deps); - } - } - - Some(result) - } - - /// For reqs of `path` type, add their sub-reqs by parsing `setup.py` or `pyproject.toml`. - fn populate_path_subreqs(&mut self) { - self.reqs.append(&mut pop_reqs_helper(&self.reqs, false)); - self.dev_reqs - .append(&mut pop_reqs_helper(&self.dev_reqs, true)); - } - - /// Create a new `pyproject.toml` file. - fn write_file(&self, path: &Path) { - let file = path; - if file.exists() { - abort("`pyproject.toml` already exists") - } - - let mut result = String::new(); - - result.push_str("\n[tool.pyflow]\n"); - if let Some(name) = &self.name { - result.push_str(&("name = \"".to_owned() + name + "\"\n")); - } else { - // Give name, and a few other fields default values. - result.push_str(&("name = \"\"".to_owned() + "\n")); - } - if let Some(py_v) = &self.py_version { - result.push_str(&("py_version = \"".to_owned() + &py_v.to_string_no_patch() + "\"\n")); - } else { - result.push_str(&("py_version = \"3.8\"".to_owned() + "\n")); - } - if let Some(vers) = self.version.clone() { - result.push_str(&(format!("version = \"{}\"", vers.to_string() + "\n"))); - } else { - result.push_str("version = \"0.1.0\""); - result.push('\n'); - } - if !self.authors.is_empty() { - result.push_str("authors = [\""); - for (i, author) in self.authors.iter().enumerate() { - if i != 0 { - result.push_str(", "); - } - result.push_str(author); - } - result.push_str("\"]\n"); - } - - if let Some(v) = &self.description { - result.push_str(&(format!("description = \"{}\"", v) + "\n")); - } - if let Some(v) = &self.homepage { - result.push_str(&(format!("homepage = \"{}\"", v) + "\n")); - } - - // todo: More fields - - result.push('\n'); - result.push_str("[tool.pyflow.scripts]\n"); - for (name, mod_fn) in &self.scripts { - result.push_str(&(format!("{} = \"{}\"", name, mod_fn) + "\n")); - } - - result.push('\n'); - result.push_str("[tool.pyflow.dependencies]\n"); - for dep in &self.reqs { - result.push_str(&(dep.to_cfg_string() + "\n")); - } - - result.push('\n'); - result.push_str("[tool.pyflow.dev-dependencies]\n"); - for dep in &self.dev_reqs { - result.push_str(&(dep.to_cfg_string() + "\n")); - } - - result.push('\n'); // trailing newline - - if fs::write(file, result).is_err() { - abort("Problem writing `pyproject.toml`") - } - } -} - /// Cli Config to hold command line options struct CliConfig { pub color_choice: ColorChoice, @@ -711,7 +158,6 @@ fn run_cli_tool( a.clone() } else { abort("`run` must be followed by the script to run, eg `pyflow run black`"); - unreachable!() }; // If the script we're calling is specified in `pyproject.toml`, ensure it exists. @@ -743,7 +189,6 @@ fn run_cli_tool( } } else { abort(&format!("Problem parsing the following script: {:#?}. Must be in the format module:function_name", s)); - unreachable!() } return; } @@ -837,10 +282,10 @@ const LOCK_FILENAME: &str = "pyflow.lock"; /// We process input commands in a deliberate order, to ensure the required, and only the required /// setup steps are accomplished before each. fn main() { - let (pyflow_path, dep_cache_path, script_env_path, git_path) = util::fs::get_paths(); + let (pyflow_path, dep_cache_path, script_env_path, git_path) = util::paths::get_paths(); let os = util::get_os(); - let opt = Opt::from_args(); + let opt = ::from_args(); #[cfg(debug_assertions)] eprintln!("opts {:?}", opt); @@ -861,49 +306,53 @@ fn main() { None }; - // Run this before parsing the config. - if let Some(x) = extcmd.clone() { - if let ExternalSubcommands::Script = x.cmd { - script::run_script(&script_env_path, &dep_cache_path, os, &x.args, &pyflow_path); - return; - } - } - - if let SubCommand::New { name } = subcmd { - if new(&name).is_err() { - abort(actions::NEW_ERROR_MESSAGE); - } - util::print_color( - &format!("Created a new Python project named {}", name), - Color::Green, - ); - return; - } + match &subcmd { + SubCommand::External(ref x) => match ExternalCommand::from_opt(x.to_owned()) { + ExternalCommand { cmd, args } => match cmd { + ExternalSubcommands::Script => { + script::run_script(&script_env_path, &dep_cache_path, os, &args, &pyflow_path); + } + // TODO: Move branches to omitted match + _ => (), + }, + }, + SubCommand::New { name } => { + if new(&name).is_err() { + abort(actions::NEW_ERROR_MESSAGE); + } - if let SubCommand::Init {} = subcmd { - let cfg_path = PathBuf::from(CFG_FILENAME); - if cfg_path.exists() { - abort("pyproject.toml already exists - not overwriting.") + util::print_color( + &format!("Created a new Python project named {}", name), + Color::Green, + ); + return; } + SubCommand::Init => { + let cfg_path = PathBuf::from(CFG_FILENAME); + if cfg_path.exists() { + abort("pyproject.toml already exists - not overwriting.") + } - let mut cfg = match PathBuf::from("Pipfile").exists() { - true => Config::from_pipfile(&PathBuf::from("Pipfile")).unwrap_or_default(), - false => Config::default(), - }; + let mut cfg = match PathBuf::from("Pipfile").exists() { + true => Config::from_pipfile(&PathBuf::from("Pipfile")).unwrap_or_default(), + false => Config::default(), + }; - cfg.py_version = Some(util::prompts::py_vers()); + cfg.py_version = Some(util::prompts::py_vers()); - files::parse_req_dot_text(&mut cfg, &PathBuf::from("requirements.txt")); + files::parse_req_dot_text(&mut cfg, &PathBuf::from("requirements.txt")); - cfg.write_file(&cfg_path); - util::print_color("Created `pyproject.toml`", Color::Green); - // Don't return here; let the normal logic create the venv now. + cfg.write_file(&cfg_path); + util::print_color("Created `pyproject.toml`", Color::Green); + // Don't return here; let the normal logic create the venv now. + } + // TODO: Move branches to omitted match + _ => {} } // We need access to the config from here on; throw an error if we can't find it. let mut cfg_path = PathBuf::from(CFG_FILENAME); if !&cfg_path.exists() { - // if let SubCommand::Python { args: _ } = subcmd { // Try looking recursively in parent directories for a config file. let recursion_limit = 8; // How my levels to look up let mut current_level = env::current_dir().expect("Can't access current directory"); diff --git a/src/pyproject/mod.rs b/src/pyproject/mod.rs new file mode 100644 index 0000000..c009638 --- /dev/null +++ b/src/pyproject/mod.rs @@ -0,0 +1,397 @@ +use std::{collections::HashMap, fs, path::Path, str::FromStr}; + +use serde::Deserialize; + +use crate::{dep_types::{Constraint, Req, Version}, files, pop_reqs_helper, util::{self, abort}}; + +/// A config, parsed from pyproject.toml +#[derive(Clone, Debug, Default, Deserialize)] +// todo: Auto-desr some of these +pub struct Config { + pub name: Option, + pub py_version: Option, + pub reqs: Vec, + pub dev_reqs: Vec, + pub version: Option, + pub authors: Vec, + pub license: Option, + pub extras: HashMap, + pub description: Option, + pub classifiers: Vec, // https://pypi.org/classifiers/ + pub keywords: Vec, + pub homepage: Option, + pub repository: Option, + pub repo_url: Option, + pub package_url: Option, + pub readme: Option, + pub build: Option, // A python file used to build non-python extensions + // entry_points: HashMap>, // todo option? + pub scripts: HashMap, //todo: put under [tool.pyflow.scripts] ? + // console_scripts: Vec, // We don't parse these; pass them to `setup.py` as-entered. + pub python_requires: Option, +} + +impl Config { + /// Helper fn to prevent repetition + pub fn parse_deps(deps: HashMap) -> Vec { + let mut result = Vec::new(); + for (name, data) in deps { + let constraints; + let mut extras = None; + let mut git = None; + let mut path = None; + let mut python_version = None; + match data { + files::DepComponentWrapper::A(constrs) => { + constraints = if let Ok(c) = Constraint::from_str_multiple(&constrs) { + c + } else { + abort(&format!( + "Problem parsing constraints in `pyproject.toml`: {}", + &constrs + )) + }; + } + files::DepComponentWrapper::B(subdata) => { + constraints = match subdata.constrs { + Some(constrs) => { + if let Ok(c) = Constraint::from_str_multiple(&constrs) { + c + } else { + abort(&format!( + "Problem parsing constraints in `pyproject.toml`: {}", + &constrs + )) + } + } + None => vec![], + }; + + if let Some(ex) = subdata.extras { + extras = Some(ex); + } + if let Some(p) = subdata.path { + path = Some(p); + } + if let Some(repo) = subdata.git { + git = Some(repo); + } + if let Some(v) = subdata.python { + let pv = Constraint::from_str(&v) + .expect("Problem parsing python version in dependency"); + python_version = Some(vec![pv]); + } + } + } + + result.push(Req { + name, + constraints, + extra: None, + sys_platform: None, + python_version, + install_with_extras: extras, + path, + git, + }); + } + result + } + + // todo: DRY at the top from `from_file`. + pub fn from_pipfile(path: &Path) -> Option { + // todo: Lots of tweaks and QC could be done re what fields to parse, and how best to + // todo parse and store them. + let toml_str = match fs::read_to_string(path).ok() { + Some(d) => d, + None => return None, + }; + + let decoded: files::Pipfile = if let Ok(d) = toml::from_str(&toml_str) { + d + } else { + abort("Problem parsing `Pipfile`") + }; + let mut result = Self::default(); + + if let Some(pipfile_deps) = decoded.packages { + result.reqs = Self::parse_deps(pipfile_deps); + } + if let Some(pipfile_dev_deps) = decoded.dev_packages { + result.dev_reqs = Self::parse_deps(pipfile_dev_deps); + } + + Some(result) + } + + /// Pull config data from `pyproject.toml`. We use this to deserialize things like Versions + /// and requirements. + pub fn from_file(path: &Path) -> Option { + // todo: Lots of tweaks and QC could be done re what fields to parse, and how best to + // todo parse and store them. + let toml_str = match fs::read_to_string(path) { + Ok(d) => d, + Err(_) => return None, + }; + + let decoded: files::Pyproject = if let Ok(d) = toml::from_str(&toml_str) { + d + } else { + abort("Problem parsing `pyproject.toml`"); + }; + let mut result = Self::default(); + + // Parse Poetry first, since we'll use pyflow if there's a conflict. + if let Some(po) = decoded.tool.poetry { + if let Some(v) = po.name { + result.name = Some(v); + } + if let Some(v) = po.authors { + result.authors = v; + } + if let Some(v) = po.license { + result.license = Some(v); + } + + if let Some(v) = po.homepage { + result.homepage = Some(v); + } + if let Some(v) = po.description { + result.description = Some(v); + } + if let Some(v) = po.repository { + result.repository = Some(v); + } + if let Some(v) = po.readme { + result.readme = Some(v); + } + if let Some(v) = po.build { + result.build = Some(v); + } + // todo: Process entry pts, classifiers etc? + if let Some(v) = po.classifiers { + result.classifiers = v; + } + if let Some(v) = po.keywords { + result.keywords = v; + } + + // if let Some(v) = po.source { + // result.source = v; + // } + // if let Some(v) = po.scripts { + // result.console_scripts = v; + // } + if let Some(v) = po.extras { + result.extras = v; + } + + if let Some(v) = po.version { + result.version = Some( + Version::from_str(&v).expect("Problem parsing version in `pyproject.toml`"), + ) + } + + // todo: DRY (c+p) from pyflow dependency parsing, other than parsing python version here, + // todo which only poetry does. + // todo: Parse poetry dev deps + if let Some(deps) = po.dependencies { + for (name, data) in deps { + let constraints; + let mut extras = None; + let mut python_version = None; + match data { + files::DepComponentWrapperPoetry::A(constrs) => { + constraints = Constraint::from_str_multiple(&constrs) + .expect("Problem parsing constraints in `pyproject.toml`."); + } + files::DepComponentWrapperPoetry::B(subdata) => { + constraints = Constraint::from_str_multiple(&subdata.constrs) + .expect("Problem parsing constraints in `pyproject.toml`."); + if let Some(ex) = subdata.extras { + extras = Some(ex); + } + if let Some(v) = subdata.python { + let pv = Constraint::from_str(&v) + .expect("Problem parsing python version in dependency"); + python_version = Some(vec![pv]); + } + // todo repository etc + } + } + if &name.to_lowercase() == "python" { + if let Some(constr) = constraints.get(0) { + result.py_version = Some(constr.version.clone()) + } + } else { + result.reqs.push(Req { + name, + constraints, + extra: None, + sys_platform: None, + python_version, + install_with_extras: extras, + path: None, + git: None, + }); + } + } + } + } + + if let Some(pf) = decoded.tool.pyflow { + if let Some(v) = pf.name { + result.name = Some(v); + } + + if let Some(v) = pf.authors { + result.authors = if v.is_empty() { + util::get_git_author() + } else { + v + }; + } + if let Some(v) = pf.license { + result.license = Some(v); + } + if let Some(v) = pf.homepage { + result.homepage = Some(v); + } + if let Some(v) = pf.description { + result.description = Some(v); + } + if let Some(v) = pf.repository { + result.repository = Some(v); + } + + // todo: Process entry pts, classifiers etc? + if let Some(v) = pf.classifiers { + result.classifiers = v; + } + if let Some(v) = pf.keywords { + result.keywords = v; + } + if let Some(v) = pf.readme { + result.readme = Some(v); + } + if let Some(v) = pf.build { + result.build = Some(v); + } + // if let Some(v) = pf.entry_points { + // result.entry_points = v; + // } // todo + if let Some(v) = pf.scripts { + result.scripts = v; + } + + if let Some(v) = pf.python_requires { + result.python_requires = Some(v); + } + + if let Some(v) = pf.package_url { + result.package_url = Some(v); + } + + if let Some(v) = pf.version { + result.version = Some( + Version::from_str(&v).expect("Problem parsing version in `pyproject.toml`"), + ) + } + + if let Some(v) = pf.py_version { + result.py_version = Some( + Version::from_str(&v) + .expect("Problem parsing python version in `pyproject.toml`"), + ); + } + + if let Some(deps) = pf.dependencies { + result.reqs = Self::parse_deps(deps); + } + if let Some(deps) = pf.dev_dependencies { + result.dev_reqs = Self::parse_deps(deps); + } + } + + Some(result) + } + + /// For reqs of `path` type, add their sub-reqs by parsing `setup.py` or `pyproject.toml`. + pub fn populate_path_subreqs(&mut self) { + self.reqs.append(&mut pop_reqs_helper(&self.reqs, false)); + self.dev_reqs + .append(&mut pop_reqs_helper(&self.dev_reqs, true)); + } + + /// Create a new `pyproject.toml` file. + pub fn write_file(&self, path: &Path) { + let file = path; + if file.exists() { + abort("`pyproject.toml` already exists") + } + + let mut result = String::new(); + + result.push_str("\n[tool.pyflow]\n"); + if let Some(name) = &self.name { + result.push_str(&("name = \"".to_owned() + name + "\"\n")); + } else { + // Give name, and a few other fields default values. + result.push_str(&("name = \"\"".to_owned() + "\n")); + } + if let Some(py_v) = &self.py_version { + result.push_str(&("py_version = \"".to_owned() + &py_v.to_string_no_patch() + "\"\n")); + } else { + result.push_str(&("py_version = \"3.8\"".to_owned() + "\n")); + } + if let Some(vers) = self.version.clone() { + result.push_str(&(format!("version = \"{}\"", vers.to_string() + "\n"))); + } else { + result.push_str("version = \"0.1.0\""); + result.push('\n'); + } + if !self.authors.is_empty() { + result.push_str("authors = [\""); + for (i, author) in self.authors.iter().enumerate() { + if i != 0 { + result.push_str(", "); + } + result.push_str(author); + } + result.push_str("\"]\n"); + } + + if let Some(v) = &self.description { + result.push_str(&(format!("description = \"{}\"", v) + "\n")); + } + if let Some(v) = &self.homepage { + result.push_str(&(format!("homepage = \"{}\"", v) + "\n")); + } + + // todo: More fields + + result.push('\n'); + result.push_str("[tool.pyflow.scripts]\n"); + for (name, mod_fn) in &self.scripts { + result.push_str(&(format!("{} = \"{}\"", name, mod_fn) + "\n")); + } + + result.push('\n'); + result.push_str("[tool.pyflow.dependencies]\n"); + for dep in &self.reqs { + result.push_str(&(dep.to_cfg_string() + "\n")); + } + + result.push('\n'); + result.push_str("[tool.pyflow.dev-dependencies]\n"); + for dep in &self.dev_reqs { + result.push_str(&(dep.to_cfg_string() + "\n")); + } + + result.push('\n'); // trailing newline + + if fs::write(file, result).is_err() { + abort("Problem writing `pyproject.toml`") + } + } +} diff --git a/src/util/mod.rs b/src/util/mod.rs index cbb493a..acf7c0d 100644 --- a/src/util/mod.rs +++ b/src/util/mod.rs @@ -1,5 +1,5 @@ pub mod deps; -pub mod pathes; +pub mod paths; pub mod prompts; mod os; diff --git a/src/util/paths.rs b/src/util/paths.rs new file mode 100644 index 0000000..249160a --- /dev/null +++ b/src/util/paths.rs @@ -0,0 +1,29 @@ +use std::path::PathBuf; + +pub fn pyflow_path() -> PathBuf { + directories::BaseDirs::new() + .expect("Problem finding base directory") + .data_dir() + .to_owned() + .join("pyflow") +} + +pub fn dep_cache_path(pyflow_path: &PathBuf) -> PathBuf { + pyflow_path.join("dependency_cache") +} + +pub fn script_env_path(pyflow_path: &PathBuf) -> PathBuf { + pyflow_path.join("script_envs") +} + +pub fn git_path(pyflow_path: &PathBuf) -> PathBuf { + pyflow_path.join("git") +} + +pub fn get_paths() -> (PathBuf, PathBuf, PathBuf, PathBuf) { + let pyflow_path = pyflow_path(); + let dep_cache_path = dep_cache_path(&pyflow_path); + let script_env_path = script_env_path(&pyflow_path); + let git_path = git_path(&pyflow_path); + (pyflow_path, dep_cache_path, script_env_path, git_path) +} From b3202c0ee219aaa146bb1f454cd0db9ea0a2e855 Mon Sep 17 00:00:00 2001 From: Dmitry Rubinstein Date: Sun, 28 Nov 2021 02:50:44 +0200 Subject: [PATCH 26/41] Format --- src/actions/mod.rs | 3 +-- src/py_versions.rs | 4 +--- src/pyproject/mod.rs | 6 +++++- src/util/mod.rs | 43 ++++++++++++++++++++----------------------- 4 files changed, 27 insertions(+), 29 deletions(-) diff --git a/src/actions/mod.rs b/src/actions/mod.rs index 2667c68..5ad9bbf 100644 --- a/src/actions/mod.rs +++ b/src/actions/mod.rs @@ -4,8 +4,7 @@ mod new; pub use install::install; pub use new::new; - pub const NEW_ERROR_MESSAGE: &str = indoc::indoc! {r#" Problem creating the project. This may be due to a permissions problem. If on linux, please try again with `sudo`. -"#}; \ No newline at end of file +"#}; diff --git a/src/py_versions.rs b/src/py_versions.rs index fedbeaa..4f7f9e3 100644 --- a/src/py_versions.rs +++ b/src/py_versions.rs @@ -110,9 +110,7 @@ impl From<(Version, Os)> for PyVers { unreachable!() } }, - _ => { - util::abort(unsupported) - } + _ => util::abort(unsupported), } } } diff --git a/src/pyproject/mod.rs b/src/pyproject/mod.rs index c009638..4dffbe8 100644 --- a/src/pyproject/mod.rs +++ b/src/pyproject/mod.rs @@ -2,7 +2,11 @@ use std::{collections::HashMap, fs, path::Path, str::FromStr}; use serde::Deserialize; -use crate::{dep_types::{Constraint, Req, Version}, files, pop_reqs_helper, util::{self, abort}}; +use crate::{ + dep_types::{Constraint, Req, Version}, + files, pop_reqs_helper, + util::{self, abort}, +}; /// A config, parsed from pyproject.toml #[derive(Clone, Debug, Default, Deserialize)] diff --git a/src/util/mod.rs b/src/util/mod.rs index acf7c0d..a04f521 100644 --- a/src/util/mod.rs +++ b/src/util/mod.rs @@ -20,6 +20,7 @@ use crate::{ use ini::Ini; use regex::Regex; +use std::fs; use std::io::{self, BufRead, BufReader, Read, Write}; use std::path::Component; use std::str::FromStr; @@ -228,7 +229,7 @@ pub fn find_installed(lib_path: &Path) -> Vec<(String, Version, Vec)> { let top_level = lib_path.join(folder_name).join("top_level.txt"); let mut tops = vec![]; - match std::fs::File::open(top_level) { + match fs::File::open(top_level) { Ok(f) => { for line in BufReader::new(f).lines().flatten() { tops.push(line); @@ -377,7 +378,7 @@ pub fn compare_names(name1: &str, name2: &str) -> bool { /// Extract the wheel or zip. /// From [this example](https://github.com/mvdnes/zip-rs/blob/master/examples/extract.rs#L32) pub fn extract_zip( - file: &std::fs::File, + file: &fs::File, out_path: &Path, rename: &Option<(String, String)>, package_names: &Option<(&str, &str)>, @@ -434,14 +435,14 @@ pub fn extract_zip( let outpath = out_path.join(extracted_file.unwrap()); if (&*file.name()).ends_with('/') { - std::fs::create_dir_all(&outpath).unwrap(); + fs::create_dir_all(&outpath).unwrap(); } else { if let Some(p) = outpath.parent() { if !p.exists() { - std::fs::create_dir_all(&p).unwrap(); + fs::create_dir_all(&p).unwrap(); } } - let mut outfile = std::fs::File::create(&outpath).unwrap(); + let mut outfile = fs::File::create(&outpath).unwrap(); io::copy(&mut file, &mut outfile).unwrap(); } @@ -451,14 +452,14 @@ pub fn extract_zip( use std::os::unix::fs::PermissionsExt; if let Some(mode) = file.unix_mode() { - std::fs::set_permissions(&outpath, std::fs::Permissions::from_mode(mode)).unwrap(); + fs::set_permissions(&outpath, fs::Permissions::from_mode(mode)).unwrap(); } } } } pub fn unpack_tar_xz(archive_path: &Path, dest: &Path) { - let archive_bytes = std::fs::read(archive_path).expect("Problem reading archive as bytes"); + let archive_bytes = fs::read(archive_path).expect("Problem reading archive as bytes"); let mut tar: Vec = Vec::new(); let mut decompressor = XzDecoder::new(&archive_bytes[..]); @@ -527,7 +528,7 @@ pub fn find_or_create_venv( #[cfg(target_os = "linux")] { - let vers_path = std::fs::canonicalize(vers_path); + let vers_path = fs::canonicalize(vers_path); let vers_path = match vers_path { Ok(path) => path, Err(error) => { @@ -543,15 +544,13 @@ pub fn find_or_create_venv( #[cfg(target_os = "macos")] { - let vers_path = std::fs::canonicalize(vers_path); + let vers_path = fs::canonicalize(vers_path); let vers_path = match vers_path { Ok(path) => path, - Err(error) => { - abort(&format!( - "Problem converting path to absolute path: {:?}", - error - )) - } + Err(error) => abort(&format!( + "Problem converting path to absolute path: {:?}", + error + )), }; (vers_path, py_vers) } @@ -739,9 +738,9 @@ pub fn find_first_file(path: &Path) -> PathBuf { } /// Mainly to avoid repeating error-handling code. -pub fn open_archive(path: &Path) -> std::fs::File { +pub fn open_archive(path: &Path) -> fs::File { // We must re-open the file after computing the hash. - if let Ok(f) = std::fs::File::open(&path) { + if let Ok(f) = fs::File::open(&path) { f } else { abort(&format!( @@ -758,7 +757,7 @@ pub fn parse_metadata(path: &Path) -> Metadata { let mut result = Metadata::default(); - let data = std::fs::read_to_string(path).expect("Problem reading METADATA"); + let data = fs::read_to_string(path).expect("Problem reading METADATA"); for line in data.lines() { if let Some(caps) = re("Version").captures(line) { let val = caps.get(1).unwrap().as_str(); @@ -849,9 +848,7 @@ pub fn canon_join(path: &Path, extend: &str) -> PathBuf { let ex_path = Path::new(extend); let canon = match ex_path.canonicalize() { Ok(c) => c, - Err(e) => { - abort(&format!("{}\n\"{}\"", e, extend)) - } + Err(e) => abort(&format!("{}\n\"{}\"", e, extend)), }; let mut new_path = path.to_path_buf(); @@ -896,14 +893,14 @@ pub fn process_reqs(reqs: Vec, git_path: &Path, paths: &util::Paths) -> Vec /// Read dependency data from a lock file. pub fn read_lock(path: &Path) -> Result> { - let data = std::fs::read_to_string(path)?; + let data = fs::read_to_string(path)?; Ok(toml::from_str(&data)?) } /// Write dependency data to a lock file. pub fn write_lock(path: &Path, data: &Lock) -> Result<(), Box> { let data = toml::to_string(data)?; - std::fs::write(path, data)?; + fs::write(path, data)?; Ok(()) } From 9e6c0b8ac87f980faf91d3ce9b23c577ef88eb34 Mon Sep 17 00:00:00 2001 From: Dmitry Rubinstein Date: Mon, 29 Nov 2021 18:20:25 +0200 Subject: [PATCH 27/41] Update more actions --- ROADMAP.md | 11 +++ src/actions/clear.rs | 65 +++++++++++++ src/actions/init.rs | 28 ++++++ src/actions/install.rs | 10 +- src/actions/list.rs | 60 ++++++++++++ src/actions/mod.rs | 8 ++ src/actions/package.rs | 30 ++++++ src/dep_resolution.rs | 40 ++++---- src/main.rs | 208 ++++------------------------------------- src/pyproject/mod.rs | 53 ++++++++++- src/script.rs | 11 +-- src/util/deps.rs | 29 +++++- src/util/mod.rs | 65 ++----------- src/util/paths.rs | 8 +- 14 files changed, 334 insertions(+), 292 deletions(-) create mode 100644 ROADMAP.md create mode 100644 src/actions/clear.rs create mode 100644 src/actions/init.rs create mode 100644 src/actions/list.rs create mode 100644 src/actions/package.rs diff --git a/ROADMAP.md b/ROADMAP.md new file mode 100644 index 0000000..b52ed48 --- /dev/null +++ b/ROADMAP.md @@ -0,0 +1,11 @@ +# Roadmap + +WIP: This file defines roadmap for project + +- [ ]: Custom build system +- [ ]: Fix pydeps caching timeout +- [ ]: Make binaries work on any linux distro +- [ ]: Mac binaries for pyflow and python +- [ ]: "fatal: destination path exists" when using git deps +- [ ]: add hash and git/path info to locks +- [ ]: clear download git source as an option. In general, git install is a mess diff --git a/src/actions/clear.rs b/src/actions/clear.rs new file mode 100644 index 0000000..dd14296 --- /dev/null +++ b/src/actions/clear.rs @@ -0,0 +1,65 @@ +use std::{fs, path::Path}; + +use crate::util::{self, abort}; + +#[derive(Clone)] +enum ClearChoice { + Dependencies, + ScriptEnvs, + PyInstalls, + // Global, + All, +} + +impl ToString for ClearChoice { + fn to_string(&self) -> String { + "".into() + } +} + +/// Clear `Pyflow`'s cache. Allow the user to select which parts to clear based on a prompt. +pub fn clear(pyflow_path: &Path, cache_path: &Path, script_env_path: &Path) { + let result = util::prompts::list( + "Which cached items would you like to clear?", + "choice", + &[ + ("Downloaded dependencies".into(), ClearChoice::Dependencies), + ( + "Standalone-script environments".into(), + ClearChoice::ScriptEnvs, + ), + ("Python installations".into(), ClearChoice::PyInstalls), + ("All of the above".into(), ClearChoice::All), + ], + false, + ); + + // todo: DRY + match result.1 { + ClearChoice::Dependencies => { + if fs::remove_dir_all(&cache_path).is_err() { + abort(&format!( + "Problem removing the dependency-cache path: {:?}", + cache_path + )); + } + } + ClearChoice::ScriptEnvs => { + if fs::remove_dir_all(&script_env_path).is_err() { + abort(&format!( + "Problem removing the script env path: {:?}", + script_env_path + )); + } + } + ClearChoice::PyInstalls => {} + ClearChoice::All => { + if fs::remove_dir_all(&pyflow_path).is_err() { + abort(&format!( + "Problem removing the Pyflow path: {:?}", + pyflow_path + )); + } + } + } +} diff --git a/src/actions/init.rs b/src/actions/init.rs new file mode 100644 index 0000000..0b2a23f --- /dev/null +++ b/src/actions/init.rs @@ -0,0 +1,28 @@ +use std::path::PathBuf; + +use termcolor::Color; + +use crate::{ + files, + pyproject::Config, + util::{self, abort}, +}; + +pub fn init(cfg_filename: &str) { + let cfg_path = PathBuf::from(cfg_filename); + if cfg_path.exists() { + abort("pyproject.toml already exists - not overwriting.") + } + + let mut cfg = match PathBuf::from("Pipfile").exists() { + true => Config::from_pipfile(&PathBuf::from("Pipfile")).unwrap_or_default(), + false => Config::default(), + }; + + cfg.py_version = Some(util::prompts::py_vers()); + + files::parse_req_dot_text(&mut cfg, &PathBuf::from("requirements.txt")); + + cfg.write_file(&cfg_path); + util::print_color("Created `pyproject.toml`", Color::Green); +} diff --git a/src/actions/install.rs b/src/actions/install.rs index b65b136..2c7f55f 100644 --- a/src/actions/install.rs +++ b/src/actions/install.rs @@ -1,4 +1,4 @@ -use std::path::PathBuf; +use std::path::{Path, PathBuf}; use termcolor::Color; @@ -11,9 +11,9 @@ use crate::{ use util::deps::sync; pub fn install( - cfg_path: &PathBuf, + cfg_path: &Path, cfg: &Config, - git_path: &PathBuf, + git_path: &Path, paths: &Paths, found_lock: bool, packages: &Vec, @@ -24,7 +24,7 @@ pub fn install( lock_path: &PathBuf, ) { if !cfg_path.exists() { - cfg.write_file(&cfg_path); + cfg.write_file(cfg_path); } if found_lock { @@ -32,7 +32,7 @@ pub fn install( } // Merge reqs added via cli with those in `pyproject.toml`. - let (updated_reqs, up_dev_reqs) = util::merge_reqs(packages, dev, &cfg, &cfg_path); + let (updated_reqs, up_dev_reqs) = util::merge_reqs(packages, dev, cfg, cfg_path); let dont_uninstall = util::find_dont_uninstall(&updated_reqs, &up_dev_reqs); diff --git a/src/actions/list.rs b/src/actions/list.rs new file mode 100644 index 0000000..efb6b21 --- /dev/null +++ b/src/actions/list.rs @@ -0,0 +1,60 @@ +use std::path::Path; + +use termcolor::Color; + +use crate::{ + dep_types::Req, + util::{self, print_color, print_color_}, +}; + +/// List all installed dependencies and console scripts, by examining the `libs` and `bin` folders. +/// Also include path requirements, which won't appear in the `lib` folder. +pub fn list(lib_path: &Path, path_reqs: &[Req]) { + let installed = util::find_installed(lib_path); + let scripts = find_console_scripts(&lib_path.join("../bin")); + + if installed.is_empty() { + print_color("No packages are installed.", Color::Blue); // Dark + } else { + print_color("These packages are installed:", Color::Blue); // Dark + for (name, version, _tops) in installed { + print_color_(&name, Color::Cyan); + print_color(&format!("=={}", version.to_string_color()), Color::White); + } + for req in path_reqs { + print_color_(&req.name, Color::Cyan); + print_color( + &format!(", at path: {}", req.path.as_ref().unwrap()), + Color::White, + ); + } + } + + if scripts.is_empty() { + print_color("\nNo console scripts are installed.", Color::Blue); // Dark + } else { + print_color("\nThese console scripts are installed:", Color::Blue); // Dark + for script in scripts { + print_color(&script, Color::Cyan); // Dark + } + } +} + +/// Find console scripts installed, by browsing the (custom) bin folder +pub fn find_console_scripts(bin_path: &Path) -> Vec { + let mut result = vec![]; + if !bin_path.exists() { + return vec![]; + } + + for entry in bin_path + .read_dir() + .expect("Trouble opening bin path") + .flatten() + { + if entry.file_type().unwrap().is_file() { + result.push(entry.file_name().to_str().unwrap().to_owned()) + } + } + result +} diff --git a/src/actions/mod.rs b/src/actions/mod.rs index 5ad9bbf..55eea45 100644 --- a/src/actions/mod.rs +++ b/src/actions/mod.rs @@ -1,8 +1,16 @@ +mod clear; +mod init; mod install; +mod list; mod new; +mod package; +pub use clear::clear; +pub use init::init; pub use install::install; +pub use list::list; pub use new::new; +pub use package::package; pub const NEW_ERROR_MESSAGE: &str = indoc::indoc! {r#" Problem creating the project. This may be due to a permissions problem. diff --git a/src/actions/package.rs b/src/actions/package.rs new file mode 100644 index 0000000..9709344 --- /dev/null +++ b/src/actions/package.rs @@ -0,0 +1,30 @@ +use std::path::Path; + +use crate::{ + build, + dep_types::{LockPackage, Version}, + util::{self, deps::sync}, +}; + +pub fn package( + paths: &util::Paths, + lockpacks: &[LockPackage], + os: util::Os, + py_vers: &Version, + lock_path: &Path, + cfg: &crate::Config, + extras: &[String], +) { + sync( + &paths, + &lockpacks, + &cfg.reqs, + &cfg.dev_reqs, + &util::find_dont_uninstall(&cfg.reqs, &cfg.dev_reqs), + os, + py_vers, + lock_path, + ); + + build::build(&lockpacks, &paths, &cfg, &extras) +} diff --git a/src/dep_resolution.rs b/src/dep_resolution.rs index 74d4670..4870b08 100644 --- a/src/dep_resolution.rs +++ b/src/dep_resolution.rs @@ -219,13 +219,11 @@ fn guess_graph( for req in &reqs { // Find matching packages for this requirement. - let query_result: Vec<&ReqCache> = query_data + let query_result = query_data .iter() - .filter(|d| util::compare_names(d.name.as_ref().unwrap(), &req.name)) - .collect(); + .filter(|d| util::compare_names(d.name.as_ref().unwrap(), &req.name)); let deps: Vec = query_result - .into_iter() // Our query data should already be compat, but QC here. .filter_map(|r| { let py_constraint = Constraint::from_str_multiple( @@ -342,7 +340,7 @@ pub(super) mod res { .filter_map(|x: &Req| { if is_compat(&r.constraints, &x.constraints[0].version) { if let Some(ref pv) = x.python_version { - if is_compat(&pv, &py_vers) { + if is_compat(pv, &py_vers) { Some(x.constraints[0].version.clone()) } else { None @@ -408,7 +406,7 @@ pub(super) mod res { // the parsed version to the key. let mut version_map = HashMap::new(); for key in data.releases.keys() { - if let Ok(ver) = Version::from_str(&key) { + if let Ok(ver) = Version::from_str(key) { version_map.insert(ver, key.as_str()); } else if cfg!(debug_assertions) { eprintln!("Unable to parse \"{}\" version \"{}\"; skipped.", name, key); @@ -778,25 +776,21 @@ pub(super) mod res { } // Generate dependencies here for all avail versions. - let unresolved_deps: Vec = versions - .iter() - .filter_map(|vers| { - if inter.iter().any(|i| i.0 <= *vers && *vers <= i.1) { - Some(Dependency { - id: 0, // placeholder; we'll assign an id to the one we pick. - name: fmtd_name.clone(), - version: vers.clone(), - reqs: vec![], // todo - parent: 0, // todo - }) - } else { - None - } - }) - .collect(); + let unresolved_deps = versions.iter().filter_map(|vers| { + if inter.iter().any(|i| i.0 <= *vers && *vers <= i.1) { + Some(Dependency { + id: 0, // placeholder; we'll assign an id to the one we pick. + name: fmtd_name.clone(), + version: vers.clone(), + reqs: vec![], // todo + parent: 0, // todo + }) + } else { + None + } + }); let mut newest_unresolved = unresolved_deps - .into_iter() .max_by(|a, b| a.version.cmp(&b.version)) .unwrap(); diff --git a/src/main.rs b/src/main.rs index 9c40239..eb4d98c 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,8 +1,5 @@ -#![allow(clippy::non_ascii_literal)] - -use crate::actions::new; use crate::cli_options::{ExternalCommand, ExternalSubcommands, Opt, SubCommand}; -use crate::dep_types::{Constraint, Lock, Package, Req, Version}; +use crate::dep_types::{Lock, Package, Req, Version}; use crate::pyproject::Config; use crate::util::abort; use crate::util::deps::sync; @@ -30,66 +27,15 @@ mod pyproject; mod script; mod util; -// todo: -// Custom build system -// Fix pydeps caching timeout -// Make binaries work on any linux distro -// Mac binaries for pyflow and python -// "fatal: destination path exists" when using git deps -// add hash and git/path info to locks -// clear download git source as an option. In general, git install is a mess - type PackToInstall = ((String, Version), Option<(u32, String)>); // ((Name, Version), (parent id, rename name)) -/// Reduce repetition between reqs and dev reqs when populating reqs of path reqs. -fn pop_reqs_helper(reqs: &[Req], dev: bool) -> Vec { - let mut result = vec![]; - for req in reqs.iter().filter(|r| r.path.is_some()) { - let req_path = PathBuf::from(req.path.clone().unwrap()); - let pyproj = req_path.join("pyproject.toml"); - let req_txt = req_path.join("requirements.txt"); - // let pipfile = req_path.join("Pipfile"); - - let mut dummy_cfg = Config::default(); - - if req_txt.exists() { - files::parse_req_dot_text(&mut dummy_cfg, &req_txt); - } - - // if pipfile.exists() { - // files::parse_pipfile(&mut dummy_cfg, &pipfile); - // } - - if dev { - result.append(&mut dummy_cfg.dev_reqs); - } else { - result.append(&mut dummy_cfg.reqs); - } - - // We don't parse `setup.py`, since it involves running arbitrary Python code. - - if pyproj.exists() { - let mut req_cfg = Config::from_file(&PathBuf::from(&pyproj)) - .unwrap_or_else(|| panic!("Problem parsing`pyproject.toml`: {:?}", &pyproj)); - result.append(&mut req_cfg.reqs) - } +const CFG_FILENAME: &str = "pyproject.toml"; +const LOCK_FILENAME: &str = "pyflow.lock"; - // Check for metadata of a built wheel - for folder_name in util::find_folders(&req_path) { - // todo: Dry from `util` and `install`. - let re_dist = Regex::new(r"^(.*?)-(.*?)\.dist-info$").unwrap(); - if re_dist.captures(&folder_name).is_some() { - let metadata_path = req_path.join(folder_name).join("METADATA"); - let mut metadata = util::parse_metadata(&metadata_path); +/////////////////////////////////////////////////////////////////////////////// +/// Global multithreaded variables part +/////////////////////////////////////////////////////////////////////////////// - result.append(&mut metadata.requires_dist); - } - } - } - result -} - -/// Cli Config to hold command line options struct CliConfig { pub color_choice: ColorChoice, } @@ -115,31 +61,9 @@ thread_local! { static CLI_CONFIG: RwLock> = RwLock::new(Default::default()); } -fn parse_lockpack_rename(rename: &str) -> (u32, String) { - let re = Regex::new(r"^(\d+)\s(.*)$").unwrap(); - let caps = re - .captures(rename) - .expect("Problem reading lock file rename"); - - let id = caps.get(1).unwrap().as_str().parse::().unwrap(); - let name = caps.get(2).unwrap().as_str().to_owned(); - - (id, name) -} - -fn already_locked(locked: &[Package], name: &str, constraints: &[Constraint]) -> bool { - let mut result = true; - for constr in constraints.iter() { - if !locked - .iter() - .any(|p| util::compare_names(&p.name, name) && constr.is_compatible(&p.version)) - { - result = false; - break; - } - } - result -} +/////////////////////////////////////////////////////////////////////////////// +/// \ Global multithreaded variables part +/////////////////////////////////////////////////////////////////////////////// /// Execute a python CLI tool, either specified in `pyproject.toml`, or in a dependency. fn run_cli_tool( @@ -214,71 +138,6 @@ fn run_cli_tool( } } -#[derive(Clone)] -enum ClearChoice { - Dependencies, - ScriptEnvs, - PyInstalls, - // Global, - All, -} - -impl ToString for ClearChoice { - fn to_string(&self) -> String { - "".into() - } -} - -/// Clear `Pyflow`'s cache. Allow the user to select which parts to clear based on a prompt. -fn clear(pyflow_path: &Path, cache_path: &Path, script_env_path: &Path) { - let result = util::prompts::list( - "Which cached items would you like to clear?", - "choice", - &[ - ("Downloaded dependencies".into(), ClearChoice::Dependencies), - ( - "Standalone-script environments".into(), - ClearChoice::ScriptEnvs, - ), - ("Python installations".into(), ClearChoice::PyInstalls), - ("All of the above".into(), ClearChoice::All), - ], - false, - ); - - // todo: DRY - match result.1 { - ClearChoice::Dependencies => { - if fs::remove_dir_all(&cache_path).is_err() { - abort(&format!( - "Problem removing the dependency-cache path: {:?}", - cache_path - )); - } - } - ClearChoice::ScriptEnvs => { - if fs::remove_dir_all(&script_env_path).is_err() { - abort(&format!( - "Problem removing the script env path: {:?}", - script_env_path - )); - } - } - ClearChoice::PyInstalls => {} - ClearChoice::All => { - if fs::remove_dir_all(&pyflow_path).is_err() { - abort(&format!( - "Problem removing the Pyflow path: {:?}", - pyflow_path - )); - } - } - } -} - -const CFG_FILENAME: &str = "pyproject.toml"; -const LOCK_FILENAME: &str = "pyflow.lock"; - /// We process input commands in a deliberate order, to ensure the required, and only the required /// setup steps are accomplished before each. fn main() { @@ -317,7 +176,7 @@ fn main() { }, }, SubCommand::New { name } => { - if new(&name).is_err() { + if actions::new(name).is_err() { abort(actions::NEW_ERROR_MESSAGE); } @@ -327,25 +186,7 @@ fn main() { ); return; } - SubCommand::Init => { - let cfg_path = PathBuf::from(CFG_FILENAME); - if cfg_path.exists() { - abort("pyproject.toml already exists - not overwriting.") - } - - let mut cfg = match PathBuf::from("Pipfile").exists() { - true => Config::from_pipfile(&PathBuf::from("Pipfile")).unwrap_or_default(), - false => Config::default(), - }; - - cfg.py_version = Some(util::prompts::py_vers()); - - files::parse_req_dot_text(&mut cfg, &PathBuf::from("requirements.txt")); - - cfg.write_file(&cfg_path); - util::print_color("Created `pyproject.toml`", Color::Green); - // Don't return here; let the normal logic create the venv now. - } + SubCommand::Init => actions::init(CFG_FILENAME), // Don't return here; let the normal logic create the venv now. // TODO: Move branches to omitted match _ => {} } @@ -415,7 +256,7 @@ fn main() { // Don't return; now that we've changed the cfg version, let's run the normal flow. } SubCommand::Clear {} => { - clear(&pyflow_path, &dep_cache_path, &script_env_path); + actions::clear(&pyflow_path, &dep_cache_path, &script_env_path); return; } SubCommand::List => { @@ -468,7 +309,7 @@ fn main() { } let mut found_lock = false; - let lock = match util::read_lock(&lock_path) { + let lock = match util::read_lock(lock_path) { Ok(l) => { found_lock = true; l @@ -486,7 +327,7 @@ fn main() { &util::find_dont_uninstall(&cfg.reqs, &cfg.dev_reqs), os, &py_vers, - &lock_path, + lock_path, ); // Now handle subcommands that require info about the environment @@ -532,31 +373,16 @@ fn main() { &[], os, &py_vers, - &lock_path, + lock_path, ); util::print_color("Uninstall complete", Color::Green); } SubCommand::Package { extras } => { - sync( - &paths, - &lockpacks, - &cfg.reqs, - &cfg.dev_reqs, - &util::find_dont_uninstall(&cfg.reqs, &cfg.dev_reqs), - os, - &py_vers, - &lock_path, - ); - - build::build(&lockpacks, &paths, &cfg, &extras) + actions::package(&paths, &lockpacks, os, &py_vers, lock_path, &cfg, &extras) } SubCommand::Publish {} => build::publish(&paths.bin, &cfg), - - // SubCommand::M { args } => { - // run_cli_tool(&paths.lib, &paths.bin, &vers_path, &cfg, args); - // } - SubCommand::List {} => util::show_installed( + SubCommand::List {} => actions::list( &paths.lib, &[cfg.reqs.as_slice(), cfg.dev_reqs.as_slice()] .concat() diff --git a/src/pyproject/mod.rs b/src/pyproject/mod.rs index 4dffbe8..c0a863f 100644 --- a/src/pyproject/mod.rs +++ b/src/pyproject/mod.rs @@ -1,10 +1,11 @@ -use std::{collections::HashMap, fs, path::Path, str::FromStr}; +use std::{collections::HashMap, fs, path::{Path, PathBuf}, str::FromStr}; +use regex::Regex; use serde::Deserialize; use crate::{ dep_types::{Constraint, Req, Version}, - files, pop_reqs_helper, + files, util::{self, abort}, }; @@ -399,3 +400,51 @@ impl Config { } } } + +/// Reduce repetition between reqs and dev reqs when populating reqs of path reqs. +fn pop_reqs_helper(reqs: &[Req], dev: bool) -> Vec { + let mut result = vec![]; + for req in reqs.iter().filter(|r| r.path.is_some()) { + let req_path = PathBuf::from(req.path.clone().unwrap()); + let pyproj = req_path.join("pyproject.toml"); + let req_txt = req_path.join("requirements.txt"); + // let pipfile = req_path.join("Pipfile"); + + let mut dummy_cfg = Config::default(); + + if req_txt.exists() { + files::parse_req_dot_text(&mut dummy_cfg, &req_txt); + } + + // if pipfile.exists() { + // files::parse_pipfile(&mut dummy_cfg, &pipfile); + // } + + if dev { + result.append(&mut dummy_cfg.dev_reqs); + } else { + result.append(&mut dummy_cfg.reqs); + } + + // We don't parse `setup.py`, since it involves running arbitrary Python code. + + if pyproj.exists() { + let mut req_cfg = Config::from_file(&PathBuf::from(&pyproj)) + .unwrap_or_else(|| panic!("Problem parsing`pyproject.toml`: {:?}", &pyproj)); + result.append(&mut req_cfg.reqs) + } + + // Check for metadata of a built wheel + for folder_name in util::find_folders(&req_path) { + // todo: Dry from `util` and `install`. + let re_dist = Regex::new(r"^(.*?)-(.*?)\.dist-info$").unwrap(); + if re_dist.captures(&folder_name).is_some() { + let metadata_path = req_path.join(folder_name).join("METADATA"); + let mut metadata = util::parse_metadata(&metadata_path); + + result.append(&mut metadata.requires_dist); + } + } + } + result +} diff --git a/src/script.rs b/src/script.rs index ee0defc..3cd16c9 100644 --- a/src/script.rs +++ b/src/script.rs @@ -34,7 +34,7 @@ pub fn run_script( // todo: Consider a metadata file, but for now, we'll use folders // let scripts_data_path = script_env_path.join("scripts.toml"); - let env_path = util::canon_join(script_env_path, &filename); + let env_path = util::canon_join(script_env_path, filename); if !env_path.exists() { fs::create_dir_all(&env_path).expect("Problem creating environment for the script"); } @@ -149,7 +149,7 @@ fn check_for_specified_py_vers(script: &str) -> Option { let re = Regex::new(r#"^__python__\s*=\s*"(.*?)"$"#).unwrap(); for line in script.lines() { - if let Some(capture) = re.captures(&line) { + if let Some(capture) = re.captures(line) { let specification = capture.get(1).unwrap().as_str(); let (_, version) = parse_version(specification).unwrap(); match version { @@ -180,11 +180,10 @@ fn find_deps_from_script(script: &str) -> Vec { let mut result = vec![]; for line in script.lines() { - if let Some(c) = re.captures(&line) { + if let Some(c) = re.captures(line) { let deps_list = c.get(1).unwrap().as_str().to_owned(); - let deps: Vec<&str> = deps_list.split(',').collect(); - result = deps - .into_iter() + result = deps_list + .split(',') .map(|d| { d.to_owned() .replace(" ", "") diff --git a/src/util/deps.rs b/src/util/deps.rs index 97f0d26..35fbac1 100644 --- a/src/util/deps.rs +++ b/src/util/deps.rs @@ -4,10 +4,9 @@ use regex::Regex; use termcolor::Color; use crate::{ - already_locked, dep_resolution::res, dep_types::{Constraint, Lock, LockPackage, Package, Rename, Req, ReqType, Version}, - install, parse_lockpack_rename, + install, util::{self, abort}, PackToInstall, }; @@ -288,3 +287,29 @@ fn sync_deps( } } } + +fn already_locked(locked: &[Package], name: &str, constraints: &[Constraint]) -> bool { + let mut result = true; + for constr in constraints.iter() { + if !locked + .iter() + .any(|p| util::compare_names(&p.name, name) && constr.is_compatible(&p.version)) + { + result = false; + break; + } + } + result +} + +fn parse_lockpack_rename(rename: &str) -> (u32, String) { + let re = Regex::new(r"^(\d+)\s(.*)$").unwrap(); + let caps = re + .captures(rename) + .expect("Problem reading lock file rename"); + + let id = caps.get(1).unwrap().as_str().parse::().unwrap(); + let name = caps.get(2).unwrap().as_str().to_owned(); + + (id, name) +} diff --git a/src/util/mod.rs b/src/util/mod.rs index a04f521..212be45 100644 --- a/src/util/mod.rs +++ b/src/util/mod.rs @@ -172,39 +172,6 @@ pub fn set_pythonpath(paths: &[PathBuf]) { env::set_var("PYTHONPATH", formatted_paths); } -/// List all installed dependencies and console scripts, by examining the `libs` and `bin` folders. -/// Also include path requirements, which won't appear in the `lib` folder. -pub fn show_installed(lib_path: &Path, path_reqs: &[Req]) { - let installed = find_installed(lib_path); - let scripts = find_console_scripts(&lib_path.join("../bin")); - - if installed.is_empty() { - print_color("No packages are installed.", Color::Blue); // Dark - } else { - print_color("These packages are installed:", Color::Blue); // Dark - for (name, version, _tops) in installed { - print_color_(&name, Color::Cyan); - print_color(&format!("=={}", version.to_string_color()), Color::White); - } - for req in path_reqs { - print_color_(&req.name, Color::Cyan); - print_color( - &format!(", at path: {}", req.path.as_ref().unwrap()), - Color::White, - ); - } - } - - if scripts.is_empty() { - print_color("\nNo console scripts are installed.", Color::Blue); // Dark - } else { - print_color("\nThese console scripts are installed:", Color::Blue); // Dark - for script in scripts { - print_color(&script, Color::Cyan); // Dark - } - } -} - /// Find the packages installed, by browsing the lib folder for metadata. /// Returns package-name, version, folder names pub fn find_installed(lib_path: &Path) -> Vec<(String, Version, Vec)> { @@ -214,7 +181,7 @@ pub fn find_installed(lib_path: &Path) -> Vec<(String, Version, Vec)> { let mut result = vec![]; - for folder_name in &find_folders(&lib_path) { + for folder_name in &find_folders(lib_path) { let re_dist = Regex::new(r"^(.*?)-(.*?)\.dist-info$").unwrap(); if let Some(caps) = re_dist.captures(folder_name) { @@ -243,26 +210,6 @@ pub fn find_installed(lib_path: &Path) -> Vec<(String, Version, Vec)> { } result } - -/// Find console scripts installed, by browsing the (custom) bin folder -pub fn find_console_scripts(bin_path: &Path) -> Vec { - let mut result = vec![]; - if !bin_path.exists() { - return vec![]; - } - - for entry in bin_path - .read_dir() - .expect("Trouble opening bin path") - .flatten() - { - if entry.file_type().unwrap().is_file() { - result.push(entry.file_name().to_str().unwrap().to_owned()) - } - } - result -} - /// Handle reqs added via the CLI. Result is (normal reqs, dev reqs) pub fn merge_reqs( added: &[String], @@ -355,12 +302,12 @@ pub fn merge_reqs( if dev { if !added_reqs_unique.is_empty() { - files::add_reqs_to_cfg(&cfg_path, &[], &added_reqs_unique); + files::add_reqs_to_cfg(cfg_path, &[], &added_reqs_unique); } (cfg.reqs.clone(), result) } else { if !added_reqs_unique.is_empty() { - files::add_reqs_to_cfg(&cfg_path, &added_reqs_unique, &[]); + files::add_reqs_to_cfg(cfg_path, &added_reqs_unique, &[]); } (result, cfg.dev_reqs.clone()) } @@ -839,7 +786,7 @@ pub(crate) fn check_command_output_with(output: &process::Output, f: impl Fn(&st if !output.status.success() { let stderr = std::str::from_utf8(&output.stderr).expect("building string from command output"); - f(&stderr) + f(stderr) } } @@ -874,8 +821,8 @@ pub fn process_reqs(reqs: Vec, git_path: &Path, paths: &util::Paths) -> Vec &req.name, // util::GitPath::Git(req.git.clone().unwrap()), &req.git.clone().unwrap(), - &git_path, - &paths, + git_path, + paths, ); git_reqs.append(&mut metadata.requires_dist); } diff --git a/src/util/paths.rs b/src/util/paths.rs index 249160a..b06ea74 100644 --- a/src/util/paths.rs +++ b/src/util/paths.rs @@ -1,4 +1,4 @@ -use std::path::PathBuf; +use std::path::{Path, PathBuf}; pub fn pyflow_path() -> PathBuf { directories::BaseDirs::new() @@ -8,15 +8,15 @@ pub fn pyflow_path() -> PathBuf { .join("pyflow") } -pub fn dep_cache_path(pyflow_path: &PathBuf) -> PathBuf { +pub fn dep_cache_path(pyflow_path: &Path) -> PathBuf { pyflow_path.join("dependency_cache") } -pub fn script_env_path(pyflow_path: &PathBuf) -> PathBuf { +pub fn script_env_path(pyflow_path: &Path) -> PathBuf { pyflow_path.join("script_envs") } -pub fn git_path(pyflow_path: &PathBuf) -> PathBuf { +pub fn git_path(pyflow_path: &Path) -> PathBuf { pyflow_path.join("git") } From 7415710a1827413ebf09a43f90a40fe84b316479 Mon Sep 17 00:00:00 2001 From: Dmitry Rubinstein Date: Mon, 29 Nov 2021 18:42:21 +0200 Subject: [PATCH 28/41] Fix clippy --- src/actions/install.rs | 12 ++++++------ src/actions/package.rs | 6 +++--- src/build.rs | 2 +- src/dep_resolution.rs | 7 +++++-- src/files.rs | 8 ++++---- src/pyproject/mod.rs | 7 ++++++- 6 files changed, 25 insertions(+), 17 deletions(-) diff --git a/src/actions/install.rs b/src/actions/install.rs index 2c7f55f..bc44be8 100644 --- a/src/actions/install.rs +++ b/src/actions/install.rs @@ -1,4 +1,4 @@ -use std::path::{Path, PathBuf}; +use std::path::Path; use termcolor::Color; @@ -16,12 +16,12 @@ pub fn install( git_path: &Path, paths: &Paths, found_lock: bool, - packages: &Vec, + packages: &[String], dev: bool, - lockpacks: &Vec, + lockpacks: &[LockPackage], os: &Os, py_vers: &Version, - lock_path: &PathBuf, + lock_path: &Path, ) { if !cfg_path.exists() { cfg.write_file(cfg_path); @@ -36,8 +36,8 @@ pub fn install( let dont_uninstall = util::find_dont_uninstall(&updated_reqs, &up_dev_reqs); - let updated_reqs = process_reqs(updated_reqs, &git_path, paths); - let up_dev_reqs = process_reqs(up_dev_reqs, &git_path, paths); + let updated_reqs = process_reqs(updated_reqs, git_path, paths); + let up_dev_reqs = process_reqs(up_dev_reqs, git_path, paths); sync( paths, diff --git a/src/actions/package.rs b/src/actions/package.rs index 9709344..54c9590 100644 --- a/src/actions/package.rs +++ b/src/actions/package.rs @@ -16,8 +16,8 @@ pub fn package( extras: &[String], ) { sync( - &paths, - &lockpacks, + paths, + lockpacks, &cfg.reqs, &cfg.dev_reqs, &util::find_dont_uninstall(&cfg.reqs, &cfg.dev_reqs), @@ -26,5 +26,5 @@ pub fn package( lock_path, ); - build::build(&lockpacks, &paths, &cfg, &extras) + build::build(lockpacks, paths, cfg, extras) } diff --git a/src/build.rs b/src/build.rs index 533f482..c3b14a1 100644 --- a/src/build.rs +++ b/src/build.rs @@ -333,7 +333,7 @@ setuptools.setup( ]"#; let actual = serialize_py_list( - &vec![ + &[ "Programming Language :: Python :: 3".into(), "License :: OSI Approved :: MIT License".into(), "Operating System :: OS Independent".into(), diff --git a/src/dep_resolution.rs b/src/dep_resolution.rs index 4870b08..74162e2 100644 --- a/src/dep_resolution.rs +++ b/src/dep_resolution.rs @@ -96,8 +96,11 @@ fn guess_graph( let mut cleaned_reqs: Vec = vec![]; for req in reqs { - let names: Vec = cleaned_reqs.iter().map(|cr| cr.name.clone()).collect(); - if names.contains(&req.name) { + if cleaned_reqs + .iter() + .map(|cr| cr.name.clone()) + .any(|x| x == req.name) + { for c in cleaned_reqs.iter_mut() { if c.name == req.name { for constr in req.constraints.iter() { diff --git a/src/files.rs b/src/files.rs index a629711..610fa6c 100644 --- a/src/files.rs +++ b/src/files.rs @@ -427,7 +427,7 @@ dev_a = "^1.17.2" #[test] fn add_deps_baseline() { let actual = update_cfg( - BASELINE.into(), + BASELINE, &[ Req::new("b".into(), base_constrs()), Req::new("c".into(), base_constrs()), @@ -455,7 +455,7 @@ dev_b = "^0.0.1" #[test] fn add_deps_no_dev_deps_sect() { let actual = update_cfg( - BASELINE_NO_DEV_DEPS.into(), + BASELINE_NO_DEV_DEPS, &[ Req::new("b".into(), base_constrs()), Req::new("c".into(), base_constrs()), @@ -482,7 +482,7 @@ dev_b = "^0.0.1" #[test] fn add_deps_baseline_empty_deps() { let actual = update_cfg( - BASELINE_EMPTY_DEPS.into(), + BASELINE_EMPTY_DEPS, &[ Req::new("b".into(), base_constrs()), Req::new("c".into(), base_constrs()), @@ -509,7 +509,7 @@ dev_b = "^0.0.1" #[test] fn add_deps_dev_deps_baseline_no_deps_dev_deps() { let actual = update_cfg( - BASELINE_NO_DEPS_NO_DEV_DEPS.into(), + BASELINE_NO_DEPS_NO_DEV_DEPS, &[ Req::new("b".into(), base_constrs()), Req::new("c".into(), base_constrs()), diff --git a/src/pyproject/mod.rs b/src/pyproject/mod.rs index c0a863f..2ea76d1 100644 --- a/src/pyproject/mod.rs +++ b/src/pyproject/mod.rs @@ -1,4 +1,9 @@ -use std::{collections::HashMap, fs, path::{Path, PathBuf}, str::FromStr}; +use std::{ + collections::HashMap, + fs, + path::{Path, PathBuf}, + str::FromStr, +}; use regex::Regex; use serde::Deserialize; From 030d19ed3adc256c7ca0620a74bfdfb7661ca42f Mon Sep 17 00:00:00 2001 From: Dmitry Rubinstein Date: Wed, 1 Dec 2021 10:46:41 +0200 Subject: [PATCH 29/41] Finish main refactoring --- src/actions/clear.rs | 3 +- src/actions/install.rs | 2 + src/actions/list.rs | 15 ++- src/actions/mod.rs | 11 +- src/actions/new.rs | 21 +++- src/actions/reset.rs | 17 +++ src/actions/run.rs | 72 +++++++++++ src/actions/switch.rs | 19 +++ src/main.rs | 258 +++++++++------------------------------ src/pyproject/current.rs | 56 +++++++++ src/pyproject/mod.rs | 14 +++ src/util/mod.rs | 7 +- src/util/os.rs | 2 +- 13 files changed, 286 insertions(+), 211 deletions(-) create mode 100644 src/actions/reset.rs create mode 100644 src/actions/run.rs create mode 100644 src/actions/switch.rs create mode 100644 src/pyproject/current.rs diff --git a/src/actions/clear.rs b/src/actions/clear.rs index dd14296..e7386f7 100644 --- a/src/actions/clear.rs +++ b/src/actions/clear.rs @@ -1,6 +1,6 @@ use std::{fs, path::Path}; -use crate::util::{self, abort}; +use crate::util::{self, abort, success}; #[derive(Clone)] enum ClearChoice { @@ -62,4 +62,5 @@ pub fn clear(pyflow_path: &Path, cache_path: &Path, script_env_path: &Path) { } } } + success("Cache is cleared") } diff --git a/src/actions/install.rs b/src/actions/install.rs index bc44be8..2050705 100644 --- a/src/actions/install.rs +++ b/src/actions/install.rs @@ -10,6 +10,8 @@ use crate::{ use util::deps::sync; +// TODO: Refactor this function +#[allow(clippy::too_many_arguments)] pub fn install( cfg_path: &Path, cfg: &Config, diff --git a/src/actions/list.rs b/src/actions/list.rs index efb6b21..5c0b900 100644 --- a/src/actions/list.rs +++ b/src/actions/list.rs @@ -1,15 +1,26 @@ -use std::path::Path; +use std::{path::Path, process}; use termcolor::Color; use crate::{ dep_types::Req, - util::{self, print_color, print_color_}, + pyproject, + util::{self, abort, print_color, print_color_}, }; /// List all installed dependencies and console scripts, by examining the `libs` and `bin` folders. /// Also include path requirements, which won't appear in the `lib` folder. pub fn list(lib_path: &Path, path_reqs: &[Req]) { + // This part check that project and venvs exists + let pcfg = pyproject::current::get_config().unwrap_or_else(|| process::exit(1)); + let num_venvs = util::find_venvs(&pcfg.pypackages_path).len(); + + if !pcfg.config_path.exists() && num_venvs == 0 { + abort("Can't find a project in this directory") + } else if num_venvs == 0 { + abort("There's no python environment set up for this project") + } + let installed = util::find_installed(lib_path); let scripts = find_console_scripts(&lib_path.join("../bin")); diff --git a/src/actions/mod.rs b/src/actions/mod.rs index 55eea45..890714b 100644 --- a/src/actions/mod.rs +++ b/src/actions/mod.rs @@ -4,6 +4,9 @@ mod install; mod list; mod new; mod package; +mod reset; +mod run; +mod switch; pub use clear::clear; pub use init::init; @@ -11,8 +14,6 @@ pub use install::install; pub use list::list; pub use new::new; pub use package::package; - -pub const NEW_ERROR_MESSAGE: &str = indoc::indoc! {r#" -Problem creating the project. This may be due to a permissions problem. -If on linux, please try again with `sudo`. -"#}; +pub use reset::reset; +pub use run::run; +pub use switch::switch; diff --git a/src/actions/new.rs b/src/actions/new.rs index 38778cf..f99b2a1 100644 --- a/src/actions/new.rs +++ b/src/actions/new.rs @@ -6,7 +6,11 @@ use std::{ use termcolor::Color; -use crate::{commands, util, Config}; +use crate::{ + commands, + util::{self, abort, success}, + Config, +}; const GITIGNORE_INIT: &str = indoc::indoc! {r##" # General Python ignores @@ -23,8 +27,21 @@ __pypackages__/ # Project ignores "##}; +pub const NEW_ERROR_MESSAGE: &str = indoc::indoc! {r#" +Problem creating the project. This may be due to a permissions problem. +If on linux, please try again with `sudo`. +"#}; + +pub fn new(name: &str) { + if new_internal(name).is_err() { + abort(NEW_ERROR_MESSAGE); + } + success(&format!("Created a new Python project named {}", name)) +} + +// TODO: Join this function after refactoring /// Create a template directory for a python project. -pub fn new(name: &str) -> Result<(), Box> { +fn new_internal(name: &str) -> Result<(), Box> { if !PathBuf::from(name).exists() { fs::create_dir_all(&format!("{}/{}", name, name.replace("-", "_")))?; fs::File::create(&format!("{}/{}/__init__.py", name, name.replace("-", "_")))?; diff --git a/src/actions/reset.rs b/src/actions/reset.rs new file mode 100644 index 0000000..ac0fb65 --- /dev/null +++ b/src/actions/reset.rs @@ -0,0 +1,17 @@ +use std::{fs, process}; + +use crate::{ + pyproject, + util::{abort, success}, +}; + +pub fn reset() { + let pcfg = pyproject::current::get_config().unwrap_or_else(|| process::exit(1)); + if (&pcfg.pypackages_path).exists() && fs::remove_dir_all(&pcfg.pypackages_path).is_err() { + abort("Problem removing `__pypackages__` directory") + } + if (&pcfg.lock_path).exists() && fs::remove_file(&pcfg.lock_path).is_err() { + abort("Problem removing `pyflow.lock`") + } + success("`__pypackages__` folder and `pyflow.lock` removed") +} diff --git a/src/actions/run.rs b/src/actions/run.rs new file mode 100644 index 0000000..5979631 --- /dev/null +++ b/src/actions/run.rs @@ -0,0 +1,72 @@ +use std::path::Path; + +use regex::Regex; + +use crate::{commands, pyproject::Config, util::abort}; + +/// Execute a python CLI tool, either specified in `pyproject.toml`, or in a dependency. +pub fn run(lib_path: &Path, bin_path: &Path, vers_path: &Path, cfg: &Config, args: Vec) { + // Allow both `pyflow run ipython` (args), and `pyflow ipython` (opt.script) + if args.is_empty() { + return; + } + + let name = if let Some(a) = args.get(0) { + a.clone() + } else { + abort("`run` must be followed by the script to run, eg `pyflow run black`"); + }; + + // If the script we're calling is specified in `pyproject.toml`, ensure it exists. + + // todo: Delete these scripts as required to sync with pyproject.toml. + let re = Regex::new(r"(.*?):(.*)").unwrap(); + + let mut specified_args: Vec = args.into_iter().skip(1).collect(); + + // If a script name is specified by by this project and a dependency, favor + // this project. + if let Some(s) = cfg.scripts.get(&name) { + let abort_msg = format!( + "Problem running the function {}, specified in `pyproject.toml`", + name, + ); + + if let Some(caps) = re.captures(s) { + let module = caps.get(1).unwrap().as_str(); + let function = caps.get(2).unwrap().as_str(); + let mut args_to_pass = vec![ + "-c".to_owned(), + format!(r#"import {}; {}.{}()"#, module, module, function), + ]; + + args_to_pass.append(&mut specified_args); + if commands::run_python(bin_path, &[lib_path.to_owned()], &args_to_pass).is_err() { + abort(&abort_msg); + } + } else { + abort(&format!("Problem parsing the following script: {:#?}. Must be in the format module:function_name", s)); + } + return; + } + // None => { + let abort_msg = format!( + "Problem running the CLI tool {}. Is it installed? \ + Try running `pyflow install {}`", + name, name + ); + let script_path = vers_path.join("bin").join(name); + if !script_path.exists() { + abort(&abort_msg); + } + + let mut args_to_pass = vec![script_path + .to_str() + .expect("Can't find script path") + .to_owned()]; + + args_to_pass.append(&mut specified_args); + if commands::run_python(bin_path, &[lib_path.to_owned()], &args_to_pass).is_err() { + abort(&abort_msg); + } +} diff --git a/src/actions/switch.rs b/src/actions/switch.rs new file mode 100644 index 0000000..11a8a52 --- /dev/null +++ b/src/actions/switch.rs @@ -0,0 +1,19 @@ +use std::{path::PathBuf, process}; + +use termcolor::Color; + +use crate::{files, pyproject, util}; + +/// Updates `pyproject.toml` with a new python version +pub fn switch(version: &str) { + let mut pcfg = pyproject::current::get_config().unwrap_or_else(|| process::exit(1)); + + let specified = util::fallible_v_parse(version); + pcfg.config.py_version = Some(specified.clone()); + files::change_py_vers(&PathBuf::from(&pcfg.config_path), &specified); + util::print_color( + &format!("Switched to Python version {}", specified.to_string()), + Color::Green, + ); + // Don't exit program here; now that we've changed the cfg version, let's run the normal flow. +} diff --git a/src/main.rs b/src/main.rs index eb4d98c..cded081 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,13 +1,13 @@ +use crate::actions::run; use crate::cli_options::{ExternalCommand, ExternalSubcommands, Opt, SubCommand}; use crate::dep_types::{Lock, Package, Req, Version}; -use crate::pyproject::Config; +use crate::pyproject::{Config, CFG_FILENAME}; use crate::util::abort; use crate::util::deps::sync; -use regex::Regex; +use std::process; use std::{ - env, fs, - path::{Path, PathBuf}, + path::PathBuf, sync::{Arc, RwLock}, }; @@ -29,9 +29,6 @@ mod util; type PackToInstall = ((String, Version), Option<(u32, String)>); // ((Name, Version), (parent id, rename name)) -const CFG_FILENAME: &str = "pyproject.toml"; -const LOCK_FILENAME: &str = "pyflow.lock"; - /////////////////////////////////////////////////////////////////////////////// /// Global multithreaded variables part /////////////////////////////////////////////////////////////////////////////// @@ -65,81 +62,11 @@ thread_local! { /// \ Global multithreaded variables part /////////////////////////////////////////////////////////////////////////////// -/// Execute a python CLI tool, either specified in `pyproject.toml`, or in a dependency. -fn run_cli_tool( - lib_path: &Path, - bin_path: &Path, - vers_path: &Path, - cfg: &Config, - args: Vec, -) { - // Allow both `pyflow run ipython` (args), and `pyflow ipython` (opt.script) - if args.is_empty() { - return; - } - - let name = if let Some(a) = args.get(0) { - a.clone() - } else { - abort("`run` must be followed by the script to run, eg `pyflow run black`"); - }; - - // If the script we're calling is specified in `pyproject.toml`, ensure it exists. - - // todo: Delete these scripts as required to sync with pyproject.toml. - let re = Regex::new(r"(.*?):(.*)").unwrap(); - - let mut specified_args: Vec = args.into_iter().skip(1).collect(); - - // If a script name is specified by by this project and a dependency, favor - // this project. - if let Some(s) = cfg.scripts.get(&name) { - let abort_msg = format!( - "Problem running the function {}, specified in `pyproject.toml`", - name, - ); - - if let Some(caps) = re.captures(s) { - let module = caps.get(1).unwrap().as_str(); - let function = caps.get(2).unwrap().as_str(); - let mut args_to_pass = vec![ - "-c".to_owned(), - format!(r#"import {}; {}.{}()"#, module, module, function), - ]; - - args_to_pass.append(&mut specified_args); - if commands::run_python(bin_path, &[lib_path.to_owned()], &args_to_pass).is_err() { - abort(&abort_msg); - } - } else { - abort(&format!("Problem parsing the following script: {:#?}. Must be in the format module:function_name", s)); - } - return; - } - // None => { - let abort_msg = format!( - "Problem running the CLI tool {}. Is it installed? \ - Try running `pyflow install {}`", - name, name - ); - let script_path = vers_path.join("bin").join(name); - if !script_path.exists() { - abort(&abort_msg); - } - - let mut args_to_pass = vec![script_path - .to_str() - .expect("Can't find script path") - .to_owned()]; - - args_to_pass.append(&mut specified_args); - if commands::run_python(bin_path, &[lib_path.to_owned()], &args_to_pass).is_err() { - abort(&abort_msg); - } -} - /// We process input commands in a deliberate order, to ensure the required, and only the required /// setup steps are accomplished before each. +#[allow(clippy::match_single_binding)] +#[allow(clippy::single_match)] +// TODO: Remove clippy::match_single_binding and clippy::single_match after full function refactoring fn main() { let (pyflow_path, dep_cache_path, script_env_path, git_path) = util::paths::get_paths(); let os = util::get_os(); @@ -166,6 +93,12 @@ fn main() { }; match &subcmd { + // Actions requires nothing to know about the project + SubCommand::New { name } => actions::new(name), + SubCommand::Init => actions::init(CFG_FILENAME), + SubCommand::Reset {} => actions::reset(), + SubCommand::Clear {} => actions::clear(&pyflow_path, &dep_cache_path, &script_env_path), + SubCommand::Switch { version } => actions::switch(version), SubCommand::External(ref x) => match ExternalCommand::from_opt(x.to_owned()) { ExternalCommand { cmd, args } => match cmd { ExternalSubcommands::Script => { @@ -175,121 +108,32 @@ fn main() { _ => (), }, }, - SubCommand::New { name } => { - if actions::new(name).is_err() { - abort(actions::NEW_ERROR_MESSAGE); - } - util::print_color( - &format!("Created a new Python project named {}", name), - Color::Green, - ); - return; - } - SubCommand::Init => actions::init(CFG_FILENAME), // Don't return here; let the normal logic create the venv now. // TODO: Move branches to omitted match _ => {} } - // We need access to the config from here on; throw an error if we can't find it. - let mut cfg_path = PathBuf::from(CFG_FILENAME); - if !&cfg_path.exists() { - // Try looking recursively in parent directories for a config file. - let recursion_limit = 8; // How my levels to look up - let mut current_level = env::current_dir().expect("Can't access current directory"); - for _ in 0..recursion_limit { - if let Some(parent) = current_level.parent() { - let parent_cfg_path = parent.join(CFG_FILENAME); - if parent_cfg_path.exists() { - cfg_path = parent_cfg_path; - break; - } - current_level = parent.to_owned(); - } - } - - if !&cfg_path.exists() { - // ie still can't find it after searching parents. - util::print_color( - "To get started, run `pyflow new projname` to create a project folder, or \ - `pyflow init` to start a project in this folder. For a list of what you can do, run \ - `pyflow help`.", - Color::Cyan, // Dark - ); - return; - } - // } - } - - // Base pypackages_path and lock_path on the `pyproject.toml` folder. - let proj_path = cfg_path.parent().expect("Can't find proj pathw via parent"); - let pypackages_path = proj_path.join("__pypackages__"); - let lock_path = &proj_path.join(LOCK_FILENAME); - - let mut cfg = Config::from_file(&cfg_path).unwrap_or_default(); - cfg.populate_path_subreqs(); - - // Run subcommands that don't require info about the environment. - match &subcmd { - SubCommand::Reset {} => { - if pypackages_path.exists() && fs::remove_dir_all(&pypackages_path).is_err() { - abort("Problem removing `__pypackages__` directory") - } - if lock_path.exists() && fs::remove_file(&lock_path).is_err() { - abort("Problem removing `pyflow.lock`") - } - util::print_color( - "`__pypackages__` folder and `pyflow.lock` removed", - Color::Green, - ); - return; - } - SubCommand::Switch { version } => { - // Updates `pyproject.toml` with a new python version - let specified = util::fallible_v_parse(&version.clone()); - cfg.py_version = Some(specified.clone()); - files::change_py_vers(&PathBuf::from(&cfg_path), &specified); - util::print_color( - &format!("Switched to Python version {}", specified.to_string()), - Color::Green, - ); - // Don't return; now that we've changed the cfg version, let's run the normal flow. - } - SubCommand::Clear {} => { - actions::clear(&pyflow_path, &dep_cache_path, &script_env_path); - return; - } - SubCommand::List => { - let num_venvs = util::find_venvs(&pypackages_path).len(); - if !cfg_path.exists() && num_venvs == 0 { - abort("Can't find a project in this directory") - } else if num_venvs == 0 { - util::print_color( - "There's no python environment set up for this project", - Color::Green, - ); - return; - } - } - _ => (), - } - - let cfg_vers = if let Some(v) = cfg.py_version.clone() { + let pcfg = pyproject::current::get_config().unwrap_or_else(|| process::exit(1)); + let cfg_vers = if let Some(v) = pcfg.config.py_version.clone() { v } else { let specified = util::prompts::py_vers(); - if !cfg_path.exists() { - cfg.write_file(&cfg_path); + if !pcfg.config_path.exists() { + pcfg.config.write_file(&pcfg.config_path); } - files::change_py_vers(&cfg_path, &specified); + files::change_py_vers(&pcfg.config_path, &specified); specified }; // Check for environments. Create one if none exist. Set `vers_path`. - let (vers_path, py_vers) = - util::find_or_create_venv(&cfg_vers, &pypackages_path, &pyflow_path, &dep_cache_path); + let (vers_path, py_vers) = util::find_or_create_venv( + &cfg_vers, + &pcfg.pypackages_path, + &pyflow_path, + &dep_cache_path, + ); let paths = util::Paths { bin: util::find_bin_path(&vers_path), @@ -301,15 +145,15 @@ fn main() { // Add all path reqs to the PYTHONPATH; this is the way we make these packages accessible when // running `pyflow`. let mut pythonpath = vec![paths.lib.clone()]; - for r in cfg.reqs.iter().filter(|r| r.path.is_some()) { + for r in pcfg.config.reqs.iter().filter(|r| r.path.is_some()) { pythonpath.push(PathBuf::from(r.path.clone().unwrap())); } - for r in cfg.dev_reqs.iter().filter(|r| r.path.is_some()) { + for r in pcfg.config.dev_reqs.iter().filter(|r| r.path.is_some()) { pythonpath.push(PathBuf::from(r.path.clone().unwrap())); } let mut found_lock = false; - let lock = match util::read_lock(lock_path) { + let lock = match util::read_lock(&pcfg.lock_path) { Ok(l) => { found_lock = true; l @@ -322,12 +166,12 @@ fn main() { sync( &paths, &lockpacks, - &cfg.reqs, - &cfg.dev_reqs, - &util::find_dont_uninstall(&cfg.reqs, &cfg.dev_reqs), + &pcfg.config.reqs, + &pcfg.config.dev_reqs, + &util::find_dont_uninstall(&pcfg.config.reqs, &pcfg.config.dev_reqs), os, &py_vers, - lock_path, + &pcfg.lock_path, ); // Now handle subcommands that require info about the environment @@ -338,8 +182,17 @@ fn main() { // the currently-installed packages, found by crawling metadata in the `lib` path. // See the readme section `How installation and locking work` for details. SubCommand::Install { packages, dev } => actions::install( - &cfg_path, &cfg, &git_path, &paths, found_lock, &packages, dev, &lockpacks, &os, - &py_vers, lock_path, + &pcfg.config_path, + &pcfg.config, + &git_path, + &paths, + found_lock, + &packages, + dev, + &lockpacks, + &os, + &py_vers, + &pcfg.lock_path, ), SubCommand::Uninstall { packages } => { @@ -355,10 +208,11 @@ fn main() { }) .collect(); - files::remove_reqs_from_cfg(&cfg_path, &removed_reqs); + files::remove_reqs_from_cfg(&pcfg.config_path, &removed_reqs); // Filter reqs here instead of re-reading the config from file. - let updated_reqs: Vec = cfg + let updated_reqs: Vec = pcfg + .config .clone() .reqs .into_iter() @@ -369,22 +223,28 @@ fn main() { &paths, &lockpacks, &updated_reqs, - &cfg.dev_reqs, + &pcfg.config.dev_reqs, &[], os, &py_vers, - lock_path, + &pcfg.lock_path, ); util::print_color("Uninstall complete", Color::Green); } - SubCommand::Package { extras } => { - actions::package(&paths, &lockpacks, os, &py_vers, lock_path, &cfg, &extras) - } - SubCommand::Publish {} => build::publish(&paths.bin, &cfg), + SubCommand::Package { extras } => actions::package( + &paths, + &lockpacks, + os, + &py_vers, + &pcfg.lock_path, + &pcfg.config, + &extras, + ), + SubCommand::Publish {} => build::publish(&paths.bin, &pcfg.config), SubCommand::List {} => actions::list( &paths.lib, - &[cfg.reqs.as_slice(), cfg.dev_reqs.as_slice()] + &[pcfg.config.reqs.as_slice(), pcfg.config.dev_reqs.as_slice()] .concat() .into_iter() .filter(|r| r.path.is_some()) @@ -401,7 +261,7 @@ fn main() { } } ExternalSubcommands::Run => { - run_cli_tool(&paths.lib, &paths.bin, &vers_path, &cfg, x.args); + run(&paths.lib, &paths.bin, &vers_path, &pcfg.config, x.args); } x => { abort(&format!( diff --git a/src/pyproject/current.rs b/src/pyproject/current.rs new file mode 100644 index 0000000..d0b18bc --- /dev/null +++ b/src/pyproject/current.rs @@ -0,0 +1,56 @@ +use std::{env, path::PathBuf}; + +use termcolor::Color; + +use crate::util; + +use super::{Config, PresentConfig, CFG_FILENAME, LOCK_FILENAME}; + +const NOT_FOUND_ERROR_MESSAGE: &str = indoc::indoc! {r#" +To get started, run `pyflow new projname` to create a project folder, or +`pyflow init` to start a project in this folder. For a list of what you can do, run +`pyflow help`. +"#}; + +pub fn get_config() -> Option { + let mut config_path = PathBuf::from(CFG_FILENAME); + if !&config_path.exists() { + // Try looking recursively in parent directories for a config file. + let recursion_limit = 8; // How my levels to look up + let mut current_level = env::current_dir().expect("Can't access current directory"); + for _ in 0..recursion_limit { + if let Some(parent) = current_level.parent() { + let parent_cfg_path = parent.join(CFG_FILENAME); + if parent_cfg_path.exists() { + config_path = parent_cfg_path; + break; + } + current_level = parent.to_owned(); + } + } + + if !&config_path.exists() { + // we still can't find it after searching parents. + util::print_color(NOT_FOUND_ERROR_MESSAGE, Color::Cyan); // Dark Cyan + return None; + } + } + + // Base pypackages_path and lock_path on the `pyproject.toml` folder. + let project_path = config_path + .parent() + .expect("Can't find project path via parent") + .to_path_buf(); + let pypackages_path = project_path.join("__pypackages__"); + let lock_path = project_path.join(LOCK_FILENAME); + + let mut config = Config::from_file(&config_path).unwrap_or_default(); + config.populate_path_subreqs(); + Some(PresentConfig { + config, + config_path, + project_path, + pypackages_path, + lock_path, + }) +} diff --git a/src/pyproject/mod.rs b/src/pyproject/mod.rs index 2ea76d1..5d7d694 100644 --- a/src/pyproject/mod.rs +++ b/src/pyproject/mod.rs @@ -1,3 +1,5 @@ +pub mod current; + use std::{ collections::HashMap, fs, @@ -14,6 +16,18 @@ use crate::{ util::{self, abort}, }; +pub const CFG_FILENAME: &str = "pyproject.toml"; +pub const LOCK_FILENAME: &str = "pyflow.lock"; + +#[derive(Clone, Debug, Default)] +pub struct PresentConfig { + pub project_path: PathBuf, + pub config_path: PathBuf, + pub pypackages_path: PathBuf, + pub lock_path: PathBuf, + pub config: Config, +} + /// A config, parsed from pyproject.toml #[derive(Clone, Debug, Default, Deserialize)] // todo: Auto-desr some of these diff --git a/src/util/mod.rs b/src/util/mod.rs index 212be45..5f20518 100644 --- a/src/util/mod.rs +++ b/src/util/mod.rs @@ -59,7 +59,7 @@ pub struct Metadata { /// Print line in a color, then reset formatting. pub fn print_color(message: &str, color: Color) { if let Err(_e) = print_color_res(message, color) { - panic!("Error printing in color") + panic!("Error printing in color"); } } @@ -94,6 +94,11 @@ pub fn abort(message: &str) -> ! { process::exit(1) } +pub fn success(message: &str) { + print_color(message, Color::Green); + process::exit(0) +} + /// Find which virtual environments exist. pub fn find_venvs(pypackages_dir: &Path) -> Vec<(u32, u32)> { let py_versions: &[(u32, u32)] = &[ diff --git a/src/util/os.rs b/src/util/os.rs index 3d013c7..7896465 100644 --- a/src/util/os.rs +++ b/src/util/os.rs @@ -40,7 +40,7 @@ impl FromStr for Os { } } -pub fn get_os() -> Os { +pub const fn get_os() -> Os { #[cfg(target_os = "windows")] return Os::Windows; #[cfg(target_os = "linux")] From c63e66bae1bbd83fb5fc97921999a19b90664473 Mon Sep 17 00:00:00 2001 From: Dmitry Rubinstein Date: Thu, 2 Dec 2021 18:04:37 +0200 Subject: [PATCH 30/41] Adding add command --- src/cli_options.rs | 10 ++++++++++ src/main.rs | 28 +++++++++++++++------------- 2 files changed, 25 insertions(+), 13 deletions(-) diff --git a/src/cli_options.rs b/src/cli_options.rs index 073283e..ea72f4a 100644 --- a/src/cli_options.rs +++ b/src/cli_options.rs @@ -22,6 +22,16 @@ pub enum SubCommand { name: String, // holds the project name. }, + /// Add packages to `pyproject.toml` and sync an environment + #[structopt(name = "add")] + Add { + #[structopt(name = "packages")] + packages: Vec, // holds the packages names. + /// Save package to your dev-dependencies section + #[structopt(short, long)] + dev: bool + }, + /** Install packages from `pyproject.toml`, `pyflow.lock`, or specified ones. Example: `pyflow install`: sync your installation with `pyproject.toml`, or `pyflow.lock` if it exists. diff --git a/src/main.rs b/src/main.rs index cded081..f5e836d 100644 --- a/src/main.rs +++ b/src/main.rs @@ -181,19 +181,21 @@ fn main() { // We use data from three sources: `pyproject.toml`, `pyflow.lock`, and // the currently-installed packages, found by crawling metadata in the `lib` path. // See the readme section `How installation and locking work` for details. - SubCommand::Install { packages, dev } => actions::install( - &pcfg.config_path, - &pcfg.config, - &git_path, - &paths, - found_lock, - &packages, - dev, - &lockpacks, - &os, - &py_vers, - &pcfg.lock_path, - ), + SubCommand::Install { packages, dev } | SubCommand::Add { packages, dev } => { + actions::install( + &pcfg.config_path, + &pcfg.config, + &git_path, + &paths, + found_lock, + &packages, + dev, + &lockpacks, + &os, + &py_vers, + &pcfg.lock_path, + ) + } SubCommand::Uninstall { packages } => { // todo: uninstall dev? From 9768ad8fe2a2ecd0056a527df2f7af06ebedb243 Mon Sep 17 00:00:00 2001 From: mataha Date: Sun, 27 Feb 2022 18:34:40 +0100 Subject: [PATCH 31/41] Lint the code Aka `cargo fmt`. --- src/cli_options.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/cli_options.rs b/src/cli_options.rs index ea72f4a..cd060ee 100644 --- a/src/cli_options.rs +++ b/src/cli_options.rs @@ -29,7 +29,7 @@ pub enum SubCommand { packages: Vec, // holds the packages names. /// Save package to your dev-dependencies section #[structopt(short, long)] - dev: bool + dev: bool, }, /** Install packages from `pyproject.toml`, `pyflow.lock`, or specified ones. Example: From bc283bdb182275db0b99cf7a26bb5b6c64380a24 Mon Sep 17 00:00:00 2001 From: mataha Date: Sun, 27 Feb 2022 18:54:09 +0100 Subject: [PATCH 32/41] Add some tests --- src/script.rs | 85 +++++++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 83 insertions(+), 2 deletions(-) diff --git a/src/script.rs b/src/script.rs index 3cd16c9..3638980 100644 --- a/src/script.rs +++ b/src/script.rs @@ -199,10 +199,13 @@ fn find_deps_from_script(script: &str) -> Vec { #[cfg(test)] mod tests { - use crate::dep_types::Version; - use crate::script::check_for_specified_py_vers; + use indoc::indoc; use rstest::rstest; + use crate::dep_types::Version; + + use super::*; + const NO_DUNDER_PYTHON: &str = r#" if __name__ == "__main__": print("Hello, world") @@ -234,4 +237,82 @@ if __name__ == "__main__": let result = check_for_specified_py_vers(src); assert_eq!(result, expected) } + + #[test] + fn parse_no_dependencies_with_single_line_requires() { + let script = indoc! { r#" + __requires__ = [] + "# }; + + let expected: Vec<&str> = vec![]; + let actual = find_deps_from_script(script); + + assert_eq!(expected, actual); + } + + #[test] + fn parse_no_dependencies_with_multi_line_requires() { + let script = indoc! { r#" + __requires__ = [ + ] + "# }; + + let expected: Vec<&str> = vec![]; + let actual = find_deps_from_script(script); + + assert_eq!(expected, actual); + } + + #[test] + fn parse_one_dependency_with_single_line_requires() { + let script = indoc! { r#" + __requires__ = ["requests"] + "# }; + + let expected: Vec<&str> = vec!["requests"]; + let actual = find_deps_from_script(script); + + assert_eq!(expected, actual); + } + + #[test] + fn parse_one_dependency_with_nulti_line_requires() { + let script = indoc! { r#" + __requires__ = [ + "requests" + ] + "# }; + + let expected: Vec<&str> = vec!["requests"]; + let actual = find_deps_from_script(script); + + assert_eq!(expected, actual); + } + + #[test] + fn parse_multiple_dependencies_with_single_line_requires() { + let script = indoc! { r#" + __requires__ = ["python-dateutil", "requests"] + "# }; + + let expected: Vec<&str> = vec!["python-dateutil", "requests"]; + let actual = find_deps_from_script(script); + + assert_eq!(expected, actual); + } + + #[test] + fn parse_multiple_dependencies_with_multi_line_requires() { + let script = indoc! { r#" + __requires__ = [ + "python-dateutil", + "requests" + ] + "# }; + + let expected: Vec<&str> = vec!["python-dateutil", "requests"]; + let actual = find_deps_from_script(script); + + assert_eq!(expected, actual); + } } From 2ce6b5a97134be502f4bdff029f44759d466128d Mon Sep 17 00:00:00 2001 From: mataha Date: Sun, 27 Feb 2022 19:13:34 +0100 Subject: [PATCH 33/41] Parse script dependencies in multiline `__requires__` --- src/script.rs | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/src/script.rs b/src/script.rs index 3638980..b68ec9b 100644 --- a/src/script.rs +++ b/src/script.rs @@ -176,23 +176,23 @@ fn check_for_specified_py_vers(script: &str) -> Option { /// Find a script's dependencies from a variable: `__requires__ = [dep1, dep2]` fn find_deps_from_script(script: &str) -> Vec { // todo: Helper for this type of logic? We use it several times in the program. - let re = Regex::new(r"^__requires__\s*=\s*\[(.*?)\]$").unwrap(); + let re = Regex::new(r"(?ms)^__requires__\s*=\s*\[(.*?)\]$").unwrap(); let mut result = vec![]; - for line in script.lines() { - if let Some(c) = re.captures(line) { - let deps_list = c.get(1).unwrap().as_str().to_owned(); - result = deps_list - .split(',') - .map(|d| { - d.to_owned() - .replace(" ", "") - .replace("\"", "") - .replace("'", "") - }) - .filter(|d| !d.is_empty()) - .collect(); - } + + if let Some(c) = re.captures(script) { + let deps_list = c.get(1).unwrap().as_str().to_owned(); + result = deps_list + .split(',') + .map(|d| { + d.to_owned() + .replace(" ", "") + .replace("'", "") + .replace("\"", "") + .replace("\n", "") + }) + .filter(|d| !d.is_empty()) + .collect(); } result } From 72b33fb080199882ff4113843d19a15f1e2b90a4 Mon Sep 17 00:00:00 2001 From: mataha Date: Sun, 27 Feb 2022 19:23:26 +0100 Subject: [PATCH 34/41] Fix remaining tests in script.rs So that they're actually unit tests. --- src/script.rs | 48 +++++++++++++++++++++++++++--------------------- 1 file changed, 27 insertions(+), 21 deletions(-) diff --git a/src/script.rs b/src/script.rs index b68ec9b..f798a1b 100644 --- a/src/script.rs +++ b/src/script.rs @@ -200,42 +200,48 @@ fn find_deps_from_script(script: &str) -> Vec { #[cfg(test)] mod tests { use indoc::indoc; - use rstest::rstest; use crate::dep_types::Version; use super::*; - const NO_DUNDER_PYTHON: &str = r#" -if __name__ == "__main__": - print("Hello, world") -"#; + #[test] + fn parse_python_version_no_dunder_specified() { + let script = indoc! { r#" + if __name__ == "__main__": + print("Hello, world") + "# }; + + let version: Option = None; + + let expected = version; + let actual = check_for_specified_py_vers(script); + + assert_eq!(expected, actual); + } - const VALID_DUNDER_PYTHON: &str = r#" -__python__ = "3.9.1" + #[test] + fn parse_python_version_valid_dunder_specified() { + let script = indoc! { r#" + __python__ = "3.9.1" -if __name__ == "__main__": - print("Hello, world") -"#; + if __name__ == "__main__": + print("Hello, world") + "# }; - fn py_version() -> Option { - let version = Version { + let version: Option = Some(Version { major: Some(3), minor: Some(9), patch: Some(1), extra_num: None, modifier: None, star: false, - }; - Some(version) - } + }); + + let expected = version; + let actual = check_for_specified_py_vers(script); - #[rstest] - #[case(NO_DUNDER_PYTHON, None)] - #[case(VALID_DUNDER_PYTHON, py_version())] - fn dunder_python_specified(#[case] src: &str, #[case] expected: Option) { - let result = check_for_specified_py_vers(src); - assert_eq!(result, expected) + assert_eq!(expected, actual); } #[test] From 107ad3fa28562a6c7c6e2edfedef1fdaa2054a9f Mon Sep 17 00:00:00 2001 From: mataha Date: Sun, 27 Feb 2022 19:25:06 +0100 Subject: [PATCH 35/41] Fill dependency parsing tests with basic code --- src/script.rs | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/src/script.rs b/src/script.rs index f798a1b..f22fdba 100644 --- a/src/script.rs +++ b/src/script.rs @@ -248,6 +248,9 @@ mod tests { fn parse_no_dependencies_with_single_line_requires() { let script = indoc! { r#" __requires__ = [] + + if __name__ == "__main__": + print("Hello, world") "# }; let expected: Vec<&str> = vec![]; @@ -261,6 +264,9 @@ mod tests { let script = indoc! { r#" __requires__ = [ ] + + if __name__ == "__main__": + print("Hello, world") "# }; let expected: Vec<&str> = vec![]; @@ -273,6 +279,9 @@ mod tests { fn parse_one_dependency_with_single_line_requires() { let script = indoc! { r#" __requires__ = ["requests"] + + if __name__ == "__main__": + print("Hello, world") "# }; let expected: Vec<&str> = vec!["requests"]; @@ -287,6 +296,9 @@ mod tests { __requires__ = [ "requests" ] + + if __name__ == "__main__": + print("Hello, world") "# }; let expected: Vec<&str> = vec!["requests"]; @@ -299,6 +311,9 @@ mod tests { fn parse_multiple_dependencies_with_single_line_requires() { let script = indoc! { r#" __requires__ = ["python-dateutil", "requests"] + + if __name__ == "__main__": + print("Hello, world") "# }; let expected: Vec<&str> = vec!["python-dateutil", "requests"]; @@ -314,6 +329,9 @@ mod tests { "python-dateutil", "requests" ] + + if __name__ == "__main__": + print("Hello, world") "# }; let expected: Vec<&str> = vec!["python-dateutil", "requests"]; From b3164ad6bbc98d2a3d51b1b719260333374ef5b7 Mon Sep 17 00:00:00 2001 From: mataha Date: Sun, 27 Feb 2022 19:26:54 +0100 Subject: [PATCH 36/41] Fix typos --- src/script.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/script.rs b/src/script.rs index f22fdba..52240f0 100644 --- a/src/script.rs +++ b/src/script.rs @@ -206,7 +206,7 @@ mod tests { use super::*; #[test] - fn parse_python_version_no_dunder_specified() { + fn parse_python_version_with_no_dunder_specified() { let script = indoc! { r#" if __name__ == "__main__": print("Hello, world") @@ -221,7 +221,7 @@ mod tests { } #[test] - fn parse_python_version_valid_dunder_specified() { + fn parse_python_version_with_valid_dunder_specified() { let script = indoc! { r#" __python__ = "3.9.1" @@ -291,7 +291,7 @@ mod tests { } #[test] - fn parse_one_dependency_with_nulti_line_requires() { + fn parse_one_dependency_with_multi_line_requires() { let script = indoc! { r#" __requires__ = [ "requests" From ee4c6a20a0b7c90c160c6e7abd57c1564e4c3af0 Mon Sep 17 00:00:00 2001 From: mataha Date: Sat, 5 Mar 2022 23:15:03 +0100 Subject: [PATCH 37/41] Add missing test case --- src/script.rs | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/src/script.rs b/src/script.rs index 52240f0..9a3677b 100644 --- a/src/script.rs +++ b/src/script.rs @@ -244,6 +244,19 @@ mod tests { assert_eq!(expected, actual); } + #[test] + fn parse_no_dependencies_with_no_requires() { + let script = indoc! { r#" + if __name__ == "__main__": + print("Hello, world") + "# }; + + let expected: Vec<&str> = vec![]; + let actual = find_deps_from_script(script); + + assert_eq!(expected, actual); + } + #[test] fn parse_no_dependencies_with_single_line_requires() { let script = indoc! { r#" From bc41dfce7be1fbd6e4ac293e0ccc00ea57f98d35 Mon Sep 17 00:00:00 2001 From: mataha Date: Fri, 11 Mar 2022 02:20:48 +0100 Subject: [PATCH 38/41] Provide python-3.10.2-windows release --- src/py_versions.rs | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/src/py_versions.rs b/src/py_versions.rs index 4f7f9e3..bf8ce68 100644 --- a/src/py_versions.rs +++ b/src/py_versions.rs @@ -13,10 +13,10 @@ use termcolor::Color; enum PyVers { V3_12_0, // unreleased V3_11_0, // unreleased - V3_10_0, // unreleased + V3_10_2, // Win V3_9_0, // either Os V3_8_0, // either Os - V3_7_4, // Either Os + V3_7_4, // either Os V3_6_9, // Linux V3_6_8, // Win V3_5_7, // Linux @@ -90,7 +90,11 @@ impl From<(Version, Os)> for PyVers { } }, 10 => match v_o.1 { - Os::Windows | Os::Ubuntu | Os::Centos => Self::V3_10_0, + Os::Windows => Self::V3_10_2, + Os::Ubuntu | Os::Centos => { + abort_helper("3.10", "Linux"); + unreachable!() + } _ => { abort_helper("3.10", "Mac"); unreachable!() @@ -120,7 +124,7 @@ impl ToString for PyVers { match self { Self::V3_12_0 => "3.12.0".into(), Self::V3_11_0 => "3.11.0".into(), - Self::V3_10_0 => "3.10.0".into(), + Self::V3_10_2 => "3.10.2".into(), Self::V3_9_0 => "3.9.0".into(), Self::V3_8_0 => "3.8.0".into(), Self::V3_7_4 => "3.7.4".into(), @@ -138,7 +142,7 @@ impl PyVers { match self { Self::V3_12_0 => Version::new(3, 12, 0), Self::V3_11_0 => Version::new(3, 11, 0), - Self::V3_10_0 => Version::new(3, 10, 0), + Self::V3_10_1 => Version::new(3, 10, 2), Self::V3_9_0 => Version::new(3, 9, 0), Self::V3_8_0 => Version::new(3, 8, 0), Self::V3_7_4 => Version::new(3, 7, 4), From 9e0b3b38f6b75e9ec60d80c42d0444272d1b4982 Mon Sep 17 00:00:00 2001 From: mataha Date: Fri, 11 Mar 2022 02:22:05 +0100 Subject: [PATCH 39/41] Remove crap --- src/py_versions.rs | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/src/py_versions.rs b/src/py_versions.rs index bf8ce68..18709f6 100644 --- a/src/py_versions.rs +++ b/src/py_versions.rs @@ -221,11 +221,7 @@ fn download(py_install_path: &Path, version: &Version) { It's worth trying the other options, to see if one works anyway.", ); unreachable!() - } // _ => panic!("If you're seeing this, the code is in what I thought was an unreachable\ - // state. I could give you advice for what to do. But honestly, why should you trust me?\ - // I clearly screwed this up. I'm writing a message that should never appear, yet\ - // I know it will probably appear someday. On a deep level, I know I'm not up to this tak.\ - // I'm so sorry.") + } }; } #[cfg(target_os = "macos")] From cf0903757fef6f4b5cd58162d4f2efb04ee8dd03 Mon Sep 17 00:00:00 2001 From: mataha Date: Fri, 11 Mar 2022 02:24:23 +0100 Subject: [PATCH 40/41] Fix typos in mod.rs --- src/util/mod.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/util/mod.rs b/src/util/mod.rs index 5f20518..3d9c204 100644 --- a/src/util/mod.rs +++ b/src/util/mod.rs @@ -417,8 +417,8 @@ pub fn unpack_tar_xz(archive_path: &Path, dest: &Path) { let mut decompressor = XzDecoder::new(&archive_bytes[..]); if decompressor.read_to_end(&mut tar).is_err() { abort(&format!( - "Problem decompressing the archive: {:?}. This may be due to a failed downoad. \ - Try deleting it, then trying again. Note that Pyflow will only install officially-released \ + "Problem decompressing the archive: {:?}. This may be due to a failed download. \ + Try deleting it, then try again. Note that Pyflow will only install officially-released \ Python versions. If you'd like to use a pre-release, you must install it manually.", archive_path )) From 4c6ec9bc8dcf2c486d5820627d70162e44d6b5a7 Mon Sep 17 00:00:00 2001 From: mataha Date: Fri, 11 Mar 2022 02:27:22 +0100 Subject: [PATCH 41/41] Fix typo in py_versions.rs --- src/py_versions.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/py_versions.rs b/src/py_versions.rs index 18709f6..2f9bb75 100644 --- a/src/py_versions.rs +++ b/src/py_versions.rs @@ -142,7 +142,7 @@ impl PyVers { match self { Self::V3_12_0 => Version::new(3, 12, 0), Self::V3_11_0 => Version::new(3, 11, 0), - Self::V3_10_1 => Version::new(3, 10, 2), + Self::V3_10_2 => Version::new(3, 10, 2), Self::V3_9_0 => Version::new(3, 9, 0), Self::V3_8_0 => Version::new(3, 8, 0), Self::V3_7_4 => Version::new(3, 7, 4),