diff --git a/src/build/clean.rs b/src/build/clean.rs index 91c90fe..de02747 100644 --- a/src/build/clean.rs +++ b/src/build/clean.rs @@ -125,7 +125,7 @@ pub fn cleanup_previous_build( .get(package_name) .expect("Could not find package"); remove_compile_assets(package, res_file_location); - remove_mjs_file(res_file_location, &suffix); + remove_mjs_file(res_file_location, suffix); remove_iast(package, res_file_location); remove_ast(package, res_file_location); match helpers::get_extension(ast_file_path).as_str() { @@ -234,14 +234,13 @@ pub fn cleanup_previous_build( let deleted_module_names = ast_module_names .difference(&all_module_names) - .map(|module_name| { + .flat_map(|module_name| { // if the module is a namespace, we need to mark the whole namespace as dirty when a module has been deleted if let Some(namespace) = helpers::get_namespace_from_module_name(module_name) { return vec![namespace, module_name.to_string()]; } vec![module_name.to_string()] }) - .flatten() .collect::>(); build_state.deleted_modules = deleted_module_names; diff --git a/src/build/compile.rs b/src/build/compile.rs index 863a47c..d185f05 100644 --- a/src/build/compile.rs +++ b/src/build/compile.rs @@ -219,11 +219,10 @@ pub fn compile( } else { None } - .map(|res| { + .inspect(|_res| { if !(log_enabled!(Info)) { inc(); } - res }) }) .collect::, >>() .iter() - .for_each(|result| match result { - Some((module_name, result, interface_result, is_clean, is_compiled)) => { - in_progress_modules.remove(module_name); + .for_each(|result| if let Some((module_name, result, interface_result, is_clean, is_compiled)) = result { + in_progress_modules.remove(module_name); - if *is_compiled { - num_compiled_modules += 1; - } + if *is_compiled { + num_compiled_modules += 1; + } - files_current_loop_count += 1; - compiled_modules.insert(module_name.to_string()); + files_current_loop_count += 1; + compiled_modules.insert(module_name.to_string()); - if *is_clean { - // actually add it to a list of clean modules - clean_modules.insert(module_name.to_string()); - } + if *is_clean { + // actually add it to a list of clean modules + clean_modules.insert(module_name.to_string()); + } - let module_dependents = build_state.get_module(module_name).unwrap().dependents.clone(); + let module_dependents = build_state.get_module(module_name).unwrap().dependents.clone(); - // if not clean -- compile modules that depend on this module - for dep in module_dependents.iter() { + // if not clean -- compile modules that depend on this module + for dep in module_dependents.iter() { + // mark the reverse dep as dirty when the source is not clean + if !*is_clean { + let dep_module = build_state.modules.get_mut(dep).unwrap(); // mark the reverse dep as dirty when the source is not clean - if !*is_clean { - let dep_module = build_state.modules.get_mut(dep).unwrap(); - // mark the reverse dep as dirty when the source is not clean - dep_module.compile_dirty = true; - } - if !compiled_modules.contains(dep) { - in_progress_modules.insert(dep.to_string()); - } + dep_module.compile_dirty = true; } + if !compiled_modules.contains(dep) { + in_progress_modules.insert(dep.to_string()); + } + } - let module = build_state.modules.get_mut(module_name).unwrap(); - let package = build_state - .packages - .get(&module.package_name) - .expect("Package not found"); - match module.source_type { - SourceType::MlMap(ref mut mlmap) => { - module.compile_dirty = false; - mlmap.parse_dirty = false; - } - SourceType::SourceFile(ref mut source_file) => { - match result { - Ok(Some(err)) => { - source_file.implementation.compile_state = CompileState::Warning; - logs::append(package, err); - compile_warnings.push_str(err); - } - Ok(None) => { - source_file.implementation.compile_state = CompileState::Success; - } - Err(err) => { - source_file.implementation.compile_state = CompileState::Error; - logs::append(package, err); - compile_errors.push_str(err); - } - }; - match interface_result { - Some(Ok(Some(err))) => { - source_file.interface.as_mut().unwrap().compile_state = - CompileState::Warning; - logs::append(package, err); - compile_warnings.push_str(err); - } - Some(Ok(None)) => { - if let Some(interface) = source_file.interface.as_mut() { - interface.compile_state = CompileState::Success; - } + let module = build_state.modules.get_mut(module_name).unwrap(); + let package = build_state + .packages + .get(&module.package_name) + .expect("Package not found"); + match module.source_type { + SourceType::MlMap(ref mut mlmap) => { + module.compile_dirty = false; + mlmap.parse_dirty = false; + } + SourceType::SourceFile(ref mut source_file) => { + match result { + Ok(Some(err)) => { + source_file.implementation.compile_state = CompileState::Warning; + logs::append(package, err); + compile_warnings.push_str(err); + } + Ok(None) => { + source_file.implementation.compile_state = CompileState::Success; + } + Err(err) => { + source_file.implementation.compile_state = CompileState::Error; + logs::append(package, err); + compile_errors.push_str(err); + } + }; + match interface_result { + Some(Ok(Some(err))) => { + source_file.interface.as_mut().unwrap().compile_state = + CompileState::Warning; + logs::append(package, err); + compile_warnings.push_str(err); + } + Some(Ok(None)) => { + if let Some(interface) = source_file.interface.as_mut() { + interface.compile_state = CompileState::Success; } + } - Some(Err(err)) => { - source_file.interface.as_mut().unwrap().compile_state = - CompileState::Error; - logs::append(package, err); - compile_errors.push_str(err); - } - _ => (), - }; - match (result, interface_result) { - // successfull compilation - (Ok(None), Some(Ok(None))) | (Ok(None), None) => { - module.compile_dirty = false; - module.last_compiled_cmi = Some(SystemTime::now()); - module.last_compiled_cmt = Some(SystemTime::now()); - } - // some error or warning - (Err(_), _) - | (_, Some(Err(_))) - | (Ok(Some(_)), _) - | (_, Some(Ok(Some(_)))) => { - module.compile_dirty = true; - } + Some(Err(err)) => { + source_file.interface.as_mut().unwrap().compile_state = + CompileState::Error; + logs::append(package, err); + compile_errors.push_str(err); + } + _ => (), + }; + match (result, interface_result) { + // successfull compilation + (Ok(None), Some(Ok(None))) | (Ok(None), None) => { + module.compile_dirty = false; + module.last_compiled_cmi = Some(SystemTime::now()); + module.last_compiled_cmt = Some(SystemTime::now()); + } + // some error or warning + (Err(_), _) + | (_, Some(Err(_))) + | (Ok(Some(_)), _) + | (_, Some(Ok(Some(_)))) => { + module.compile_dirty = true; } } } } - None => (), }); files_total_count += files_current_loop_count; @@ -388,7 +384,7 @@ pub fn compiler_args( // .unwrap_or(&vec![]) // .to_owned(); - let deps = vec![normal_deps] + let deps = [normal_deps] .concat() .par_iter() .map(|package_name| { @@ -455,7 +451,7 @@ pub fn compiler_args( _ => vec![], }; - vec![warn_number, warn_error].concat() + [warn_number, warn_error].concat() } }; diff --git a/src/build/deps.rs b/src/build/deps.rs index 5bd1939..2ce82b3 100644 --- a/src/build/deps.rs +++ b/src/build/deps.rs @@ -84,18 +84,15 @@ pub fn get_deps(build_state: &mut BuildState, deleted_modules: &AHashSet all_mod, ); - match &source_file.interface { - Some(interface) => { - let iast_path = package.get_iast_path(&interface.path); + if let Some(interface) = &source_file.interface { + let iast_path = package.get_iast_path(&interface.path); - deps.extend(get_dep_modules( - &iast_path, - package.namespace.to_suffix(), - package.modules.as_ref().unwrap(), - all_mod, - )) - } - None => (), + deps.extend(get_dep_modules( + &iast_path, + package.namespace.to_suffix(), + package.modules.as_ref().unwrap(), + all_mod, + )) } match &package.namespace { packages::Namespace::NamespaceWithEntry { namespace: _, entry } diff --git a/src/build/packages.rs b/src/build/packages.rs index b50c0c6..631f0b3 100644 --- a/src/build/packages.rs +++ b/src/build/packages.rs @@ -278,10 +278,10 @@ pub fn read_dependency( /// Given a bsconfig, recursively finds all dependencies. /// 1. It starts with registering dependencies and -/// prevents the operation for the ones which are already -/// registerd for the parent packages. Especially relevant for peerDependencies. +/// prevents the operation for the ones which are already +/// registerd for the parent packages. Especially relevant for peerDependencies. /// 2. In parallel performs IO to read the dependencies bsconfig and -/// recursively continues operation for their dependencies as well. +/// recursively continues operation for their dependencies as well. fn read_dependencies( registered_dependencies_set: &mut AHashSet, parent_bsconfig: &bsconfig::Config, @@ -292,7 +292,7 @@ fn read_dependencies( return parent_bsconfig .bs_dependencies .to_owned() - .unwrap_or(vec![]) + .unwrap_or_default() .iter() .filter_map(|package_name| { if registered_dependencies_set.contains(package_name) { @@ -517,6 +517,7 @@ fn extend_with_children( /// 1. Get all the packages parsed, and take all the source folders from the bsconfig /// 2. Take the (by then deduplicated) packages, and find all the '.re', '.res', '.ml' and /// interface files. +/// /// The two step process is there to reduce IO overhead pub fn make( filter: &Option, @@ -528,12 +529,9 @@ pub fn make( /* Once we have the deduplicated packages, we can add the source files for each - to minimize * the IO */ let result = extend_with_children(filter, map); - result.values().for_each(|package| match &package.dirs { - Some(dirs) => dirs.iter().for_each(|dir| { - let _ = std::fs::create_dir_all(std::path::Path::new(&package.get_bs_build_path()).join(dir)); - }), - None => (), - }); + result.values().for_each(|package| if let Some(dirs) = &package.dirs { dirs.iter().for_each(|dir| { + let _ = std::fs::create_dir_all(std::path::Path::new(&package.get_bs_build_path()).join(dir)); + }) }); result } @@ -778,11 +776,9 @@ pub fn validate_packages_dependencies(packages: &AHashMap) -> b let pinned_dependencies = &package.bsconfig.pinned_dependencies.to_owned().unwrap_or(vec![]); let dev_dependencies = &package.bsconfig.bs_dev_dependencies.to_owned().unwrap_or(vec![]); - vec![ - ("bs-dependencies", bs_dependencies), + [("bs-dependencies", bs_dependencies), ("pinned-dependencies", pinned_dependencies), - ("bs-dev-dependencies", dev_dependencies), - ] + ("bs-dev-dependencies", dev_dependencies)] .iter() .for_each(|(dependency_type, dependencies)| { if let Some(unallowed_dependency_name) = @@ -812,11 +808,9 @@ pub fn validate_packages_dependencies(packages: &AHashMap) -> b console::style(package_name).bold() ); - vec![ - ("bs-dependencies", unallowed_deps.bs_deps.to_owned()), + [("bs-dependencies", unallowed_deps.bs_deps.to_owned()), ("pinned-dependencies", unallowed_deps.pinned_deps.to_owned()), - ("bs-dev-dependencies", unallowed_deps.bs_dev_deps.to_owned()), - ] + ("bs-dev-dependencies", unallowed_deps.bs_dev_deps.to_owned())] .iter() .for_each(|(deps_type, map)| { if !map.is_empty() { @@ -854,7 +848,7 @@ mod test { dev_deps: Vec, allowed_dependents: Option>, ) -> Package { - return Package { + Package { name: name.clone(), bsconfig: crate::bsconfig::Config { name: name.clone(), @@ -883,7 +877,7 @@ mod test { dirs: None, is_pinned_dep: false, is_root: false, - }; + } } #[test] fn should_return_false_with_invalid_parents_as_bs_dependencies() { @@ -910,7 +904,7 @@ mod test { ); let is_valid = super::validate_packages_dependencies(&packages); - assert_eq!(is_valid, false) + assert!(!is_valid) } #[test] @@ -938,7 +932,7 @@ mod test { ); let is_valid = super::validate_packages_dependencies(&packages); - assert_eq!(is_valid, false) + assert!(!is_valid) } #[test] @@ -966,7 +960,7 @@ mod test { ); let is_valid = super::validate_packages_dependencies(&packages); - assert_eq!(is_valid, false) + assert!(!is_valid) } #[test] @@ -994,6 +988,6 @@ mod test { ); let is_valid = super::validate_packages_dependencies(&packages); - assert_eq!(is_valid, true) + assert!(is_valid) } } diff --git a/src/build/parse.rs b/src/build/parse.rs index 4ca3c18..2452fa6 100644 --- a/src/build/parse.rs +++ b/src/build/parse.rs @@ -172,7 +172,6 @@ pub fn generate_asts( } Ok(None) => { // The file had no interface file associated - () } } }; @@ -279,7 +278,7 @@ pub fn parser_args( let file = "../../".to_string() + file; ( ast_path.to_string(), - vec![ + [ vec!["-bs-v".to_string(), format!("{}", version)], ppx_flags, jsx_args, @@ -347,17 +346,14 @@ fn generate_ast( filename, package.name )) }; - match &result { - Ok((ast_path, _)) => { - let dir = std::path::Path::new(filename).parent().unwrap(); - let _ = std::fs::copy( - build_path_abs.to_string() + "/" + ast_path, - std::path::Path::new(&package.get_bs_build_path()) - .join(dir) - .join(ast_path), - ); - } - Err(_) => (), + if let Ok((ast_path, _)) = &result { + let dir = std::path::Path::new(filename).parent().unwrap(); + let _ = std::fs::copy( + build_path_abs.to_string() + "/" + ast_path, + std::path::Path::new(&package.get_bs_build_path()) + .join(dir) + .join(ast_path), + ); } result } @@ -374,17 +370,17 @@ fn path_to_ast_extension(path: &Path) -> &str { fn include_ppx(flag: &str, contents: &str) -> bool { if flag.contains("bisect") { return std::env::var("BISECT_ENABLE").is_ok(); - } else if (flag.contains("graphql-ppx") || flag.contains("graphql_ppx")) && !contents.contains("%graphql") + } + + if ((flag.contains("graphql-ppx") || flag.contains("graphql_ppx")) && !contents.contains("%graphql")) + || (flag.contains("spice") && !contents.contains("@spice")) + || (flag.contains("rescript-relay") && !contents.contains("%relay")) + || (flag.contains("re-formality") && !contents.contains("%form")) { return false; - } else if flag.contains("spice") && !contents.contains("@spice") { - return false; - } else if flag.contains("rescript-relay") && !contents.contains("%relay") { - return false; - } else if flag.contains("re-formality") && !contents.contains("%form") { - return false; - } - return true; + }; + + true } fn filter_ppx_flags( diff --git a/src/sourcedirs.rs b/src/sourcedirs.rs index d50f563..41eafb5 100644 --- a/src/sourcedirs.rs +++ b/src/sourcedirs.rs @@ -20,7 +20,7 @@ pub struct SourceDirs<'a> { pub generated: &'a Vec, } -fn package_to_dirs<'a>(package: &'a Package, root_package_path: &String) -> AHashSet { +fn package_to_dirs(package: &Package, root_package_path: &String) -> AHashSet { let relative_path = PathBuf::from(&package.path) .strip_prefix(PathBuf::from(&root_package_path)) .unwrap() @@ -72,7 +72,7 @@ pub fn print(buildstate: &BuildState) { .filter(|(_name, package)| !package.is_root) .map(|(_name, package)| { // Extract Directories - let dirs = package_to_dirs(&package, &root_package.path); + let dirs = package_to_dirs(package, &root_package.path); // Extract Pkgs let pkgs = [