diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 27ff45c9e..30d79a159 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -13,7 +13,7 @@ jobs: fetch-depth: 0 - uses: actions/setup-node@v2 with: - node-version: 18 + node-version: 20 - uses: actions/setup-python@v4 with: python-version: '3.x' @@ -22,7 +22,7 @@ jobs: run: | npm install @semantic-release/git @semantic-release/exec --no-save python3 -m pip install wheel twine - python3 -m pip install git+https://github.com/pypa/hatch + python3 -m pip install git+https://github.com/pypa/hatch@hatch-v1.9.2 - name: Create Release env: diff --git a/.gitignore b/.gitignore index 35826d3c8..8d81a51f1 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,9 @@ # MAC OS .DS_Store +# VS Code +.vscode/ + # Vim Gitignore ## Swap [._]*.s[a-v][a-z] diff --git a/bench/app.py b/bench/app.py index f07251e4c..934a100b0 100755 --- a/bench/app.py +++ b/bench/app.py @@ -6,15 +6,19 @@ import shutil import subprocess import sys +import tarfile import typing from collections import OrderedDict from datetime import date from functools import lru_cache +from pathlib import Path +from typing import Optional from urllib.parse import urlparse # imports - third party imports import click import git +import semantic_version as sv # imports - module imports import bench @@ -22,7 +26,9 @@ from bench.utils import ( UNSET_ARG, fetch_details_from_tag, + get_app_cache_extract_filter, get_available_folder_name, + get_bench_cache_path, is_bench_directory, is_git_url, is_valid_frappe_branch, @@ -121,7 +127,8 @@ def _setup_details_from_mounted_disk(self): self.tag = self.branch = None def _setup_details_from_name_tag(self): - self.org, self.repo, self.tag = fetch_details_from_tag(self.name) + using_cached = bool(self.cache_key) + self.org, self.repo, self.tag = fetch_details_from_tag(self.name, using_cached) self.tag = self.tag or self.branch def _setup_details_from_git_url(self, url=None): @@ -166,6 +173,7 @@ def __init__( branch: str = None, bench: "Bench" = None, soft_link: bool = False, + cache_key=None, *args, **kwargs, ): @@ -173,6 +181,8 @@ def __init__( self.soft_link = soft_link self.required_by = None self.local_resolution = [] + self.cache_key = cache_key + self.pyproject = None super().__init__(name, branch, *args, **kwargs) @step(title="Fetching App {repo}", success="App {repo} Fetched") @@ -227,10 +237,13 @@ def install( resolved=False, restart_bench=True, ignore_resolution=False, + using_cached=False, ): import bench.cli from bench.utils.app import get_app_name + self.validate_app_dependencies() + verbose = bench.cli.verbose or verbose app_name = get_app_name(self.bench.name, self.app_name) if not resolved and self.app_name != "frappe" and not ignore_resolution: @@ -247,6 +260,7 @@ def install( skip_assets=skip_assets, restart_bench=restart_bench, resolution=self.local_resolution, + using_cached=using_cached, ) @step(title="Cloning and installing {repo}", success="App {repo} Installed") @@ -284,6 +298,304 @@ def update_app_state(self): required=self.local_resolution, ) + def get_pyproject(self) -> Optional[dict]: + from bench.utils.app import get_pyproject + + if self.pyproject: + return self.pyproject + + apps_path = os.path.join(os.path.abspath(self.bench.name), "apps") + pyproject_path = os.path.join(apps_path, self.app_name, "pyproject.toml") + self.pyproject = get_pyproject(pyproject_path) + return self.pyproject + + def validate_app_dependencies(self, throw=False) -> None: + pyproject = self.get_pyproject() or {} + deps: Optional[dict] = ( + pyproject.get("tool", {}).get("bench", {}).get("frappe-dependencies") + ) + if not deps: + return + + for dep, version in deps.items(): + validate_dependency(self, dep, version, throw=throw) + + """ + Get App Cache + + Since get-app affects only the `apps`, `env`, and `sites` + bench sub directories. If we assume deterministic builds + when get-app is called, the `apps/app_name` sub dir can be + cached. + + In subsequent builds this would save time by not having to: + - clone repository + - install frontend dependencies + - building frontend assets + as all of this is contained in the `apps/app_name` sub dir. + + Code that updates the `env` and `sites` subdirs still need + to be run. + """ + + def get_app_path(self) -> Path: + return Path(self.bench.name) / "apps" / self.app_name + + def get_app_cache_path(self, is_compressed=False) -> Path: + assert self.cache_key is not None + + cache_path = get_bench_cache_path("apps") + tarfile_name = get_cache_filename( + self.app_name, + self.cache_key, + is_compressed, + ) + return cache_path / tarfile_name + + def get_cached(self) -> bool: + if not self.cache_key: + return False + + cache_path = self.get_app_cache_path(False) + mode = "r" + + # Check if cache exists without gzip + if not cache_path.is_file(): + cache_path = self.get_app_cache_path(True) + mode = "r:gz" + + # Check if cache exists with gzip + if not cache_path.is_file(): + return False + + app_path = self.get_app_path() + if app_path.is_dir(): + shutil.rmtree(app_path) + + click.secho(f"Getting {self.app_name} from cache", fg="yellow") + with tarfile.open(cache_path, mode) as tar: + extraction_filter = get_app_cache_extract_filter(count_threshold=150_000) + try: + tar.extractall(app_path.parent, filter=extraction_filter) + except Exception: + message = f"Cache extraction failed for {self.app_name}, skipping cache" + click.secho(message, fg="yellow") + logger.exception(message) + shutil.rmtree(app_path) + return False + + return True + + def set_cache(self, compress_artifacts=False) -> bool: + if not self.cache_key: + return False + + app_path = self.get_app_path() + if not app_path.is_dir(): + return False + + cwd = os.getcwd() + cache_path = self.get_app_cache_path(compress_artifacts) + mode = "w:gz" if compress_artifacts else "w" + + message = f"Caching {self.app_name} app directory" + if compress_artifacts: + message += " (compressed)" + click.secho(message) + + self.prune_app_directory() + + success = False + os.chdir(app_path.parent) + try: + with tarfile.open(cache_path, mode) as tar: + tar.add(app_path.name) + success = True + except Exception: + log(f"Failed to cache {app_path}", level=3) + success = False + finally: + os.chdir(cwd) + return success + + def prune_app_directory(self): + app_path = self.get_app_path() + if can_frappe_use_cached(self): + remove_unused_node_modules(app_path) + + +def coerce_url_to_name_if_possible(git_url: str, cache_key: str) -> str: + app_name = os.path.basename(git_url) + if can_get_cached(app_name, cache_key): + return app_name + return git_url + + +def can_get_cached(app_name: str, cache_key: str) -> bool: + """ + Used before App is initialized if passed `git_url` is a + file URL as opposed to the app name. + + If True then `git_url` can be coerced into the `app_name` and + checking local remote and fetching can be skipped while keeping + get-app command params the same. + """ + cache_path = get_bench_cache_path("apps") + tarfile_path = cache_path / get_cache_filename( + app_name, + cache_key, + True, + ) + + if tarfile_path.is_file(): + return True + + tarfile_path = cache_path / get_cache_filename( + app_name, + cache_key, + False, + ) + + return tarfile_path.is_file() + + +def get_cache_filename(app_name: str, cache_key: str, is_compressed=False): + ext = "tgz" if is_compressed else "tar" + return f"{app_name}-{cache_key[:10]}.{ext}" + + +def can_frappe_use_cached(app: App) -> bool: + min_frappe = get_required_frappe_version(app) + if not min_frappe: + return False + + try: + return sv.Version(min_frappe) in sv.SimpleSpec(">=15.12.0") + except ValueError: + # Passed value is not a version string, it's an expression + pass + + try: + """ + 15.12.0 is the first version to support USING_CACHED, + but there is no way to check the last version without + support. So it's not possible to have a ">" filter. + + Hence this excludes the first supported version. + """ + return sv.Version("15.12.0") not in sv.SimpleSpec(min_frappe) + except ValueError: + click.secho(f"Invalid value found for frappe version '{min_frappe}'", fg="yellow") + # Invalid expression + return False + + +def validate_dependency(app: App, dep: str, req_version: str, throw=False) -> None: + dep_path = Path(app.bench.name) / "apps" / dep + if not dep_path.is_dir(): + click.secho(f"Required frappe-dependency '{dep}' not found.", fg="yellow") + if throw: + sys.exit(1) + return + + dep_version = get_dep_version(dep, dep_path) + if not dep_version: + return + + if sv.Version(dep_version) not in sv.SimpleSpec(req_version): + click.secho( + f"Installed frappe-dependency '{dep}' version '{dep_version}' " + f"does not satisfy required version '{req_version}'. " + f"App '{app.name}' might not work as expected.", + fg="yellow", + ) + if throw: + click.secho(f"Please install '{dep}{req_version}' first and retry", fg="red") + sys.exit(1) + + +def get_dep_version(dep: str, dep_path: Path) -> Optional[str]: + from bench.utils.app import get_pyproject + + dep_pp = get_pyproject(str(dep_path / "pyproject.toml")) + version = dep_pp.get("project", {}).get("version") + if version: + return version + + dinit_path = dep_path / dep / "__init__.py" + if not dinit_path.is_file(): + return None + + with dinit_path.open("r", encoding="utf-8") as dinit: + for line in dinit: + if not line.startswith("__version__ =") and not line.startswith("VERSION ="): + continue + + version = line.split("=")[1].strip().strip("\"'") + if version: + return version + else: + break + + return None + + +def get_required_frappe_version(app: App) -> Optional[str]: + pyproject = app.get_pyproject() or {} + + # Reference: https://github.com/frappe/bench/issues/1524 + req_frappe = ( + pyproject.get("tool", {}) + .get("bench", {}) + .get("frappe-dependencies", {}) + .get("frappe") + ) + + if not req_frappe: + click.secho( + "Required frappe version not set in pyproject.toml, " + "please refer: https://github.com/frappe/bench/issues/1524", + fg="yellow", + ) + + return req_frappe + + +def remove_unused_node_modules(app_path: Path) -> None: + """ + Erring a bit the side of caution; since there is no explicit way + to check if node_modules are utilized, this function checks if Vite + is being used to build the frontend code. + + Since most popular Frappe apps use Vite to build their frontends, + this method should suffice. + + Note: root package.json is ignored cause those usually belong to + apps that do not have a build step and so their node_modules are + utilized during runtime. + """ + + for p in app_path.iterdir(): + if not p.is_dir(): + continue + + package_json = p / "package.json" + if not package_json.is_file(): + continue + + node_modules = p / "node_modules" + if not node_modules.is_dir(): + continue + + can_delete = False + with package_json.open("r", encoding="utf-8") as f: + package_json = json.loads(f.read()) + build_script = package_json.get("scripts", {}).get("build", "") + can_delete = "vite build" in build_script + + if can_delete: + shutil.rmtree(node_modules) + def make_resolution_plan(app: App, bench: "Bench"): """ @@ -346,6 +658,8 @@ def get_app( soft_link=False, init_bench=False, resolve_deps=False, + cache_key=None, + compress_artifacts=False, ): """bench get-app clones a Frappe App from remote (GitHub or any other git server), and installs it on the current bench. This also resolves dependencies based on the @@ -359,8 +673,13 @@ def get_app( from bench.bench import Bench from bench.utils.app import check_existing_dir + if urlparse(git_url).scheme == "file" and cache_key: + git_url = coerce_url_to_name_if_possible(git_url, cache_key) + bench = Bench(bench_path) - app = App(git_url, branch=branch, bench=bench, soft_link=soft_link) + app = App( + git_url, branch=branch, bench=bench, soft_link=soft_link, cache_key=cache_key + ) git_url = app.url repo_name = app.repo branch = app.tag @@ -418,6 +737,15 @@ def get_app( ) return + if app.get_cached(): + app.install( + verbose=verbose, + skip_assets=skip_assets, + restart_bench=restart_bench, + using_cached=True, + ) + return + dir_already_exists, cloned_path = check_existing_dir(bench_path, repo_name) to_clone = not dir_already_exists @@ -443,6 +771,8 @@ def get_app( ): app.install(verbose=verbose, skip_assets=skip_assets, restart_bench=restart_bench) + app.set_cache(compress_artifacts) + def install_resolved_deps( bench, @@ -550,6 +880,7 @@ def install_app( restart_bench=True, skip_assets=False, resolution=UNSET_ARG, + using_cached=False, ): import bench.cli as bench_cli from bench.bench import Bench @@ -577,14 +908,16 @@ def install_app( if conf.get("developer_mode"): install_python_dev_dependencies(apps=app, bench_path=bench_path, verbose=verbose) - if os.path.exists(os.path.join(app_path, "package.json")): - yarn_install = "yarn install --verbose" if verbose else "yarn install" + if not using_cached and os.path.exists(os.path.join(app_path, "package.json")): + yarn_install = "yarn install --check-files" + if verbose: + yarn_install += " --verbose" bench.run(yarn_install, cwd=app_path) bench.apps.sync(app_name=app, required=resolution, branch=tag, app_dir=app_path) if not skip_assets: - build_assets(bench_path=bench_path, app=app) + build_assets(bench_path=bench_path, app=app, using_cached=using_cached) if restart_bench: # Avoiding exceptions here as production might not be set-up @@ -621,9 +954,9 @@ def pull_apps(apps=None, bench_path=".", reset=False): Here are your choices: 1. Merge the {app} app manually with "git pull" / "git pull --rebase" and fix conflicts. -1. Temporarily remove your changes with "git stash" or discard them completely +2. Temporarily remove your changes with "git stash" or discard them completely with "bench update --reset" or for individual repositries "git reset --hard" -2. If your changes are helpful for others, send in a pull request via GitHub and +3. If your changes are helpful for others, send in a pull request via GitHub and wait for them to be merged in the core.""" ) sys.exit(1) diff --git a/bench/commands/__init__.py b/bench/commands/__init__.py index 5ef142121..40ac8d5db 100755 --- a/bench/commands/__init__.py +++ b/bench/commands/__init__.py @@ -46,6 +46,7 @@ def bench_command(bench_path="."): new_app, pip, remove_app, + validate_dependencies, ) bench_command.add_command(init) @@ -56,6 +57,7 @@ def bench_command(bench_path="."): bench_command.add_command(exclude_app_for_update) bench_command.add_command(include_app_for_update) bench_command.add_command(pip) +bench_command.add_command(validate_dependencies) from bench.commands.update import ( @@ -72,6 +74,7 @@ def bench_command(bench_path="."): from bench.commands.utils import ( + app_cache_helper, backup_all_sites, bench_src, disable_production, @@ -108,6 +111,7 @@ def bench_command(bench_path="."): bench_command.add_command(bench_src) bench_command.add_command(find_benches) bench_command.add_command(migrate_env) +bench_command.add_command(app_cache_helper) from bench.commands.setup import setup diff --git a/bench/commands/install.py b/bench/commands/install.py index 31ad59b66..a0f1fd417 100644 --- a/bench/commands/install.py +++ b/bench/commands/install.py @@ -77,7 +77,7 @@ def install_nginx(user=None): setup_sudoers(user) -@click.command("virtualbox", help="Installs supervisor") +@click.command("virtualbox", help="Installs virtualbox") def install_virtualbox(): run_playbook("vm_build.yml", tag="virtualbox") diff --git a/bench/commands/make.py b/bench/commands/make.py index 7369e9c86..682b7bb60 100755 --- a/bench/commands/make.py +++ b/bench/commands/make.py @@ -151,6 +151,18 @@ def drop(path): default=False, help="Resolve dependencies before installing app", ) +@click.option( + "--cache-key", + type=str, + default=None, + help="Caches get-app artifacts if provided (only first 10 chars is used)", +) +@click.option( + "--compress-artifacts", + is_flag=True, + default=False, + help="Whether to gzip get-app artifacts that are to be cached", +) def get_app( git_url, branch, @@ -160,6 +172,8 @@ def get_app( soft_link=False, init_bench=False, resolve_deps=False, + cache_key=None, + compress_artifacts=False, ): "clone an app from the internet and set it up in your bench" from bench.app import get_app @@ -172,6 +186,8 @@ def get_app( soft_link=soft_link, init_bench=init_bench, resolve_deps=resolve_deps, + cache_key=cache_key, + compress_artifacts=compress_artifacts, ) @@ -237,3 +253,20 @@ def pip(ctx, args): env_py = get_env_cmd("python") os.execv(env_py, (env_py, "-m", "pip") + args) + + +@click.command( + "validate-dependencies", + help="Validates that all requirements specified in frappe-dependencies are met curently.", +) +@click.pass_context +def validate_dependencies(ctx): + "Validate all specified frappe-dependencies." + from bench.bench import Bench + from bench.app import App + + bench = Bench(".") + + for app_name in bench.apps: + app = App(app_name, bench=bench) + app.validate_app_dependencies(throw=True) diff --git a/bench/commands/setup.py b/bench/commands/setup.py index 9b13c269a..e291f86ae 100755 --- a/bench/commands/setup.py +++ b/bench/commands/setup.py @@ -73,7 +73,9 @@ def setup_supervisor(user=None, yes=False, skip_redis=False, skip_supervisord=Fa generate_supervisor_config, ) - which("supervisorctl", raise_err=True) + if which("supervisorctl") is None: + click.secho("Please install `supervisor` to proceed", fg="red") + sys.exit(1) if not skip_supervisord and "Permission denied" in get_cmd_output( "supervisorctl status" diff --git a/bench/commands/utils.py b/bench/commands/utils.py index 9882e8f01..0a7d97c5c 100644 --- a/bench/commands/utils.py +++ b/bench/commands/utils.py @@ -176,3 +176,21 @@ def migrate_env(python, backup=True): from bench.utils.bench import migrate_env migrate_env(python=python, backup=backup) + + +@click.command("app-cache", help="View or remove items belonging to bench get-app cache") +@click.option("--clear", is_flag=True, default=False, help="Remove all items") +@click.option( + "--remove-app", + default="", + help="Removes all items that match provided app name", +) +@click.option( + "--remove-key", + default="", + help="Removes all items that matches provided cache key", +) +def app_cache_helper(clear=False, remove_app="", remove_key=""): + from bench.utils.bench import cache_helper + + cache_helper(clear, remove_app, remove_key) diff --git a/bench/config/procfile.py b/bench/config/procfile.py index 38ee5c60b..7feaab722 100755 --- a/bench/config/procfile.py +++ b/bench/config/procfile.py @@ -1,19 +1,19 @@ -# imports - standard imports import os +import platform -# imports - third party imports import click -# imports - module imports import bench from bench.app import use_rq -from bench.utils import which from bench.bench import Bench +from bench.utils import which def setup_procfile(bench_path, yes=False, skip_redis=False): config = Bench(bench_path).conf procfile_path = os.path.join(bench_path, "Procfile") + + is_mac = platform.system() == "Darwin" if not yes and os.path.exists(procfile_path): click.confirm( "A Procfile already exists and this will overwrite it. Do you want to continue?", @@ -30,6 +30,7 @@ def setup_procfile(bench_path, yes=False, skip_redis=False): CI=os.environ.get("CI"), skip_redis=skip_redis, workers=config.get("workers", {}), + is_mac=is_mac, ) ) diff --git a/bench/config/supervisor.py b/bench/config/supervisor.py index 1055d3baa..0eb6a8485 100644 --- a/bench/config/supervisor.py +++ b/bench/config/supervisor.py @@ -59,6 +59,7 @@ def generate_supervisor_config(bench_path, user=None, yes=False, skip_redis=Fals "skip_redis": skip_redis, "workers": config.get("workers", {}), "multi_queue_consumption": can_enable_multi_queue_consumption(bench_path), + "supervisor_startretries": 10, } ) diff --git a/bench/config/templates/Procfile b/bench/config/templates/Procfile index 4ef4b8428..1f2440fb5 100644 --- a/bench/config/templates/Procfile +++ b/bench/config/templates/Procfile @@ -5,18 +5,14 @@ redis_queue: redis-server config/redis_queue.conf web: bench serve {% if webserver_port -%} --port {{ webserver_port }} {%- endif %} socketio: {{ node }} apps/frappe/socketio.js + {% if not CI %} watch: bench watch {% endif %} -{% if use_rq -%} + schedule: bench schedule -worker: bench worker 1>> logs/worker.log 2>> logs/worker.error.log +worker: {{ 'OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES NO_PROXY=*' if is_mac else '' }} bench worker 1>> logs/worker.log 2>> logs/worker.error.log {% for worker_name, worker_details in workers.items() %} -worker_{{ worker_name }}: bench worker --queue {{ worker_name }} 1>> logs/worker.log 2>> logs/worker.error.log +worker_{{ worker_name }}: {{ 'OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES NO_PROXY=*' if is_mac else '' }} bench worker --queue {{ worker_name }} 1>> logs/worker.log 2>> logs/worker.error.log {% endfor %} -{% else %} -workerbeat: sh -c 'cd sites && exec ../env/bin/python -m frappe.celery_app beat -s scheduler.schedule' -worker: sh -c 'cd sites && exec ../env/bin/python -m frappe.celery_app worker -n jobs@%h -Ofair --soft-time-limit 360 --time-limit 390' -longjob_worker: sh -c 'cd sites && exec ../env/bin/python -m frappe.celery_app worker -n longjobs@%h -Ofair --soft-time-limit 1500 --time-limit 1530' -async_worker: sh -c 'cd sites && exec ../env/bin/python -m frappe.celery_app worker -n async@%h -Ofair --soft-time-limit 1500 --time-limit 1530' -{%- endif %} + diff --git a/bench/config/templates/nginx.conf b/bench/config/templates/nginx.conf index 5cba57828..cd6a67014 100644 --- a/bench/config/templates/nginx.conf +++ b/bench/config/templates/nginx.conf @@ -58,6 +58,7 @@ server { location /assets { try_files $uri =404; + add_header Cache-Control "max-age=31536000"; } location ~ ^/protected/(.*) { diff --git a/bench/config/templates/supervisor.conf b/bench/config/templates/supervisor.conf index 57fd85748..9303edb76 100644 --- a/bench/config/templates/supervisor.conf +++ b/bench/config/templates/supervisor.conf @@ -14,6 +14,7 @@ stopwaitsecs=40 killasgroup=true user={{ user }} directory={{ sites_dir }} +startretries={{ supervisor_startretries }} [program:{{ bench_name }}-frappe-schedule] command={{ bench_cmd }} schedule @@ -24,6 +25,7 @@ stdout_logfile={{ bench_dir }}/logs/schedule.log stderr_logfile={{ bench_dir }}/logs/schedule.error.log user={{ user }} directory={{ bench_dir }} +startretries={{ supervisor_startretries }} {% if not multi_queue_consumption %} [program:{{ bench_name }}-frappe-default-worker] @@ -39,6 +41,7 @@ directory={{ bench_dir }} killasgroup=true numprocs={{ background_workers }} process_name=%(program_name)s-%(process_num)d +startretries={{ supervisor_startretries }} {% endif %} [program:{{ bench_name }}-frappe-short-worker] @@ -54,6 +57,7 @@ directory={{ bench_dir }} killasgroup=true numprocs={{ background_workers }} process_name=%(program_name)s-%(process_num)d +startretries={{ supervisor_startretries }} [program:{{ bench_name }}-frappe-long-worker] command={{ bench_cmd }} worker --queue long{{',default,short' if multi_queue_consumption else ''}} @@ -68,6 +72,7 @@ directory={{ bench_dir }} killasgroup=true numprocs={{ background_workers }} process_name=%(program_name)s-%(process_num)d +startretries={{ supervisor_startretries }} {% for worker_name, worker_details in workers.items() %} [program:{{ bench_name }}-frappe-{{ worker_name }}-worker] @@ -83,6 +88,7 @@ directory={{ bench_dir }} killasgroup=true numprocs={{ worker_details["background_workers"] or background_workers }} process_name=%(program_name)s-%(process_num)d +startretries={{ supervisor_startretries }} {% endfor %} @@ -96,6 +102,7 @@ stdout_logfile={{ bench_dir }}/logs/redis-cache.log stderr_logfile={{ bench_dir }}/logs/redis-cache.error.log user={{ user }} directory={{ sites_dir }} +startretries={{ supervisor_startretries }} [program:{{ bench_name }}-redis-queue] command={{ redis_server }} {{ redis_queue_config }} @@ -106,6 +113,7 @@ stdout_logfile={{ bench_dir }}/logs/redis-queue.log stderr_logfile={{ bench_dir }}/logs/redis-queue.error.log user={{ user }} directory={{ sites_dir }} +startretries={{ supervisor_startretries }} {% endif %} {% if node %} @@ -118,6 +126,7 @@ stdout_logfile={{ bench_dir }}/logs/node-socketio.log stderr_logfile={{ bench_dir }}/logs/node-socketio.error.log user={{ user }} directory={{ bench_dir }} +startretries={{ supervisor_startretries }} {% endif %} [group:{{ bench_name }}-web] diff --git a/bench/utils/__init__.py b/bench/utils/__init__.py index 3fe17ad2e..4b9e65b62 100644 --- a/bench/utils/__init__.py +++ b/bench/utils/__init__.py @@ -7,8 +7,10 @@ import sys from functools import lru_cache from glob import glob +from pathlib import Path from shlex import split -from typing import List, Tuple +from tarfile import TarInfo +from typing import List, Optional, Tuple # imports - third party imports import click @@ -50,6 +52,15 @@ def is_frappe_app(directory: str) -> bool: return bool(is_frappe_app) +def get_bench_cache_path(sub_dir: Optional[str]) -> Path: + relative_path = "~/.cache/bench" + if sub_dir and not sub_dir.startswith("/"): + relative_path += f"/{sub_dir}" + + cache_path = os.path.expanduser(relative_path) + cache_path = Path(cache_path) + cache_path.mkdir(parents=True, exist_ok=True) + return cache_path @lru_cache(maxsize=None) def is_valid_frappe_branch(frappe_path: str, frappe_branch: str): @@ -406,7 +417,7 @@ def get_env_frappe_commands(bench_path=".") -> List: return [] -def find_org(org_repo): +def find_org(org_repo, using_cached: bool=False): import requests org_repo = org_repo[0] @@ -418,10 +429,13 @@ def find_org(org_repo): if res.ok: return org, org_repo - raise InvalidRemoteException(f"{org_repo} not found in frappe or erpnext") + if using_cached: + return "", org_repo + + raise InvalidRemoteException(f"{org_repo} not found under frappe or erpnext GitHub accounts") -def fetch_details_from_tag(_tag: str) -> Tuple[str, str, str]: +def fetch_details_from_tag(_tag: str, using_cached: bool=False) -> Tuple[str, str, str]: if not _tag: raise Exception("Tag is not provided") @@ -436,7 +450,7 @@ def fetch_details_from_tag(_tag: str) -> Tuple[str, str, str]: try: org, repo = org_repo except Exception: - org, repo = find_org(org_repo) + org, repo = find_org(org_repo, using_cached) return org, repo, tag @@ -559,3 +573,35 @@ def get_cmd_from_sysargv(): break return cmd_from_ctx + + +def get_app_cache_extract_filter( + count_threshold: int = 10_000, + size_threshold: int = 1_000_000_000, +): # -> Callable[[TarInfo, str], TarInfo | None] + state = dict(count=0, size=0) + + AbsoluteLinkError = Exception + def data_filter(m: TarInfo, _:str) -> TarInfo: + return m + + if (sys.version_info.major == 3 and sys.version_info.minor > 7) or sys.version_info.major > 3: + from tarfile import data_filter, AbsoluteLinkError + + def filter_function(member: TarInfo, dest_path: str) -> Optional[TarInfo]: + state["count"] += 1 + state["size"] += member.size + + if state["count"] > count_threshold: + raise RuntimeError(f"Number of entries exceeds threshold ({state['count']})") + + if state["size"] > size_threshold: + raise RuntimeError(f"Extracted size exceeds threshold ({state['size']})") + + try: + return data_filter(member, dest_path) + except AbsoluteLinkError: + # Links created by `frappe` after extraction + return None + + return filter_function diff --git a/bench/utils/app.py b/bench/utils/app.py index 3a18010da..7e1ecc810 100644 --- a/bench/utils/app.py +++ b/bench/utils/app.py @@ -4,7 +4,7 @@ import re import sys import subprocess -from typing import List +from typing import List, Optional from functools import lru_cache # imports - module imports @@ -230,18 +230,13 @@ def get_app_name(bench_path: str, folder_name: str) -> str: app_name = None apps_path = os.path.join(os.path.abspath(bench_path), "apps") - pyproject_path = os.path.join(apps_path, folder_name, "pyproject.toml") config_py_path = os.path.join(apps_path, folder_name, "setup.cfg") setup_py_path = os.path.join(apps_path, folder_name, "setup.py") - - if os.path.exists(pyproject_path): - try: - from tomli import load - except ImportError: - from tomllib import load - - with open(pyproject_path, "rb") as f: - app_name = load(f).get("project", {}).get("name") + + pyproject_path = os.path.join(apps_path, folder_name, "pyproject.toml") + pyproject = get_pyproject(pyproject_path) + if pyproject: + app_name = pyproject.get("project", {}).get("name") if not app_name and os.path.exists(config_py_path): from setuptools.config import read_configuration @@ -261,6 +256,19 @@ def get_app_name(bench_path: str, folder_name: str) -> str: return folder_name +def get_pyproject(pyproject_path: str) -> Optional[dict]: + if not os.path.exists(pyproject_path): + return None + + try: + from tomli import load + except ImportError: + from tomllib import load + + with open(pyproject_path, "rb") as f: + return load(f) + + def check_existing_dir(bench_path, repo_name): cloned_path = os.path.join(bench_path, "apps", repo_name) dir_already_exists = os.path.isdir(cloned_path) diff --git a/bench/utils/bench.py b/bench/utils/bench.py index 44a1c457a..701e37804 100644 --- a/bench/utils/bench.py +++ b/bench/utils/bench.py @@ -4,11 +4,13 @@ import logging import os import re +import shutil import subprocess import sys from functools import lru_cache from glob import glob from json.decoder import JSONDecodeError +from pathlib import Path # imports - third party imports import click @@ -16,7 +18,14 @@ # imports - module imports import bench from bench.exceptions import PatchError, ValidationError -from bench.utils import exec_cmd, get_bench_name, get_cmd_output, log, which +from bench.utils import ( + exec_cmd, + get_bench_cache_path, + get_bench_name, + get_cmd_output, + log, + which, +) logger = logging.getLogger(bench.PROJECT_NAME) @@ -129,7 +138,9 @@ def update_yarn_packages(bench_path=".", apps=None, verbose=None): app_path = os.path.join(apps_dir, app) if os.path.exists(os.path.join(app_path, "package.json")): click.secho(f"\nInstalling node dependencies for {app}", fg="yellow") - yarn_install = "yarn install --verbose" if verbose else "yarn install" + yarn_install = "yarn install --check-files" + if verbose: + yarn_install += " --verbose" bench.run(yarn_install, cwd=app_path) @@ -310,22 +321,26 @@ def restart_supervisor_processes(bench_path=".", web_workers=False, _raise=False supervisor_status = get_cmd_output("sudo supervisorctl status", cwd=bench_path) if web_workers and f"{bench_name}-web:" in supervisor_status: - group = f"{bench_name}-web:\t" + groups = [f"{bench_name}-web:\t"] elif f"{bench_name}-workers:" in supervisor_status: - group = f"{bench_name}-workers: {bench_name}-web:" + groups = [f"{bench_name}-web:", f"{bench_name}-workers:"] # backward compatibility elif f"{bench_name}-processes:" in supervisor_status: - group = f"{bench_name}-processes:" + groups = [f"{bench_name}-processes:"] # backward compatibility else: - group = "frappe:" + groups = ["frappe:"] - failure = bench.run(f"{sudo}supervisorctl restart {group}", _raise=_raise) - if failure: - log("restarting supervisor failed. Use `bench restart` to retry.", level=3) + for group in groups: + failure = bench.run(f"{sudo}supervisorctl restart {group}", _raise=_raise) + if failure: + log( + f"restarting supervisor group `{group}` failed. Use `bench restart` to retry.", + level=3, + ) def restart_systemd_processes(bench_path=".", web_workers=False, _raise=True): @@ -349,11 +364,16 @@ def restart_process_manager(bench_path=".", web_workers=False): exec_cmd(f"overmind restart {worker}", cwd=bench_path) -def build_assets(bench_path=".", app=None): +def build_assets(bench_path=".", app=None, using_cached=False): command = "bench build" if app: command += f" --app {app}" - exec_cmd(command, cwd=bench_path, env={"BENCH_DEVELOPER": "1"}) + + env = {"BENCH_DEVELOPER": "1"} + if using_cached: + env["USING_CACHED"] = "1" + + exec_cmd(command, cwd=bench_path, env=env) def handle_version_upgrade(version_upgrade, bench_path, force, reset, conf): @@ -634,3 +654,115 @@ def validate_branch(): ) sys.exit(1) + + +def cache_helper(clear=False, remove_app="", remove_key="") -> None: + can_remove = bool(remove_key or remove_app) + if not clear and not can_remove: + cache_list() + elif can_remove: + cache_remove(remove_app, remove_key) + elif clear: + cache_clear() + else: + pass # unreachable + + +def cache_list() -> None: + from datetime import datetime + + tot_size = 0 + tot_items = 0 + + printed_header = False + for item in get_bench_cache_path("apps").iterdir(): + if item.suffix not in [".tar", ".tgz"]: + continue + + stat = item.stat() + size_mb = stat.st_size / 1_000_000 + created = datetime.fromtimestamp(stat.st_ctime) + accessed = datetime.fromtimestamp(stat.st_atime) + + app = item.name.split("-")[0] + tot_items += 1 + tot_size += stat.st_size + compressed = item.suffix == ".tgz" + + if not printed_header: + click.echo( + f"{'APP':15} " + f"{'FILE':25} " + f"{'SIZE':>13} " + f"{'COMPRESSED'} " + f"{'CREATED':19} " + f"{'ACCESSED':19} " + ) + printed_header = True + + click.echo( + f"{app:15} " + f"{item.name:25} " + f"{size_mb:10.3f} MB " + f"{str(compressed):10} " + f"{created:%Y-%m-%d %H:%M:%S} " + f"{accessed:%Y-%m-%d %H:%M:%S} " + ) + + if tot_items: + click.echo(f"Total size {tot_size / 1_000_000:.3f} MB belonging to {tot_items} items") + else: + click.echo("No cached items") + + +def cache_remove(app: str = "", key: str = "") -> None: + rem_items = 0 + rem_size = 0 + for item in get_bench_cache_path("apps").iterdir(): + if not should_remove_item(item, app, key): + continue + + rem_items += 1 + rem_size += item.stat().st_size + item.unlink(True) + click.echo(f"Removed {item.name}") + + if rem_items: + click.echo(f"Cleared {rem_size / 1_000_000:.3f} MB belonging to {rem_items} items") + else: + click.echo("No items removed") + + +def should_remove_item(item: Path, app: str, key: str) -> bool: + if item.suffix not in [".tar", ".tgz"]: + return False + + name = item.name + if app and key and name.startswith(f"{app}-{key[:10]}."): + return True + + if app and name.startswith(f"{app}-"): + return True + + if key and f"-{key[:10]}." in name: + return True + + return False + + +def cache_clear() -> None: + cache_path = get_bench_cache_path("apps") + tot_items = len(os.listdir(cache_path)) + if not tot_items: + click.echo("No cached items") + return + + tot_size = get_dir_size(cache_path) + shutil.rmtree(cache_path) + + if tot_items: + click.echo(f"Cleared {tot_size / 1_000_000:.3f} MB belonging to {tot_items} items") + + +def get_dir_size(p: Path) -> int: + return sum(i.stat(follow_symlinks=False).st_size for i in p.iterdir()) diff --git a/pyproject.toml b/pyproject.toml index d1011ff18..01db07b02 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,7 +22,7 @@ dependencies = [ "Click>=7.0", "GitPython~=3.1.30", "honcho", - "Jinja2~=3.0.3", + "Jinja2~=3.1.3", "python-crontab~=2.6.0", "requests", "semantic-version~=2.8.2", @@ -44,7 +44,7 @@ Source = "https://github.com/frappe/bench" [build-system] requires = [ - "hatchling>=1.6.0", + "hatchling>=1.6.0,<=1.21.0", ] build-backend = "hatchling.build"