From 84d3b7fdea6846bf1541ee63e66362fbfb77648f Mon Sep 17 00:00:00 2001 From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com> Date: Tue, 23 Jul 2024 07:14:22 -0400 Subject: [PATCH 01/39] Fixed bug associated with rename without title field populated Where's was an (if not) condition which should have been an (if) condition. This caused issues when renaming a file without populating the title field and with an associated performer. --- plugins/RenameFile/README.md | 6 +- plugins/RenameFile/renamefile.py | 99 ++++++++++++++++++++----------- plugins/RenameFile/renamefile.yml | 2 +- 3 files changed, 68 insertions(+), 39 deletions(-) diff --git a/plugins/RenameFile/README.md b/plugins/RenameFile/README.md index 86cb5fb3..e373aea8 100644 --- a/plugins/RenameFile/README.md +++ b/plugins/RenameFile/README.md @@ -37,10 +37,12 @@ To avoid this error, refresh the URL before changing the Title field. ### Installation - Follow **Requirements** instructions. - In the stash plugin directory (C:\Users\MyUserName\.stash\plugins), create a folder named **RenameFile**. -- Copy all the plugin files to this folder.(**C:\Users\MyUserName\.stash\plugins\RenameFile**). +- Copy all the plugin files to this folder.(**C:\Users\MyUserName\\.stash\plugins\RenameFile**). - Restart Stash. That's it!!! ### Options -To change options, see **renamefile_settings.py** file. After making changes, go to http://localhost:9999/settings?tab=plugins, and click [Reload Plugins]. +- Main options are accessible in the GUI via Settings->Plugins->Plugins->[RenameFile]. +- Advanced options are avialable in the **renamefile_settings.py** file. After making changes, go to http://localhost:9999/settings?tab=plugins, and click [Reload Plugins]. + diff --git a/plugins/RenameFile/renamefile.py b/plugins/RenameFile/renamefile.py index be044561..f131a44d 100644 --- a/plugins/RenameFile/renamefile.py +++ b/plugins/RenameFile/renamefile.py @@ -27,6 +27,9 @@ DEFAULT_ENDPOINT = "http://localhost:9999/graphql" # Default GraphQL endpoint DEFAULT_FIELD_KEY_LIST = "title, performers, tags" # Default Field Key List with the desired order DEFAULT_SEPERATOR = "-" +PLUGIN_ARGS = False + + # ------------------------------------------ # ------------------------------------------ @@ -58,7 +61,11 @@ # Extract dry_run setting from settings dry_run = settings["dryRun"] dry_run_prefix = '' -logger.info(f"\nStarting (debugTracing={debugTracing}) (dry_run={dry_run})************************************************") +try: + PLUGIN_ARGS = json_input['args']["mode"] +except: + pass +logger.info(f"\nStarting (debugTracing={debugTracing}) (dry_run={dry_run}) (PLUGIN_ARGS={PLUGIN_ARGS})************************************************") if debugTracing: logger.info("settings: %s " % (settings,)) if dry_run: logger.info("Dry run mode is enabled.") @@ -96,6 +103,7 @@ double_separator = separator + separator + # GraphQL query to fetch all scenes query_all_scenes = """ query AllScenes { @@ -152,7 +160,7 @@ def form_filename(original_file_stem, scene_details, wrapper_styles): title = default_title # ................... - if debugTracing: logger.info("Debug Tracing................") + if debugTracing: logger.info(f"Debug Tracing (title=\"{title}\")................") # Function to add tag to filename def add_tag(tag_name): @@ -195,7 +203,9 @@ def add_tag(tag_name): if settings["performerAppend"]: performers = '-'.join([performer.get('name', '') for performer in scene_details.get('performers', [])]) if performers: - if not include_performer_if_in_name or performers.lower() not in title.lower(): + if debugTracing: logger.info(f"Debug Tracing (include_performer_if_in_name={include_performer_if_in_name})................") + if include_performer_if_in_name or performers.lower() not in title.lower(): + if debugTracing: logger.info(f"Debug Tracing (performers={performers})................") if wrapper_styles.get('performers'): filename_parts.append(f"{wrapper_styles['performers'][0]}{performers}{wrapper_styles['performers'][1]}") else: @@ -237,7 +247,7 @@ def add_tag(tag_name): if debugTracing: logger.info(f"Debug Tracing (include_tag_if_in_name={include_tag_if_in_name})................") if include_tag_if_in_name or tag_name.lower() not in title.lower(): add_tag(tag_name) - if debugTracing: logger.info("Debug Tracing................") + if debugTracing: logger.info(f"Debug Tracing (tag_name={tag_name})................") new_filename = separator.join(filename_parts).replace(double_separator, separator) @@ -408,49 +418,66 @@ def rename_scene(scene_id, wrapper_styles, stash_directory): return new_filename, original_path_info, new_path_info -if debugTracing: logger.info("Debug Tracing................") -# Execute the GraphQL query to fetch all scenes -scene_result = graphql_request(query_all_scenes) -if debugTracing: logger.info("Debug Tracing................") -all_scenes = scene_result.get('data', {}).get('allScenes', []) -if debugTracing: logger.info("Debug Tracing................") -if not all_scenes: +# Main default function for rename scene +def rename_files_task(): + if debugTracing: logger.info("Debug Tracing................") + # Execute the GraphQL query to fetch all scenes + scene_result = graphql_request(query_all_scenes) + if debugTracing: logger.info("Debug Tracing................") + all_scenes = scene_result.get('data', {}).get('allScenes', []) + if debugTracing: logger.info("Debug Tracing................") + if not all_scenes: + if debugTracing: logger.info("Debug Tracing................") + log.error("No scenes found.") + logger.error("No scenes found.") + exit() if debugTracing: logger.info("Debug Tracing................") - log.error("No scenes found.") - logger.error("No scenes found.") - exit() -if debugTracing: logger.info("Debug Tracing................") -# Find the scene with the latest updated_at timestamp -latest_scene = max(all_scenes, key=lambda scene: scene['updated_at']) + # Find the scene with the latest updated_at timestamp + latest_scene = max(all_scenes, key=lambda scene: scene['updated_at']) -# Extract the ID of the latest scene -latest_scene_id = latest_scene.get('id') + # Extract the ID of the latest scene + latest_scene_id = latest_scene.get('id') -# Extract wrapper styles -wrapper_styles = config["wrapper_styles"] + # Extract wrapper styles + wrapper_styles = config["wrapper_styles"] -# Read stash directory from renamefile_settings.py -stash_directory = config.get('stash_directory', '') -if debugTracing: logger.info("Debug Tracing................") + # Read stash directory from renamefile_settings.py + stash_directory = config.get('stash_directory', '') + if debugTracing: logger.info("Debug Tracing................") -if debugTracing: logger.info("Debug Tracing................") + if debugTracing: logger.info("Debug Tracing................") -# Rename the latest scene and trigger metadata scan -new_filename = rename_scene(latest_scene_id, wrapper_styles, stash_directory) -if debugTracing: logger.info("Debug Tracing................") + # Rename the latest scene and trigger metadata scan + new_filename = rename_scene(latest_scene_id, wrapper_styles, stash_directory) + if debugTracing: logger.info("Debug Tracing................") -# Log dry run state and indicate if no changes were made -if dry_run: - log.info("Dry run: Script executed in dry run mode. No changes were made.") - logger.info("Dry run: Script executed in dry run mode. No changes were made.") -elif not new_filename: - logger.info("No changes were made.") + # Log dry run state and indicate if no changes were made + if dry_run: + log.info("Dry run: Script executed in dry run mode. No changes were made.") + logger.info("Dry run: Script executed in dry run mode. No changes were made.") + elif not new_filename: + logger.info("No changes were made.") + else: + logger.info("Change success!") + return + +def fetch_dup_filename_tags(): # Place holder for new implementation + return + +if PLUGIN_ARGS == "fetch_dup_filename_tags": + fetch_dup_filename_tags() +elif PLUGIN_ARGS == "rename_files_task": + rename_files_task() else: - logger.info("Change success!") + rename_files_task() + if debugTracing: logger.info("\n*********************************\nEXITING ***********************\n*********************************") + + # ToDo List # Add logic to max_filename_length code so it checks base file length and checks folder length, instead of lumping them altogether. # Add logic to update Sqlite DB on file name change, instead of perform_metadata_scan. - # Get variables from the Plugins Settings UI instead of from renamefile_settings.py \ No newline at end of file + # Get variables from the Plugins Settings UI instead of from renamefile_settings.py + # Add code to get tags from duplicate filenames \ No newline at end of file diff --git a/plugins/RenameFile/renamefile.yml b/plugins/RenameFile/renamefile.yml index 75570699..b838025c 100644 --- a/plugins/RenameFile/renamefile.yml +++ b/plugins/RenameFile/renamefile.yml @@ -1,6 +1,6 @@ name: RenameFile description: Renames video (scene) file names when the user edits the [Title] field located in the scene [Edit] tab. -version: 0.2.5 +version: 0.2.6 url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/RenameFile settings: dryRun: From 1cee24801f26e5a363f07b9d5e2204957548f73e Mon Sep 17 00:00:00 2001 From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com> Date: Sun, 28 Jul 2024 03:30:21 -0400 Subject: [PATCH 02/39] Added ChangeFileMonitor and added more features to RenameFile Added new plugin ChangeFileMonitor. Made following changes to RenameFile plugin. Added fields galleries, resolution, and width. Fixed bug associated with studio. Added logic to limit the log file size. Added logic to only get [Change success] logging when no errors occurs. Change default fields to include studio. Added postfix styles, which was mainly needed to properly format resolution field, but can be used for the other fields. Consolidated [Include Existing Key Field] options into one option. Cleaned up code and updated version --- plugins/ChangeFileMonitor | 1 + plugins/RenameFile/README.md | 27 ++- plugins/RenameFile/manifest | 14 ++ plugins/RenameFile/renamefile.py | 276 +++++++++++++--------- plugins/RenameFile/renamefile.yml | 57 +++-- plugins/RenameFile/renamefile_settings.py | 40 +++- 6 files changed, 263 insertions(+), 152 deletions(-) create mode 120000 plugins/ChangeFileMonitor create mode 100644 plugins/RenameFile/manifest diff --git a/plugins/ChangeFileMonitor b/plugins/ChangeFileMonitor new file mode 120000 index 00000000..8ca902f7 --- /dev/null +++ b/plugins/ChangeFileMonitor @@ -0,0 +1 @@ +../../Axter-Stash/plugins/ChangeFileMonitor \ No newline at end of file diff --git a/plugins/RenameFile/README.md b/plugins/RenameFile/README.md index e373aea8..7ea05101 100644 --- a/plugins/RenameFile/README.md +++ b/plugins/RenameFile/README.md @@ -1,5 +1,5 @@ -# RenameFile: Ver 0.2.5 -RenameFile is a [Stash](https://github.com/stashapp/stash) plugin which performs the following two main task. +# RenameFile: Ver 0.4.0 (By David Maisonave) +RenameFile is a [Stash](https://github.com/stashapp/stash) plugin which performs the following tasks. - **Rename Scene File Name** (On-The-Fly) - **Append tag names** to file name - **Append Performer names** to file name @@ -21,6 +21,15 @@ Note: This script is **largely** based on the [Renamer](https://github.com/Serec - To add these fields see the [Key Fields] option in Settings->Plugins->Plugins->[RenameFile]. - The [Key Fields] can also be used to change the order for the file name format. - There are many options in Plugins->[RenameFile] UI, and all the options have detailed descriptions. Please advise us if any of the options need further clarification, and provide example details. + - **[Key Fields]**: (This option may require more detail than could be provided in the GUI) + - Define key fields to use to format the file name. This is a comma seperated list, and the list should be in the desired format order. (Default=title,performers,studio,tags) + - For example, if the user wants the performers name before the title, set the performers name first. + - Example:"performers,title,tags". + - This is an example of user adding height:"title,performers,tags,height" + - Here's an example using all of the supported fields: "title,performers,tags,studio,galleries,resolution,width,height,video_codec,frame_rate,date". + - The **resolution** field equals width + height. + - The date field is **not** populated by default unless the user explicitly adds the date value to a scene. + - If **[Key Fields]** is empty, the default value is used. (Default=title,performers,studio,tags) - There are additional options in renamefile_settings.py, but these options should only be changed by advanced users, and any changes should be tested first with the [Dry-Run] option enabled. **Note:** On Windows 10/11, the file can not be renamed while it's playing. It will result in following error: @@ -30,15 +39,17 @@ Error: [WinError 32] The process cannot access the file because it is being used To avoid this error, refresh the URL before changing the Title field. ### Requirements -`pip install stashapp-tools` - -`pip install pyYAML` +pip install -r requirements.txt +- Or manually install each requirement: + - `pip install stashapp-tools` + - `pip install pyYAML` + - `pip install requests` ### Installation - Follow **Requirements** instructions. -- In the stash plugin directory (C:\Users\MyUserName\.stash\plugins), create a folder named **RenameFile**. -- Copy all the plugin files to this folder.(**C:\Users\MyUserName\\.stash\plugins\RenameFile**). -- Restart Stash. +- Create a folder named **RenameFile**, in the stash plugin directory (C:\Users\MyUserName\.stash\plugins). +- Download the latest version from the following link: [RenameFile](https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/RenameFile), and copy the plugin files to folder.(**C:\Users\MyUserName\\.stash\plugins\RenameFile**). +- Click the **[Reload Plugins]** button in Stash->Settings->Plugins->Plugins. That's it!!! diff --git a/plugins/RenameFile/manifest b/plugins/RenameFile/manifest new file mode 100644 index 00000000..a98d0dcf --- /dev/null +++ b/plugins/RenameFile/manifest @@ -0,0 +1,14 @@ +id: renamefile +name: RenameFile +metadata: + description: Renames video (scene) file names when the user edits the [Title] field located in the scene [Edit] tab. +version: 0.4.0 +date: "2024-07-26 08:00:00" +requires: [pip install stashapp-tools, pip install pyYAML] +source_repository: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/RenameFile +files: +- README.md +- renamefile.yml +- renamefile.py +- renamefile_settings.py +- requirements.txt diff --git a/plugins/RenameFile/renamefile.py b/plugins/RenameFile/renamefile.py index f131a44d..d7c55889 100644 --- a/plugins/RenameFile/renamefile.py +++ b/plugins/RenameFile/renamefile.py @@ -1,71 +1,98 @@ -import requests +# Description: This is a Stash plugin which allows users to rename the video (scene) file name by editing the [Title] field located in the scene [Edit] tab. +# By David Maisonave (aka Axter) Jul-2024 (https://www.axter.com/) +# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/RenameFile +# Based on source code from https://github.com/Serechops/Serechops-Stash/tree/main/plugins/Renamer import os -import logging +import sys import shutil -from pathlib import Path import hashlib import json -import sys +from pathlib import Path +import requests +import logging +from logging.handlers import RotatingFileHandler +import stashapi.log as log # Importing stashapi.log as log for critical events ONLY from stashapi.stashapp import StashInterface +from renamefile_settings import config # Import settings from renamefile_settings.py -# This is a Stash plugin which allows users to rename the video (scene) file name by editing the [Title] field located in the scene [Edit] tab. - -# Importing stashapi.log as log for critical events -import stashapi.log as log - -# Import settings from renamefile_settings.py -from renamefile_settings import config - -# Get the directory of the script -script_dir = Path(__file__).resolve().parent - -# Configure logging for your script -log_file_path = script_dir / 'renamefile.log' +# ********************************************************************** +# Constant global variables -------------------------------------------- +LOG_FILE_PATH = log_file_path = f"{Path(__file__).resolve().parent}\\{Path(__file__).stem}.log" FORMAT = "[%(asctime)s - LN:%(lineno)s] %(message)s" -logging.basicConfig(filename=log_file_path, level=logging.INFO, format=FORMAT) -logger = logging.getLogger('renamefile') DEFAULT_ENDPOINT = "http://localhost:9999/graphql" # Default GraphQL endpoint -DEFAULT_FIELD_KEY_LIST = "title, performers, tags" # Default Field Key List with the desired order +DEFAULT_FIELD_KEY_LIST = "title,performers,studio,tags" # Default Field Key List with the desired order DEFAULT_SEPERATOR = "-" PLUGIN_ARGS = False +PLUGIN_ARGS_MODE = False +WRAPPER_STYLES = config["wrapper_styles"] +POSTFIX_STYLES = config["postfix_styles"] +# GraphQL query to fetch all scenes +QUERY_ALL_SCENES = """ + query AllScenes { + allScenes { + id + updated_at + } + } +""" +RFH = RotatingFileHandler( + filename=LOG_FILE_PATH, + mode='a', + maxBytes=2*1024*1024, # Configure logging for this script with max log file size of 2000K + backupCount=2, + encoding=None, + delay=0 +) + +# ********************************************************************** +# Global variables -------------------------------------------- +inputToUpdateScenePost = False +exitMsg = "Change success!!" + +# Configure local log file for plugin within plugin folder having a limited max log file size +logging.basicConfig(level=logging.INFO, format=FORMAT, datefmt="%y%m%d %H:%M:%S", handlers=[RFH]) +logger = logging.getLogger('renamefile') - - -# ------------------------------------------ -# ------------------------------------------ -# Code to fetch variables from Plugin UI +# ********************************************************************** +# ---------------------------------------------------------------------- +# Code section to fetch variables from Plugin UI and from renamefile_settings.py json_input = json.loads(sys.stdin.read()) FRAGMENT_SERVER = json_input["server_connection"] stash = StashInterface(FRAGMENT_SERVER) pluginConfiguration = stash.get_configuration()["plugins"] settings = { - "dryRun": False, - "fileRenameViaMove": False, "performerAppend": False, - "performerIncludeInFileName": False, + "studioAppend": False, "tagAppend": False, - "tagIncludeInFileName": False, - "zFieldKeyList": DEFAULT_FIELD_KEY_LIST, + "z_keyFIeldsIncludeInFileName": False, + "zafileRenameViaMove": False, + "zfieldKeyList": DEFAULT_FIELD_KEY_LIST, "zgraphqlEndpoint": DEFAULT_ENDPOINT, "zmaximumTagKeys": 12, "zpathToExclude": "", "zseparators": DEFAULT_SEPERATOR, "ztagWhitelist": "", "zzdebugTracing": False, + "zzdryRun": False, } if "renamefile" in pluginConfiguration: settings.update(pluginConfiguration["renamefile"]) -# ------------------------------------------ +# ---------------------------------------------------------------------- debugTracing = settings["zzdebugTracing"] # Extract dry_run setting from settings -dry_run = settings["dryRun"] +dry_run = settings["zzdryRun"] dry_run_prefix = '' try: - PLUGIN_ARGS = json_input['args']["mode"] + PLUGIN_ARGS = json_input['args'] + PLUGIN_ARGS_MODE = json_input['args']["mode"] except: pass -logger.info(f"\nStarting (debugTracing={debugTracing}) (dry_run={dry_run}) (PLUGIN_ARGS={PLUGIN_ARGS})************************************************") +try: + if json_input['args']['hookContext']['input']: inputToUpdateScenePost = True # This avoids calling rename logic twice +except: + pass +logger.info(f"\nStarting (debugTracing={debugTracing}) (dry_run={dry_run}) (PLUGIN_ARGS_MODE={PLUGIN_ARGS_MODE}) (inputToUpdateScenePost={inputToUpdateScenePost})************************************************") if debugTracing: logger.info("settings: %s " % (settings,)) if dry_run: logger.info("Dry run mode is enabled.") @@ -82,15 +109,16 @@ if debugTracing: logger.info("Debug Tracing................") if not tag_whitelist: tag_whitelist = "" +if debugTracing: logger.info(f"Debug Tracing (tag_whitelist={tag_whitelist})................") endpoint = settings["zgraphqlEndpoint"] # GraphQL endpoint -if debugTracing: logger.info("Debug Tracing................") if not endpoint or endpoint == "": endpoint = DEFAULT_ENDPOINT +if debugTracing: logger.info(f"Debug Tracing (endpoint={endpoint})................") # Extract rename_files and move_files settings from renamefile_settings.py rename_files = config["rename_files"] -move_files = settings["fileRenameViaMove"] +move_files = settings["zafileRenameViaMove"] if debugTracing: logger.info("Debug Tracing................") -fieldKeyList = settings["zFieldKeyList"] # Default Field Key List with the desired order +fieldKeyList = settings["zfieldKeyList"] # Default Field Key List with the desired order if not fieldKeyList or fieldKeyList == "": fieldKeyList = DEFAULT_FIELD_KEY_LIST fieldKeyList = fieldKeyList.replace(" ", "") @@ -98,22 +126,11 @@ fieldKeyList = fieldKeyList.split(",") if debugTracing: logger.info(f"Debug Tracing (fieldKeyList={fieldKeyList})................") separator = settings["zseparators"] -# ------------------------------------------ -# ------------------------------------------ -double_separator = separator + separator - - +# ---------------------------------------------------------------------- +# ********************************************************************** -# GraphQL query to fetch all scenes -query_all_scenes = """ - query AllScenes { - allScenes { - id - updated_at - } - } -""" -if debugTracing: logger.info("Debug Tracing................") +double_separator = separator + separator +if debugTracing: logger.info(f"Debug Tracing (PLUGIN_ARGS={PLUGIN_ARGS}) (WRAPPER_STYLES={WRAPPER_STYLES}) (POSTFIX_STYLES={POSTFIX_STYLES})................") # Function to make GraphQL requests def graphql_request(query, variables=None): @@ -142,14 +159,13 @@ def should_exclude_path(scene_details): return False # Function to form the new filename based on scene details and user settings -def form_filename(original_file_stem, scene_details, wrapper_styles): +def form_filename(original_file_stem, scene_details): if debugTracing: logger.info("Debug Tracing................") filename_parts = [] tag_keys_added = 0 default_title = '' if_notitle_use_org_filename = config["if_notitle_use_org_filename"] - include_tag_if_in_name = settings["tagIncludeInFileName"] - include_performer_if_in_name = settings["performerIncludeInFileName"] + include_keyField_if_in_name = settings["z_keyFIeldsIncludeInFileName"] if if_notitle_use_org_filename: default_title = original_file_stem # ................... @@ -166,15 +182,14 @@ def form_filename(original_file_stem, scene_details, wrapper_styles): def add_tag(tag_name): nonlocal tag_keys_added nonlocal filename_parts - nonlocal wrapper_styles if debugTracing: logger.info(f"Debug Tracing (tag_name={tag_name})................") if max_tag_keys == -1 or (max_tag_keys is not None and tag_keys_added >= int(max_tag_keys)): return # Skip adding more tags if the maximum limit is reached # Check if the tag name is in the whitelist if tag_whitelist == "" or tag_whitelist == None or (tag_whitelist and tag_name in tag_whitelist): - if wrapper_styles.get('tag'): - filename_parts.append(f"{wrapper_styles['tag'][0]}{tag_name}{wrapper_styles['tag'][1]}") + if WRAPPER_STYLES.get('tag'): + filename_parts.append(f"{WRAPPER_STYLES['tag'][0]}{tag_name}{WRAPPER_STYLES['tag'][1]}") if debugTracing: logger.info("Debug Tracing................") else: filename_parts.append(tag_name) @@ -187,69 +202,120 @@ def add_tag(tag_name): for key in fieldKeyList: if key == 'studio': - studio_name = scene_details.get('studio', {}).get('name', '') - if studio_name: - if wrapper_styles.get('studio'): - filename_parts.append(f"{wrapper_styles['studio'][0]}{studio_name}{wrapper_styles['studio'][1]}") - else: - filename_parts.append(studio_name) + if settings["studioAppend"]: + if debugTracing: logger.info("Debug Tracing................") + studio_name = scene_details.get('studio', {}) + if debugTracing: logger.info(f"Debug Tracing (studio_name={studio_name})................") + if studio_name: + studio_name = scene_details.get('studio', {}).get('name', '') + if debugTracing: logger.info(f"Debug Tracing (studio_name={studio_name})................") + if studio_name: + studio_name += POSTFIX_STYLES.get('studio') + if debugTracing: logger.info("Debug Tracing................") + if include_keyField_if_in_name or studio_name.lower() not in title.lower(): + if WRAPPER_STYLES.get('studio'): + filename_parts.append(f"{WRAPPER_STYLES['studio'][0]}{studio_name}{WRAPPER_STYLES['studio'][1]}") + else: + filename_parts.append(studio_name) elif key == 'title': if title: # This value has already been fetch in start of function because it needs to be defined before tags and performers - if wrapper_styles.get('title'): - filename_parts.append(f"{wrapper_styles['title'][0]}{title}{wrapper_styles['title'][1]}") + title += POSTFIX_STYLES.get('title') + if WRAPPER_STYLES.get('title'): + filename_parts.append(f"{WRAPPER_STYLES['title'][0]}{title}{WRAPPER_STYLES['title'][1]}") else: filename_parts.append(title) elif key == 'performers': if settings["performerAppend"]: performers = '-'.join([performer.get('name', '') for performer in scene_details.get('performers', [])]) if performers: - if debugTracing: logger.info(f"Debug Tracing (include_performer_if_in_name={include_performer_if_in_name})................") - if include_performer_if_in_name or performers.lower() not in title.lower(): + performers += POSTFIX_STYLES.get('performers') + if debugTracing: logger.info(f"Debug Tracing (include_keyField_if_in_name={include_keyField_if_in_name})................") + if include_keyField_if_in_name or performers.lower() not in title.lower(): if debugTracing: logger.info(f"Debug Tracing (performers={performers})................") - if wrapper_styles.get('performers'): - filename_parts.append(f"{wrapper_styles['performers'][0]}{performers}{wrapper_styles['performers'][1]}") + if WRAPPER_STYLES.get('performers'): + filename_parts.append(f"{WRAPPER_STYLES['performers'][0]}{performers}{WRAPPER_STYLES['performers'][1]}") else: filename_parts.append(performers) elif key == 'date': scene_date = scene_details.get('date', '') + if debugTracing: logger.info("Debug Tracing................") if scene_date: - if wrapper_styles.get('date'): - filename_parts.append(f"{wrapper_styles['date'][0]}{scene_date}{wrapper_styles['date'][1]}") + scene_date += POSTFIX_STYLES.get('date') + if debugTracing: logger.info("Debug Tracing................") + if WRAPPER_STYLES.get('date'): + filename_parts.append(f"{WRAPPER_STYLES['date'][0]}{scene_date}{WRAPPER_STYLES['date'][1]}") else: filename_parts.append(scene_date) + elif key == 'resolution': + width = str(scene_details.get('files', [{}])[0].get('width', '')) # Convert width to string + height = str(scene_details.get('files', [{}])[0].get('height', '')) # Convert height to string + if width and height: + resolution = width + POSTFIX_STYLES.get('width_height_seperator') + height + POSTFIX_STYLES.get('resolution') + if WRAPPER_STYLES.get('resolution'): + filename_parts.append(f"{WRAPPER_STYLES['resolution'][0]}{resolution}{WRAPPER_STYLES['width'][1]}") + else: + filename_parts.append(resolution) + elif key == 'width': + width = str(scene_details.get('files', [{}])[0].get('width', '')) # Convert width to string + if width: + width += POSTFIX_STYLES.get('width') + if WRAPPER_STYLES.get('width'): + filename_parts.append(f"{WRAPPER_STYLES['width'][0]}{width}{WRAPPER_STYLES['width'][1]}") + else: + filename_parts.append(width) elif key == 'height': height = str(scene_details.get('files', [{}])[0].get('height', '')) # Convert height to string if height: - height += 'p' - if wrapper_styles.get('height'): - filename_parts.append(f"{wrapper_styles['height'][0]}{height}{wrapper_styles['height'][1]}") + height += POSTFIX_STYLES.get('height') + if WRAPPER_STYLES.get('height'): + filename_parts.append(f"{WRAPPER_STYLES['height'][0]}{height}{WRAPPER_STYLES['height'][1]}") else: filename_parts.append(height) elif key == 'video_codec': video_codec = scene_details.get('files', [{}])[0].get('video_codec', '').upper() # Convert to uppercase if video_codec: - if wrapper_styles.get('video_codec'): - filename_parts.append(f"{wrapper_styles['video_codec'][0]}{video_codec}{wrapper_styles['video_codec'][1]}") + video_codec += POSTFIX_STYLES.get('video_codec') + if WRAPPER_STYLES.get('video_codec'): + filename_parts.append(f"{WRAPPER_STYLES['video_codec'][0]}{video_codec}{WRAPPER_STYLES['video_codec'][1]}") else: filename_parts.append(video_codec) elif key == 'frame_rate': - frame_rate = str(scene_details.get('files', [{}])[0].get('frame_rate', '')) + ' FPS' # Convert to string and append ' FPS' + frame_rate = str(scene_details.get('files', [{}])[0].get('frame_rate', '')) + 'FPS' # Convert to string and append ' FPS' if frame_rate: - if wrapper_styles.get('frame_rate'): - filename_parts.append(f"{wrapper_styles['frame_rate'][0]}{frame_rate}{wrapper_styles['frame_rate'][1]}") + frame_rate += POSTFIX_STYLES.get('frame_rate') + if WRAPPER_STYLES.get('frame_rate'): + filename_parts.append(f"{WRAPPER_STYLES['frame_rate'][0]}{frame_rate}{WRAPPER_STYLES['frame_rate'][1]}") else: filename_parts.append(frame_rate) + elif key == 'galleries': + galleries = [gallery.get('title', '') for gallery in scene_details.get('galleries', [])] + if debugTracing: logger.info("Debug Tracing................") + for gallery_name in galleries: + if debugTracing: logger.info(f"Debug Tracing (include_keyField_if_in_name={include_keyField_if_in_name}) (gallery_name={gallery_name})................") + if include_keyField_if_in_name or gallery_name.lower() not in title.lower(): + gallery_name += POSTFIX_STYLES.get('galleries') + if WRAPPER_STYLES.get('galleries'): + filename_parts.append(f"{WRAPPER_STYLES['galleries'][0]}{gallery_name}{WRAPPER_STYLES['galleries'][1]}") + if debugTracing: logger.info("Debug Tracing................") + else: + filename_parts.append(gallery_name) + if debugTracing: logger.info("Debug Tracing................") + if debugTracing: logger.info(f"Debug Tracing (gallery_name={gallery_name})................") + if debugTracing: logger.info("Debug Tracing................") elif key == 'tags': if settings["tagAppend"]: tags = [tag.get('name', '') for tag in scene_details.get('tags', [])] if debugTracing: logger.info("Debug Tracing................") for tag_name in tags: - if debugTracing: logger.info(f"Debug Tracing (include_tag_if_in_name={include_tag_if_in_name})................") - if include_tag_if_in_name or tag_name.lower() not in title.lower(): - add_tag(tag_name) + if debugTracing: logger.info(f"Debug Tracing (include_keyField_if_in_name={include_keyField_if_in_name}) (tag_name={tag_name})................") + if include_keyField_if_in_name or tag_name.lower() not in title.lower(): + add_tag(tag_name + POSTFIX_STYLES.get('tag')) if debugTracing: logger.info(f"Debug Tracing (tag_name={tag_name})................") + if debugTracing: logger.info("Debug Tracing................") + if debugTracing: logger.info(f"Debug Tracing (filename_parts={filename_parts})................") new_filename = separator.join(filename_parts).replace(double_separator, separator) + if debugTracing: logger.info(f"Debug Tracing (new_filename={new_filename})................") # Check if the scene's path matches any of the excluded paths if exclude_paths and should_exclude_path(scene_details): @@ -267,10 +333,14 @@ def find_scene_by_id(scene_id): date files { path + width height video_codec frame_rate } + galleries { + title + } studio { name } @@ -287,6 +357,7 @@ def find_scene_by_id(scene_id): return scene_result.get('data', {}).get('findScene') def move_or_rename_files(scene_details, new_filename, original_parent_directory): + global exitMsg studio_directory = None for file_info in scene_details['files']: path = file_info['path'] @@ -325,12 +396,13 @@ def move_or_rename_files(scene_details, new_filename, original_parent_directory) except FileNotFoundError: log.error(f"File not found: {path}. Skipping...") logger.error(f"File not found: {path}. Skipping...") + exitMsg = "File not found" continue except OSError as e: log.error(f"Failed to move or rename file: {path}. Error: {e}") logger.error(f"Failed to move or rename file: {path}. Error: {e}") + exitMsg = "Failed to move or rename file" continue - return new_path # Return the new_path variable after the loop def perform_metadata_scan(metadata_scan_path): @@ -345,7 +417,8 @@ def perform_metadata_scan(metadata_scan_path): logger.info(f"Mutation string: {mutation_metadata_scan}") graphql_request(mutation_metadata_scan) -def rename_scene(scene_id, wrapper_styles, stash_directory): +def rename_scene(scene_id, stash_directory): + global exitMsg scene_details = find_scene_by_id(scene_id) if debugTracing: logger.info(f"Debug Tracing (scene_details={scene_details})................") if not scene_details: @@ -372,7 +445,7 @@ def rename_scene(scene_id, wrapper_styles, stash_directory): original_file_stem = Path(original_file_path).stem original_file_name = Path(original_file_path).name - new_filename = form_filename(original_file_stem, scene_details, wrapper_styles) + new_filename = form_filename(original_file_stem, scene_details) newFilenameWithExt = new_filename + Path(original_file_path).suffix if debugTracing: logger.info(f"Debug Tracing (original_file_name={original_file_name})(newFilenameWithExt={newFilenameWithExt})................") if original_file_name == newFilenameWithExt: @@ -400,14 +473,13 @@ def rename_scene(scene_id, wrapper_styles, stash_directory): os.rename(original_file_path, new_file_path) logger.info(f"{dry_run_prefix}Renamed file: {original_file_path} -> {new_file_path}") except Exception as e: + exitMsg = "Failed to rename file" log.error(f"Failed to rename file: {original_file_path}. Error: {e}") logger.error(f"Failed to rename file: {original_file_path}. Error: {e}") metadata_scan_path = original_parent_directory perform_metadata_scan(metadata_scan_path) - # ToDo: Add logic to the below code section so it checks base file length and checks folder length, instead of lumping them altogether. - # Current DB schema allows file folder max length to be 255, and max base filename to be 255 max_filename_length = int(config["max_filename_length"]) if len(new_filename) > max_filename_length: extension_length = len(Path(original_file_path).suffix) @@ -415,14 +487,15 @@ def rename_scene(scene_id, wrapper_styles, stash_directory): truncated_filename = new_filename[:max_base_filename_length] hash_suffix = hashlib.md5(new_filename.encode()).hexdigest() new_filename = truncated_filename + '_' + hash_suffix + Path(original_file_path).suffix - + + if debugTracing: logger.info(f"Debug Tracing (exitMsg={exitMsg})................") return new_filename, original_path_info, new_path_info # Main default function for rename scene def rename_files_task(): if debugTracing: logger.info("Debug Tracing................") # Execute the GraphQL query to fetch all scenes - scene_result = graphql_request(query_all_scenes) + scene_result = graphql_request(QUERY_ALL_SCENES) if debugTracing: logger.info("Debug Tracing................") all_scenes = scene_result.get('data', {}).get('allScenes', []) if debugTracing: logger.info("Debug Tracing................") @@ -439,19 +512,13 @@ def rename_files_task(): # Extract the ID of the latest scene latest_scene_id = latest_scene.get('id') - - # Extract wrapper styles - wrapper_styles = config["wrapper_styles"] - # Read stash directory from renamefile_settings.py stash_directory = config.get('stash_directory', '') if debugTracing: logger.info("Debug Tracing................") - if debugTracing: logger.info("Debug Tracing................") - # Rename the latest scene and trigger metadata scan - new_filename = rename_scene(latest_scene_id, wrapper_styles, stash_directory) - if debugTracing: logger.info("Debug Tracing................") + new_filename = rename_scene(latest_scene_id, stash_directory) + if debugTracing: logger.info(f"Debug Tracing (exitMsg={exitMsg})................") # Log dry run state and indicate if no changes were made if dry_run: @@ -460,24 +527,21 @@ def rename_files_task(): elif not new_filename: logger.info("No changes were made.") else: - logger.info("Change success!") + logger.info(f"{exitMsg}") return def fetch_dup_filename_tags(): # Place holder for new implementation return -if PLUGIN_ARGS == "fetch_dup_filename_tags": +if PLUGIN_ARGS_MODE == "fetch_dup_filename_tags": fetch_dup_filename_tags() -elif PLUGIN_ARGS == "rename_files_task": +elif PLUGIN_ARGS_MODE == "rename_files_task": rename_files_task() -else: +elif inputToUpdateScenePost: rename_files_task() if debugTracing: logger.info("\n*********************************\nEXITING ***********************\n*********************************") - -# ToDo List - # Add logic to max_filename_length code so it checks base file length and checks folder length, instead of lumping them altogether. +# ToDo: Wish List # Add logic to update Sqlite DB on file name change, instead of perform_metadata_scan. - # Get variables from the Plugins Settings UI instead of from renamefile_settings.py # Add code to get tags from duplicate filenames \ No newline at end of file diff --git a/plugins/RenameFile/renamefile.yml b/plugins/RenameFile/renamefile.yml index b838025c..14006c3a 100644 --- a/plugins/RenameFile/renamefile.yml +++ b/plugins/RenameFile/renamefile.yml @@ -1,37 +1,34 @@ name: RenameFile -description: Renames video (scene) file names when the user edits the [Title] field located in the scene [Edit] tab. -version: 0.2.6 +description: Renames video (scene) file names when the user edits the [Title] field located in the scene [Edit] tab. +# By David Maisonave (aka Axter) 2024 +version: 0.4.0 url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/RenameFile settings: - dryRun: - displayName: Dry Run - description: Enable to run script in [Dry Run] mode. In dry run mode, files are NOT renamed, and only logging is performed. Use the logging to determine if rename will occur as expected. This should always be enabled on the first run after renamefile_settings.py has been modified. - type: BOOLEAN - fileRenameViaMove: - displayName: Rename Using Move - description: Enable to have file moved when renaming file. - type: BOOLEAN performerAppend: displayName: Append Performers description: Enable to append performers name to file name when renaming a file. Requires performers to be included in [Key Fields] list, which by default it is included. - type: BOOLEAN - performerIncludeInFileName: - displayName: Include Existing Performers - description: Enable to append performer even if performers name already exists in the original file name. - type: BOOLEAN + type: BOOLEAN + studioAppend: + displayName: Append Studio + description: Enable to append studio name to file name when renaming a file. Requires studio to be included in [Key Fields] list, which by default it is included. + type: BOOLEAN tagAppend: displayName: Append Tags description: Enable to append tag names to file name when renaming a file. Requires tags to be included in [Key Fields] list, which by default it is included. - type: BOOLEAN - tagIncludeInFileName: - displayName: Include Existing Tags - description: Enable to append tag name even if tag already exists in original file name. - type: BOOLEAN - zFieldKeyList: + type: BOOLEAN + z_keyFIeldsIncludeInFileName: # Prefixing z_ to variable names so that the GUI will place these fields after above fields (alphabatically listed) + displayName: Include Existing Key Field + description: Enable to append performer, tags, studios, & galleries even if name already exists in the original file name. + type: BOOLEAN + zafileRenameViaMove: + displayName: Rename Using Move + description: Enable to have file moved when renaming file. + type: BOOLEAN + zfieldKeyList: displayName: Key Fields - description: '(Default=title,performers,tags) Define key fields to use to format the file name. This is a comma seperated list, and the list should be in the desired format order. For example, if the user wants the performers name before the title, set the performers name first. Example:"performers,title,tags". This is an example of user adding height:"title,performers,tags,height" Here''s an example using all of the supported fields: "title,performers,tags,studio,date,height,video_codec,frame_rate".' + description: '(Default=title,performers,studio,tags) Define key fields to use to format the file name. This is a comma seperated list, and the list should be in the desired format order. For example, if the user wants the performers name before the title, set the performers name first. Example:"performers,title,tags". This is an example of user adding height:"title,performers,tags,height" Here''s an example using all of the supported fields: "title,performers,tags,studio,galleries,resolution,width,height,video_codec,frame_rate,date".' type: STRING - zgraphqlEndpoint: # Prefixing z_ to variable names so that the GUI will place these fields after above fields (alphabatically listed) + zgraphqlEndpoint: displayName: GraphQL Endpoint description: (Default=http://localhost:9999/graphql). Update with your endpoint, or leave blank to use default. type: STRING @@ -54,7 +51,11 @@ settings: zzdebugTracing: displayName: Debug Tracing description: (Default=false) [***For Advanced Users***] Enable debug tracing. When enabled, additional tracing logging is added to Stash\plugins\RenameFile\renamefile.log - type: BOOLEAN + type: BOOLEAN + zzdryRun: + displayName: Dry Run + description: Enable to run script in [Dry Run] mode. In dry run mode, files are NOT renamed, and only logging is performed. Use the logging to determine if rename will occur as expected. This should always be enabled on the first run after renamefile_settings.py has been modified. + type: BOOLEAN exec: - python - "{pluginDir}/renamefile.py" @@ -65,7 +66,11 @@ hooks: triggeredBy: - Scene.Update.Post tasks: - - name: Rename Files Task - description: Renames scene files. + # - name: Fetch Tags + # description: Get tags from duplicate file names. + # defaultArgs: + # mode: fetch_dup_filename_tags + - name: Rename Last Scene + description: Renames file of last updated scene. defaultArgs: mode: rename_files_task diff --git a/plugins/RenameFile/renamefile_settings.py b/plugins/RenameFile/renamefile_settings.py index a027bd01..c4eeab9b 100644 --- a/plugins/RenameFile/renamefile_settings.py +++ b/plugins/RenameFile/renamefile_settings.py @@ -1,4 +1,4 @@ -# Importing config dictionary +# By David Maisonave (aka Axter) 2024 # RenameFile plugin main configuration options are available on the Stash GUI under Settings->Plugins->Plugins->[RenameFile]. # Most users should only use the GUI options. # The configuration options in this file are for advanced users ONLY!!! @@ -9,15 +9,33 @@ config = { # Define wrapper styles for different parts of the filename. # Use '[]' for square brackets, '{}' for curly brackets, '()' for parentheses, or an empty string for None. - "wrapper_styles": { - "studio": '[]', # Modify these values to change how each part of the filename is wrapped. - "title": '', # Use '[]' for square brackets, '{}' for curly brackets, '()' for parentheses, or an empty string for None. - "performers": '()', # Modify these values to change how each part of the filename is wrapped. - "date": '[]', # Use '[]' for square brackets, '{}' for curly brackets, '()' for parentheses, or an empty string for None. - "height": '()', # Modify these values to change how each part of the filename is wrapped. - "video_codec": '[]', # Use '[]' for square brackets, '{}' for curly brackets, '()' for parentheses, or an empty string for None. - "frame_rate": '[]', # Modify these values to change how each part of the filename is wrapped. - "tag": '[]' # Modify these values to change how each tag part of the filename is wrapped. + "wrapper_styles": { # Modify these values to change how each part of the filename is wrapped. + "title": '', + "performers": '()', + "tag": '[]', + "studio": '{}', + "galleries": '()', + "resolution": '', # Contains both WITH and HEIGHT + "width": '', + "height": '', + "video_codec": '', + "frame_rate": '', + "date": '()', # This field is not populated in the DB by default. It's usually empty. + }, + # Define the field postfix + "postfix_styles": { + "title": '', + "performers": '', + "tag": '', + "studio": '', + "galleries": '', + "resolution": 'P', # Contains both WITH and HEIGHT + "width": 'W', + "height": 'P', + "width_height_seperator": 'x', # Used in RESOLUTION field as the string seperating WITH and HEIGHT. Example: 720x480 or 1280X720 + "video_codec": '', + "frame_rate": 'FR', + "date": '', }, # Define whether files should be renamed when moved "rename_files": True, @@ -25,6 +43,4 @@ "if_notitle_use_org_filename": True, # Warning: Do not recommend setting this to False. # Current Stash DB schema only allows maximum base file name length to be 255 "max_filename_length": 255, - # "max_filefolder_length": 255, # For future useage - # "max_filebase_length": 255, # For future useage } From c99d35905a4bd18d8bdca715c6ef1cc4e3fbfeb9 Mon Sep 17 00:00:00 2001 From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com> Date: Sun, 28 Jul 2024 03:37:49 -0400 Subject: [PATCH 03/39] Added plugin ChangeFileMonitor and updated RenameFile plugin Added new plugin ChangeFileMonitor. Made following changes to RenameFile plugin. Added fields galleries, resolution, and width. Fixed bug associated with studio. Added logic to limit the log file size. Added logic to only get [Change success] logging when no errors occurs. Change default fields to include studio. Added postfix styles, which was mainly needed to properly format resolution field, but can be used for the other fields. Consolidated [Include Existing Key Field] options into one option. Cleaned up code and updated version --- plugins/ChangeFileMonitor | 1 - plugins/ChangeFileMonitor/.gitignore | 525 ++++++++++++++++++ plugins/ChangeFileMonitor/README.md | 27 + .../ChangeFileMonitor/changefilemonitor.py | 246 ++++++++ .../ChangeFileMonitor/changefilemonitor.yml | 35 ++ plugins/ChangeFileMonitor/manifest | 13 + plugins/ChangeFileMonitor/requirements.txt | 4 + plugins/RenameFile/.gitignore | 525 ++++++++++++++++++ 8 files changed, 1375 insertions(+), 1 deletion(-) delete mode 120000 plugins/ChangeFileMonitor create mode 100644 plugins/ChangeFileMonitor/.gitignore create mode 100644 plugins/ChangeFileMonitor/README.md create mode 100644 plugins/ChangeFileMonitor/changefilemonitor.py create mode 100644 plugins/ChangeFileMonitor/changefilemonitor.yml create mode 100644 plugins/ChangeFileMonitor/manifest create mode 100644 plugins/ChangeFileMonitor/requirements.txt create mode 100644 plugins/RenameFile/.gitignore diff --git a/plugins/ChangeFileMonitor b/plugins/ChangeFileMonitor deleted file mode 120000 index 8ca902f7..00000000 --- a/plugins/ChangeFileMonitor +++ /dev/null @@ -1 +0,0 @@ -../../Axter-Stash/plugins/ChangeFileMonitor \ No newline at end of file diff --git a/plugins/ChangeFileMonitor/.gitignore b/plugins/ChangeFileMonitor/.gitignore new file mode 100644 index 00000000..dd93ef78 --- /dev/null +++ b/plugins/ChangeFileMonitor/.gitignore @@ -0,0 +1,525 @@ +$ cat .gitignore + +# Ignore these patterns +desktop.ini +~AutoRecover*.* +*.aps +*.exe +*.idb +*.ipch +*.lib +*.log +*.log.1 +*.log.2 +*.manifest +*.obj +*.pch +*.pdb +*.sdf +*.suo +*.tlog +*.user +*.7z +*.swp +*.zip +data.csv +/boost +/scintilla +/bin +/SQL +/__pycache__ +__pycache__/ +renamefile_settings.cpython-310.pyc + +## Ignore Visual Studio temporary files, build results, and +## files generated by popular Visual Studio add-ons. +## +## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore + +# User-specific files +*.rsuser +*.suo +*.user +*.userosscache +*.sln.docstates + +# User-specific files (MonoDevelop/Xamarin Studio) +*.userprefs + +# Mono auto generated files +mono_crash.* + +# Build results +[Dd]ebug/ +[Dd]ebugPublic/ +[Rr]elease/ +[Rr]eleases/ +x64/ +x86/ +[Ww][Ii][Nn]32/ +[Aa][Rr][Mm]/ +[Aa][Rr][Mm]64/ +bld/ +[Bb]in/ +[Oo]bj/ +[Ll]og/ +[Ll]ogs/ + + +# Visual Studio 2015/2017 cache/options directory +.vs/ +# Uncomment if you have tasks that create the project's static files in wwwroot +#wwwroot/ + +# Visual Studio 2017 auto generated files +Generated\ Files/ + +# MSTest test Results +[Tt]est[Rr]esult*/ +[Bb]uild[Ll]og.* + +# NUnit +*.VisualState.xml +TestResult.xml +nunit-*.xml + +# Build Results of an ATL Project +[Dd]ebugPS/ +[Rr]eleasePS/ +dlldata.c + +# Benchmark Results +BenchmarkDotNet.Artifacts/ + +# .NET Core +project.lock.json +project.fragment.lock.json +artifacts/ + +# Tye +.tye/ + +# ASP.NET Scaffolding +ScaffoldingReadMe.txt + +# StyleCop +StyleCopReport.xml + +# Files built by Visual Studio +*_i.c +*_p.c +*_h.h +*_i.h +*.ilk +*.meta +*.obj +*.iobj +*.pch +*.pdb +*.ipdb +*.pgc +*.pgd +*.rsp +*.sbr +*.tlb +*.tli +*.tlh +*.tmp +*.tmp_proj +*_wpftmp.csproj +*.log +*.vspscc +*.vssscc +.builds +*.pidb +*.svclog +*.scc + +# Chutzpah Test files +_Chutzpah* + +# Visual C++ cache files +ipch/ +*.aps +*.ncb +*.opendb +*.opensdf +*.sdf +*.cachefile +*.VC.db +*.VC.VC.opendb + +# Visual Studio profiler +*.psess +*.vsp +*.vspx +*.sap + +# Visual Studio Trace Files +*.e2e + +# TFS 2012 Local Workspace +$tf/ + +# Guidance Automation Toolkit +*.gpState + +# ReSharper is a .NET coding add-in +_ReSharper*/ +*.[Rr]e[Ss]harper +*.DotSettings.user +# JustCode is a .NET coding add-in +.JustCode + +# TeamCity is a build add-in +_TeamCity* + +# DotCover is a Code Coverage Tool +*.dotCover + +# AxoCover is a Code Coverage Tool +.axoCover/* +!.axoCover/settings.json + +# Coverlet is a free, cross platform Code Coverage Tool +coverage*.json +coverage*.xml +coverage*.info + +# Visual Studio code coverage results +*.coverage +*.coveragexml + +# NCrunch +_NCrunch_* +.*crunch*.local.xml +nCrunchTemp_* + +# MightyMoose +*.mm.* +AutoTest.Net/ + +# Web workbench (sass) +.sass-cache/ + +# Installshield output folder +[Ee]xpress/ + +# DocProject is a documentation generator add-in +DocProject/buildhelp/ +DocProject/Help/*.HxT +DocProject/Help/*.HxC +DocProject/Help/*.hhc +DocProject/Help/*.hhk +DocProject/Help/*.hhp +DocProject/Help/Html2 +DocProject/Help/html + +# Click-Once directory +publish/ + +# Publish Web Output +*.[Pp]ublish.xml +*.azurePubxml +# Note: Comment the next line if you want to checkin your web deploy settings, +# but database connection strings (with potential passwords) will be unencrypted +*.pubxml +*.publishproj + +# Microsoft Azure Web App publish settings. Comment the next line if you want to +# checkin your Azure Web App publish settings, but sensitive information contained +# in these scripts will be unencrypted +PublishScripts/ + +# NuGet Packages +*.nupkg +# NuGet Symbol Packages +*.snupkg +# The packages folder can be ignored because of Package Restore +**/[Pp]ackages/* +# except build/, which is used as an MSBuild target. +!**/[Pp]ackages/build/ +# Uncomment if necessary however generally it will be regenerated when needed +#!**/[Pp]ackages/repositories.config +# NuGet v3's project.json files produces more ignorable files +*.nuget.props +*.nuget.targets + +# Microsoft Azure Build Output +csx/ +*.build.csdef + +# Microsoft Azure Emulator +ecf/ +rcf/ + +# Windows Store app package directories and files +AppPackages/ +BundleArtifacts/ +Package.StoreAssociation.xml +_pkginfo.txt +*.appx +*.appxbundle +*.appxupload + +# Visual Studio cache files +# files ending in .cache can be ignored +*.[Cc]ache +# but keep track of directories ending in .cache +!?*.[Cc]ache/ + +# Others +ClientBin/ +~$* +*~ +*.dbmdl +*.dbproj.schemaview +*.jfm +*.pfx +*.publishsettings +node_modules/ +orleans.codegen.cs + +# Including strong name files can present a security risk +# (https://github.com/github/gitignore/pull/2483#issue-259490424) +#*.snk + +# Since there are multiple workflows, uncomment next line to ignore bower_components +# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) +#bower_components/ + +# RIA/Silverlight projects +Generated_Code/ + +# Backup & report files from converting an old project file +# to a newer Visual Studio version. Backup files are not needed, +# because we have git ;-) +_UpgradeReport_Files/ +Backup*/ +UpgradeLog*.XML +UpgradeLog*.htm +ServiceFabricBackup/ +*.rptproj.bak + +# SQL Server files +*.mdf +*.ldf +*.ndf + +# Business Intelligence projects +*.rdl.data +*.bim.layout +*.bim_*.settings +*.rptproj.rsuser +*- [Bb]ackup.rdl +*- [Bb]ackup ([0-9]).rdl +*- [Bb]ackup ([0-9][0-9]).rdl + +# Microsoft Fakes +FakesAssemblies/ + +# GhostDoc plugin setting file +*.GhostDoc.xml + +# Node.js Tools for Visual Studio +.ntvs_analysis.dat +node_modules/ + +# Visual Studio 6 build log +*.plg + +# Visual Studio 6 workspace options file +*.opt + +# Visual Studio 6 auto-generated workspace file (contains which files were open etc.) +*.vbw + +# Visual Studio LightSwitch build output +**/*.HTMLClient/GeneratedArtifacts +**/*.DesktopClient/GeneratedArtifacts +**/*.DesktopClient/ModelManifest.xml +**/*.Server/GeneratedArtifacts +**/*.Server/ModelManifest.xml +_Pvt_Extensions + +# Paket dependency manager +.paket/paket.exe +paket-files/ + +# FAKE - F# Make +.fake/ + +# JetBrains Rider +.idea/ +*.sln.iml + +# CodeRush +.cr/ + +# CodeRush personal settings +.cr/personal + +# Python Tools for Visual Studio (PTVS) +__pycache__/ +*.pyc + +# Cake - Uncomment if you are using it +# tools/** +# !tools/packages.config + +# Tabs Studio +*.tss + +# Telerik's JustMock configuration file +*.jmconfig + +# BizTalk build output +*.btp.cs +*.btm.cs +*.odx.cs +*.xsd.cs + +# OpenCover UI analysis results +OpenCover/ + +# Azure Stream Analytics local run output +ASALocalRun/ + +# MSBuild Binary and Structured Log +*.binlog + +# NVidia Nsight GPU debugger configuration file +*.nvuser + +# MFractors (Xamarin productivity tool) working folder +.mfractor/ + +# Local History for Visual Studio +.localhistory/ + +# BeatPulse healthcheck temp database +healthchecksdb + +# Backup folder for Package Reference Convert tool in Visual Studio 2017 +MigrationBackup/ + +# Ionide (cross platform F# VS Code tools) working folder +.ionide/ + +# Fody - auto-generated XML schema +FodyWeavers.xsd + +## +## Visual studio for Mac +## + + +# globs +Makefile.in +*.userprefs +*.usertasks +config.make +config.status +aclocal.m4 +install-sh +autom4te.cache/ +*.tar.gz +tarballs/ +test-results/ + +# Mac bundle stuff +*.dmg +*.app + +# content below from: https://github.com/github/gitignore/blob/master/Global/macOS.gitignore +# General +.DS_Store +.AppleDouble +.LSOverride + +# Icon must end with two \r +Icon + + +# Thumbnails +._* + +# Files that might appear in the root of a volume +.DocumentRevisions-V100 +.fseventsd +.Spotlight-V100 +.TemporaryItems +.Trashes +.VolumeIcon.icns +.com.apple.timemachine.donotpresent + +# Directories potentially created on remote AFP share +.AppleDB +.AppleDesktop +Network Trash Folder +Temporary Items +.apdisk + +# content below from: https://github.com/github/gitignore/blob/master/Global/Windows.gitignore +# Windows thumbnail cache files +Thumbs.db +ehthumbs.db +ehthumbs_vista.db + +# Dump file +*.stackdump + +# Folder config file +[Dd]esktop.ini + +# Recycle Bin used on file shares +$RECYCLE.BIN/ + +# Windows Installer files +*.cab +*.msi +*.msix +*.msm +*.msp + +# Windows shortcuts +*.lnk + +# JetBrains Rider +.idea/ +*.sln.iml + +## +## Visual Studio Code +## +.vscode/* +!.vscode/settings.json +!.vscode/tasks.json +!.vscode/launch.json +!.vscode/extensions.json + +# Other miscellaneous folders +zzMiscellaneous/ +zzExcludeFromGithub/ +FromAnotherLanuageKit/ +_BadLanguages/ + +# Exclude test data and temp files +Test_Data/ +*__ExcludeFromRepo__*.* +*__DoNotAddToRepo__*.* +deleteme/ +RelatedProjects/ +obj/ + +# Exclude temp and backup files +*.bak + +# ########################################### +# Unique to this project +# ########################################### +# Exclude reparsepoint files which are used to help view file using VS +*.xaml.xml +gitignore.txt + +GTranslate/obj/ diff --git a/plugins/ChangeFileMonitor/README.md b/plugins/ChangeFileMonitor/README.md new file mode 100644 index 00000000..ca09e59e --- /dev/null +++ b/plugins/ChangeFileMonitor/README.md @@ -0,0 +1,27 @@ +# ChangeFileMonitor: Ver 0.1.0 (By David Maisonave) +ChangeFileMonitor is a [Stash](https://github.com/stashapp/stash) plugin which updates Stash if any changes occurs in the Stash library paths. + +### Using ChangeFileMonitor +- To start monitoring file changes, go to **Stash->Settings->Task->[Plugin Tasks]->ChangeFileMonitor**, and click on the [Start Library Monitor] button. + - ![ChangeFileMonitor_Task](https://github.com/user-attachments/assets/f275a70f-8e86-42a4-b2c1-98b3f4935334) +- To stop this task, go to **Stash->Settings->Task->[Task Queue]**, and click on the **[x]**. + - ![Kill_ChangeFileMonitor_Task](https://github.com/user-attachments/assets/a3f4abca-f3a2-49fa-9db5-e0c733e0aeb1) + + +### Requirements +`pip install stashapp-tools` +`pip install pyYAML` +`pip install watchdog` + +### Installation +- Follow **Requirements** instructions. +- In the stash plugin directory (C:\Users\MyUserName\.stash\plugins), create a folder named **ChangeFileMonitor**. +- Copy all the plugin files to this folder.(**C:\Users\MyUserName\\.stash\plugins\ChangeFileMonitor**). +- Restart Stash. + +That's it!!! + +### Options +- All options are accessible in the GUI via Settings->Plugins->Plugins->[ChangeFileMonitor]. + + diff --git a/plugins/ChangeFileMonitor/changefilemonitor.py b/plugins/ChangeFileMonitor/changefilemonitor.py new file mode 100644 index 00000000..41918a5c --- /dev/null +++ b/plugins/ChangeFileMonitor/changefilemonitor.py @@ -0,0 +1,246 @@ +# Description: This is a Stash plugin which updates Stash if any changes occurs in the Stash library paths. +# By David Maisonave (aka Axter) Jul-2024 (https://www.axter.com/) +# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/ChangeFileMonitor +import os +import sys +import shutil +import hashlib +import json +from pathlib import Path +import requests +import logging +from logging.handlers import RotatingFileHandler +import stashapi.log as log # Importing stashapi.log as log for critical events ONLY +from stashapi.stashapp import StashInterface +from watchdog.observers import Observer # This is also needed for event attributes +import watchdog # pip install watchdog # https://pythonhosted.org/watchdog/ +from threading import Lock, Condition +from multiprocessing import shared_memory + +# ********************************************************************** +# Constant global variables -------------------------------------------- +LOG_FILE_PATH = log_file_path = f"{Path(__file__).resolve().parent}\\{Path(__file__).stem}.log" +FORMAT = "[%(asctime)s - LN:%(lineno)s] %(message)s" +DEFAULT_ENDPOINT = "http://localhost:9999/graphql" # Default GraphQL endpoint +PLUGIN_ARGS = False +PLUGIN_ARGS_MODE = False +# GraphQL query to fetch all scenes +QUERY_ALL_SCENES = """ + query AllScenes { + allScenes { + id + updated_at + } + } +""" +RFH = RotatingFileHandler( + filename=LOG_FILE_PATH, + mode='a', + maxBytes=2*1024*1024, # Configure logging for this script with max log file size of 2000K + backupCount=2, + encoding=None, + delay=0 +) +TIMEOUT = 5 +CONTINUE_RUNNING_SIG = 99 + +# ********************************************************************** +# Global variables -------------------------------------------- +exitMsg = "Change success!!" +mutex = Lock() +signal = Condition(mutex) +shouldUpdate = False +TargetPaths = [] + +# Configure local log file for plugin within plugin folder having a limited max log file size +logging.basicConfig(level=logging.INFO, format=FORMAT, datefmt="%y%m%d %H:%M:%S", handlers=[RFH]) +logger = logging.getLogger(Path(__file__).stem) + +# ********************************************************************** +# ---------------------------------------------------------------------- +# Code section to fetch variables from Plugin UI and from changefilemonitor_settings.py +json_input = json.loads(sys.stdin.read()) +FRAGMENT_SERVER = json_input["server_connection"] +stash = StashInterface(FRAGMENT_SERVER) +PLUGINCONFIGURATION = stash.get_configuration()["plugins"] +STASHCONFIGURATION = stash.get_configuration()["general"] +STASHPATHSCONFIG = STASHCONFIGURATION['stashes'] +stashPaths = [] +settings = { + "scanModified": False, + "recursiveDisabled": False, + "zgraphqlEndpoint": DEFAULT_ENDPOINT, + "zzdebugTracing": False, + "zzdryRun": False, +} +PLUGIN_ID = "changefilemonitor" +if PLUGIN_ID in PLUGINCONFIGURATION: + settings.update(PLUGINCONFIGURATION[PLUGIN_ID]) +# ---------------------------------------------------------------------- +debugTracing = settings["zzdebugTracing"] +RECURSIVE = settings["recursiveDisabled"] == False +SCAN_MODIFIED = settings["scanModified"] + +for item in STASHPATHSCONFIG: + stashPaths.append(item["path"]) + +# Extract dry_run setting from settings +dry_run = settings["zzdryRun"] +dry_run_prefix = '' +try: + PLUGIN_ARGS = json_input['args'] + PLUGIN_ARGS_MODE = json_input['args']["mode"] +except: + pass +logger.info(f"\nStarting (debugTracing={debugTracing}) (dry_run={dry_run}) (PLUGIN_ARGS_MODE={PLUGIN_ARGS_MODE}) (PLUGIN_ARGS={PLUGIN_ARGS})************************************************") +if debugTracing: logger.info(f"Debug Tracing (stash.get_configuration()={stash.get_configuration()})................") +if debugTracing: logger.info("settings: %s " % (settings,)) +if debugTracing: logger.info(f"Debug Tracing (STASHCONFIGURATION={STASHCONFIGURATION})................") +if debugTracing: logger.info(f"Debug Tracing (stashPaths={stashPaths})................") + +if dry_run: + logger.info("Dry run mode is enabled.") + dry_run_prefix = "Would've " +if debugTracing: logger.info("Debug Tracing................") +# ToDo: Add split logic here to slpit possible string array into an array +endpoint = settings["zgraphqlEndpoint"] # GraphQL endpoint +if not endpoint or endpoint == "": + endpoint = DEFAULT_ENDPOINT +if debugTracing: logger.info(f"Debug Tracing (endpoint={endpoint})................") +# ---------------------------------------------------------------------- +# ********************************************************************** +if debugTracing: logger.info(f"Debug Tracing (SCAN_MODIFIED={SCAN_MODIFIED}) (RECURSIVE={RECURSIVE})................") + +def start_library_monitor(): + global shouldUpdate + global TargetPaths + try: + # Create shared memory buffer which can be used as singleton logic or to get a signal to quit task from external script + shm_a = shared_memory.SharedMemory(name="DavidMaisonaveAxter_ChangeFileMonitor", create=True, size=4) + except: + pass + logger.info("Could not open shared memory map. Change File Monitor must be running. Can not run multiple instance of Change File Monitor.") + return + type(shm_a.buf) + shm_buffer = shm_a.buf + len(shm_buffer) + shm_buffer[0] = CONTINUE_RUNNING_SIG + if debugTracing: logger.info(f"Shared memory map opended, and flag set to {shm_buffer[0]}") + + event_handler = watchdog.events.FileSystemEventHandler() + def on_created(event): + global shouldUpdate + global TargetPaths + TargetPaths.append(event.src_path) + logger.info(f"CREATE *** '{event.src_path}'") + with mutex: + shouldUpdate = True + signal.notify() + + def on_deleted(event): + global shouldUpdate + global TargetPaths + TargetPaths.append(event.src_path) + logger.info(f"DELETE *** '{event.src_path}'") + with mutex: + shouldUpdate = True + signal.notify() + + def on_modified(event): + global shouldUpdate + global TargetPaths + if SCAN_MODIFIED: + TargetPaths.append(event.src_path) + logger.info(f"MODIFIED *** '{event.src_path}'") + with mutex: + shouldUpdate = True + signal.notify() + else: + if debugTracing: logger.info(f"Ignoring modifications due to plugin UI setting. path='{event.src_path}'") + + def on_moved(event): + global shouldUpdate + global TargetPaths + TargetPaths.append(event.src_path) + TargetPaths.append(event.dest_path) + logger.info(f"MOVE *** from '{event.src_path}' to '{event.dest_path}'") + with mutex: + shouldUpdate = True + signal.notify() + + event_handler.on_created = on_created + event_handler.on_deleted = on_deleted + event_handler.on_modified = on_modified + event_handler.on_moved = on_moved + + observer = Observer() + # Iterate through stashPaths + for path in stashPaths: + observer.schedule(event_handler, path, recursive=RECURSIVE) + if debugTracing: logger.info(f"Observing {path}") + observer.start() + if debugTracing: logger.info("Starting loop................") + try: + while True: + TmpTargetPaths = [] + with mutex: + while not shouldUpdate: + if debugTracing: logger.info("Wait start................") + signal.wait() + if debugTracing: logger.info("Wait end................") + shouldUpdate = False + TmpTargetPaths = [] + for TargetPath in TargetPaths: + TmpTargetPaths.append(os.path.dirname(TargetPath)) + TargetPaths = [] + TmpTargetPaths = list(set(TmpTargetPaths)) + if TmpTargetPaths != []: + logger.info(f"Triggering stash scan for path(s) {TmpTargetPaths}") + if not dry_run: + stash.metadata_scan(paths=TmpTargetPaths) + stash.run_plugin_task(plugin_id=PLUGIN_ID, task_name="Start Library Monitor") + if debugTracing: logger.info("Exiting plugin so that metadata_scan task can run.") + return + else: + if debugTracing: logger.info("Nothing to scan.") + if shm_buffer[0] != CONTINUE_RUNNING_SIG: + logger.info(f"Exiting Change File Monitor. (shm_buffer[0]={shm_buffer[0]})") + shm_a.close() + shm_a.unlink() # Call unlink only once to release the shared memory + time.sleep(1) + break + except KeyboardInterrupt: + observer.stop() + if debugTracing: logger.info("Stopping observer................") + observer.join() + if debugTracing: logger.info("Exiting function................") + +# stop_library_monitor does not work because only one task can run at a time. +# def stop_library_monitor(): + # if debugTracing: logger.info("Opening shared memory map.") + # try: + # shm_a = shared_memory.SharedMemory(name="DavidMaisonaveAxter_ChangeFileMonitor", create=False, size=4) + # except: + # pass + # logger.info("Could not open shared memory map. Change File Monitor must not be running.") + # return + # type(shm_a.buf) + # shm_buffer = shm_a.buf + # len(shm_buffer) + # shm_buffer[0] = 123 + # if debugTracing: logger.info(f"Shared memory map opended, and flag set to {shm_buffer[0]}") + # shm_a.close() + # shm_a.unlink() # Call unlink only once to release the shared memory + # time.sleep(1) + # return + +if PLUGIN_ARGS_MODE == "start_library_monitor": + start_library_monitor() + if debugTracing: logger.info(f"start_library_monitor EXIT................") +# elif PLUGIN_ARGS_MODE == "stop_library_monitor": + # stop_library_monitor() + # if debugTracing: logger.info(f"stop_library_monitor EXIT................") +else: + logger.info(f"Nothing to do!!! (PLUGIN_ARGS_MODE={PLUGIN_ARGS_MODE})") + +if debugTracing: logger.info("\n*********************************\nEXITING ***********************\n*********************************") diff --git a/plugins/ChangeFileMonitor/changefilemonitor.yml b/plugins/ChangeFileMonitor/changefilemonitor.yml new file mode 100644 index 00000000..0150513d --- /dev/null +++ b/plugins/ChangeFileMonitor/changefilemonitor.yml @@ -0,0 +1,35 @@ +# By David Maisonave (aka Axter) 2024 +name: ChangeFileMonitor +description: Monitors the Stash library folders, and updates Stash if any changes occurs in the Stash library paths. +version: 0.1.0 +url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/ChangeFileMonitor +settings: + scanModified: + displayName: Scan Modifications + description: Enable to monitor changes in file system for modification flag. Flags for CREATE, DELETE, and MOVE will still get triggered if this is disabled. + type: BOOLEAN + recursiveDisabled: + displayName: No Recursive + description: Enable stop monitoring paths recursively. + type: BOOLEAN + zgraphqlEndpoint: + displayName: GraphQL Endpoint + description: (Default=http://localhost:9999/graphql). Update with your endpoint, or leave blank to use default. + type: STRING + zzdebugTracing: + displayName: Debug Tracing + description: (Default=false) [***For Advanced Users***] Enable debug tracing. When enabled, additional tracing logging is added to Stash\plugins\ChangeFileMonitor\changefilemonitor.log + type: BOOLEAN + zzdryRun: + displayName: Dry Run + description: Enable to run script in [Dry Run] mode. In this mode, Stash does NOT call meta_scan, and only logs the action it would have taken. + type: BOOLEAN +exec: + - python + - "{pluginDir}/changefilemonitor.py" +interface: raw +tasks: + - name: Start Library Monitor + description: Monitors paths in Stash library for media file changes, and updates Stash. + defaultArgs: + mode: start_library_monitor diff --git a/plugins/ChangeFileMonitor/manifest b/plugins/ChangeFileMonitor/manifest new file mode 100644 index 00000000..4a03c5f4 --- /dev/null +++ b/plugins/ChangeFileMonitor/manifest @@ -0,0 +1,13 @@ +id: changefilemonitor +name: ChangeFileMonitor +metadata: + description: Monitors the Stash library folders, and updates Stash if any changes occurs in the Stash library paths. +version: 0.1.0 +date: "2024-07-26 08:00:00" +requires: [pip install stashapp-tools, pip install pyYAML, pip install watchdog] +source_repository: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/ChangeFileMonitor +files: +- README.md +- changefilemonitor.yml +- changefilemonitor.py +- requirements.txt diff --git a/plugins/ChangeFileMonitor/requirements.txt b/plugins/ChangeFileMonitor/requirements.txt new file mode 100644 index 00000000..aa553701 --- /dev/null +++ b/plugins/ChangeFileMonitor/requirements.txt @@ -0,0 +1,4 @@ +stashapp-tools +pyYAML +watchdog +requests \ No newline at end of file diff --git a/plugins/RenameFile/.gitignore b/plugins/RenameFile/.gitignore new file mode 100644 index 00000000..dd93ef78 --- /dev/null +++ b/plugins/RenameFile/.gitignore @@ -0,0 +1,525 @@ +$ cat .gitignore + +# Ignore these patterns +desktop.ini +~AutoRecover*.* +*.aps +*.exe +*.idb +*.ipch +*.lib +*.log +*.log.1 +*.log.2 +*.manifest +*.obj +*.pch +*.pdb +*.sdf +*.suo +*.tlog +*.user +*.7z +*.swp +*.zip +data.csv +/boost +/scintilla +/bin +/SQL +/__pycache__ +__pycache__/ +renamefile_settings.cpython-310.pyc + +## Ignore Visual Studio temporary files, build results, and +## files generated by popular Visual Studio add-ons. +## +## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore + +# User-specific files +*.rsuser +*.suo +*.user +*.userosscache +*.sln.docstates + +# User-specific files (MonoDevelop/Xamarin Studio) +*.userprefs + +# Mono auto generated files +mono_crash.* + +# Build results +[Dd]ebug/ +[Dd]ebugPublic/ +[Rr]elease/ +[Rr]eleases/ +x64/ +x86/ +[Ww][Ii][Nn]32/ +[Aa][Rr][Mm]/ +[Aa][Rr][Mm]64/ +bld/ +[Bb]in/ +[Oo]bj/ +[Ll]og/ +[Ll]ogs/ + + +# Visual Studio 2015/2017 cache/options directory +.vs/ +# Uncomment if you have tasks that create the project's static files in wwwroot +#wwwroot/ + +# Visual Studio 2017 auto generated files +Generated\ Files/ + +# MSTest test Results +[Tt]est[Rr]esult*/ +[Bb]uild[Ll]og.* + +# NUnit +*.VisualState.xml +TestResult.xml +nunit-*.xml + +# Build Results of an ATL Project +[Dd]ebugPS/ +[Rr]eleasePS/ +dlldata.c + +# Benchmark Results +BenchmarkDotNet.Artifacts/ + +# .NET Core +project.lock.json +project.fragment.lock.json +artifacts/ + +# Tye +.tye/ + +# ASP.NET Scaffolding +ScaffoldingReadMe.txt + +# StyleCop +StyleCopReport.xml + +# Files built by Visual Studio +*_i.c +*_p.c +*_h.h +*_i.h +*.ilk +*.meta +*.obj +*.iobj +*.pch +*.pdb +*.ipdb +*.pgc +*.pgd +*.rsp +*.sbr +*.tlb +*.tli +*.tlh +*.tmp +*.tmp_proj +*_wpftmp.csproj +*.log +*.vspscc +*.vssscc +.builds +*.pidb +*.svclog +*.scc + +# Chutzpah Test files +_Chutzpah* + +# Visual C++ cache files +ipch/ +*.aps +*.ncb +*.opendb +*.opensdf +*.sdf +*.cachefile +*.VC.db +*.VC.VC.opendb + +# Visual Studio profiler +*.psess +*.vsp +*.vspx +*.sap + +# Visual Studio Trace Files +*.e2e + +# TFS 2012 Local Workspace +$tf/ + +# Guidance Automation Toolkit +*.gpState + +# ReSharper is a .NET coding add-in +_ReSharper*/ +*.[Rr]e[Ss]harper +*.DotSettings.user +# JustCode is a .NET coding add-in +.JustCode + +# TeamCity is a build add-in +_TeamCity* + +# DotCover is a Code Coverage Tool +*.dotCover + +# AxoCover is a Code Coverage Tool +.axoCover/* +!.axoCover/settings.json + +# Coverlet is a free, cross platform Code Coverage Tool +coverage*.json +coverage*.xml +coverage*.info + +# Visual Studio code coverage results +*.coverage +*.coveragexml + +# NCrunch +_NCrunch_* +.*crunch*.local.xml +nCrunchTemp_* + +# MightyMoose +*.mm.* +AutoTest.Net/ + +# Web workbench (sass) +.sass-cache/ + +# Installshield output folder +[Ee]xpress/ + +# DocProject is a documentation generator add-in +DocProject/buildhelp/ +DocProject/Help/*.HxT +DocProject/Help/*.HxC +DocProject/Help/*.hhc +DocProject/Help/*.hhk +DocProject/Help/*.hhp +DocProject/Help/Html2 +DocProject/Help/html + +# Click-Once directory +publish/ + +# Publish Web Output +*.[Pp]ublish.xml +*.azurePubxml +# Note: Comment the next line if you want to checkin your web deploy settings, +# but database connection strings (with potential passwords) will be unencrypted +*.pubxml +*.publishproj + +# Microsoft Azure Web App publish settings. Comment the next line if you want to +# checkin your Azure Web App publish settings, but sensitive information contained +# in these scripts will be unencrypted +PublishScripts/ + +# NuGet Packages +*.nupkg +# NuGet Symbol Packages +*.snupkg +# The packages folder can be ignored because of Package Restore +**/[Pp]ackages/* +# except build/, which is used as an MSBuild target. +!**/[Pp]ackages/build/ +# Uncomment if necessary however generally it will be regenerated when needed +#!**/[Pp]ackages/repositories.config +# NuGet v3's project.json files produces more ignorable files +*.nuget.props +*.nuget.targets + +# Microsoft Azure Build Output +csx/ +*.build.csdef + +# Microsoft Azure Emulator +ecf/ +rcf/ + +# Windows Store app package directories and files +AppPackages/ +BundleArtifacts/ +Package.StoreAssociation.xml +_pkginfo.txt +*.appx +*.appxbundle +*.appxupload + +# Visual Studio cache files +# files ending in .cache can be ignored +*.[Cc]ache +# but keep track of directories ending in .cache +!?*.[Cc]ache/ + +# Others +ClientBin/ +~$* +*~ +*.dbmdl +*.dbproj.schemaview +*.jfm +*.pfx +*.publishsettings +node_modules/ +orleans.codegen.cs + +# Including strong name files can present a security risk +# (https://github.com/github/gitignore/pull/2483#issue-259490424) +#*.snk + +# Since there are multiple workflows, uncomment next line to ignore bower_components +# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) +#bower_components/ + +# RIA/Silverlight projects +Generated_Code/ + +# Backup & report files from converting an old project file +# to a newer Visual Studio version. Backup files are not needed, +# because we have git ;-) +_UpgradeReport_Files/ +Backup*/ +UpgradeLog*.XML +UpgradeLog*.htm +ServiceFabricBackup/ +*.rptproj.bak + +# SQL Server files +*.mdf +*.ldf +*.ndf + +# Business Intelligence projects +*.rdl.data +*.bim.layout +*.bim_*.settings +*.rptproj.rsuser +*- [Bb]ackup.rdl +*- [Bb]ackup ([0-9]).rdl +*- [Bb]ackup ([0-9][0-9]).rdl + +# Microsoft Fakes +FakesAssemblies/ + +# GhostDoc plugin setting file +*.GhostDoc.xml + +# Node.js Tools for Visual Studio +.ntvs_analysis.dat +node_modules/ + +# Visual Studio 6 build log +*.plg + +# Visual Studio 6 workspace options file +*.opt + +# Visual Studio 6 auto-generated workspace file (contains which files were open etc.) +*.vbw + +# Visual Studio LightSwitch build output +**/*.HTMLClient/GeneratedArtifacts +**/*.DesktopClient/GeneratedArtifacts +**/*.DesktopClient/ModelManifest.xml +**/*.Server/GeneratedArtifacts +**/*.Server/ModelManifest.xml +_Pvt_Extensions + +# Paket dependency manager +.paket/paket.exe +paket-files/ + +# FAKE - F# Make +.fake/ + +# JetBrains Rider +.idea/ +*.sln.iml + +# CodeRush +.cr/ + +# CodeRush personal settings +.cr/personal + +# Python Tools for Visual Studio (PTVS) +__pycache__/ +*.pyc + +# Cake - Uncomment if you are using it +# tools/** +# !tools/packages.config + +# Tabs Studio +*.tss + +# Telerik's JustMock configuration file +*.jmconfig + +# BizTalk build output +*.btp.cs +*.btm.cs +*.odx.cs +*.xsd.cs + +# OpenCover UI analysis results +OpenCover/ + +# Azure Stream Analytics local run output +ASALocalRun/ + +# MSBuild Binary and Structured Log +*.binlog + +# NVidia Nsight GPU debugger configuration file +*.nvuser + +# MFractors (Xamarin productivity tool) working folder +.mfractor/ + +# Local History for Visual Studio +.localhistory/ + +# BeatPulse healthcheck temp database +healthchecksdb + +# Backup folder for Package Reference Convert tool in Visual Studio 2017 +MigrationBackup/ + +# Ionide (cross platform F# VS Code tools) working folder +.ionide/ + +# Fody - auto-generated XML schema +FodyWeavers.xsd + +## +## Visual studio for Mac +## + + +# globs +Makefile.in +*.userprefs +*.usertasks +config.make +config.status +aclocal.m4 +install-sh +autom4te.cache/ +*.tar.gz +tarballs/ +test-results/ + +# Mac bundle stuff +*.dmg +*.app + +# content below from: https://github.com/github/gitignore/blob/master/Global/macOS.gitignore +# General +.DS_Store +.AppleDouble +.LSOverride + +# Icon must end with two \r +Icon + + +# Thumbnails +._* + +# Files that might appear in the root of a volume +.DocumentRevisions-V100 +.fseventsd +.Spotlight-V100 +.TemporaryItems +.Trashes +.VolumeIcon.icns +.com.apple.timemachine.donotpresent + +# Directories potentially created on remote AFP share +.AppleDB +.AppleDesktop +Network Trash Folder +Temporary Items +.apdisk + +# content below from: https://github.com/github/gitignore/blob/master/Global/Windows.gitignore +# Windows thumbnail cache files +Thumbs.db +ehthumbs.db +ehthumbs_vista.db + +# Dump file +*.stackdump + +# Folder config file +[Dd]esktop.ini + +# Recycle Bin used on file shares +$RECYCLE.BIN/ + +# Windows Installer files +*.cab +*.msi +*.msix +*.msm +*.msp + +# Windows shortcuts +*.lnk + +# JetBrains Rider +.idea/ +*.sln.iml + +## +## Visual Studio Code +## +.vscode/* +!.vscode/settings.json +!.vscode/tasks.json +!.vscode/launch.json +!.vscode/extensions.json + +# Other miscellaneous folders +zzMiscellaneous/ +zzExcludeFromGithub/ +FromAnotherLanuageKit/ +_BadLanguages/ + +# Exclude test data and temp files +Test_Data/ +*__ExcludeFromRepo__*.* +*__DoNotAddToRepo__*.* +deleteme/ +RelatedProjects/ +obj/ + +# Exclude temp and backup files +*.bak + +# ########################################### +# Unique to this project +# ########################################### +# Exclude reparsepoint files which are used to help view file using VS +*.xaml.xml +gitignore.txt + +GTranslate/obj/ From 81a83bb10b4c6b1aab5bcf833ff2406ccdfc9e56 Mon Sep 17 00:00:00 2001 From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com> Date: Sun, 28 Jul 2024 04:58:17 -0400 Subject: [PATCH 04/39] Added clean option --- .../ChangeFileMonitor/changefilemonitor.py | 19 ++++++++++++++----- .../ChangeFileMonitor/changefilemonitor.yml | 12 ++++++++---- 2 files changed, 22 insertions(+), 9 deletions(-) diff --git a/plugins/ChangeFileMonitor/changefilemonitor.py b/plugins/ChangeFileMonitor/changefilemonitor.py index 41918a5c..ab81e793 100644 --- a/plugins/ChangeFileMonitor/changefilemonitor.py +++ b/plugins/ChangeFileMonitor/changefilemonitor.py @@ -67,8 +67,9 @@ STASHPATHSCONFIG = STASHCONFIGURATION['stashes'] stashPaths = [] settings = { - "scanModified": False, "recursiveDisabled": False, + "runCleanAfterDelete": False, + "scanModified": False, "zgraphqlEndpoint": DEFAULT_ENDPOINT, "zzdebugTracing": False, "zzdryRun": False, @@ -80,25 +81,26 @@ debugTracing = settings["zzdebugTracing"] RECURSIVE = settings["recursiveDisabled"] == False SCAN_MODIFIED = settings["scanModified"] +RUN_CLEAN_AFTER_DELETE = settings["runCleanAfterDelete"] for item in STASHPATHSCONFIG: stashPaths.append(item["path"]) # Extract dry_run setting from settings -dry_run = settings["zzdryRun"] +DRY_RUN = settings["zzdryRun"] dry_run_prefix = '' try: PLUGIN_ARGS = json_input['args'] PLUGIN_ARGS_MODE = json_input['args']["mode"] except: pass -logger.info(f"\nStarting (debugTracing={debugTracing}) (dry_run={dry_run}) (PLUGIN_ARGS_MODE={PLUGIN_ARGS_MODE}) (PLUGIN_ARGS={PLUGIN_ARGS})************************************************") +logger.info(f"\nStarting (debugTracing={debugTracing}) (DRY_RUN={DRY_RUN}) (PLUGIN_ARGS_MODE={PLUGIN_ARGS_MODE}) (PLUGIN_ARGS={PLUGIN_ARGS})************************************************") if debugTracing: logger.info(f"Debug Tracing (stash.get_configuration()={stash.get_configuration()})................") if debugTracing: logger.info("settings: %s " % (settings,)) if debugTracing: logger.info(f"Debug Tracing (STASHCONFIGURATION={STASHCONFIGURATION})................") if debugTracing: logger.info(f"Debug Tracing (stashPaths={stashPaths})................") -if dry_run: +if DRY_RUN: logger.info("Dry run mode is enabled.") dry_run_prefix = "Would've " if debugTracing: logger.info("Debug Tracing................") @@ -126,6 +128,7 @@ def start_library_monitor(): len(shm_buffer) shm_buffer[0] = CONTINUE_RUNNING_SIG if debugTracing: logger.info(f"Shared memory map opended, and flag set to {shm_buffer[0]}") + RunCleanMetadata = False event_handler = watchdog.events.FileSystemEventHandler() def on_created(event): @@ -140,10 +143,12 @@ def on_created(event): def on_deleted(event): global shouldUpdate global TargetPaths + nonlocal RunCleanMetadata TargetPaths.append(event.src_path) logger.info(f"DELETE *** '{event.src_path}'") with mutex: shouldUpdate = True + RunCleanMetadata = True signal.notify() def on_modified(event): @@ -168,12 +173,14 @@ def on_moved(event): shouldUpdate = True signal.notify() + if debugTracing: logger.info("Debug Trace........") event_handler.on_created = on_created event_handler.on_deleted = on_deleted event_handler.on_modified = on_modified event_handler.on_moved = on_moved observer = Observer() + # Iterate through stashPaths for path in stashPaths: observer.schedule(event_handler, path, recursive=RECURSIVE) @@ -196,8 +203,10 @@ def on_moved(event): TmpTargetPaths = list(set(TmpTargetPaths)) if TmpTargetPaths != []: logger.info(f"Triggering stash scan for path(s) {TmpTargetPaths}") - if not dry_run: + if not DRY_RUN: stash.metadata_scan(paths=TmpTargetPaths) + if RUN_CLEAN_AFTER_DELETE and RunCleanMetadata: + stash.metadata_clean(paths=TmpTargetPaths, dry_run=DRY_RUN) stash.run_plugin_task(plugin_id=PLUGIN_ID, task_name="Start Library Monitor") if debugTracing: logger.info("Exiting plugin so that metadata_scan task can run.") return diff --git a/plugins/ChangeFileMonitor/changefilemonitor.yml b/plugins/ChangeFileMonitor/changefilemonitor.yml index 0150513d..501e0e32 100644 --- a/plugins/ChangeFileMonitor/changefilemonitor.yml +++ b/plugins/ChangeFileMonitor/changefilemonitor.yml @@ -4,13 +4,17 @@ description: Monitors the Stash library folders, and updates Stash if any chan version: 0.1.0 url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/ChangeFileMonitor settings: - scanModified: - displayName: Scan Modifications - description: Enable to monitor changes in file system for modification flag. Flags for CREATE, DELETE, and MOVE will still get triggered if this is disabled. - type: BOOLEAN recursiveDisabled: displayName: No Recursive description: Enable stop monitoring paths recursively. + type: BOOLEAN + runCleanAfterDelete: + displayName: Run Clean + description: Enable to run metadata clean task after file deletion. + type: BOOLEAN + scanModified: + displayName: Scan Modifications + description: Enable to monitor changes in file system for modification flag. Flags for CREATE, DELETE, and MOVE will still get triggered if this is disabled. type: BOOLEAN zgraphqlEndpoint: displayName: GraphQL Endpoint From f34a382b9131d6f325b836382e5a4cf52ca3e0a6 Mon Sep 17 00:00:00 2001 From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com> Date: Sun, 28 Jul 2024 05:04:04 -0400 Subject: [PATCH 05/39] Update changefilemonitor.yml --- plugins/ChangeFileMonitor/changefilemonitor.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/ChangeFileMonitor/changefilemonitor.yml b/plugins/ChangeFileMonitor/changefilemonitor.yml index 501e0e32..95c00f5d 100644 --- a/plugins/ChangeFileMonitor/changefilemonitor.yml +++ b/plugins/ChangeFileMonitor/changefilemonitor.yml @@ -14,7 +14,7 @@ settings: type: BOOLEAN scanModified: displayName: Scan Modifications - description: Enable to monitor changes in file system for modification flag. Flags for CREATE, DELETE, and MOVE will still get triggered if this is disabled. + description: Enable to monitor changes in file system for modification flag. This option is NOT needed for Windows, because on Windows changes are triggered via CREATE, DELETE, and MOVE flags. Other OS may differ. type: BOOLEAN zgraphqlEndpoint: displayName: GraphQL Endpoint From e1133dc65b908d45882a8eb73713eb30606bc7c2 Mon Sep 17 00:00:00 2001 From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com> Date: Sun, 28 Jul 2024 05:10:41 -0400 Subject: [PATCH 06/39] Update changefilemonitor.yml --- plugins/ChangeFileMonitor/changefilemonitor.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/plugins/ChangeFileMonitor/changefilemonitor.yml b/plugins/ChangeFileMonitor/changefilemonitor.yml index 95c00f5d..818c4f7d 100644 --- a/plugins/ChangeFileMonitor/changefilemonitor.yml +++ b/plugins/ChangeFileMonitor/changefilemonitor.yml @@ -1,4 +1,3 @@ -# By David Maisonave (aka Axter) 2024 name: ChangeFileMonitor description: Monitors the Stash library folders, and updates Stash if any changes occurs in the Stash library paths. version: 0.1.0 From 789e5d05bd0f4dfb795f7b78554f5df185769b97 Mon Sep 17 00:00:00 2001 From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com> Date: Sun, 28 Jul 2024 05:12:04 -0400 Subject: [PATCH 07/39] Update renamefile.yml --- plugins/RenameFile/renamefile.yml | 5 ----- 1 file changed, 5 deletions(-) diff --git a/plugins/RenameFile/renamefile.yml b/plugins/RenameFile/renamefile.yml index 14006c3a..e85c0d81 100644 --- a/plugins/RenameFile/renamefile.yml +++ b/plugins/RenameFile/renamefile.yml @@ -1,6 +1,5 @@ name: RenameFile description: Renames video (scene) file names when the user edits the [Title] field located in the scene [Edit] tab. -# By David Maisonave (aka Axter) 2024 version: 0.4.0 url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/RenameFile settings: @@ -66,10 +65,6 @@ hooks: triggeredBy: - Scene.Update.Post tasks: - # - name: Fetch Tags - # description: Get tags from duplicate file names. - # defaultArgs: - # mode: fetch_dup_filename_tags - name: Rename Last Scene description: Renames file of last updated scene. defaultArgs: From ac708add88085a98deb9596cd961e1fa2469d4e5 Mon Sep 17 00:00:00 2001 From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com> Date: Sun, 28 Jul 2024 05:15:38 -0400 Subject: [PATCH 08/39] Fixing format --- plugins/ChangeFileMonitor/changefilemonitor.yml | 2 +- plugins/RenameFile/renamefile.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/plugins/ChangeFileMonitor/changefilemonitor.yml b/plugins/ChangeFileMonitor/changefilemonitor.yml index 818c4f7d..0990e327 100644 --- a/plugins/ChangeFileMonitor/changefilemonitor.yml +++ b/plugins/ChangeFileMonitor/changefilemonitor.yml @@ -35,4 +35,4 @@ tasks: - name: Start Library Monitor description: Monitors paths in Stash library for media file changes, and updates Stash. defaultArgs: - mode: start_library_monitor + mode: start_library_monitor \ No newline at end of file diff --git a/plugins/RenameFile/renamefile.yml b/plugins/RenameFile/renamefile.yml index e85c0d81..820f7edb 100644 --- a/plugins/RenameFile/renamefile.yml +++ b/plugins/RenameFile/renamefile.yml @@ -68,4 +68,4 @@ tasks: - name: Rename Last Scene description: Renames file of last updated scene. defaultArgs: - mode: rename_files_task + mode: rename_files_task \ No newline at end of file From 0bd49ca4548dcd8bbc32e0f72828032636f63061 Mon Sep 17 00:00:00 2001 From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com> Date: Sun, 28 Jul 2024 05:19:37 -0400 Subject: [PATCH 09/39] Create .prettierignore --- plugins/ChangeFileMonitor/.prettierignore | 14 ++++++++++++++ 1 file changed, 14 insertions(+) create mode 100644 plugins/ChangeFileMonitor/.prettierignore diff --git a/plugins/ChangeFileMonitor/.prettierignore b/plugins/ChangeFileMonitor/.prettierignore new file mode 100644 index 00000000..951dc9a3 --- /dev/null +++ b/plugins/ChangeFileMonitor/.prettierignore @@ -0,0 +1,14 @@ +## Please check .eslintignore and .gitignore when changing this file + +## file extensions +*.* +!*.css +!*.js +!*.json +!*.jsx +!*.less +!*.md +!*.mdx +!*.ts +!*.tsx +!*.yml \ No newline at end of file From 1e4f9d37b72b536514ce9203f102a87eddc76f8f Mon Sep 17 00:00:00 2001 From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com> Date: Sun, 28 Jul 2024 05:23:27 -0400 Subject: [PATCH 10/39] Delete .prettierignore --- plugins/ChangeFileMonitor/.prettierignore | 14 -------------- 1 file changed, 14 deletions(-) delete mode 100644 plugins/ChangeFileMonitor/.prettierignore diff --git a/plugins/ChangeFileMonitor/.prettierignore b/plugins/ChangeFileMonitor/.prettierignore deleted file mode 100644 index 951dc9a3..00000000 --- a/plugins/ChangeFileMonitor/.prettierignore +++ /dev/null @@ -1,14 +0,0 @@ -## Please check .eslintignore and .gitignore when changing this file - -## file extensions -*.* -!*.css -!*.js -!*.json -!*.jsx -!*.less -!*.md -!*.mdx -!*.ts -!*.tsx -!*.yml \ No newline at end of file From a9b73d6c3d824e326115fa0d1a88759c4e5f2d6b Mon Sep 17 00:00:00 2001 From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com> Date: Sun, 28 Jul 2024 05:33:53 -0400 Subject: [PATCH 11/39] Fixed format via prettier --- .../ChangeFileMonitor/changefilemonitor.yml | 10 +++++----- plugins/RenameFile/renamefile.yml | 18 +++++++++--------- 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/plugins/ChangeFileMonitor/changefilemonitor.yml b/plugins/ChangeFileMonitor/changefilemonitor.yml index 0990e327..b522cab8 100644 --- a/plugins/ChangeFileMonitor/changefilemonitor.yml +++ b/plugins/ChangeFileMonitor/changefilemonitor.yml @@ -1,5 +1,5 @@ name: ChangeFileMonitor -description: Monitors the Stash library folders, and updates Stash if any changes occurs in the Stash library paths. +description: Monitors the Stash library folders, and updates Stash if any changes occurs in the Stash library paths. version: 0.1.0 url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/ChangeFileMonitor settings: @@ -14,7 +14,7 @@ settings: scanModified: displayName: Scan Modifications description: Enable to monitor changes in file system for modification flag. This option is NOT needed for Windows, because on Windows changes are triggered via CREATE, DELETE, and MOVE flags. Other OS may differ. - type: BOOLEAN + type: BOOLEAN zgraphqlEndpoint: displayName: GraphQL Endpoint description: (Default=http://localhost:9999/graphql). Update with your endpoint, or leave blank to use default. @@ -22,11 +22,11 @@ settings: zzdebugTracing: displayName: Debug Tracing description: (Default=false) [***For Advanced Users***] Enable debug tracing. When enabled, additional tracing logging is added to Stash\plugins\ChangeFileMonitor\changefilemonitor.log - type: BOOLEAN + type: BOOLEAN zzdryRun: displayName: Dry Run description: Enable to run script in [Dry Run] mode. In this mode, Stash does NOT call meta_scan, and only logs the action it would have taken. - type: BOOLEAN + type: BOOLEAN exec: - python - "{pluginDir}/changefilemonitor.py" @@ -35,4 +35,4 @@ tasks: - name: Start Library Monitor description: Monitors paths in Stash library for media file changes, and updates Stash. defaultArgs: - mode: start_library_monitor \ No newline at end of file + mode: start_library_monitor diff --git a/plugins/RenameFile/renamefile.yml b/plugins/RenameFile/renamefile.yml index 820f7edb..4bc81ac6 100644 --- a/plugins/RenameFile/renamefile.yml +++ b/plugins/RenameFile/renamefile.yml @@ -1,28 +1,28 @@ name: RenameFile -description: Renames video (scene) file names when the user edits the [Title] field located in the scene [Edit] tab. +description: Renames video (scene) file names when the user edits the [Title] field located in the scene [Edit] tab. version: 0.4.0 url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/RenameFile settings: performerAppend: displayName: Append Performers description: Enable to append performers name to file name when renaming a file. Requires performers to be included in [Key Fields] list, which by default it is included. - type: BOOLEAN + type: BOOLEAN studioAppend: displayName: Append Studio description: Enable to append studio name to file name when renaming a file. Requires studio to be included in [Key Fields] list, which by default it is included. - type: BOOLEAN + type: BOOLEAN tagAppend: displayName: Append Tags description: Enable to append tag names to file name when renaming a file. Requires tags to be included in [Key Fields] list, which by default it is included. - type: BOOLEAN + type: BOOLEAN z_keyFIeldsIncludeInFileName: # Prefixing z_ to variable names so that the GUI will place these fields after above fields (alphabatically listed) displayName: Include Existing Key Field description: Enable to append performer, tags, studios, & galleries even if name already exists in the original file name. - type: BOOLEAN + type: BOOLEAN zafileRenameViaMove: displayName: Rename Using Move description: Enable to have file moved when renaming file. - type: BOOLEAN + type: BOOLEAN zfieldKeyList: displayName: Key Fields description: '(Default=title,performers,studio,tags) Define key fields to use to format the file name. This is a comma seperated list, and the list should be in the desired format order. For example, if the user wants the performers name before the title, set the performers name first. Example:"performers,title,tags". This is an example of user adding height:"title,performers,tags,height" Here''s an example using all of the supported fields: "title,performers,tags,studio,galleries,resolution,width,height,video_codec,frame_rate,date".' @@ -50,11 +50,11 @@ settings: zzdebugTracing: displayName: Debug Tracing description: (Default=false) [***For Advanced Users***] Enable debug tracing. When enabled, additional tracing logging is added to Stash\plugins\RenameFile\renamefile.log - type: BOOLEAN + type: BOOLEAN zzdryRun: displayName: Dry Run description: Enable to run script in [Dry Run] mode. In dry run mode, files are NOT renamed, and only logging is performed. Use the logging to determine if rename will occur as expected. This should always be enabled on the first run after renamefile_settings.py has been modified. - type: BOOLEAN + type: BOOLEAN exec: - python - "{pluginDir}/renamefile.py" @@ -68,4 +68,4 @@ tasks: - name: Rename Last Scene description: Renames file of last updated scene. defaultArgs: - mode: rename_files_task \ No newline at end of file + mode: rename_files_task From ec379fef07a57d1b205b5e1e10d6bc6bc2ad9399 Mon Sep 17 00:00:00 2001 From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com> Date: Sun, 28 Jul 2024 15:30:49 -0400 Subject: [PATCH 12/39] Removed unused UI variable endpoint Removed unused UI variable endpoint. Added logic that allows ChangeFileMonitor to run as a script. Updated README.md file to explain how to run it as a script. --- plugins/ChangeFileMonitor/README.md | 10 +- .../ChangeFileMonitor/changefilemonitor.py | 112 ++++++++++++------ .../ChangeFileMonitor/changefilemonitor.yml | 8 +- plugins/RenameFile/renamefile.py | 16 ++- 4 files changed, 99 insertions(+), 47 deletions(-) diff --git a/plugins/ChangeFileMonitor/README.md b/plugins/ChangeFileMonitor/README.md index ca09e59e..30cd2412 100644 --- a/plugins/ChangeFileMonitor/README.md +++ b/plugins/ChangeFileMonitor/README.md @@ -1,12 +1,20 @@ # ChangeFileMonitor: Ver 0.1.0 (By David Maisonave) ChangeFileMonitor is a [Stash](https://github.com/stashapp/stash) plugin which updates Stash if any changes occurs in the Stash library paths. -### Using ChangeFileMonitor +### Using ChangeFileMonitor as a plugin - To start monitoring file changes, go to **Stash->Settings->Task->[Plugin Tasks]->ChangeFileMonitor**, and click on the [Start Library Monitor] button. - ![ChangeFileMonitor_Task](https://github.com/user-attachments/assets/f275a70f-8e86-42a4-b2c1-98b3f4935334) - To stop this task, go to **Stash->Settings->Task->[Task Queue]**, and click on the **[x]**. - ![Kill_ChangeFileMonitor_Task](https://github.com/user-attachments/assets/a3f4abca-f3a2-49fa-9db5-e0c733e0aeb1) +### Using ChangeFileMonitor as a script +**ChangeFileMonitor** can be called as a standalone script. +- To start monitoring call the script and pass any argument. + - python changefilemonitor.py **foofoo** +- To stop **ChangeFileMonitor**, pass argument **stop**. + - python changefilemonitor.py **stop** + - After running above command line, **ChangeFileMonitor** will stop after the next file change occurs. + - The stop command works to stop the standalone job and the Stash plugin task job. ### Requirements `pip install stashapp-tools` diff --git a/plugins/ChangeFileMonitor/changefilemonitor.py b/plugins/ChangeFileMonitor/changefilemonitor.py index ab81e793..77462398 100644 --- a/plugins/ChangeFileMonitor/changefilemonitor.py +++ b/plugins/ChangeFileMonitor/changefilemonitor.py @@ -1,9 +1,13 @@ # Description: This is a Stash plugin which updates Stash if any changes occurs in the Stash library paths. # By David Maisonave (aka Axter) Jul-2024 (https://www.axter.com/) # Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/ChangeFileMonitor +# Note: To call this script outside of Stash, pass any argument. +# Example: python changefilemonitor.py foofoo import os import sys +import time import shutil +import fileinput import hashlib import json from pathlib import Path @@ -21,9 +25,9 @@ # Constant global variables -------------------------------------------- LOG_FILE_PATH = log_file_path = f"{Path(__file__).resolve().parent}\\{Path(__file__).stem}.log" FORMAT = "[%(asctime)s - LN:%(lineno)s] %(message)s" -DEFAULT_ENDPOINT = "http://localhost:9999/graphql" # Default GraphQL endpoint PLUGIN_ARGS = False PLUGIN_ARGS_MODE = False +PLUGIN_ID = Path(__file__).stem.lower() # GraphQL query to fetch all scenes QUERY_ALL_SCENES = """ query AllScenes { @@ -51,16 +55,45 @@ signal = Condition(mutex) shouldUpdate = False TargetPaths = [] +runningInPluginMode = False # Configure local log file for plugin within plugin folder having a limited max log file size logging.basicConfig(level=logging.INFO, format=FORMAT, datefmt="%y%m%d %H:%M:%S", handlers=[RFH]) logger = logging.getLogger(Path(__file__).stem) - + # ********************************************************************** # ---------------------------------------------------------------------- # Code section to fetch variables from Plugin UI and from changefilemonitor_settings.py -json_input = json.loads(sys.stdin.read()) -FRAGMENT_SERVER = json_input["server_connection"] +# Check if being called as Stash plugin +gettingCalledAsStashPlugin = True +stopLibraryMonitoring = False +StdInRead = None +try: + if len(sys.argv) == 1: + print(f"Attempting to read stdin. (len(sys.argv)={len(sys.argv)})", file=sys.stderr) + StdInRead = sys.stdin.read() + # for line in fileinput.input(): + # StdInRead = line + # break + else: + if len(sys.argv) > 1 and sys.argv[1].lower() == "stop": + stopLibraryMonitoring = True + raise Exception("Not called in plugin mode.") +except: + gettingCalledAsStashPlugin = False + print(f"Either len(sys.argv) not expected value OR sys.stdin.read() failed! (stopLibraryMonitoring={stopLibraryMonitoring}) (StdInRead={StdInRead}) (len(sys.argv)={len(sys.argv)})", file=sys.stderr) + pass + +if gettingCalledAsStashPlugin and StdInRead: + print(f"StdInRead={StdInRead} (len(sys.argv)={len(sys.argv)})", file=sys.stderr) + runningInPluginMode = True + json_input = json.loads(StdInRead) + FRAGMENT_SERVER = json_input["server_connection"] +else: + runningInPluginMode = False + FRAGMENT_SERVER = {'Scheme': 'http', 'Host': '0.0.0.0', 'Port': 9999, 'SessionCookie': {'Name': 'session', 'Value': '', 'Path': '', 'Domain': '', 'Expires': '0001-01-01T00:00:00Z', 'RawExpires': '', 'MaxAge': 0, 'Secure': False, 'HttpOnly': False, 'SameSite': 0, 'Raw': '', 'Unparsed': None}, 'Dir': os.path.dirname(Path(__file__).resolve().parent), 'PluginDir': Path(__file__).resolve().parent} + print("Running in non-plugin mode!", file=sys.stderr) + stash = StashInterface(FRAGMENT_SERVER) PLUGINCONFIGURATION = stash.get_configuration()["plugins"] STASHCONFIGURATION = stash.get_configuration()["general"] @@ -69,12 +102,12 @@ settings = { "recursiveDisabled": False, "runCleanAfterDelete": False, + "runGenerateContent": False, "scanModified": False, - "zgraphqlEndpoint": DEFAULT_ENDPOINT, "zzdebugTracing": False, "zzdryRun": False, } -PLUGIN_ID = "changefilemonitor" + if PLUGIN_ID in PLUGINCONFIGURATION: settings.update(PLUGINCONFIGURATION[PLUGIN_ID]) # ---------------------------------------------------------------------- @@ -82,6 +115,7 @@ RECURSIVE = settings["recursiveDisabled"] == False SCAN_MODIFIED = settings["scanModified"] RUN_CLEAN_AFTER_DELETE = settings["runCleanAfterDelete"] +RUN_GENERATE_CONTENT = settings["runGenerateContent"] for item in STASHPATHSCONFIG: stashPaths.append(item["path"]) @@ -94,7 +128,7 @@ PLUGIN_ARGS_MODE = json_input['args']["mode"] except: pass -logger.info(f"\nStarting (debugTracing={debugTracing}) (DRY_RUN={DRY_RUN}) (PLUGIN_ARGS_MODE={PLUGIN_ARGS_MODE}) (PLUGIN_ARGS={PLUGIN_ARGS})************************************************") +logger.info(f"\nStarting (runningInPluginMode={runningInPluginMode}) (debugTracing={debugTracing}) (DRY_RUN={DRY_RUN}) (PLUGIN_ARGS_MODE={PLUGIN_ARGS_MODE}) (PLUGIN_ARGS={PLUGIN_ARGS})************************************************") if debugTracing: logger.info(f"Debug Tracing (stash.get_configuration()={stash.get_configuration()})................") if debugTracing: logger.info("settings: %s " % (settings,)) if debugTracing: logger.info(f"Debug Tracing (STASHCONFIGURATION={STASHCONFIGURATION})................") @@ -104,11 +138,6 @@ logger.info("Dry run mode is enabled.") dry_run_prefix = "Would've " if debugTracing: logger.info("Debug Tracing................") -# ToDo: Add split logic here to slpit possible string array into an array -endpoint = settings["zgraphqlEndpoint"] # GraphQL endpoint -if not endpoint or endpoint == "": - endpoint = DEFAULT_ENDPOINT -if debugTracing: logger.info(f"Debug Tracing (endpoint={endpoint})................") # ---------------------------------------------------------------------- # ********************************************************************** if debugTracing: logger.info(f"Debug Tracing (SCAN_MODIFIED={SCAN_MODIFIED}) (RECURSIVE={RECURSIVE})................") @@ -207,48 +236,53 @@ def on_moved(event): stash.metadata_scan(paths=TmpTargetPaths) if RUN_CLEAN_AFTER_DELETE and RunCleanMetadata: stash.metadata_clean(paths=TmpTargetPaths, dry_run=DRY_RUN) - stash.run_plugin_task(plugin_id=PLUGIN_ID, task_name="Start Library Monitor") - if debugTracing: logger.info("Exiting plugin so that metadata_scan task can run.") - return + if RUN_GENERATE_CONTENT: + stash.metadata_generate() + if gettingCalledAsStashPlugin and shm_buffer[0] == CONTINUE_RUNNING_SIG: + stash.run_plugin_task(plugin_id=PLUGIN_ID, task_name="Start Library Monitor") + if debugTracing: logger.info("Exiting plugin so that metadata_scan task can run.") + return else: if debugTracing: logger.info("Nothing to scan.") if shm_buffer[0] != CONTINUE_RUNNING_SIG: logger.info(f"Exiting Change File Monitor. (shm_buffer[0]={shm_buffer[0]})") shm_a.close() shm_a.unlink() # Call unlink only once to release the shared memory - time.sleep(1) - break + raise KeyboardInterrupt except KeyboardInterrupt: observer.stop() if debugTracing: logger.info("Stopping observer................") observer.join() if debugTracing: logger.info("Exiting function................") -# stop_library_monitor does not work because only one task can run at a time. -# def stop_library_monitor(): - # if debugTracing: logger.info("Opening shared memory map.") - # try: - # shm_a = shared_memory.SharedMemory(name="DavidMaisonaveAxter_ChangeFileMonitor", create=False, size=4) - # except: - # pass - # logger.info("Could not open shared memory map. Change File Monitor must not be running.") - # return - # type(shm_a.buf) - # shm_buffer = shm_a.buf - # len(shm_buffer) - # shm_buffer[0] = 123 - # if debugTracing: logger.info(f"Shared memory map opended, and flag set to {shm_buffer[0]}") - # shm_a.close() - # shm_a.unlink() # Call unlink only once to release the shared memory - # time.sleep(1) - # return +# This function is only useful when called outside of Stash. +# Example: python changefilemonitor.py stop +# Stops monitoring after triggered by the next file change. +# ToDo: Add logic so it doesn't have to wait until the next file change +def stop_library_monitor(): + if debugTracing: logger.info("Opening shared memory map.") + try: + shm_a = shared_memory.SharedMemory(name="DavidMaisonaveAxter_ChangeFileMonitor", create=False, size=4) + except: + pass + logger.info("Could not open shared memory map. Change File Monitor must not be running.") + return + type(shm_a.buf) + shm_buffer = shm_a.buf + len(shm_buffer) + shm_buffer[0] = 123 + if debugTracing: logger.info(f"Shared memory map opended, and flag set to {shm_buffer[0]}") + shm_a.close() + shm_a.unlink() # Call unlink only once to release the shared memory + time.sleep(1) + return -if PLUGIN_ARGS_MODE == "start_library_monitor": +if stopLibraryMonitoring: + stop_library_monitor() + if debugTracing: logger.info(f"stop_library_monitor EXIT................") +elif PLUGIN_ARGS_MODE == "start_library_monitor" or not gettingCalledAsStashPlugin: start_library_monitor() if debugTracing: logger.info(f"start_library_monitor EXIT................") -# elif PLUGIN_ARGS_MODE == "stop_library_monitor": - # stop_library_monitor() - # if debugTracing: logger.info(f"stop_library_monitor EXIT................") else: logger.info(f"Nothing to do!!! (PLUGIN_ARGS_MODE={PLUGIN_ARGS_MODE})") diff --git a/plugins/ChangeFileMonitor/changefilemonitor.yml b/plugins/ChangeFileMonitor/changefilemonitor.yml index b522cab8..3336bc47 100644 --- a/plugins/ChangeFileMonitor/changefilemonitor.yml +++ b/plugins/ChangeFileMonitor/changefilemonitor.yml @@ -11,14 +11,14 @@ settings: displayName: Run Clean description: Enable to run metadata clean task after file deletion. type: BOOLEAN + runGenerateContent: + displayName: Run Generate Content + description: Enable to run metadata_generate (Generate Content) after metadata scan. + type: BOOLEAN scanModified: displayName: Scan Modifications description: Enable to monitor changes in file system for modification flag. This option is NOT needed for Windows, because on Windows changes are triggered via CREATE, DELETE, and MOVE flags. Other OS may differ. type: BOOLEAN - zgraphqlEndpoint: - displayName: GraphQL Endpoint - description: (Default=http://localhost:9999/graphql). Update with your endpoint, or leave blank to use default. - type: STRING zzdebugTracing: displayName: Debug Tracing description: (Default=false) [***For Advanced Users***] Enable debug tracing. When enabled, additional tracing logging is added to Stash\plugins\ChangeFileMonitor\changefilemonitor.log diff --git a/plugins/RenameFile/renamefile.py b/plugins/RenameFile/renamefile.py index d7c55889..a434970a 100644 --- a/plugins/RenameFile/renamefile.py +++ b/plugins/RenameFile/renamefile.py @@ -21,6 +21,7 @@ FORMAT = "[%(asctime)s - LN:%(lineno)s] %(message)s" DEFAULT_ENDPOINT = "http://localhost:9999/graphql" # Default GraphQL endpoint DEFAULT_FIELD_KEY_LIST = "title,performers,studio,tags" # Default Field Key List with the desired order +PLUGIN_ID = Path(__file__).stem.lower() DEFAULT_SEPERATOR = "-" PLUGIN_ARGS = False PLUGIN_ARGS_MODE = False @@ -51,7 +52,7 @@ # Configure local log file for plugin within plugin folder having a limited max log file size logging.basicConfig(level=logging.INFO, format=FORMAT, datefmt="%y%m%d %H:%M:%S", handlers=[RFH]) -logger = logging.getLogger('renamefile') +logger = logging.getLogger(PLUGIN_ID) # ********************************************************************** # ---------------------------------------------------------------------- @@ -75,8 +76,8 @@ "zzdebugTracing": False, "zzdryRun": False, } -if "renamefile" in pluginConfiguration: - settings.update(pluginConfiguration["renamefile"]) +if PLUGIN_ID in pluginConfiguration: + settings.update(pluginConfiguration[PLUGIN_ID]) # ---------------------------------------------------------------------- debugTracing = settings["zzdebugTracing"] @@ -94,6 +95,13 @@ pass logger.info(f"\nStarting (debugTracing={debugTracing}) (dry_run={dry_run}) (PLUGIN_ARGS_MODE={PLUGIN_ARGS_MODE}) (inputToUpdateScenePost={inputToUpdateScenePost})************************************************") if debugTracing: logger.info("settings: %s " % (settings,)) +# if PLUGIN_ID in pluginConfiguration: + # if debugTracing: logger.info(f"Debug Tracing (pluginConfiguration[PLUGIN_ID]={pluginConfiguration[PLUGIN_ID]})................") + # if 'zmaximumTagKeys' not in pluginConfiguration[PLUGIN_ID]: + # if debugTracing: logger.info("Debug Tracing................") + # stash.configure_plugin(PLUGIN_ID, settings) # , init_defaults=True + # if debugTracing: logger.info("Debug Tracing................") + if dry_run: logger.info("Dry run mode is enabled.") dry_run_prefix = "Would've " @@ -131,6 +139,8 @@ double_separator = separator + separator if debugTracing: logger.info(f"Debug Tracing (PLUGIN_ARGS={PLUGIN_ARGS}) (WRAPPER_STYLES={WRAPPER_STYLES}) (POSTFIX_STYLES={POSTFIX_STYLES})................") +if debugTracing: logger.info(f"Debug Tracing (PLUGIN_ID=\"{PLUGIN_ID}\")................") +if debugTracing: logger.info("Debug Tracing................") # Function to make GraphQL requests def graphql_request(query, variables=None): From bf3bfe4a269188e076a0b4d5413669348cb99ab2 Mon Sep 17 00:00:00 2001 From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com> Date: Mon, 29 Jul 2024 03:21:39 -0400 Subject: [PATCH 13/39] Moved rarely used fields to config file --- .../ChangeFileMonitor/changefilemonitor.py | 6 ++-- .../ChangeFileMonitor/changefilemonitor.yml | 4 --- .../changefilemonitor_config.py | 12 +++++++ plugins/RenameFile/renamefile.py | 34 ++++++++++++------- plugins/RenameFile/renamefile.yml | 12 ------- plugins/RenameFile/renamefile_settings.py | 4 +++ 6 files changed, 40 insertions(+), 32 deletions(-) create mode 100644 plugins/ChangeFileMonitor/changefilemonitor_config.py diff --git a/plugins/ChangeFileMonitor/changefilemonitor.py b/plugins/ChangeFileMonitor/changefilemonitor.py index 77462398..c8992c01 100644 --- a/plugins/ChangeFileMonitor/changefilemonitor.py +++ b/plugins/ChangeFileMonitor/changefilemonitor.py @@ -20,6 +20,7 @@ import watchdog # pip install watchdog # https://pythonhosted.org/watchdog/ from threading import Lock, Condition from multiprocessing import shared_memory +from changefilemonitor_config import config # Import settings from changefilemonitor_config.py # ********************************************************************** # Constant global variables -------------------------------------------- @@ -91,7 +92,7 @@ FRAGMENT_SERVER = json_input["server_connection"] else: runningInPluginMode = False - FRAGMENT_SERVER = {'Scheme': 'http', 'Host': '0.0.0.0', 'Port': 9999, 'SessionCookie': {'Name': 'session', 'Value': '', 'Path': '', 'Domain': '', 'Expires': '0001-01-01T00:00:00Z', 'RawExpires': '', 'MaxAge': 0, 'Secure': False, 'HttpOnly': False, 'SameSite': 0, 'Raw': '', 'Unparsed': None}, 'Dir': os.path.dirname(Path(__file__).resolve().parent), 'PluginDir': Path(__file__).resolve().parent} + FRAGMENT_SERVER = {'Scheme': config['endpoint_Scheme'], 'Host': config['endpoint_Host'], 'Port': config['endpoint_Port'], 'SessionCookie': {'Name': 'session', 'Value': '', 'Path': '', 'Domain': '', 'Expires': '0001-01-01T00:00:00Z', 'RawExpires': '', 'MaxAge': 0, 'Secure': False, 'HttpOnly': False, 'SameSite': 0, 'Raw': '', 'Unparsed': None}, 'Dir': os.path.dirname(Path(__file__).resolve().parent), 'PluginDir': Path(__file__).resolve().parent} print("Running in non-plugin mode!", file=sys.stderr) stash = StashInterface(FRAGMENT_SERVER) @@ -102,7 +103,6 @@ settings = { "recursiveDisabled": False, "runCleanAfterDelete": False, - "runGenerateContent": False, "scanModified": False, "zzdebugTracing": False, "zzdryRun": False, @@ -115,7 +115,7 @@ RECURSIVE = settings["recursiveDisabled"] == False SCAN_MODIFIED = settings["scanModified"] RUN_CLEAN_AFTER_DELETE = settings["runCleanAfterDelete"] -RUN_GENERATE_CONTENT = settings["runGenerateContent"] +RUN_GENERATE_CONTENT = config['runGenerateContent'] for item in STASHPATHSCONFIG: stashPaths.append(item["path"]) diff --git a/plugins/ChangeFileMonitor/changefilemonitor.yml b/plugins/ChangeFileMonitor/changefilemonitor.yml index 3336bc47..41e91fdb 100644 --- a/plugins/ChangeFileMonitor/changefilemonitor.yml +++ b/plugins/ChangeFileMonitor/changefilemonitor.yml @@ -11,10 +11,6 @@ settings: displayName: Run Clean description: Enable to run metadata clean task after file deletion. type: BOOLEAN - runGenerateContent: - displayName: Run Generate Content - description: Enable to run metadata_generate (Generate Content) after metadata scan. - type: BOOLEAN scanModified: displayName: Scan Modifications description: Enable to monitor changes in file system for modification flag. This option is NOT needed for Windows, because on Windows changes are triggered via CREATE, DELETE, and MOVE flags. Other OS may differ. diff --git a/plugins/ChangeFileMonitor/changefilemonitor_config.py b/plugins/ChangeFileMonitor/changefilemonitor_config.py new file mode 100644 index 00000000..4c09f403 --- /dev/null +++ b/plugins/ChangeFileMonitor/changefilemonitor_config.py @@ -0,0 +1,12 @@ +# Description: This is a Stash plugin which updates Stash if any changes occurs in the Stash library paths. +# By David Maisonave (aka Axter) Jul-2024 (https://www.axter.com/) +# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/ChangeFileMonitor +config = { + # Enable to run metadata_generate (Generate Content) after metadata scan. + "runGenerateContent": False, + + # The following fields are ONLY used when running ChangeFileMonitor in script mode + "endpoint_Scheme" : "http", # Define endpoint to use when contacting the Stash server + "endpoint_Host" : "0.0.0.0", # Define endpoint to use when contacting the Stash server + "endpoint_Port" : 9999, # Define endpoint to use when contacting the Stash server +} diff --git a/plugins/RenameFile/renamefile.py b/plugins/RenameFile/renamefile.py index a434970a..ccf7b27e 100644 --- a/plugins/RenameFile/renamefile.py +++ b/plugins/RenameFile/renamefile.py @@ -19,7 +19,6 @@ # Constant global variables -------------------------------------------- LOG_FILE_PATH = log_file_path = f"{Path(__file__).resolve().parent}\\{Path(__file__).stem}.log" FORMAT = "[%(asctime)s - LN:%(lineno)s] %(message)s" -DEFAULT_ENDPOINT = "http://localhost:9999/graphql" # Default GraphQL endpoint DEFAULT_FIELD_KEY_LIST = "title,performers,studio,tags" # Default Field Key List with the desired order PLUGIN_ID = Path(__file__).stem.lower() DEFAULT_SEPERATOR = "-" @@ -58,9 +57,10 @@ # ---------------------------------------------------------------------- # Code section to fetch variables from Plugin UI and from renamefile_settings.py json_input = json.loads(sys.stdin.read()) -FRAGMENT_SERVER = json_input["server_connection"] +FRAGMENT_SERVER = json_input['server_connection'] stash = StashInterface(FRAGMENT_SERVER) pluginConfiguration = stash.get_configuration()["plugins"] + settings = { "performerAppend": False, "studioAppend": False, @@ -68,11 +68,8 @@ "z_keyFIeldsIncludeInFileName": False, "zafileRenameViaMove": False, "zfieldKeyList": DEFAULT_FIELD_KEY_LIST, - "zgraphqlEndpoint": DEFAULT_ENDPOINT, "zmaximumTagKeys": 12, - "zpathToExclude": "", "zseparators": DEFAULT_SEPERATOR, - "ztagWhitelist": "", "zzdebugTracing": False, "zzdryRun": False, } @@ -95,11 +92,19 @@ pass logger.info(f"\nStarting (debugTracing={debugTracing}) (dry_run={dry_run}) (PLUGIN_ARGS_MODE={PLUGIN_ARGS_MODE}) (inputToUpdateScenePost={inputToUpdateScenePost})************************************************") if debugTracing: logger.info("settings: %s " % (settings,)) -# if PLUGIN_ID in pluginConfiguration: - # if debugTracing: logger.info(f"Debug Tracing (pluginConfiguration[PLUGIN_ID]={pluginConfiguration[PLUGIN_ID]})................") + +if PLUGIN_ID in pluginConfiguration: + if debugTracing: logger.info(f"Debug Tracing (pluginConfiguration[PLUGIN_ID]={pluginConfiguration[PLUGIN_ID]})................") # if 'zmaximumTagKeys' not in pluginConfiguration[PLUGIN_ID]: # if debugTracing: logger.info("Debug Tracing................") - # stash.configure_plugin(PLUGIN_ID, settings) # , init_defaults=True + # try: + # stash.configure_plugin(PLUGIN_ID, settings) + # stash.configure_plugin("renamefile", {"zmaximumTagKeys": 12}) + # except Exception as e: + # logger.error(f"configure_plugin failed!!! Error: {e}") + # logger.exception('Got exception on main handler') + # pass + # # stash.configure_plugin(PLUGIN_ID, settings) # , init_defaults=True # if debugTracing: logger.info("Debug Tracing................") if dry_run: @@ -109,18 +114,21 @@ max_tag_keys = settings["zmaximumTagKeys"] if settings["zmaximumTagKeys"] != 0 else 12 # Need this incase use explicitly sets value to zero in UI if debugTracing: logger.info("Debug Tracing................") # ToDo: Add split logic here to slpit possible string array into an array -exclude_paths = settings["zpathToExclude"] +exclude_paths = config["pathToExclude"] exclude_paths = exclude_paths.split() if debugTracing: logger.info(f"Debug Tracing (exclude_paths={exclude_paths})................") # Extract tag whitelist from settings -tag_whitelist = settings["ztagWhitelist"] +tag_whitelist = config["tagWhitelist"] if debugTracing: logger.info("Debug Tracing................") if not tag_whitelist: tag_whitelist = "" if debugTracing: logger.info(f"Debug Tracing (tag_whitelist={tag_whitelist})................") -endpoint = settings["zgraphqlEndpoint"] # GraphQL endpoint -if not endpoint or endpoint == "": - endpoint = DEFAULT_ENDPOINT + +endpointHost = json_input['server_connection']['Host'] +if endpointHost == "0.0.0.0": + endpointHost = "localhost" +endpoint = f"{json_input['server_connection']['Scheme']}://{endpointHost}:{json_input['server_connection']['Port']}/graphql" + if debugTracing: logger.info(f"Debug Tracing (endpoint={endpoint})................") # Extract rename_files and move_files settings from renamefile_settings.py rename_files = config["rename_files"] diff --git a/plugins/RenameFile/renamefile.yml b/plugins/RenameFile/renamefile.yml index 4bc81ac6..e5d2a0f0 100644 --- a/plugins/RenameFile/renamefile.yml +++ b/plugins/RenameFile/renamefile.yml @@ -27,26 +27,14 @@ settings: displayName: Key Fields description: '(Default=title,performers,studio,tags) Define key fields to use to format the file name. This is a comma seperated list, and the list should be in the desired format order. For example, if the user wants the performers name before the title, set the performers name first. Example:"performers,title,tags". This is an example of user adding height:"title,performers,tags,height" Here''s an example using all of the supported fields: "title,performers,tags,studio,galleries,resolution,width,height,video_codec,frame_rate,date".' type: STRING - zgraphqlEndpoint: - displayName: GraphQL Endpoint - description: (Default=http://localhost:9999/graphql). Update with your endpoint, or leave blank to use default. - type: STRING zmaximumTagKeys: displayName: Max Tag Keys description: (Default=12) Maximum quantity of tag keys to append to file name. 0=Default(12); -1=No tags appended. type: NUMBER - zpathToExclude: - displayName: Exclude Path - description: 'Add path(s) to exclude from RenameFile. Example Usage: r"/path/to/exclude1" When entering multiple paths, use space. Example: r"/path_1_to/exclude" r"/someOtherPath2Exclude" r"/yetAnotherPath"' - type: STRING zseparators: displayName: Separator description: '(Default=-) Define the separator to use between different parts of the filename. Example Usage: ","' type: STRING - ztagWhitelist: - displayName: Tag Whitelist - description: 'Define a whitelist of allowed tags or EMPTY to allow all tags. Example Usage: "tag1", "tag2", "tag3"' - type: STRING zzdebugTracing: displayName: Debug Tracing description: (Default=false) [***For Advanced Users***] Enable debug tracing. When enabled, additional tracing logging is added to Stash\plugins\RenameFile\renamefile.log diff --git a/plugins/RenameFile/renamefile_settings.py b/plugins/RenameFile/renamefile_settings.py index c4eeab9b..24052f8a 100644 --- a/plugins/RenameFile/renamefile_settings.py +++ b/plugins/RenameFile/renamefile_settings.py @@ -37,6 +37,10 @@ "frame_rate": 'FR', "date": '', }, + # Add path(s) to exclude from RenameFile. Example Usage: r"/path/to/exclude1" When entering multiple paths, use space. Example: r"/path_1_to/exclude" r"/someOtherPath2Exclude" r"/yetAnotherPath" + "pathToExclude": "", + # Define a whitelist of allowed tags or EMPTY to allow all tags. Example Usage: "tag1", "tag2", "tag3" + "tagWhitelist": "", # Define whether files should be renamed when moved "rename_files": True, # Define whether the original file name should be used if title is empty From ff65f8ce192d06a758f8352e6fa032b14c94ff88 Mon Sep 17 00:00:00 2001 From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com> Date: Mon, 29 Jul 2024 23:18:26 -0400 Subject: [PATCH 14/39] Change plugin name from ChangeFileMonitor to FileMonitor --- plugins/ChangeFileMonitor/README.md | 35 ------------------- .../.gitignore | 0 plugins/FileMonitor/README.md | 35 +++++++++++++++++++ .../filemonitor.py} | 14 ++++---- .../filemonitor.yml} | 10 +++--- .../filemonitor_config.py} | 4 +-- .../manifest | 12 +++---- .../requirements.txt | 0 8 files changed, 55 insertions(+), 55 deletions(-) delete mode 100644 plugins/ChangeFileMonitor/README.md rename plugins/{ChangeFileMonitor => FileMonitor}/.gitignore (100%) create mode 100644 plugins/FileMonitor/README.md rename plugins/{ChangeFileMonitor/changefilemonitor.py => FileMonitor/filemonitor.py} (96%) rename plugins/{ChangeFileMonitor/changefilemonitor.yml => FileMonitor/filemonitor.yml} (89%) rename plugins/{ChangeFileMonitor/changefilemonitor_config.py => FileMonitor/filemonitor_config.py} (79%) rename plugins/{ChangeFileMonitor => FileMonitor}/manifest (69%) rename plugins/{ChangeFileMonitor => FileMonitor}/requirements.txt (100%) diff --git a/plugins/ChangeFileMonitor/README.md b/plugins/ChangeFileMonitor/README.md deleted file mode 100644 index 30cd2412..00000000 --- a/plugins/ChangeFileMonitor/README.md +++ /dev/null @@ -1,35 +0,0 @@ -# ChangeFileMonitor: Ver 0.1.0 (By David Maisonave) -ChangeFileMonitor is a [Stash](https://github.com/stashapp/stash) plugin which updates Stash if any changes occurs in the Stash library paths. - -### Using ChangeFileMonitor as a plugin -- To start monitoring file changes, go to **Stash->Settings->Task->[Plugin Tasks]->ChangeFileMonitor**, and click on the [Start Library Monitor] button. - - ![ChangeFileMonitor_Task](https://github.com/user-attachments/assets/f275a70f-8e86-42a4-b2c1-98b3f4935334) -- To stop this task, go to **Stash->Settings->Task->[Task Queue]**, and click on the **[x]**. - - ![Kill_ChangeFileMonitor_Task](https://github.com/user-attachments/assets/a3f4abca-f3a2-49fa-9db5-e0c733e0aeb1) - -### Using ChangeFileMonitor as a script -**ChangeFileMonitor** can be called as a standalone script. -- To start monitoring call the script and pass any argument. - - python changefilemonitor.py **foofoo** -- To stop **ChangeFileMonitor**, pass argument **stop**. - - python changefilemonitor.py **stop** - - After running above command line, **ChangeFileMonitor** will stop after the next file change occurs. - - The stop command works to stop the standalone job and the Stash plugin task job. - -### Requirements -`pip install stashapp-tools` -`pip install pyYAML` -`pip install watchdog` - -### Installation -- Follow **Requirements** instructions. -- In the stash plugin directory (C:\Users\MyUserName\.stash\plugins), create a folder named **ChangeFileMonitor**. -- Copy all the plugin files to this folder.(**C:\Users\MyUserName\\.stash\plugins\ChangeFileMonitor**). -- Restart Stash. - -That's it!!! - -### Options -- All options are accessible in the GUI via Settings->Plugins->Plugins->[ChangeFileMonitor]. - - diff --git a/plugins/ChangeFileMonitor/.gitignore b/plugins/FileMonitor/.gitignore similarity index 100% rename from plugins/ChangeFileMonitor/.gitignore rename to plugins/FileMonitor/.gitignore diff --git a/plugins/FileMonitor/README.md b/plugins/FileMonitor/README.md new file mode 100644 index 00000000..dcf32fdb --- /dev/null +++ b/plugins/FileMonitor/README.md @@ -0,0 +1,35 @@ +# FileMonitor: Ver 0.1.0 (By David Maisonave) +FileMonitor is a [Stash](https://github.com/stashapp/stash) plugin which updates Stash if any changes occurs in the Stash library paths. + +### Using FileMonitor as a plugin +- To start monitoring file changes, go to **Stash->Settings->Task->[Plugin Tasks]->FileMonitor**, and click on the [Start Library Monitor] button. + - ![FileMonitor_Task](https://github.com/user-attachments/assets/f275a70f-8e86-42a4-b2c1-98b3f4935334) +- To stop this task, go to **Stash->Settings->Task->[Task Queue]**, and click on the **[x]**. + - ![Kill_FileMonitor_Task](https://github.com/user-attachments/assets/a3f4abca-f3a2-49fa-9db5-e0c733e0aeb1) + +### Using FileMonitor as a script +**FileMonitor** can be called as a standalone script. +- To start monitoring call the script and pass any argument. + - python filemonitor.py **start** +- To stop **FileMonitor**, pass argument **stop**. + - python filemonitor.py **stop** + - After running above command line, **FileMonitor** will stop after the next file change occurs. + - The stop command works to stop the standalone job and the Stash plugin task job. + +### Requirements +`pip install stashapp-tools` +`pip install pyYAML` +`pip install watchdog` + +### Installation +- Follow **Requirements** instructions. +- In the stash plugin directory (C:\Users\MyUserName\.stash\plugins), create a folder named **FileMonitor**. +- Copy all the plugin files to this folder.(**C:\Users\MyUserName\\.stash\plugins\FileMonitor**). +- Restart Stash. + +That's it!!! + +### Options +- All options are accessible in the GUI via Settings->Plugins->Plugins->[FileMonitor]. + + diff --git a/plugins/ChangeFileMonitor/changefilemonitor.py b/plugins/FileMonitor/filemonitor.py similarity index 96% rename from plugins/ChangeFileMonitor/changefilemonitor.py rename to plugins/FileMonitor/filemonitor.py index c8992c01..ccfe0388 100644 --- a/plugins/ChangeFileMonitor/changefilemonitor.py +++ b/plugins/FileMonitor/filemonitor.py @@ -1,8 +1,8 @@ # Description: This is a Stash plugin which updates Stash if any changes occurs in the Stash library paths. # By David Maisonave (aka Axter) Jul-2024 (https://www.axter.com/) -# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/ChangeFileMonitor +# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/FileMonitor # Note: To call this script outside of Stash, pass any argument. -# Example: python changefilemonitor.py foofoo +# Example: python filemonitor.py foofoo import os import sys import time @@ -20,7 +20,7 @@ import watchdog # pip install watchdog # https://pythonhosted.org/watchdog/ from threading import Lock, Condition from multiprocessing import shared_memory -from changefilemonitor_config import config # Import settings from changefilemonitor_config.py +from filemonitor_config import config # Import settings from filemonitor_config.py # ********************************************************************** # Constant global variables -------------------------------------------- @@ -64,7 +64,7 @@ # ********************************************************************** # ---------------------------------------------------------------------- -# Code section to fetch variables from Plugin UI and from changefilemonitor_settings.py +# Code section to fetch variables from Plugin UI and from filemonitor_settings.py # Check if being called as Stash plugin gettingCalledAsStashPlugin = True stopLibraryMonitoring = False @@ -147,7 +147,7 @@ def start_library_monitor(): global TargetPaths try: # Create shared memory buffer which can be used as singleton logic or to get a signal to quit task from external script - shm_a = shared_memory.SharedMemory(name="DavidMaisonaveAxter_ChangeFileMonitor", create=True, size=4) + shm_a = shared_memory.SharedMemory(name="DavidMaisonaveAxter_FileMonitor", create=True, size=4) except: pass logger.info("Could not open shared memory map. Change File Monitor must be running. Can not run multiple instance of Change File Monitor.") @@ -256,13 +256,13 @@ def on_moved(event): if debugTracing: logger.info("Exiting function................") # This function is only useful when called outside of Stash. -# Example: python changefilemonitor.py stop +# Example: python filemonitor.py stop # Stops monitoring after triggered by the next file change. # ToDo: Add logic so it doesn't have to wait until the next file change def stop_library_monitor(): if debugTracing: logger.info("Opening shared memory map.") try: - shm_a = shared_memory.SharedMemory(name="DavidMaisonaveAxter_ChangeFileMonitor", create=False, size=4) + shm_a = shared_memory.SharedMemory(name="DavidMaisonaveAxter_FileMonitor", create=False, size=4) except: pass logger.info("Could not open shared memory map. Change File Monitor must not be running.") diff --git a/plugins/ChangeFileMonitor/changefilemonitor.yml b/plugins/FileMonitor/filemonitor.yml similarity index 89% rename from plugins/ChangeFileMonitor/changefilemonitor.yml rename to plugins/FileMonitor/filemonitor.yml index 41e91fdb..14a41783 100644 --- a/plugins/ChangeFileMonitor/changefilemonitor.yml +++ b/plugins/FileMonitor/filemonitor.yml @@ -1,7 +1,7 @@ -name: ChangeFileMonitor +name: FileMonitor description: Monitors the Stash library folders, and updates Stash if any changes occurs in the Stash library paths. -version: 0.1.0 -url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/ChangeFileMonitor +version: 0.2.0 +url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/FileMonitor settings: recursiveDisabled: displayName: No Recursive @@ -17,7 +17,7 @@ settings: type: BOOLEAN zzdebugTracing: displayName: Debug Tracing - description: (Default=false) [***For Advanced Users***] Enable debug tracing. When enabled, additional tracing logging is added to Stash\plugins\ChangeFileMonitor\changefilemonitor.log + description: (Default=false) [***For Advanced Users***] Enable debug tracing. When enabled, additional tracing logging is added to Stash\plugins\FileMonitor\filemonitor.log type: BOOLEAN zzdryRun: displayName: Dry Run @@ -25,7 +25,7 @@ settings: type: BOOLEAN exec: - python - - "{pluginDir}/changefilemonitor.py" + - "{pluginDir}/filemonitor.py" interface: raw tasks: - name: Start Library Monitor diff --git a/plugins/ChangeFileMonitor/changefilemonitor_config.py b/plugins/FileMonitor/filemonitor_config.py similarity index 79% rename from plugins/ChangeFileMonitor/changefilemonitor_config.py rename to plugins/FileMonitor/filemonitor_config.py index 4c09f403..de0210b6 100644 --- a/plugins/ChangeFileMonitor/changefilemonitor_config.py +++ b/plugins/FileMonitor/filemonitor_config.py @@ -1,11 +1,11 @@ # Description: This is a Stash plugin which updates Stash if any changes occurs in the Stash library paths. # By David Maisonave (aka Axter) Jul-2024 (https://www.axter.com/) -# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/ChangeFileMonitor +# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/FileMonitor config = { # Enable to run metadata_generate (Generate Content) after metadata scan. "runGenerateContent": False, - # The following fields are ONLY used when running ChangeFileMonitor in script mode + # The following fields are ONLY used when running FileMonitor in script mode "endpoint_Scheme" : "http", # Define endpoint to use when contacting the Stash server "endpoint_Host" : "0.0.0.0", # Define endpoint to use when contacting the Stash server "endpoint_Port" : 9999, # Define endpoint to use when contacting the Stash server diff --git a/plugins/ChangeFileMonitor/manifest b/plugins/FileMonitor/manifest similarity index 69% rename from plugins/ChangeFileMonitor/manifest rename to plugins/FileMonitor/manifest index 4a03c5f4..a6d00ee2 100644 --- a/plugins/ChangeFileMonitor/manifest +++ b/plugins/FileMonitor/manifest @@ -1,13 +1,13 @@ -id: changefilemonitor -name: ChangeFileMonitor +id: filemonitor +name: FileMonitor metadata: description: Monitors the Stash library folders, and updates Stash if any changes occurs in the Stash library paths. -version: 0.1.0 +version: 0.2.0 date: "2024-07-26 08:00:00" requires: [pip install stashapp-tools, pip install pyYAML, pip install watchdog] -source_repository: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/ChangeFileMonitor +source_repository: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/FileMonitor files: - README.md -- changefilemonitor.yml -- changefilemonitor.py +- filemonitor.yml +- filemonitor.py - requirements.txt diff --git a/plugins/ChangeFileMonitor/requirements.txt b/plugins/FileMonitor/requirements.txt similarity index 100% rename from plugins/ChangeFileMonitor/requirements.txt rename to plugins/FileMonitor/requirements.txt From 2bff74c7cf63107870513865ea9468214019d829 Mon Sep 17 00:00:00 2001 From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com> Date: Mon, 29 Jul 2024 23:32:45 -0400 Subject: [PATCH 15/39] Removed files per review --- plugins/FileMonitor/.gitignore | 525 --------------------------------- plugins/FileMonitor/manifest | 13 - plugins/RenameFile/.gitignore | 525 --------------------------------- plugins/RenameFile/manifest | 14 - 4 files changed, 1077 deletions(-) delete mode 100644 plugins/FileMonitor/.gitignore delete mode 100644 plugins/FileMonitor/manifest delete mode 100644 plugins/RenameFile/.gitignore delete mode 100644 plugins/RenameFile/manifest diff --git a/plugins/FileMonitor/.gitignore b/plugins/FileMonitor/.gitignore deleted file mode 100644 index dd93ef78..00000000 --- a/plugins/FileMonitor/.gitignore +++ /dev/null @@ -1,525 +0,0 @@ -$ cat .gitignore - -# Ignore these patterns -desktop.ini -~AutoRecover*.* -*.aps -*.exe -*.idb -*.ipch -*.lib -*.log -*.log.1 -*.log.2 -*.manifest -*.obj -*.pch -*.pdb -*.sdf -*.suo -*.tlog -*.user -*.7z -*.swp -*.zip -data.csv -/boost -/scintilla -/bin -/SQL -/__pycache__ -__pycache__/ -renamefile_settings.cpython-310.pyc - -## Ignore Visual Studio temporary files, build results, and -## files generated by popular Visual Studio add-ons. -## -## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore - -# User-specific files -*.rsuser -*.suo -*.user -*.userosscache -*.sln.docstates - -# User-specific files (MonoDevelop/Xamarin Studio) -*.userprefs - -# Mono auto generated files -mono_crash.* - -# Build results -[Dd]ebug/ -[Dd]ebugPublic/ -[Rr]elease/ -[Rr]eleases/ -x64/ -x86/ -[Ww][Ii][Nn]32/ -[Aa][Rr][Mm]/ -[Aa][Rr][Mm]64/ -bld/ -[Bb]in/ -[Oo]bj/ -[Ll]og/ -[Ll]ogs/ - - -# Visual Studio 2015/2017 cache/options directory -.vs/ -# Uncomment if you have tasks that create the project's static files in wwwroot -#wwwroot/ - -# Visual Studio 2017 auto generated files -Generated\ Files/ - -# MSTest test Results -[Tt]est[Rr]esult*/ -[Bb]uild[Ll]og.* - -# NUnit -*.VisualState.xml -TestResult.xml -nunit-*.xml - -# Build Results of an ATL Project -[Dd]ebugPS/ -[Rr]eleasePS/ -dlldata.c - -# Benchmark Results -BenchmarkDotNet.Artifacts/ - -# .NET Core -project.lock.json -project.fragment.lock.json -artifacts/ - -# Tye -.tye/ - -# ASP.NET Scaffolding -ScaffoldingReadMe.txt - -# StyleCop -StyleCopReport.xml - -# Files built by Visual Studio -*_i.c -*_p.c -*_h.h -*_i.h -*.ilk -*.meta -*.obj -*.iobj -*.pch -*.pdb -*.ipdb -*.pgc -*.pgd -*.rsp -*.sbr -*.tlb -*.tli -*.tlh -*.tmp -*.tmp_proj -*_wpftmp.csproj -*.log -*.vspscc -*.vssscc -.builds -*.pidb -*.svclog -*.scc - -# Chutzpah Test files -_Chutzpah* - -# Visual C++ cache files -ipch/ -*.aps -*.ncb -*.opendb -*.opensdf -*.sdf -*.cachefile -*.VC.db -*.VC.VC.opendb - -# Visual Studio profiler -*.psess -*.vsp -*.vspx -*.sap - -# Visual Studio Trace Files -*.e2e - -# TFS 2012 Local Workspace -$tf/ - -# Guidance Automation Toolkit -*.gpState - -# ReSharper is a .NET coding add-in -_ReSharper*/ -*.[Rr]e[Ss]harper -*.DotSettings.user -# JustCode is a .NET coding add-in -.JustCode - -# TeamCity is a build add-in -_TeamCity* - -# DotCover is a Code Coverage Tool -*.dotCover - -# AxoCover is a Code Coverage Tool -.axoCover/* -!.axoCover/settings.json - -# Coverlet is a free, cross platform Code Coverage Tool -coverage*.json -coverage*.xml -coverage*.info - -# Visual Studio code coverage results -*.coverage -*.coveragexml - -# NCrunch -_NCrunch_* -.*crunch*.local.xml -nCrunchTemp_* - -# MightyMoose -*.mm.* -AutoTest.Net/ - -# Web workbench (sass) -.sass-cache/ - -# Installshield output folder -[Ee]xpress/ - -# DocProject is a documentation generator add-in -DocProject/buildhelp/ -DocProject/Help/*.HxT -DocProject/Help/*.HxC -DocProject/Help/*.hhc -DocProject/Help/*.hhk -DocProject/Help/*.hhp -DocProject/Help/Html2 -DocProject/Help/html - -# Click-Once directory -publish/ - -# Publish Web Output -*.[Pp]ublish.xml -*.azurePubxml -# Note: Comment the next line if you want to checkin your web deploy settings, -# but database connection strings (with potential passwords) will be unencrypted -*.pubxml -*.publishproj - -# Microsoft Azure Web App publish settings. Comment the next line if you want to -# checkin your Azure Web App publish settings, but sensitive information contained -# in these scripts will be unencrypted -PublishScripts/ - -# NuGet Packages -*.nupkg -# NuGet Symbol Packages -*.snupkg -# The packages folder can be ignored because of Package Restore -**/[Pp]ackages/* -# except build/, which is used as an MSBuild target. -!**/[Pp]ackages/build/ -# Uncomment if necessary however generally it will be regenerated when needed -#!**/[Pp]ackages/repositories.config -# NuGet v3's project.json files produces more ignorable files -*.nuget.props -*.nuget.targets - -# Microsoft Azure Build Output -csx/ -*.build.csdef - -# Microsoft Azure Emulator -ecf/ -rcf/ - -# Windows Store app package directories and files -AppPackages/ -BundleArtifacts/ -Package.StoreAssociation.xml -_pkginfo.txt -*.appx -*.appxbundle -*.appxupload - -# Visual Studio cache files -# files ending in .cache can be ignored -*.[Cc]ache -# but keep track of directories ending in .cache -!?*.[Cc]ache/ - -# Others -ClientBin/ -~$* -*~ -*.dbmdl -*.dbproj.schemaview -*.jfm -*.pfx -*.publishsettings -node_modules/ -orleans.codegen.cs - -# Including strong name files can present a security risk -# (https://github.com/github/gitignore/pull/2483#issue-259490424) -#*.snk - -# Since there are multiple workflows, uncomment next line to ignore bower_components -# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) -#bower_components/ - -# RIA/Silverlight projects -Generated_Code/ - -# Backup & report files from converting an old project file -# to a newer Visual Studio version. Backup files are not needed, -# because we have git ;-) -_UpgradeReport_Files/ -Backup*/ -UpgradeLog*.XML -UpgradeLog*.htm -ServiceFabricBackup/ -*.rptproj.bak - -# SQL Server files -*.mdf -*.ldf -*.ndf - -# Business Intelligence projects -*.rdl.data -*.bim.layout -*.bim_*.settings -*.rptproj.rsuser -*- [Bb]ackup.rdl -*- [Bb]ackup ([0-9]).rdl -*- [Bb]ackup ([0-9][0-9]).rdl - -# Microsoft Fakes -FakesAssemblies/ - -# GhostDoc plugin setting file -*.GhostDoc.xml - -# Node.js Tools for Visual Studio -.ntvs_analysis.dat -node_modules/ - -# Visual Studio 6 build log -*.plg - -# Visual Studio 6 workspace options file -*.opt - -# Visual Studio 6 auto-generated workspace file (contains which files were open etc.) -*.vbw - -# Visual Studio LightSwitch build output -**/*.HTMLClient/GeneratedArtifacts -**/*.DesktopClient/GeneratedArtifacts -**/*.DesktopClient/ModelManifest.xml -**/*.Server/GeneratedArtifacts -**/*.Server/ModelManifest.xml -_Pvt_Extensions - -# Paket dependency manager -.paket/paket.exe -paket-files/ - -# FAKE - F# Make -.fake/ - -# JetBrains Rider -.idea/ -*.sln.iml - -# CodeRush -.cr/ - -# CodeRush personal settings -.cr/personal - -# Python Tools for Visual Studio (PTVS) -__pycache__/ -*.pyc - -# Cake - Uncomment if you are using it -# tools/** -# !tools/packages.config - -# Tabs Studio -*.tss - -# Telerik's JustMock configuration file -*.jmconfig - -# BizTalk build output -*.btp.cs -*.btm.cs -*.odx.cs -*.xsd.cs - -# OpenCover UI analysis results -OpenCover/ - -# Azure Stream Analytics local run output -ASALocalRun/ - -# MSBuild Binary and Structured Log -*.binlog - -# NVidia Nsight GPU debugger configuration file -*.nvuser - -# MFractors (Xamarin productivity tool) working folder -.mfractor/ - -# Local History for Visual Studio -.localhistory/ - -# BeatPulse healthcheck temp database -healthchecksdb - -# Backup folder for Package Reference Convert tool in Visual Studio 2017 -MigrationBackup/ - -# Ionide (cross platform F# VS Code tools) working folder -.ionide/ - -# Fody - auto-generated XML schema -FodyWeavers.xsd - -## -## Visual studio for Mac -## - - -# globs -Makefile.in -*.userprefs -*.usertasks -config.make -config.status -aclocal.m4 -install-sh -autom4te.cache/ -*.tar.gz -tarballs/ -test-results/ - -# Mac bundle stuff -*.dmg -*.app - -# content below from: https://github.com/github/gitignore/blob/master/Global/macOS.gitignore -# General -.DS_Store -.AppleDouble -.LSOverride - -# Icon must end with two \r -Icon - - -# Thumbnails -._* - -# Files that might appear in the root of a volume -.DocumentRevisions-V100 -.fseventsd -.Spotlight-V100 -.TemporaryItems -.Trashes -.VolumeIcon.icns -.com.apple.timemachine.donotpresent - -# Directories potentially created on remote AFP share -.AppleDB -.AppleDesktop -Network Trash Folder -Temporary Items -.apdisk - -# content below from: https://github.com/github/gitignore/blob/master/Global/Windows.gitignore -# Windows thumbnail cache files -Thumbs.db -ehthumbs.db -ehthumbs_vista.db - -# Dump file -*.stackdump - -# Folder config file -[Dd]esktop.ini - -# Recycle Bin used on file shares -$RECYCLE.BIN/ - -# Windows Installer files -*.cab -*.msi -*.msix -*.msm -*.msp - -# Windows shortcuts -*.lnk - -# JetBrains Rider -.idea/ -*.sln.iml - -## -## Visual Studio Code -## -.vscode/* -!.vscode/settings.json -!.vscode/tasks.json -!.vscode/launch.json -!.vscode/extensions.json - -# Other miscellaneous folders -zzMiscellaneous/ -zzExcludeFromGithub/ -FromAnotherLanuageKit/ -_BadLanguages/ - -# Exclude test data and temp files -Test_Data/ -*__ExcludeFromRepo__*.* -*__DoNotAddToRepo__*.* -deleteme/ -RelatedProjects/ -obj/ - -# Exclude temp and backup files -*.bak - -# ########################################### -# Unique to this project -# ########################################### -# Exclude reparsepoint files which are used to help view file using VS -*.xaml.xml -gitignore.txt - -GTranslate/obj/ diff --git a/plugins/FileMonitor/manifest b/plugins/FileMonitor/manifest deleted file mode 100644 index a6d00ee2..00000000 --- a/plugins/FileMonitor/manifest +++ /dev/null @@ -1,13 +0,0 @@ -id: filemonitor -name: FileMonitor -metadata: - description: Monitors the Stash library folders, and updates Stash if any changes occurs in the Stash library paths. -version: 0.2.0 -date: "2024-07-26 08:00:00" -requires: [pip install stashapp-tools, pip install pyYAML, pip install watchdog] -source_repository: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/FileMonitor -files: -- README.md -- filemonitor.yml -- filemonitor.py -- requirements.txt diff --git a/plugins/RenameFile/.gitignore b/plugins/RenameFile/.gitignore deleted file mode 100644 index dd93ef78..00000000 --- a/plugins/RenameFile/.gitignore +++ /dev/null @@ -1,525 +0,0 @@ -$ cat .gitignore - -# Ignore these patterns -desktop.ini -~AutoRecover*.* -*.aps -*.exe -*.idb -*.ipch -*.lib -*.log -*.log.1 -*.log.2 -*.manifest -*.obj -*.pch -*.pdb -*.sdf -*.suo -*.tlog -*.user -*.7z -*.swp -*.zip -data.csv -/boost -/scintilla -/bin -/SQL -/__pycache__ -__pycache__/ -renamefile_settings.cpython-310.pyc - -## Ignore Visual Studio temporary files, build results, and -## files generated by popular Visual Studio add-ons. -## -## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore - -# User-specific files -*.rsuser -*.suo -*.user -*.userosscache -*.sln.docstates - -# User-specific files (MonoDevelop/Xamarin Studio) -*.userprefs - -# Mono auto generated files -mono_crash.* - -# Build results -[Dd]ebug/ -[Dd]ebugPublic/ -[Rr]elease/ -[Rr]eleases/ -x64/ -x86/ -[Ww][Ii][Nn]32/ -[Aa][Rr][Mm]/ -[Aa][Rr][Mm]64/ -bld/ -[Bb]in/ -[Oo]bj/ -[Ll]og/ -[Ll]ogs/ - - -# Visual Studio 2015/2017 cache/options directory -.vs/ -# Uncomment if you have tasks that create the project's static files in wwwroot -#wwwroot/ - -# Visual Studio 2017 auto generated files -Generated\ Files/ - -# MSTest test Results -[Tt]est[Rr]esult*/ -[Bb]uild[Ll]og.* - -# NUnit -*.VisualState.xml -TestResult.xml -nunit-*.xml - -# Build Results of an ATL Project -[Dd]ebugPS/ -[Rr]eleasePS/ -dlldata.c - -# Benchmark Results -BenchmarkDotNet.Artifacts/ - -# .NET Core -project.lock.json -project.fragment.lock.json -artifacts/ - -# Tye -.tye/ - -# ASP.NET Scaffolding -ScaffoldingReadMe.txt - -# StyleCop -StyleCopReport.xml - -# Files built by Visual Studio -*_i.c -*_p.c -*_h.h -*_i.h -*.ilk -*.meta -*.obj -*.iobj -*.pch -*.pdb -*.ipdb -*.pgc -*.pgd -*.rsp -*.sbr -*.tlb -*.tli -*.tlh -*.tmp -*.tmp_proj -*_wpftmp.csproj -*.log -*.vspscc -*.vssscc -.builds -*.pidb -*.svclog -*.scc - -# Chutzpah Test files -_Chutzpah* - -# Visual C++ cache files -ipch/ -*.aps -*.ncb -*.opendb -*.opensdf -*.sdf -*.cachefile -*.VC.db -*.VC.VC.opendb - -# Visual Studio profiler -*.psess -*.vsp -*.vspx -*.sap - -# Visual Studio Trace Files -*.e2e - -# TFS 2012 Local Workspace -$tf/ - -# Guidance Automation Toolkit -*.gpState - -# ReSharper is a .NET coding add-in -_ReSharper*/ -*.[Rr]e[Ss]harper -*.DotSettings.user -# JustCode is a .NET coding add-in -.JustCode - -# TeamCity is a build add-in -_TeamCity* - -# DotCover is a Code Coverage Tool -*.dotCover - -# AxoCover is a Code Coverage Tool -.axoCover/* -!.axoCover/settings.json - -# Coverlet is a free, cross platform Code Coverage Tool -coverage*.json -coverage*.xml -coverage*.info - -# Visual Studio code coverage results -*.coverage -*.coveragexml - -# NCrunch -_NCrunch_* -.*crunch*.local.xml -nCrunchTemp_* - -# MightyMoose -*.mm.* -AutoTest.Net/ - -# Web workbench (sass) -.sass-cache/ - -# Installshield output folder -[Ee]xpress/ - -# DocProject is a documentation generator add-in -DocProject/buildhelp/ -DocProject/Help/*.HxT -DocProject/Help/*.HxC -DocProject/Help/*.hhc -DocProject/Help/*.hhk -DocProject/Help/*.hhp -DocProject/Help/Html2 -DocProject/Help/html - -# Click-Once directory -publish/ - -# Publish Web Output -*.[Pp]ublish.xml -*.azurePubxml -# Note: Comment the next line if you want to checkin your web deploy settings, -# but database connection strings (with potential passwords) will be unencrypted -*.pubxml -*.publishproj - -# Microsoft Azure Web App publish settings. Comment the next line if you want to -# checkin your Azure Web App publish settings, but sensitive information contained -# in these scripts will be unencrypted -PublishScripts/ - -# NuGet Packages -*.nupkg -# NuGet Symbol Packages -*.snupkg -# The packages folder can be ignored because of Package Restore -**/[Pp]ackages/* -# except build/, which is used as an MSBuild target. -!**/[Pp]ackages/build/ -# Uncomment if necessary however generally it will be regenerated when needed -#!**/[Pp]ackages/repositories.config -# NuGet v3's project.json files produces more ignorable files -*.nuget.props -*.nuget.targets - -# Microsoft Azure Build Output -csx/ -*.build.csdef - -# Microsoft Azure Emulator -ecf/ -rcf/ - -# Windows Store app package directories and files -AppPackages/ -BundleArtifacts/ -Package.StoreAssociation.xml -_pkginfo.txt -*.appx -*.appxbundle -*.appxupload - -# Visual Studio cache files -# files ending in .cache can be ignored -*.[Cc]ache -# but keep track of directories ending in .cache -!?*.[Cc]ache/ - -# Others -ClientBin/ -~$* -*~ -*.dbmdl -*.dbproj.schemaview -*.jfm -*.pfx -*.publishsettings -node_modules/ -orleans.codegen.cs - -# Including strong name files can present a security risk -# (https://github.com/github/gitignore/pull/2483#issue-259490424) -#*.snk - -# Since there are multiple workflows, uncomment next line to ignore bower_components -# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) -#bower_components/ - -# RIA/Silverlight projects -Generated_Code/ - -# Backup & report files from converting an old project file -# to a newer Visual Studio version. Backup files are not needed, -# because we have git ;-) -_UpgradeReport_Files/ -Backup*/ -UpgradeLog*.XML -UpgradeLog*.htm -ServiceFabricBackup/ -*.rptproj.bak - -# SQL Server files -*.mdf -*.ldf -*.ndf - -# Business Intelligence projects -*.rdl.data -*.bim.layout -*.bim_*.settings -*.rptproj.rsuser -*- [Bb]ackup.rdl -*- [Bb]ackup ([0-9]).rdl -*- [Bb]ackup ([0-9][0-9]).rdl - -# Microsoft Fakes -FakesAssemblies/ - -# GhostDoc plugin setting file -*.GhostDoc.xml - -# Node.js Tools for Visual Studio -.ntvs_analysis.dat -node_modules/ - -# Visual Studio 6 build log -*.plg - -# Visual Studio 6 workspace options file -*.opt - -# Visual Studio 6 auto-generated workspace file (contains which files were open etc.) -*.vbw - -# Visual Studio LightSwitch build output -**/*.HTMLClient/GeneratedArtifacts -**/*.DesktopClient/GeneratedArtifacts -**/*.DesktopClient/ModelManifest.xml -**/*.Server/GeneratedArtifacts -**/*.Server/ModelManifest.xml -_Pvt_Extensions - -# Paket dependency manager -.paket/paket.exe -paket-files/ - -# FAKE - F# Make -.fake/ - -# JetBrains Rider -.idea/ -*.sln.iml - -# CodeRush -.cr/ - -# CodeRush personal settings -.cr/personal - -# Python Tools for Visual Studio (PTVS) -__pycache__/ -*.pyc - -# Cake - Uncomment if you are using it -# tools/** -# !tools/packages.config - -# Tabs Studio -*.tss - -# Telerik's JustMock configuration file -*.jmconfig - -# BizTalk build output -*.btp.cs -*.btm.cs -*.odx.cs -*.xsd.cs - -# OpenCover UI analysis results -OpenCover/ - -# Azure Stream Analytics local run output -ASALocalRun/ - -# MSBuild Binary and Structured Log -*.binlog - -# NVidia Nsight GPU debugger configuration file -*.nvuser - -# MFractors (Xamarin productivity tool) working folder -.mfractor/ - -# Local History for Visual Studio -.localhistory/ - -# BeatPulse healthcheck temp database -healthchecksdb - -# Backup folder for Package Reference Convert tool in Visual Studio 2017 -MigrationBackup/ - -# Ionide (cross platform F# VS Code tools) working folder -.ionide/ - -# Fody - auto-generated XML schema -FodyWeavers.xsd - -## -## Visual studio for Mac -## - - -# globs -Makefile.in -*.userprefs -*.usertasks -config.make -config.status -aclocal.m4 -install-sh -autom4te.cache/ -*.tar.gz -tarballs/ -test-results/ - -# Mac bundle stuff -*.dmg -*.app - -# content below from: https://github.com/github/gitignore/blob/master/Global/macOS.gitignore -# General -.DS_Store -.AppleDouble -.LSOverride - -# Icon must end with two \r -Icon - - -# Thumbnails -._* - -# Files that might appear in the root of a volume -.DocumentRevisions-V100 -.fseventsd -.Spotlight-V100 -.TemporaryItems -.Trashes -.VolumeIcon.icns -.com.apple.timemachine.donotpresent - -# Directories potentially created on remote AFP share -.AppleDB -.AppleDesktop -Network Trash Folder -Temporary Items -.apdisk - -# content below from: https://github.com/github/gitignore/blob/master/Global/Windows.gitignore -# Windows thumbnail cache files -Thumbs.db -ehthumbs.db -ehthumbs_vista.db - -# Dump file -*.stackdump - -# Folder config file -[Dd]esktop.ini - -# Recycle Bin used on file shares -$RECYCLE.BIN/ - -# Windows Installer files -*.cab -*.msi -*.msix -*.msm -*.msp - -# Windows shortcuts -*.lnk - -# JetBrains Rider -.idea/ -*.sln.iml - -## -## Visual Studio Code -## -.vscode/* -!.vscode/settings.json -!.vscode/tasks.json -!.vscode/launch.json -!.vscode/extensions.json - -# Other miscellaneous folders -zzMiscellaneous/ -zzExcludeFromGithub/ -FromAnotherLanuageKit/ -_BadLanguages/ - -# Exclude test data and temp files -Test_Data/ -*__ExcludeFromRepo__*.* -*__DoNotAddToRepo__*.* -deleteme/ -RelatedProjects/ -obj/ - -# Exclude temp and backup files -*.bak - -# ########################################### -# Unique to this project -# ########################################### -# Exclude reparsepoint files which are used to help view file using VS -*.xaml.xml -gitignore.txt - -GTranslate/obj/ diff --git a/plugins/RenameFile/manifest b/plugins/RenameFile/manifest deleted file mode 100644 index a98d0dcf..00000000 --- a/plugins/RenameFile/manifest +++ /dev/null @@ -1,14 +0,0 @@ -id: renamefile -name: RenameFile -metadata: - description: Renames video (scene) file names when the user edits the [Title] field located in the scene [Edit] tab. -version: 0.4.0 -date: "2024-07-26 08:00:00" -requires: [pip install stashapp-tools, pip install pyYAML] -source_repository: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/RenameFile -files: -- README.md -- renamefile.yml -- renamefile.py -- renamefile_settings.py -- requirements.txt From e5d8f82f30aa11dd6890e399022ff09d7f1d8a88 Mon Sep 17 00:00:00 2001 From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com> Date: Sat, 10 Aug 2024 13:19:33 -0400 Subject: [PATCH 16/39] Added issue templates --- .github/ISSUE_TEMPLATE/bug_report.yml | 77 ++++++++++++++++++ .github/ISSUE_TEMPLATE/bug_report_plugin.yml | 84 ++++++++++++++++++++ .github/ISSUE_TEMPLATE/discussion.yml | 42 ++++++++++ .github/ISSUE_TEMPLATE/feature_request.yml | 35 ++++++++ .github/ISSUE_TEMPLATE/help.yml | 37 +++++++++ 5 files changed, 275 insertions(+) create mode 100644 .github/ISSUE_TEMPLATE/bug_report.yml create mode 100644 .github/ISSUE_TEMPLATE/bug_report_plugin.yml create mode 100644 .github/ISSUE_TEMPLATE/discussion.yml create mode 100644 .github/ISSUE_TEMPLATE/feature_request.yml create mode 100644 .github/ISSUE_TEMPLATE/help.yml diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml new file mode 100644 index 00000000..061780ac --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -0,0 +1,77 @@ +name: 🐞 Bug +description: Create a bug report +title: "🐞[Bug] Your_Short_title" +labels: [Bug] +body: + - type: markdown + attributes: + value: | + Thank you for taking the time to fill out this bug report! + Fields marked with an asterisk (*) are required, and remaining fields are optional, but we encourage their usage. + Steps to reproduce the behavior: + 1. Go to '...' + 2. Click on '....' + 3. Scroll down to '....' + 4. See error + - type: textarea + id: steps + attributes: + label: Please enter steps to reproduce the behavior. + validations: + required: true + - type: input + id: stash_ver + attributes: + label: Stash Version (from Settings -> About) + placeholder: e.g. v0.26.2 + validations: + required: true + - type: input + id: os + attributes: + label: What Operating System (OS)? + placeholder: e.g. Windows, MacOS, Linux, iOS8.1 (mobile OS) + validations: + required: true + - type: input + id: device + attributes: + label: Phone or tablets + placeholder: e.g. iPhone6, Galaxy Tab A9+ + validations: + required: false + - type: input + id: browser + attributes: + label: What browser and version? + placeholder: e.g. Chrome Ver:127, Firefox Ver:129, Safari Ver:1.3.2, Any + validations: + required: true + - type: textarea + id: expected-behavior + attributes: + label: Expected Behavior + description: What was the expected behavior? + validations: + required: false + - type: textarea + id: logging + attributes: + label: Logging + description: Enter Stash logging. + validations: + required: false + - type: textarea + id: snapshots + attributes: + label: Snapshot(s) + description: Optionally attach snapshot(s) which displays the bug. + validations: + required: false + - type: textarea + id: additional + attributes: + label: Additional context + description: Add any other context about the problem here. + validations: + required: false diff --git a/.github/ISSUE_TEMPLATE/bug_report_plugin.yml b/.github/ISSUE_TEMPLATE/bug_report_plugin.yml new file mode 100644 index 00000000..5c03d45b --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report_plugin.yml @@ -0,0 +1,84 @@ +name: 🪲 Plugin Bug +description: Create a plugin bug report +title: "🪲[EnterPluginNameHere] Your_Short_title" +labels: [Plugin_Bug] +body: + - type: markdown + attributes: + value: | + Thank you for taking the time to fill out this bug report! + Fields marked with an asterisk (*) are required, and remaining fields are optional, but we encourage their usage. + Steps to reproduce the behavior: + 1. Go to '...' + 2. Click on '....' + 3. Scroll down to '....' + 4. See error + - type: textarea + id: steps + attributes: + label: Please enter steps to reproduce the behavior. + validations: + required: true + - type: input + id: stash_ver + attributes: + label: Stash Version (from Settings -> About) + placeholder: e.g. v0.26.2 + validations: + required: true + - type: input + id: os + attributes: + label: What Operating System (OS)? + placeholder: e.g. Windows, MacOS, Linux, iOS8.1 (mobile OS) + validations: + required: true + - type: input + id: device + attributes: + label: Phone or tablets + placeholder: e.g. iPhone6, Galaxy Tab A9+ + validations: + required: false + - type: input + id: browser + attributes: + label: What browser and version? + placeholder: e.g. Chrome Ver:127, Firefox Ver:129, Safari Ver:1.3.2, Any + validations: + required: true + - type: textarea + id: install + attributes: + label: The pip install for the plugin + description: pip install stashapp-tools --upgrade, pip install pyYAML + validations: + required: true + - type: textarea + id: expected-behavior + attributes: + label: Expected Behavior + description: What was the expected behavior? + validations: + required: false + - type: textarea + id: logging + attributes: + label: Logging + description: Enter Stash logging and plugin logging file if applicable. + validations: + required: false + - type: textarea + id: snapshots + attributes: + label: Snapshot(s) + description: Optionally attach snapshot(s) which displays the bug. + validations: + required: false + - type: textarea + id: additional + attributes: + label: Additional context + description: Add any other context about the problem here. + validations: + required: false \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/discussion.yml b/.github/ISSUE_TEMPLATE/discussion.yml new file mode 100644 index 00000000..177b35cd --- /dev/null +++ b/.github/ISSUE_TEMPLATE/discussion.yml @@ -0,0 +1,42 @@ +name: 🛗Discussion / Request for Commentary [RFC] +description: This is for issues that will be discussed and won't necessarily result directly in commits or pull requests. +title: "🛗[RFC]: Your_Short_title" +labels: [RFC] +body: + - type: markdown + attributes: + value: | + This is for issues that will be discussed and won't necessarily result directly in commits or pull requests. + Please ensure that you respect people's time and attention and understand that people are volunteering their time, so concision is ideal and considerate. + Fields marked with an asterisk (*) are required, and remaining fields are optional, but we encourage their usage. + - type: textarea + id: Title + attributes: + label: Optional title of the topic to discuss. + validations: + required: false + - type: textarea + id: Summary + attributes: + label: Describe the scope of your topic and your goals ideally within a single paragraph or TL. A summary that makes it easier for people to determine if they can contribute at a glance. + validations: + required: true + - type: textarea + id: Details + attributes: + label: Only required if summary and title doesn't cover everything. + validations: + required: false + - type: textarea + id: Examples + attributes: + label: If you can show a picture or video examples post them here. + validations: + required: false + - type: textarea + id: snapshots + attributes: + label: Snapshot(s) + description: Optionally attach additional snapshot(s) which helps describe the discussion. + validations: + required: false diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml new file mode 100644 index 00000000..9593dc41 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.yml @@ -0,0 +1,35 @@ +name: 💡️Feature Request +description: Suggest improvement for this project +title: "💡️[Enhancement]: Your_Short_title" +labels: [Enhancement] +body: + - type: markdown + attributes: + value: | + Please fill out the following fields with as much detail as possible: + Fields marked with an asterisk (*) are required, and remaining fields are optional, but we encourage their usage. + - type: textarea + id: problem + attributes: + label: If feature request is related to a problem, give a clear and concise description of what is the problem. Ex. I'm always frustrated when [...] + validations: + required: false + - type: textarea + id: solution + attributes: + label: Describe the solution you'd like. A clear and concise description of what you want to happen. + validations: + required: true + - type: textarea + id: alternatives + attributes: + label: Describe alternatives you've considered. A clear and concise description of any alternative solutions or features you've considered. + validations: + required: false + - type: textarea + id: Snapshots + attributes: + label: Snapshots / Images + description: Add any other context or screenshots about the feature request here, which can help explain the feature, and a description of what to look for in the image(s). + validations: + required: false diff --git a/.github/ISSUE_TEMPLATE/help.yml b/.github/ISSUE_TEMPLATE/help.yml new file mode 100644 index 00000000..dae58e2e --- /dev/null +++ b/.github/ISSUE_TEMPLATE/help.yml @@ -0,0 +1,37 @@ +name: ❓Help +description: Post your question +title: "❓[Help]: Your_Short_title" +labels: [Question] +body: + - type: markdown + attributes: + value: | + Please fill out the following fields with as much detail as possible, so that we can better answer your question. + Fields marked with an asterisk (*) are required, and remaining fields are optional, but we encourage their usage. + - type: textarea + id: question + attributes: + label: Please enter a clear and concise detailed question. + validations: + required: true + - type: input + id: os + attributes: + label: What Operating System (OS)? + placeholder: e.g. Windows, MacOS, Linux + validations: + required: false + - type: input + id: browser + attributes: + label: What browser and version? + placeholder: e.g. Chrome Ver:127, Firefox Ver:129, Safari Ver:1.3.2, Any + validations: + required: false + - type: textarea + id: snapshots + attributes: + label: Snapshot(s) + description: Optionally attach snapshot(s) which helps describe the question. + validations: + required: false From 971d0ea408d8503acc8cdcd03f9e0cd1f105da56 Mon Sep 17 00:00:00 2001 From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com> Date: Fri, 16 Aug 2024 04:03:12 -0400 Subject: [PATCH 17/39] Added run as a service UI option and scheduler option Can start FileMonitor from Stash UI as a service. Stop FileMonitor from the UI. Scheduler service for multiple UI task. Scheduler service for plugins. UI option to trim backup DB files. Config file option to exclude paths. Config file option to only include paths. Config file option to only include specified file types. Very minor changes to RenameFile plugin. --- plugins/FileMonitor/README.md | 171 +++++- plugins/FileMonitor/StashPluginHelper.py | 363 +++++++++++++ plugins/FileMonitor/filemonitor.py | 625 +++++++++++++++------- plugins/FileMonitor/filemonitor.yml | 36 +- plugins/FileMonitor/filemonitor_config.py | 126 ++++- plugins/FileMonitor/requirements.txt | 5 +- plugins/RenameFile/README.md | 4 +- plugins/RenameFile/renamefile.py | 2 +- plugins/RenameFile/requirements.txt | 2 +- 9 files changed, 1106 insertions(+), 228 deletions(-) create mode 100644 plugins/FileMonitor/StashPluginHelper.py diff --git a/plugins/FileMonitor/README.md b/plugins/FileMonitor/README.md index dcf32fdb..a196509c 100644 --- a/plugins/FileMonitor/README.md +++ b/plugins/FileMonitor/README.md @@ -1,35 +1,164 @@ -# FileMonitor: Ver 0.1.0 (By David Maisonave) -FileMonitor is a [Stash](https://github.com/stashapp/stash) plugin which updates Stash if any changes occurs in the Stash library paths. +# FileMonitor: Ver 0.8.2 (By David Maisonave) +FileMonitor is a [Stash](https://github.com/stashapp/stash) plugin with the following two main features: +- Updates Stash when any file changes occurs in the Stash library. +- **Task Scheduler**: Runs scheduled task based on the scheduler configuration in **filemonitor_config.py**. -### Using FileMonitor as a plugin -- To start monitoring file changes, go to **Stash->Settings->Task->[Plugin Tasks]->FileMonitor**, and click on the [Start Library Monitor] button. - - ![FileMonitor_Task](https://github.com/user-attachments/assets/f275a70f-8e86-42a4-b2c1-98b3f4935334) -- To stop this task, go to **Stash->Settings->Task->[Task Queue]**, and click on the **[x]**. - - ![Kill_FileMonitor_Task](https://github.com/user-attachments/assets/a3f4abca-f3a2-49fa-9db5-e0c733e0aeb1) +## Starting FileMonitor from the UI +From the GUI, FileMonitor can be started as a service or as a plugin. The recommended method is to start it as a service. When started as a service, it will jump on the Task Queue momentarily, and then disappear as it starts running in the background. +- To start monitoring file changes, go to **Stash->Settings->Task->[Plugin Tasks]->FileMonitor**, and click on the [Start Library Monitor Service] button. + - ![FileMonitorService](https://github.com/user-attachments/assets/b12aeca9-37a8-447f-90da-26e9440735ad) + - **Important Note**: At first, this will show up as a plugin in the Task Queue momentarily. It will then disappear from the Task Queue and run in the background as a service. +- To stop FileMonitor click on [Stop Library Monitor] button. +- The **[Monitor as a Plugin]** option is mainly available for backwards compatibility and for test purposes. + -### Using FileMonitor as a script +## Using FileMonitor as a script **FileMonitor** can be called as a standalone script. -- To start monitoring call the script and pass any argument. - - python filemonitor.py **start** -- To stop **FileMonitor**, pass argument **stop**. - - python filemonitor.py **stop** - - After running above command line, **FileMonitor** will stop after the next file change occurs. +- To start monitoring call the script and pass --url and the Stash URL. + - python filemonitor.py --url http://localhost:9999 +- To stop **FileMonitor**, pass argument **--stop**. + - python filemonitor.py **--stop** - The stop command works to stop the standalone job and the Stash plugin task job. +- To restart **FileMonitor**, pass argument **--restart**. + - python filemonitor.py **--restart** + - The restart command restarts FileMonitor as a Task in Stash. -### Requirements -`pip install stashapp-tools` -`pip install pyYAML` -`pip install watchdog` +# Task Scheduler +To enable the scheduler go to **Stash->Settings->Plugins->Plugins->FileMonitor** and enable the **Scheduler** option. +![ReoccurringTaskScheduler](https://github.com/user-attachments/assets/5a7bf6a4-3bd6-4692-a6c3-e9f8f4664f14) -### Installation +- **Warning:** The below task are already preconfigured in the scheduler, and when the scheduler is enabled all these task are enabled. + - Auto Tag -> [Auto Tag] (Daily) + - Maintenance -> [Clean] (every 2 days) + - Maintenance -> [Clean Generated Files] (every 2 days) + - Maintenance -> [Optimise Database] (Daily) + - Generated Content-> [Generate] (Every Sunday at 7AM) + - Library -> [Scan] (Weekly) (Every Sunday at 3AM) + - Backup -> [Backup] 2nd sunday of the month at 1AM +- The example task are disabled by default because they have a zero frequency value. + +To configure the schedule or to add new task, edit the **task_scheduler** section in the **filemonitor_config.py** file. +```` python +"task_scheduler": [ + # To create a daily task, include each day of the week for the weekday field. + {"task" : "Auto Tag", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "06:00"}, # Auto Tag -> [Auto Tag] (Daily at 6AM) + {"task" : "Optimise Database", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "07:00"}, # Maintenance -> [Optimise Database] (Daily at 7AM) + + # The following tasks are scheduled for 3 days out of the week. + {"task" : "Clean", "weekday" : "monday,wednesday,friday", "time" : "08:00"}, # Maintenance -> [Clean] (3 days per week at 8AM) + {"task" : "Clean Generated Files", "weekday" : "tuesday,thursday,saturday", "time" : "08:00"}, # Maintenance -> [Clean Generated Files] (3 days per week at 8AM) + + # The following tasks are scheduled weekly + {"task" : "Generate", "weekday" : "sunday", "time" : "07:00"}, # Generated Content-> [Generate] (Every Sunday at 7AM) + {"task" : "Scan", "weekday" : "sunday", "time" : "03:00"}, # Library -> [Scan] (Weekly) (Every Sunday at 3AM) + + # To perform a task monthly, specify the day of the month as in the weekly schedule format, and add a monthly field. + # The monthly field value must be 1, 2, 3, or 4. + # 1 = 1st specified weekday of the month. Example 1st monday. + # 2 = 2nd specified weekday of the month. Example 2nd monday of the month. + # 3 = 3rd specified weekday of the month. + # 4 = 4th specified weekday of the month. + # The following task is scheduled monthly + {"task" : "Backup", "weekday" : "sunday", "time" : "01:00", "monthly" : 2}, # Backup -> [Backup] 2nd sunday of the month at 1AM (01:00) + + # The following task is the syntax used for a plugins. A plugin task requires the plugin name for the [task] field, and the plugin-ID for the [pluginId] field. + # This task requires plugin [Path Parser], and it's disabled by default. + {"task" : "Create Tags", "pluginId" : "pathParser", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "DISABLED"}, # To enable this task change time "DISABLED" to a valid time. + + # Example#A1: Task to call call_GQL API with custom input + {"task" : "GQL", "input" : "mutation OptimiseDatabase { optimiseDatabase }", "weekday" : "sunday", "time" : "DISABLED"}, # To enable, change "DISABLED" to valid time + + # Example#A2: Task to call a python script. When this task is executed, the keyword is replaced by filemonitor.py current directory. + # The args field is NOT required. + {"task" : "python", "script" : "test_script_hello_world.py", "args" : "--MyArguments Hello", "weekday" : "monday", "time" : "DISABLED"}, # change "DISABLED" to valid time + + # Example#A3: The following task types can optionally take a [paths] field. If the paths field does not exists, the paths in the Stash library is used. + {"task" : "Scan", "paths" : ["E:\\MyVideos\\downloads", "V:\\MyOtherVideos"], "weekday" : "sunday", "time" : "DISABLED"}, # Library -> [Scan] + {"task" : "Auto Tag", "paths" : [r"E:\MyVideos\downloads", r"V:\MyOtherVideos"], "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "DISABLED"}, # Auto Tag -> [Auto Tag] + {"task" : "Clean", "paths" : [r"E:\MyVideos\downloads", r"V:\MyOtherVideos"], "weekday" : "sunday", "time" : "DISABLED"}, # Generated Content-> [Generate] + + # Example#A4: Task which calls Migrations -> [Rename generated files] + {"task" : "RenameGeneratedFiles", "weekday" : "tuesday,thursday", "time" : "DISABLED"}, # (bi-weekly) example + + # The above weekday method is the more reliable method to schedule task, because it doesn't rely on FileMonitor running continuously (non-stop). + + # The below examples use frequency field method which can work with minutes and hours. A zero frequency value disables the task. + # Note: Both seconds and days are also supported for the frequency field. + # However, seconds is mainly used for test purposes. + # And days usage is discourage, because it only works if FileMonitor is running for X many days non-stop. + # The below example tasks are done using hours and minutes, however any of these task types can be converted to a daily, weekly, or monthly syntax. + + # Example#B1: Task for calling another Stash plugin, which needs plugin name and plugin ID. + {"task" : "PluginButtonName_Here", "pluginId" : "PluginId_Here", "hours" : 0}, # The zero frequency value makes this task disabled. + + # Example#B2: Task to execute a command + {"task" : "execute", "command" : "C:\\MyPath\\HelloWorld.bat", "hours" : 0}, + + # Example#B3: Task to execute a command with optional args field, and using keyword , which gets replaced with filemonitor.py current directory. + {"task" : "execute", "command" : "HelloWorld.cmd", "args" : "--name David", "minutes" : 0}, +], +```` +- To add plugins to the task list, both the Plugin-ID and the plugin name is required. The plugin ID is usually the file name of the script without the extension. +- Task can be scheduled to run monthly, weekly, hourly, and by minutes. +- The scheduler list uses two types of syntax. One is **weekday** based, and the other is **frequency** based. + - **weekday Based** + - Use the weekday based syntax for daily, weekly, and monthly schedules. + - All the weekday based methods must have a **weekday** field and a **time** field, which specifies the day(s) of the week and the time to start the task. + - **Daily**: + - A daily task populates the weekday field with all the days of the week. + - **Daily Example**: + - Starts a task daily at 6AM. + - `{"task" : "Optimise Database", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "06:00"},` + - **Weekly**: + - **Weekly Example**: + - Starts a task weekly every monday and 9AM. + - `{"task" : "Generate", "weekday" : "monday", "time" : "09:00"},` + - **Monthly**: + - The monthly syntax is similar to the weekly format, but it also includes a **"monthly"** field which must be set to 1, 2, 3, or 4. + - **Monthly Examples**: + - Starts a task once a month on the 3rd sunday of the month and at 1AM. + - `{"task" : "Backup", "weekday" : "sunday", "time" : "01:00", "monthly" : 3},` + - Starts a task at 2PM once a month on the 1st saturday of the month. + - `{"task" : "Clean", "weekday" : "saturday", "time" : "14:00", "monthly" : 1},` + + - **Frequency Based** + - The frequency field can be in **minutes** or **hours**. + - The frequency value must be a number greater than zero. A frequency value of zero will disable the task on the schedule. + - **Frequency Based Examples**: + - Starts a task every 24 hours. + - `{"task" : "Auto Tag", "hours" : 24},` + - Starts a (**plugin**) task every 30 minutes. + - `{"task" : "Create Tags", "pluginId" : "pathParser", "minutes" : 30},` + - The frequency field does support **days** and **seconds**. + - **seconds** is mainly used for test purposes. + - The use of **days** is discourage, because it only works if FileMonitor is running for X many days non-stop. + - For example, if days is used with 30 days, FileMonitor would have to be running non-stop for 30 days before the task is activated. If it's restarted at any time during the 30 days, the count down restarts. + - It's recommended to use weekday based syntax over using days, because many restarts can occur during the week or month, and the task will still get started as long as FileMonitor is running during the scheduled activation time. + +- The scheduler feature requires `pip install schedule` + - If the user leaves the scheduler disabled, **schedule** does NOT have to be installed. +- For best results use the scheduler with FileMonitor running as a service. + +## Requirements +- pip install -r requirements.txt +- Or manually install each requirement: + - `pip install stashapp-tools --upgrade` + - `pip install pyYAML` + - `pip install watchdog` + - `pip install schedule` + +## Installation - Follow **Requirements** instructions. - In the stash plugin directory (C:\Users\MyUserName\.stash\plugins), create a folder named **FileMonitor**. - Copy all the plugin files to this folder.(**C:\Users\MyUserName\\.stash\plugins\FileMonitor**). -- Restart Stash. +- Click the **[Reload Plugins]** button in Stash->Settings->Plugins->Plugins. That's it!!! -### Options -- All options are accessible in the GUI via Settings->Plugins->Plugins->[FileMonitor]. +## Options +- Main options are accessible in the GUI via Settings->Plugins->Plugins->[FileMonitor]. + - When the UI option [Max DB Backups] is set to a value greater than 1, and when the scheduler is enabled, the quantity of database backup files are trim down to the set [**Max DB Backups**] value after the scheduler executes the Backup task. + - The other options are self explanatory from the UI. +- Additional options available in filemonitor_config.py. The options are well documented in the commented code. diff --git a/plugins/FileMonitor/StashPluginHelper.py b/plugins/FileMonitor/StashPluginHelper.py new file mode 100644 index 00000000..218e055c --- /dev/null +++ b/plugins/FileMonitor/StashPluginHelper.py @@ -0,0 +1,363 @@ +from stashapi.stashapp import StashInterface +from logging.handlers import RotatingFileHandler +import inspect, sys, os, pathlib, logging, json +import concurrent.futures +from stashapi.stash_types import PhashDistance +import __main__ + +# StashPluginHelper (By David Maisonave aka Axter) + # See end of this file for example usage + # Log Features: + # Can optionally log out to multiple outputs for each Log or Trace call. + # Logging includes source code line number + # Sets a maximum plugin log file size + # Stash Interface Features: + # Gets STASH_URL value from command line argument and/or from STDIN_READ + # Sets FRAGMENT_SERVER based on command line arguments or STDIN_READ + # Sets PLUGIN_ID based on the main script file name (in lower case) + # Gets PLUGIN_TASK_NAME value + # Sets pluginSettings (The plugin UI settings) + # Misc Features: + # Gets DRY_RUN value from command line argument and/or from UI and/or from config file + # Gets DEBUG_TRACING value from command line argument and/or from UI and/or from config file + # Sets RUNNING_IN_COMMAND_LINE_MODE to True if detects multiple arguments + # Sets CALLED_AS_STASH_PLUGIN to True if it's able to read from STDIN_READ +class StashPluginHelper(StashInterface): + # Primary Members for external reference + PLUGIN_TASK_NAME = None + PLUGIN_ID = None + PLUGIN_CONFIGURATION = None + pluginSettings = None + pluginConfig = None + STASH_INTERFACE_INIT = False + STASH_URL = None + STASH_CONFIGURATION = None + JSON_INPUT = None + DEBUG_TRACING = False + DRY_RUN = False + CALLED_AS_STASH_PLUGIN = False + RUNNING_IN_COMMAND_LINE_MODE = False + FRAGMENT_SERVER = None + STASHPATHSCONFIG = None + STASH_PATHS = [] + + # printTo argument + LOG_TO_FILE = 1 + LOG_TO_CONSOLE = 2 # Note: Only see output when running in command line mode. In plugin mode, this output is lost. + LOG_TO_STDERR = 4 # Note: In plugin mode, output to StdErr ALWAYS gets sent to stash logging as an error. + LOG_TO_STASH = 8 + LOG_TO_WARN = 16 + LOG_TO_ERROR = 32 + LOG_TO_CRITICAL = 64 + LOG_TO_ALL = LOG_TO_FILE + LOG_TO_CONSOLE + LOG_TO_STDERR + LOG_TO_STASH + + # Misc class variables + MAIN_SCRIPT_NAME = None + LOG_LEVEL = logging.INFO + LOG_FILE_DIR = None + LOG_FILE_NAME = None + STDIN_READ = None + pluginLog = None + logLinePreviousHits = [] + thredPool = None + + # Prefix message value + LEV_TRACE = "TRACE: " + LEV_DBG = "DBG: " + LEV_INF = "INF: " + LEV_WRN = "WRN: " + LEV_ERR = "ERR: " + LEV_CRITICAL = "CRITICAL: " + + # Default format + LOG_FORMAT = "[%(asctime)s] %(message)s" + + # Externally modifiable variables + log_to_err_set = LOG_TO_FILE + LOG_TO_STDERR # This can be changed by the calling source in order to customize what targets get error messages + log_to_norm = LOG_TO_FILE + LOG_TO_CONSOLE # Can be change so-as to set target output for normal logging + # Warn message goes to both plugin log file and stash when sent to Stash log file. + log_to_wrn_set = LOG_TO_STASH # This can be changed by the calling source in order to customize what targets get warning messages + + def __init__(self, + debugTracing = None, # Set debugTracing to True so as to output debug and trace logging + logFormat = LOG_FORMAT, # Plugin log line format + dateFmt = "%y%m%d %H:%M:%S", # Date format when logging to plugin log file + maxbytes = 2*1024*1024, # Max size of plugin log file + backupcount = 2, # Backup counts when log file size reaches max size + logToWrnSet = 0, # Customize the target output set which will get warning logging + logToErrSet = 0, # Customize the target output set which will get error logging + logToNormSet = 0, # Customize the target output set which will get normal logging + logFilePath = "", # Plugin log file. If empty, the log file name will be set based on current python file name and path + mainScriptName = "", # The main plugin script file name (full path) + pluginID = "", + settings = None, # Default settings for UI fields + config = None, # From pluginName_config.py or pluginName_setting.py + fragmentServer = None, + stash_url = None, # Stash URL (endpoint URL) Example: http://localhost:9999 + DebugTraceFieldName = "zzdebugTracing", + DryRunFieldName = "zzdryRun", + setStashLoggerAsPluginLogger = False): + self.thredPool = concurrent.futures.ThreadPoolExecutor(max_workers=2) + if logToWrnSet: self.log_to_wrn_set = logToWrnSet + if logToErrSet: self.log_to_err_set = logToErrSet + if logToNormSet: self.log_to_norm = logToNormSet + if stash_url and len(stash_url): self.STASH_URL = stash_url + self.MAIN_SCRIPT_NAME = mainScriptName if mainScriptName != "" else __main__.__file__ + self.PLUGIN_ID = pluginID if pluginID != "" else pathlib.Path(self.MAIN_SCRIPT_NAME).stem.lower() + # print(f"self.MAIN_SCRIPT_NAME={self.MAIN_SCRIPT_NAME}, self.PLUGIN_ID={self.PLUGIN_ID}", file=sys.stderr) + self.LOG_FILE_NAME = logFilePath if logFilePath != "" else f"{pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent}{os.sep}{pathlib.Path(self.MAIN_SCRIPT_NAME).stem}.log" + self.LOG_FILE_DIR = pathlib.Path(self.LOG_FILE_NAME).resolve().parent + RFH = RotatingFileHandler( + filename=self.LOG_FILE_NAME, + mode='a', + maxBytes=maxbytes, + backupCount=backupcount, + encoding=None, + delay=0 + ) + if fragmentServer: + self.FRAGMENT_SERVER = fragmentServer + else: + self.FRAGMENT_SERVER = {'Scheme': 'http', 'Host': '0.0.0.0', 'Port': '9999', 'SessionCookie': {'Name': 'session', 'Value': '', 'Path': '', 'Domain': '', 'Expires': '0001-01-01T00:00:00Z', 'RawExpires': '', 'MaxAge': 0, 'Secure': False, 'HttpOnly': False, 'SameSite': 0, 'Raw': '', 'Unparsed': None}, 'Dir': os.path.dirname(pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent), 'PluginDir': pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent} + + if debugTracing: self.DEBUG_TRACING = debugTracing + if config: + self.pluginConfig = config + if DebugTraceFieldName in self.pluginConfig: + self.DEBUG_TRACING = self.pluginConfig[DebugTraceFieldName] + if DryRunFieldName in self.pluginConfig: + self.DRY_RUN = self.pluginConfig[DryRunFieldName] + + if len(sys.argv) > 1: + RUNNING_IN_COMMAND_LINE_MODE = True + if not debugTracing or not stash_url: + for argValue in sys.argv[1:]: + if argValue.lower() == "--trace": + self.DEBUG_TRACING = True + elif argValue.lower() == "--dry_run" or argValue.lower() == "--dryrun": + self.DRY_RUN = True + elif ":" in argValue and not self.STASH_URL: + self.STASH_URL = argValue + if self.STASH_URL: + endpointUrlArr = self.STASH_URL.split(":") + if len(endpointUrlArr) == 3: + self.FRAGMENT_SERVER['Scheme'] = endpointUrlArr[0] + self.FRAGMENT_SERVER['Host'] = endpointUrlArr[1][2:] + self.FRAGMENT_SERVER['Port'] = endpointUrlArr[2] + super().__init__(self.FRAGMENT_SERVER) + self.STASH_INTERFACE_INIT = True + else: + try: + self.STDIN_READ = sys.stdin.read() + self.CALLED_AS_STASH_PLUGIN = True + except: + pass + if self.STDIN_READ: + self.JSON_INPUT = json.loads(self.STDIN_READ) + if "args" in self.JSON_INPUT and "mode" in self.JSON_INPUT["args"]: + self.PLUGIN_TASK_NAME = self.JSON_INPUT["args"]["mode"] + self.FRAGMENT_SERVER = self.JSON_INPUT["server_connection"] + self.STASH_URL = f"{self.FRAGMENT_SERVER['Scheme']}://{self.FRAGMENT_SERVER['Host']}:{self.FRAGMENT_SERVER['Port']}" + super().__init__(self.FRAGMENT_SERVER) + self.STASH_INTERFACE_INIT = True + + if self.STASH_INTERFACE_INIT: + self.PLUGIN_CONFIGURATION = self.get_configuration()["plugins"] + self.STASH_CONFIGURATION = self.get_configuration()["general"] + self.STASHPATHSCONFIG = self.STASH_CONFIGURATION['stashes'] + for item in self.STASHPATHSCONFIG: + self.STASH_PATHS.append(item["path"]) + if settings: + self.pluginSettings = settings + if self.PLUGIN_ID in self.PLUGIN_CONFIGURATION: + self.pluginSettings.update(self.PLUGIN_CONFIGURATION[self.PLUGIN_ID]) + if DebugTraceFieldName in self.pluginSettings: + self.DEBUG_TRACING = self.pluginSettings[DebugTraceFieldName] + if DryRunFieldName in self.pluginSettings: + self.DRY_RUN = self.pluginSettings[DryRunFieldName] + if self.DEBUG_TRACING: self.LOG_LEVEL = logging.DEBUG + + logging.basicConfig(level=self.LOG_LEVEL, format=logFormat, datefmt=dateFmt, handlers=[RFH]) + self.pluginLog = logging.getLogger(pathlib.Path(self.MAIN_SCRIPT_NAME).stem) + if setStashLoggerAsPluginLogger: + self.log = self.pluginLog + + def __del__(self): + self.thredPool.shutdown(wait=False) + + def Log(self, logMsg, printTo = 0, logLevel = logging.INFO, lineNo = -1, levelStr = "", logAlways = False): + if printTo == 0: + printTo = self.log_to_norm + elif printTo == self.LOG_TO_ERROR and logLevel == logging.INFO: + logLevel = logging.ERROR + printTo = self.log_to_err_set + elif printTo == self.LOG_TO_CRITICAL and logLevel == logging.INFO: + logLevel = logging.CRITICAL + printTo = self.log_to_err_set + elif printTo == self.LOG_TO_WARN and logLevel == logging.INFO: + logLevel = logging.WARN + printTo = self.log_to_wrn_set + if lineNo == -1: + lineNo = inspect.currentframe().f_back.f_lineno + LN_Str = f"[LN:{lineNo}]" + # print(f"{LN_Str}, {logAlways}, {self.LOG_LEVEL}, {logging.DEBUG}, {levelStr}, {logMsg}") + if logLevel == logging.DEBUG and (logAlways == False or self.LOG_LEVEL == logging.DEBUG): + if levelStr == "": levelStr = self.LEV_DBG + if printTo & self.LOG_TO_FILE: self.pluginLog.debug(f"{LN_Str} {levelStr}{logMsg}") + if printTo & self.LOG_TO_STASH: self.log.debug(f"{LN_Str} {levelStr}{logMsg}") + elif logLevel == logging.INFO or logLevel == logging.DEBUG: + if levelStr == "": levelStr = self.LEV_INF if logLevel == logging.INFO else self.LEV_DBG + if printTo & self.LOG_TO_FILE: self.pluginLog.info(f"{LN_Str} {levelStr}{logMsg}") + if printTo & self.LOG_TO_STASH: self.log.info(f"{LN_Str} {levelStr}{logMsg}") + elif logLevel == logging.WARN: + if levelStr == "": levelStr = self.LEV_WRN + if printTo & self.LOG_TO_FILE: self.pluginLog.warning(f"{LN_Str} {levelStr}{logMsg}") + if printTo & self.LOG_TO_STASH: self.log.warning(f"{LN_Str} {levelStr}{logMsg}") + elif logLevel == logging.ERROR: + if levelStr == "": levelStr = self.LEV_ERR + if printTo & self.LOG_TO_FILE: self.pluginLog.error(f"{LN_Str} {levelStr}{logMsg}") + if printTo & self.LOG_TO_STASH: self.log.error(f"{LN_Str} {levelStr}{logMsg}") + elif logLevel == logging.CRITICAL: + if levelStr == "": levelStr = self.LEV_CRITICAL + if printTo & self.LOG_TO_FILE: self.pluginLog.critical(f"{LN_Str} {levelStr}{logMsg}") + if printTo & self.LOG_TO_STASH: self.log.error(f"{LN_Str} {levelStr}{logMsg}") + if (printTo & self.LOG_TO_CONSOLE) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways): + print(f"{LN_Str} {levelStr}{logMsg}") + if (printTo & self.LOG_TO_STDERR) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways): + print(f"StdErr: {LN_Str} {levelStr}{logMsg}", file=sys.stderr) + + def Trace(self, logMsg = "", printTo = 0, logAlways = False, lineNo = -1): + if printTo == 0: printTo = self.LOG_TO_FILE + if lineNo == -1: + lineNo = inspect.currentframe().f_back.f_lineno + logLev = logging.INFO if logAlways else logging.DEBUG + if self.DEBUG_TRACING or logAlways: + if logMsg == "": + logMsg = f"Line number {lineNo}..." + self.Log(logMsg, printTo, logLev, lineNo, self.LEV_TRACE, logAlways) + + # Log once per session. Only logs the first time called from a particular line number in the code. + def TraceOnce(self, logMsg = "", printTo = 0, logAlways = False): + lineNo = inspect.currentframe().f_back.f_lineno + if self.DEBUG_TRACING or logAlways: + FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}" + if FuncAndLineNo in self.logLinePreviousHits: + return + self.logLinePreviousHits.append(FuncAndLineNo) + self.Trace(logMsg, printTo, logAlways, lineNo) + + # Log INFO on first call, then do Trace on remaining calls. + def LogOnce(self, logMsg = "", printTo = 0, logAlways = False, traceOnRemainingCalls = True): + if printTo == 0: printTo = self.LOG_TO_FILE + lineNo = inspect.currentframe().f_back.f_lineno + FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}" + if FuncAndLineNo in self.logLinePreviousHits: + if traceOnRemainingCalls: + self.Trace(logMsg, printTo, logAlways, lineNo) + else: + self.logLinePreviousHits.append(FuncAndLineNo) + self.Log(logMsg, printTo, logging.INFO, lineNo) + + def Warn(self, logMsg, printTo = 0): + if printTo == 0: printTo = self.log_to_wrn_set + lineNo = inspect.currentframe().f_back.f_lineno + self.Log(logMsg, printTo, logging.WARN, lineNo) + + def Error(self, logMsg, printTo = 0): + if printTo == 0: printTo = self.log_to_err_set + lineNo = inspect.currentframe().f_back.f_lineno + self.Log(logMsg, printTo, logging.ERROR, lineNo) + + def Status(self, printTo = 0, logLevel = logging.INFO, lineNo = -1): + if printTo == 0: printTo = self.log_to_norm + if lineNo == -1: + lineNo = inspect.currentframe().f_back.f_lineno + self.Log(f"StashPluginHelper Status: (CALLED_AS_STASH_PLUGIN={self.CALLED_AS_STASH_PLUGIN}), (RUNNING_IN_COMMAND_LINE_MODE={self.RUNNING_IN_COMMAND_LINE_MODE}), (DEBUG_TRACING={self.DEBUG_TRACING}), (DRY_RUN={self.DRY_RUN}), (PLUGIN_ID={self.PLUGIN_ID}), (PLUGIN_TASK_NAME={self.PLUGIN_TASK_NAME}), (STASH_URL={self.STASH_URL}), (MAIN_SCRIPT_NAME={self.MAIN_SCRIPT_NAME})", + printTo, logLevel, lineNo) + + def ExecuteProcess(self, args, ExecDetach=False): + import platform, subprocess + is_windows = any(platform.win32_ver()) + pid = None + self.Trace(f"is_windows={is_windows} args={args}") + if is_windows: + if ExecDetach: + self.Trace("Executing process using Windows DETACHED_PROCESS") + DETACHED_PROCESS = 0x00000008 + pid = subprocess.Popen(args,creationflags=DETACHED_PROCESS, shell=True).pid + else: + pid = subprocess.Popen(args, shell=True).pid + else: + self.Trace("Executing process using normal Popen") + pid = subprocess.Popen(args).pid + self.Trace(f"pid={pid}") + return pid + + def ExecutePythonScript(self, args, ExecDetach=True): + PythonExe = f"{sys.executable}" + argsWithPython = [f"{PythonExe}"] + args + return self.ExecuteProcess(argsWithPython,ExecDetach=ExecDetach) + + # Extends class StashInterface with functions which are not yet in the class + def metadata_autotag(self, paths:list=[], performers:list=[], studios:list=[], tags:list=[]): + query = """ + mutation MetadataAutoTag($input:AutoTagMetadataInput!) { + metadataAutoTag(input: $input) + } + """ + metadata_autotag_input = { + "paths":paths, + "performers": performers, + "studios":studios, + "tags":tags, + } + result = self.call_GQL(query, {"input": metadata_autotag_input}) + return result + + def backup_database(self): + return self.call_GQL("mutation { backupDatabase(input: {download: false})}") + + def optimise_database(self): + return self.call_GQL("mutation OptimiseDatabase { optimiseDatabase }") + + def metadata_clean_generated(self, blobFiles=True, dryRun=False, imageThumbnails=True, markers=True, screenshots=True, sprites=True, transcodes=True): + query = """ + mutation MetadataCleanGenerated($input: CleanGeneratedInput!) { + metadataCleanGenerated(input: $input) + } + """ + clean_metadata_input = { + "blobFiles": blobFiles, + "dryRun": dryRun, + "imageThumbnails": imageThumbnails, + "markers": markers, + "screenshots": screenshots, + "sprites": sprites, + "transcodes": transcodes, + } + result = self.call_GQL(query, {"input": clean_metadata_input}) + return result + + def rename_generated_files(self): + return self.call_GQL("mutation MigrateHashNaming {migrateHashNaming}") + # def find_duplicate_scenes(self, distance: PhashDistance=PhashDistance.EXACT, fragment=None): + # query = """ + # query FindDuplicateScenes($distance: Int) { + # findDuplicateScenes(distance: $distance) { + # ...SceneSlim + # } + # } + # """ + # if fragment: + # query = re.sub(r'\.\.\.SceneSlim', fragment, query) + # else: + # query = """ + # query FindDuplicateScenes($distance: Int) { + # findDuplicateScenes(distance: $distance) + # } + # """ + # variables = { + # "distance": distance + # } + # result = self.call_GQL(query, variables) + # return result['findDuplicateScenes'] \ No newline at end of file diff --git a/plugins/FileMonitor/filemonitor.py b/plugins/FileMonitor/filemonitor.py index ccfe0388..6d6752d1 100644 --- a/plugins/FileMonitor/filemonitor.py +++ b/plugins/FileMonitor/filemonitor.py @@ -1,180 +1,352 @@ -# Description: This is a Stash plugin which updates Stash if any changes occurs in the Stash library paths. +# Description: This is a Stash plugin which updates Stash if any changes occurs in the Stash library paths, and runs a scheduler. # By David Maisonave (aka Axter) Jul-2024 (https://www.axter.com/) # Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/FileMonitor -# Note: To call this script outside of Stash, pass any argument. -# Example: python filemonitor.py foofoo -import os -import sys -import time -import shutil -import fileinput -import hashlib -import json -from pathlib import Path -import requests -import logging -from logging.handlers import RotatingFileHandler -import stashapi.log as log # Importing stashapi.log as log for critical events ONLY -from stashapi.stashapp import StashInterface -from watchdog.observers import Observer # This is also needed for event attributes +# Note: To call this script outside of Stash, pass argument --url and the Stash URL. +# Example: python filemonitor.py --url http://localhost:9999 +import os, sys, time, pathlib, argparse +from StashPluginHelper import StashPluginHelper import watchdog # pip install watchdog # https://pythonhosted.org/watchdog/ +from watchdog.observers import Observer # This is also needed for event attributes from threading import Lock, Condition from multiprocessing import shared_memory from filemonitor_config import config # Import settings from filemonitor_config.py -# ********************************************************************** -# Constant global variables -------------------------------------------- -LOG_FILE_PATH = log_file_path = f"{Path(__file__).resolve().parent}\\{Path(__file__).stem}.log" -FORMAT = "[%(asctime)s - LN:%(lineno)s] %(message)s" -PLUGIN_ARGS = False -PLUGIN_ARGS_MODE = False -PLUGIN_ID = Path(__file__).stem.lower() -# GraphQL query to fetch all scenes -QUERY_ALL_SCENES = """ - query AllScenes { - allScenes { - id - updated_at - } - } -""" -RFH = RotatingFileHandler( - filename=LOG_FILE_PATH, - mode='a', - maxBytes=2*1024*1024, # Configure logging for this script with max log file size of 2000K - backupCount=2, - encoding=None, - delay=0 -) -TIMEOUT = 5 CONTINUE_RUNNING_SIG = 99 +STOP_RUNNING_SIG = 32 + +parser = argparse.ArgumentParser() +parser.add_argument('--url', '-u', dest='stash_url', type=str, help='Add Stash URL') +parser.add_argument('--trace', '-t', dest='trace', action='store_true', help='Enables debug trace mode.') +parser.add_argument('--stop', '-s', dest='stop', action='store_true', help='Stop (kill) a running FileMonitor task.') +parser.add_argument('--restart', '-r', dest='restart', action='store_true', help='Restart FileMonitor.') +parser.add_argument('--silent', '--quit', '-q', dest='quit', action='store_true', help='Run in silent mode. No output to console or stderr. Use this when running from pythonw.exe') +parse_args = parser.parse_args() + +logToErrSet = 0 +logToNormSet = 0 +if parse_args.quit: + logToErrSet = 1 + logToNormSet = 1 + +settings = { + "recursiveDisabled": False, + "turnOnScheduler": False, + "zmaximumBackups": 0, + "zzdebugTracing": False +} +stash = StashPluginHelper( + stash_url=parse_args.stash_url, + debugTracing=parse_args.trace, + settings=settings, + config=config, + logToErrSet=logToErrSet, + logToNormSet=logToNormSet + ) +stash.Status() +stash.Log(f"\nStarting (__file__={__file__}) (stash.CALLED_AS_STASH_PLUGIN={stash.CALLED_AS_STASH_PLUGIN}) (stash.DEBUG_TRACING={stash.DEBUG_TRACING}) (stash.DRY_RUN={stash.DRY_RUN}) (stash.PLUGIN_TASK_NAME={stash.PLUGIN_TASK_NAME})************************************************") + +# stash.Log(f"{stash.find_duplicate_scenes()}") -# ********************************************************************** -# Global variables -------------------------------------------- exitMsg = "Change success!!" mutex = Lock() signal = Condition(mutex) shouldUpdate = False -TargetPaths = [] -runningInPluginMode = False -# Configure local log file for plugin within plugin folder having a limited max log file size -logging.basicConfig(level=logging.INFO, format=FORMAT, datefmt="%y%m%d %H:%M:%S", handlers=[RFH]) -logger = logging.getLogger(Path(__file__).stem) - -# ********************************************************************** -# ---------------------------------------------------------------------- -# Code section to fetch variables from Plugin UI and from filemonitor_settings.py -# Check if being called as Stash plugin -gettingCalledAsStashPlugin = True -stopLibraryMonitoring = False -StdInRead = None -try: - if len(sys.argv) == 1: - print(f"Attempting to read stdin. (len(sys.argv)={len(sys.argv)})", file=sys.stderr) - StdInRead = sys.stdin.read() - # for line in fileinput.input(): - # StdInRead = line - # break - else: - if len(sys.argv) > 1 and sys.argv[1].lower() == "stop": - stopLibraryMonitoring = True - raise Exception("Not called in plugin mode.") -except: - gettingCalledAsStashPlugin = False - print(f"Either len(sys.argv) not expected value OR sys.stdin.read() failed! (stopLibraryMonitoring={stopLibraryMonitoring}) (StdInRead={StdInRead}) (len(sys.argv)={len(sys.argv)})", file=sys.stderr) - pass - -if gettingCalledAsStashPlugin and StdInRead: - print(f"StdInRead={StdInRead} (len(sys.argv)={len(sys.argv)})", file=sys.stderr) - runningInPluginMode = True - json_input = json.loads(StdInRead) - FRAGMENT_SERVER = json_input["server_connection"] -else: - runningInPluginMode = False - FRAGMENT_SERVER = {'Scheme': config['endpoint_Scheme'], 'Host': config['endpoint_Host'], 'Port': config['endpoint_Port'], 'SessionCookie': {'Name': 'session', 'Value': '', 'Path': '', 'Domain': '', 'Expires': '0001-01-01T00:00:00Z', 'RawExpires': '', 'MaxAge': 0, 'Secure': False, 'HttpOnly': False, 'SameSite': 0, 'Raw': '', 'Unparsed': None}, 'Dir': os.path.dirname(Path(__file__).resolve().parent), 'PluginDir': Path(__file__).resolve().parent} - print("Running in non-plugin mode!", file=sys.stderr) +SHAREDMEMORY_NAME = "DavidMaisonaveAxter_FileMonitor" # Unique name for shared memory +RECURSIVE = stash.pluginSettings["recursiveDisabled"] == False +SCAN_MODIFIED = stash.pluginConfig["scanModified"] +RUN_CLEAN_AFTER_DELETE = stash.pluginConfig["runCleanAfterDelete"] +RUN_GENERATE_CONTENT = stash.pluginConfig['runGenerateContent'] +SCAN_ON_ANY_EVENT = stash.pluginConfig['onAnyEvent'] +SIGNAL_TIMEOUT = stash.pluginConfig['timeOut'] if stash.pluginConfig['timeOut'] > 0 else 1 -stash = StashInterface(FRAGMENT_SERVER) -PLUGINCONFIGURATION = stash.get_configuration()["plugins"] -STASHCONFIGURATION = stash.get_configuration()["general"] -STASHPATHSCONFIG = STASHCONFIGURATION['stashes'] -stashPaths = [] -settings = { - "recursiveDisabled": False, - "runCleanAfterDelete": False, - "scanModified": False, - "zzdebugTracing": False, - "zzdryRun": False, -} +CREATE_SPECIAL_FILE_TO_EXIT = stash.pluginConfig['createSpecFileToExit'] +DELETE_SPECIAL_FILE_ON_STOP = stash.pluginConfig['deleteSpecFileInStop'] +SPECIAL_FILE_DIR = f"{stash.LOG_FILE_DIR}{os.sep}working" +if CREATE_SPECIAL_FILE_TO_EXIT and not os.path.exists(SPECIAL_FILE_DIR): + os.makedirs(SPECIAL_FILE_DIR) +# Unique name to trigger shutting down FileMonitor +SPECIAL_FILE_NAME = f"{SPECIAL_FILE_DIR}{os.sep}trigger_to_kill_filemonitor_by_david_maisonave.txt" +if CREATE_SPECIAL_FILE_TO_EXIT and os.path.isfile(SPECIAL_FILE_NAME): + os.remove(SPECIAL_FILE_NAME) + +fileExtTypes = stash.pluginConfig['fileExtTypes'].split(",") if stash.pluginConfig['fileExtTypes'] != "" else [] +includePathChanges = stash.pluginConfig['includePathChanges'] if len(stash.pluginConfig['includePathChanges']) > 0 else stash.STASH_PATHS +excludePathChanges = stash.pluginConfig['excludePathChanges'] -if PLUGIN_ID in PLUGINCONFIGURATION: - settings.update(PLUGINCONFIGURATION[PLUGIN_ID]) -# ---------------------------------------------------------------------- -debugTracing = settings["zzdebugTracing"] -RECURSIVE = settings["recursiveDisabled"] == False -SCAN_MODIFIED = settings["scanModified"] -RUN_CLEAN_AFTER_DELETE = settings["runCleanAfterDelete"] -RUN_GENERATE_CONTENT = config['runGenerateContent'] +stash.Trace(f"(includePathChanges={includePathChanges})") -for item in STASHPATHSCONFIG: - stashPaths.append(item["path"]) +if stash.DRY_RUN: + stash.Log("Dry run mode is enabled.") +stash.Trace(f"(SCAN_MODIFIED={SCAN_MODIFIED}) (SCAN_ON_ANY_EVENT={SCAN_ON_ANY_EVENT}) (RECURSIVE={RECURSIVE})") -# Extract dry_run setting from settings -DRY_RUN = settings["zzdryRun"] -dry_run_prefix = '' -try: - PLUGIN_ARGS = json_input['args'] - PLUGIN_ARGS_MODE = json_input['args']["mode"] -except: - pass -logger.info(f"\nStarting (runningInPluginMode={runningInPluginMode}) (debugTracing={debugTracing}) (DRY_RUN={DRY_RUN}) (PLUGIN_ARGS_MODE={PLUGIN_ARGS_MODE}) (PLUGIN_ARGS={PLUGIN_ARGS})************************************************") -if debugTracing: logger.info(f"Debug Tracing (stash.get_configuration()={stash.get_configuration()})................") -if debugTracing: logger.info("settings: %s " % (settings,)) -if debugTracing: logger.info(f"Debug Tracing (STASHCONFIGURATION={STASHCONFIGURATION})................") -if debugTracing: logger.info(f"Debug Tracing (stashPaths={stashPaths})................") +StartFileMonitorAsAPluginTaskName = "Monitor as a Plugin" +StartFileMonitorAsAServiceTaskName = "Start Library Monitor Service" +StartFileMonitorAsAPluginTaskID = "start_library_monitor" +StartFileMonitorAsAServiceTaskID = "start_library_monitor_service" -if DRY_RUN: - logger.info("Dry run mode is enabled.") - dry_run_prefix = "Would've " -if debugTracing: logger.info("Debug Tracing................") -# ---------------------------------------------------------------------- -# ********************************************************************** -if debugTracing: logger.info(f"Debug Tracing (SCAN_MODIFIED={SCAN_MODIFIED}) (RECURSIVE={RECURSIVE})................") +FileMonitorPluginIsOnTaskQue = stash.CALLED_AS_STASH_PLUGIN +StopLibraryMonitorWaitingInTaskQueue = False +JobIdInTheQue = 0 +def isJobWaitingToRun(): + global StopLibraryMonitorWaitingInTaskQueue + global JobIdInTheQue + global FileMonitorPluginIsOnTaskQue + FileMonitorPluginIsOnTaskQue = False + jobIsWaiting = False + taskQue = stash.job_queue() + for jobDetails in taskQue: + stash.Trace(f"(Job ID({jobDetails['id']})={jobDetails})") + if jobDetails['status'] == "READY": + if jobDetails['description'] == "Running plugin task: Stop Library Monitor": + StopLibraryMonitorWaitingInTaskQueue = True + JobIdInTheQue = jobDetails['id'] + jobIsWaiting = True + elif jobDetails['status'] == "RUNNING" and jobDetails['description'].find(StartFileMonitorAsAPluginTaskName) > -1: + FileMonitorPluginIsOnTaskQue = True + JobIdInTheQue = 0 + return jobIsWaiting + +if stash.CALLED_AS_STASH_PLUGIN and stash.PLUGIN_TASK_NAME == StartFileMonitorAsAPluginTaskID: + stash.Trace(f"isJobWaitingToRun() = {isJobWaitingToRun()})") + +class StashScheduler: # Stash Scheduler + def __init__(self): + import schedule # pip install schedule # https://github.com/dbader/schedule + global SIGNAL_TIMEOUT + for task in stash.pluginConfig['task_scheduler']: + if 'task' not in task: + stash.Error(f"Task is missing required task field. task={task}") + elif 'hours' in task: + if task['hours'] > 0: + stash.Log(f"Adding to scheduler task '{task['task']}' at {task['hours']} hours interval") + schedule.every(task['hours']).hours.do(self.runTask, task) + if task['hours'] > 167: # Warn when using a week or more of hours + stash.Warn(f"Using {task['hours']} hours in task '{task['task']}'. Should use the weekday syntax instead.") + elif 'minutes' in task: + if task['minutes'] > 0: + stash.Log(f"Adding to scheduler task '{task['task']}' at {task['minutes']} minutes interval") + schedule.every(task['minutes']).minutes.do(self.runTask, task) + if task['minutes'] > 10079: # Warn when using a week or more of minutes + stash.Warn(f"Using {task['minutes']} minutes in task '{task['task']}'. Should use the weekday syntax instead.") + elif 'days' in task: # Left here for backward compatibility, but should use weekday logic instead. + if task['days'] > 0: + stash.Log(f"Adding to scheduler task '{task['task']}' at {task['days']} days interval") + schedule.every(task['days']).days.do(self.runTask, task) + if task['days'] > 6: # Warn when using a week or more of days + stash.Warn(f"Using {task['days']} days in task '{task['task']}'. Should use the weekday syntax instead.") + elif 'seconds' in task: # This is mainly here for test purposes only + if task['seconds'] > 0: + if SIGNAL_TIMEOUT > task['seconds']: + stash.Log(f"Changing SIGNAL_TIMEOUT from value {SIGNAL_TIMEOUT} to {task['seconds']} to allow '{task['task']}' to get triggered timely") + SIGNAL_TIMEOUT = task['seconds'] + stash.Log(f"Adding to scheduler task '{task['task']}' at {task['seconds']} seconds interval") + schedule.every(task['seconds']).seconds.do(self.runTask, task) + elif 'weekday' in task and 'time' in task: + if task['time'].upper() == "DISABLED": + stash.Trace(f"Skipping task '{task['task']}', because it's disabled. To enable this task, change the time field to a valid time. Example: '07:00'") + elif len(task['time']) != 5 or task['time'][2] != ":": + stash.Error(f"Skipping task '{task['task']}', because time ({task['time']}) is invalid. Change the time field to a valid time. Example: '07:00'") + else: + weekDays = task['weekday'].lower() + if 'monthly' in task: + stash.Log(f"Adding to scheduler task '{task['task']}' monthly on number {task['monthly']} {task['weekday']} at {task['time']}") + else: + stash.Log(f"Adding to scheduler task '{task['task']}' (weekly) every {task['weekday']} at {task['time']}") + + hasValidDay = False + if "monday" in weekDays: + schedule.every().monday.at(task['time']).do(self.runTask, task) + hasValidDay = True + if "tuesday" in weekDays: + schedule.every().tuesday.at(task['time']).do(self.runTask, task) + hasValidDay = True + if "wednesday" in weekDays: + schedule.every().wednesday.at(task['time']).do(self.runTask, task) + hasValidDay = True + if "thursday" in weekDays: + schedule.every().thursday.at(task['time']).do(self.runTask, task) + hasValidDay = True + if "friday" in weekDays: + schedule.every().friday.at(task['time']).do(self.runTask, task) + hasValidDay = True + if "saturday" in weekDays: + schedule.every().saturday.at(task['time']).do(self.runTask, task) + hasValidDay = True + if "sunday" in weekDays: + schedule.every().sunday.at(task['time']).do(self.runTask, task) + hasValidDay = True + + if not hasValidDay: + stash.Error(f"Task '{task['task']}' is missing valid day(s) in weekday field. weekday = '{task['weekday']}'") + else: + stash.Error(f"Task '{task['task']}' is missing fields.") + self.checkSchedulePending() + + # ToDo: Add asynchronous threading logic to running task. + def runTask(self, task): + import datetime + stash.Trace(f"Running task {task}") + if 'monthly' in task: + dayOfTheMonth = datetime.datetime.today().day + FirstAllowedDate = ((task['monthly'] - 1) * 7) + 1 + LastAllowedDate = task['monthly'] * 7 + if dayOfTheMonth < FirstAllowedDate or dayOfTheMonth > LastAllowedDate: + stash.Log(f"Skipping task {task['task']} because today is not the right {task['weekday']} of the month. Target range is between {FirstAllowedDate} and {LastAllowedDate}.") + return + + targetPaths = includePathChanges + if 'paths' in task: + targetPaths = task['paths'] + + result = None + if task['task'] == "Clean": + result = stash.metadata_clean(paths=targetPaths, dry_run=stash.DRY_RUN) + elif task['task'] == "Clean Generated Files": + result = stash.metadata_clean_generated() + elif task['task'] == "Generate": + result = stash.metadata_generate() + elif task['task'] == "Backup": + stash.LogOnce("Note: Backup task does not get listed in the Task Queue, but user can verify that it started by looking in the Stash log file as an INFO level log line.") + result = stash.backup_database() + maximumBackup = stash.pluginSettings['zmaximumBackups'] + if "maxBackups" in task: + maximumBackup = task['maxBackups'] + if maximumBackup < 2: + stash.TraceOnce(f"Skipping DB backup file trim because zmaximumBackups={maximumBackup}. Value has to be greater than 1.") + elif 'backupDirectoryPath' in stash.STASH_CONFIGURATION: + if len(stash.STASH_CONFIGURATION['backupDirectoryPath']) < 5: + stash.TraceOnce(f"Skipping DB backup file trim because backupDirectoryPath length is to short. Len={len(stash.STASH_CONFIGURATION['backupDirectoryPath'])}. Only support length greater than 4 characters.") + elif os.path.exists(stash.STASH_CONFIGURATION['backupDirectoryPath']): + stash.LogOnce(f"Checking quantity of DB backups if path {stash.STASH_CONFIGURATION['backupDirectoryPath']} exceeds {maximumBackup} backup files.") + self.trimDbFiles(stash.STASH_CONFIGURATION['backupDirectoryPath'], maximumBackup) + else: + stash.TraceOnce(f"Skipping DB backup file trim because backupDirectoryPath does NOT exist. backupDirectoryPath={stash.STASH_CONFIGURATION['backupDirectoryPath']}") + elif task['task'] == "Scan": + result = stash.metadata_scan(paths=targetPaths) + elif task['task'] == "Auto Tag": + result = stash.metadata_autotag(paths=targetPaths) + elif task['task'] == "Optimise Database": + result = stash.optimise_database() + elif task['task'] == "RenameGeneratedFiles": + result = stash.rename_generated_files() + elif task['task'] == "GQL": + result = stash.call_GQL(task['input']) + elif task['task'] == "python": + if 'script' in task and task['script'] != "": + script = task['script'].replace("", f"{pathlib.Path(__file__).resolve().parent}{os.sep}") + stash.Log(f"Executing python script {script}.") + args = [script] + if 'args' in task and len(task['args']) > 0: + args = args + [task['args']] + detached = True + if 'detach' in task: + detached = task['detach'] + result = f"Python process PID = {stash.ExecutePythonScript(args, ExecDetach=detached)}" + else: + stash.Error(f"Can not run task '{task['task']}', because it's missing 'script' field.") + elif task['task'] == "execute": + if 'command' in task and task['command'] != "": + cmd = task['command'].replace("", f"{pathlib.Path(__file__).resolve().parent}{os.sep}") + args = [cmd] + if 'args' in task and len(task['args']) > 0: + args = args + [task['args']] + stash.Log(f"Executing command arguments {args}.") + result = f"Execute process PID = {stash.ExecuteProcess(args)}" + else: + stash.Error(f"Can not run task '{task['task']}', because it's missing 'command' field.") + else: + # ToDo: Add code to check if plugin is installed. + try: + if 'pluginId' in task and task['pluginId'] != "": + stash.Trace(f"Running plugin task pluginID={task['pluginId']}, task name = {task['task']}") + stash.run_plugin_task(plugin_id=task['pluginId'], task_name=task['task']) + else: + stash.Error(f"Can not run task '{task['task']}', because it's an invalid task.") + stash.LogOnce(f"If task '{task['task']}' is supposed to be a built-in task, check for correct task name spelling.") + stash.LogOnce(f"If task '{task['task']}' is supposed to be a plugin, make sure to include the pluginId field in the task. task={task}") + except Exception as e: + stash.LogOnce(f"Failed to call plugin {task['task']} with plugin-ID {task['pluginId']}. Error: {e}") + pass + + if result: + stash.Trace(f"Task '{task['task']}' result={result}") + + def trimDbFiles(self, dbPath, maxFiles): + if not os.path.exists(dbPath): + stash.LogOnce(f"Exiting trimDbFiles, because path {dbPath} does not exists.") + return + if len(dbPath) < 5: # For safety and security, short path not supported. + stash.Warn(f"Exiting trimDbFiles, because path {dbPath} is to short. Len={len(dbPath)}. Path string must be at least 5 characters in length.") + return + stashPrefixSqlDbFileName = "stash-go.sqlite." + dbFiles = sorted(os.listdir(dbPath)) + n = len(dbFiles) + for i in range(0, n-maxFiles): + dbFilePath = f"{dbPath}{os.sep}{dbFiles[i]}" + if dbFiles[i].startswith(stashPrefixSqlDbFileName): + stash.Warn(f"Deleting DB file {dbFilePath}") + os.remove(dbFilePath) + else: + stash.LogOnce(f"Skipping deleting file '{dbFiles[i]}', because the file doesn't start with string '{stashPrefixSqlDbFileName}'.") + + def checkSchedulePending(self): + import schedule # pip install schedule # https://github.com/dbader/schedule + stash.TraceOnce("Checking if task pending.") + schedule.run_pending() + stash.TraceOnce("Pending check complete.") + +TargetPaths = [] def start_library_monitor(): global shouldUpdate - global TargetPaths + global TargetPaths try: # Create shared memory buffer which can be used as singleton logic or to get a signal to quit task from external script - shm_a = shared_memory.SharedMemory(name="DavidMaisonaveAxter_FileMonitor", create=True, size=4) + shm_a = shared_memory.SharedMemory(name=SHAREDMEMORY_NAME, create=True, size=4) except: - pass - logger.info("Could not open shared memory map. Change File Monitor must be running. Can not run multiple instance of Change File Monitor.") + stash.Error(f"Could not open shared memory map ({SHAREDMEMORY_NAME}). Change File Monitor must be running. Can not run multiple instance of Change File Monitor. Stop FileMonitor before trying to start it again.") return type(shm_a.buf) shm_buffer = shm_a.buf len(shm_buffer) shm_buffer[0] = CONTINUE_RUNNING_SIG - if debugTracing: logger.info(f"Shared memory map opended, and flag set to {shm_buffer[0]}") + stash.Trace(f"Shared memory map opended, and flag set to {shm_buffer[0]}") RunCleanMetadata = False - + stashScheduler = StashScheduler() if stash.pluginSettings['turnOnScheduler'] else None event_handler = watchdog.events.FileSystemEventHandler() + def doIgnoreFileExt(chng_path, addToTargetPaths = False): + global TargetPaths + chng_path_lwr = chng_path.lower() + if len(fileExtTypes) > 0: + suffix = pathlib.Path(chng_path_lwr).suffix.lstrip(".") + if suffix not in fileExtTypes: + stash.TraceOnce(f"Ignoring file change because not a monitored type ({suffix}).") + return True + if len(excludePathChanges) > 0: + for path in excludePathChanges: + if chng_path_lwr.startswith(path.lower()): + stash.TraceOnce(f"Ignoring file change because is excluded path ({chng_path_lwr}) per entery '{path}'.") + return True + if addToTargetPaths: + TargetPaths.append(chng_path) + return False + def on_created(event): global shouldUpdate - global TargetPaths - TargetPaths.append(event.src_path) - logger.info(f"CREATE *** '{event.src_path}'") + if doIgnoreFileExt(event.src_path, True): + return + stash.Log(f"CREATE *** '{event.src_path}'") with mutex: shouldUpdate = True signal.notify() def on_deleted(event): global shouldUpdate - global TargetPaths nonlocal RunCleanMetadata - TargetPaths.append(event.src_path) - logger.info(f"DELETE *** '{event.src_path}'") + if doIgnoreFileExt(event.src_path, True): + return + stash.Log(f"DELETE *** '{event.src_path}'") with mutex: shouldUpdate = True RunCleanMetadata = True @@ -183,107 +355,194 @@ def on_deleted(event): def on_modified(event): global shouldUpdate global TargetPaths + if doIgnoreFileExt(event.src_path): + return if SCAN_MODIFIED: TargetPaths.append(event.src_path) - logger.info(f"MODIFIED *** '{event.src_path}'") + stash.Log(f"MODIFIED *** '{event.src_path}'") with mutex: shouldUpdate = True signal.notify() else: - if debugTracing: logger.info(f"Ignoring modifications due to plugin UI setting. path='{event.src_path}'") + stash.TraceOnce(f"Ignoring modifications due to plugin UI setting. path='{event.src_path}'") def on_moved(event): global shouldUpdate global TargetPaths - TargetPaths.append(event.src_path) + if doIgnoreFileExt(event.src_path, True): + return TargetPaths.append(event.dest_path) - logger.info(f"MOVE *** from '{event.src_path}' to '{event.dest_path}'") + stash.Log(f"MOVE *** from '{event.src_path}' to '{event.dest_path}'") with mutex: shouldUpdate = True signal.notify() - - if debugTracing: logger.info("Debug Trace........") + + def on_any_event(event): + global shouldUpdate + global TargetPaths + if doIgnoreFileExt(event.src_path): + return + if SCAN_ON_ANY_EVENT or event.src_path == SPECIAL_FILE_DIR: + stash.Log(f"Any-Event *** '{event.src_path}'") + TargetPaths.append(event.src_path) + with mutex: + shouldUpdate = True + signal.notify() + else: + stash.TraceOnce("Ignoring on_any_event trigger.") + event_handler.on_created = on_created event_handler.on_deleted = on_deleted event_handler.on_modified = on_modified event_handler.on_moved = on_moved + event_handler.on_any_event = on_any_event observer = Observer() - # Iterate through stashPaths - for path in stashPaths: + # Iterate through includePathChanges + for path in includePathChanges: observer.schedule(event_handler, path, recursive=RECURSIVE) - if debugTracing: logger.info(f"Observing {path}") + stash.Log(f"Observing {path}") + observer.schedule(event_handler, SPECIAL_FILE_DIR, recursive=RECURSIVE) + stash.Trace(f"Observing FileMonitor path {SPECIAL_FILE_DIR}") observer.start() - if debugTracing: logger.info("Starting loop................") + JobIsRunning = False + PutPluginBackOnTaskQueAndExit = False + stash.Trace("Starting loop") try: while True: TmpTargetPaths = [] with mutex: while not shouldUpdate: - if debugTracing: logger.info("Wait start................") - signal.wait() - if debugTracing: logger.info("Wait end................") + stash.TraceOnce("While not shouldUpdate") + if stash.CALLED_AS_STASH_PLUGIN and isJobWaitingToRun(): + if FileMonitorPluginIsOnTaskQue: + stash.Log(f"Another task (JobID={JobIdInTheQue}) is waiting on the queue. Will restart FileMonitor to allow other task to run.") + JobIsRunning = True + break + else: + stash.Warn("Not restarting because FileMonitor is no longer on Task Queue") + if shm_buffer[0] != CONTINUE_RUNNING_SIG: + stash.Log(f"Breaking out of loop. (shm_buffer[0]={shm_buffer[0]})") + break + if stash.pluginSettings['turnOnScheduler']: + stashScheduler.checkSchedulePending() + stash.LogOnce("Waiting for a file change-trigger.") + signal.wait(timeout=SIGNAL_TIMEOUT) + if stash.pluginSettings['turnOnScheduler'] and not shouldUpdate: + stash.TraceOnce("Checking the scheduler.") + elif shouldUpdate: + stash.LogOnce("File change trigger occurred.") + else: + stash.TraceOnce("Wait timeout occurred.") shouldUpdate = False TmpTargetPaths = [] for TargetPath in TargetPaths: TmpTargetPaths.append(os.path.dirname(TargetPath)) + stash.Trace(f"Added Path {os.path.dirname(TargetPath)}") + if TargetPath == SPECIAL_FILE_NAME: + if os.path.isfile(SPECIAL_FILE_NAME): + shm_buffer[0] = STOP_RUNNING_SIG + stash.Log(f"[SpFl]Detected trigger file to kill FileMonitor. {SPECIAL_FILE_NAME}", printTo = stash.LOG_TO_FILE + stash.LOG_TO_CONSOLE + stash.LOG_TO_STASH) + else: + stash.Trace(f"[SpFl]Did not find file {SPECIAL_FILE_NAME}.") + + # Make sure special file does not exist, incase change was missed. + if CREATE_SPECIAL_FILE_TO_EXIT and os.path.isfile(SPECIAL_FILE_NAME) and shm_buffer[0] == CONTINUE_RUNNING_SIG: + shm_buffer[0] = STOP_RUNNING_SIG + stash.Log(f"[SpFl]Detected trigger file to kill FileMonitor. {SPECIAL_FILE_NAME}", printTo = stash.LOG_TO_FILE + stash.LOG_TO_CONSOLE + stash.LOG_TO_STASH) TargetPaths = [] TmpTargetPaths = list(set(TmpTargetPaths)) if TmpTargetPaths != []: - logger.info(f"Triggering stash scan for path(s) {TmpTargetPaths}") - if not DRY_RUN: - stash.metadata_scan(paths=TmpTargetPaths) - if RUN_CLEAN_AFTER_DELETE and RunCleanMetadata: - stash.metadata_clean(paths=TmpTargetPaths, dry_run=DRY_RUN) - if RUN_GENERATE_CONTENT: - stash.metadata_generate() - if gettingCalledAsStashPlugin and shm_buffer[0] == CONTINUE_RUNNING_SIG: - stash.run_plugin_task(plugin_id=PLUGIN_ID, task_name="Start Library Monitor") - if debugTracing: logger.info("Exiting plugin so that metadata_scan task can run.") - return + stash.Log(f"Triggering Stash scan for path(s) {TmpTargetPaths}") + if len(TmpTargetPaths) > 1 or TmpTargetPaths[0] != SPECIAL_FILE_DIR: + if not stash.DRY_RUN: + # ToDo: Consider using create_scene, update_scene, and destroy_scene over general method metadata_scan + stash.metadata_scan(paths=TmpTargetPaths) + if RUN_CLEAN_AFTER_DELETE and RunCleanMetadata: + stash.metadata_clean(paths=TmpTargetPaths, dry_run=stash.DRY_RUN) + if RUN_GENERATE_CONTENT: + stash.metadata_generate() + if stash.CALLED_AS_STASH_PLUGIN and shm_buffer[0] == CONTINUE_RUNNING_SIG and FileMonitorPluginIsOnTaskQue: + PutPluginBackOnTaskQueAndExit = True else: - if debugTracing: logger.info("Nothing to scan.") - if shm_buffer[0] != CONTINUE_RUNNING_SIG: - logger.info(f"Exiting Change File Monitor. (shm_buffer[0]={shm_buffer[0]})") + stash.Trace("Nothing to scan.") + + if shm_buffer[0] != CONTINUE_RUNNING_SIG or StopLibraryMonitorWaitingInTaskQueue: + stash.Log(f"Exiting Change File Monitor. (shm_buffer[0]={shm_buffer[0]}) (StopLibraryMonitorWaitingInTaskQueue={StopLibraryMonitorWaitingInTaskQueue})") shm_a.close() shm_a.unlink() # Call unlink only once to release the shared memory raise KeyboardInterrupt + elif JobIsRunning or PutPluginBackOnTaskQueAndExit: + stash.run_plugin_task(plugin_id=stash.PLUGIN_ID, task_name=StartFileMonitorAsAPluginTaskName) + stash.Trace(f"Exiting plugin so that other task can run. (JobIsRunning={JobIsRunning}) (PutPluginBackOnTaskQueAndExit={PutPluginBackOnTaskQueAndExit})") + return except KeyboardInterrupt: observer.stop() - if debugTracing: logger.info("Stopping observer................") + stash.Trace("Stopping observer") + if os.path.isfile(SPECIAL_FILE_NAME): + os.remove(SPECIAL_FILE_NAME) observer.join() - if debugTracing: logger.info("Exiting function................") + stash.Trace("Exiting function") -# This function is only useful when called outside of Stash. -# Example: python filemonitor.py stop -# Stops monitoring after triggered by the next file change. -# ToDo: Add logic so it doesn't have to wait until the next file change +# Example: python filemonitor.py --stop def stop_library_monitor(): - if debugTracing: logger.info("Opening shared memory map.") + if CREATE_SPECIAL_FILE_TO_EXIT: + if os.path.isfile(SPECIAL_FILE_NAME): + os.remove(SPECIAL_FILE_NAME) + pathlib.Path(SPECIAL_FILE_NAME).touch() + if DELETE_SPECIAL_FILE_ON_STOP: + os.remove(SPECIAL_FILE_NAME) + stash.Trace("Opening shared memory map.") try: - shm_a = shared_memory.SharedMemory(name="DavidMaisonaveAxter_FileMonitor", create=False, size=4) + shm_a = shared_memory.SharedMemory(name=SHAREDMEMORY_NAME, create=False, size=4) except: - pass - logger.info("Could not open shared memory map. Change File Monitor must not be running.") + # If FileMonitor is running as plugin, then it's expected behavior that SharedMemory will not be available. + stash.Trace(f"Could not open shared memory map ({SHAREDMEMORY_NAME}). Change File Monitor must not be running.") return type(shm_a.buf) shm_buffer = shm_a.buf len(shm_buffer) - shm_buffer[0] = 123 - if debugTracing: logger.info(f"Shared memory map opended, and flag set to {shm_buffer[0]}") + shm_buffer[0] = STOP_RUNNING_SIG + stash.Trace(f"Shared memory map opended, and flag set to {shm_buffer[0]}") shm_a.close() shm_a.unlink() # Call unlink only once to release the shared memory - time.sleep(1) - return + +def start_library_monitor_service(): + # First check if FileMonitor is already running + try: + shm_a = shared_memory.SharedMemory(name=SHAREDMEMORY_NAME, create=False, size=4) + shm_a.close() + shm_a.unlink() + stash.Error("FileMonitor is already running. Need to stop FileMonitor before trying to start it again.") + return + except: + pass + stash.Trace("FileMonitor is not running, so it's safe to start it as a service.") + args = [f"{pathlib.Path(__file__).resolve().parent}{os.sep}filemonitor.py", '--url', f"{stash.STASH_URL}"] + stash.ExecutePythonScript(args) -if stopLibraryMonitoring: +if parse_args.stop or parse_args.restart or stash.PLUGIN_TASK_NAME == "stop_library_monitor": stop_library_monitor() - if debugTracing: logger.info(f"stop_library_monitor EXIT................") -elif PLUGIN_ARGS_MODE == "start_library_monitor" or not gettingCalledAsStashPlugin: + if parse_args.restart: + time.sleep(5) + stash.run_plugin_task(plugin_id=stash.PLUGIN_ID, task_name=StartFileMonitorAsAPluginTaskName) + stash.Trace(f"Restart FileMonitor EXIT") + else: + stash.Trace(f"Stop FileMonitor EXIT") +elif stash.PLUGIN_TASK_NAME == StartFileMonitorAsAServiceTaskID: + start_library_monitor_service() + stash.Trace(f"{StartFileMonitorAsAServiceTaskID} EXIT") +elif stash.PLUGIN_TASK_NAME == StartFileMonitorAsAPluginTaskID: start_library_monitor() - if debugTracing: logger.info(f"start_library_monitor EXIT................") + stash.Trace(f"{StartFileMonitorAsAPluginTaskID} EXIT") +elif not stash.CALLED_AS_STASH_PLUGIN: + try: + start_library_monitor() + stash.Trace(f"Command line FileMonitor EXIT") + except Exception as e: + stash.Error(f"Exception while running FileMonitor from the command line. Error: {e}") else: - logger.info(f"Nothing to do!!! (PLUGIN_ARGS_MODE={PLUGIN_ARGS_MODE})") + stash.Log(f"Nothing to do!!! (stash.PLUGIN_TASK_NAME={stash.PLUGIN_TASK_NAME})") -if debugTracing: logger.info("\n*********************************\nEXITING ***********************\n*********************************") +stash.Trace("\n*********************************\nEXITING ***********************\n*********************************") diff --git a/plugins/FileMonitor/filemonitor.yml b/plugins/FileMonitor/filemonitor.yml index 14a41783..4d2adff4 100644 --- a/plugins/FileMonitor/filemonitor.yml +++ b/plugins/FileMonitor/filemonitor.yml @@ -1,34 +1,38 @@ name: FileMonitor description: Monitors the Stash library folders, and updates Stash if any changes occurs in the Stash library paths. -version: 0.2.0 +version: 0.8.2 url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/FileMonitor settings: recursiveDisabled: displayName: No Recursive - description: Enable stop monitoring paths recursively. + description: Enable to STOP monitoring paths recursively. type: BOOLEAN - runCleanAfterDelete: - displayName: Run Clean - description: Enable to run metadata clean task after file deletion. - type: BOOLEAN - scanModified: - displayName: Scan Modifications - description: Enable to monitor changes in file system for modification flag. This option is NOT needed for Windows, because on Windows changes are triggered via CREATE, DELETE, and MOVE flags. Other OS may differ. + turnOnScheduler: + displayName: Scheduler + description: Enable to turn on the scheduler. See filemonitor_config.py for more details. type: BOOLEAN + zmaximumBackups: + displayName: Max DB Backups + description: When value greater than 1, will trim the number of database backup files to set value. Requires [Scheduler] enabled and backupDirectoryPath populated with path length longer than 4. + type: NUMBER zzdebugTracing: displayName: Debug Tracing - description: (Default=false) [***For Advanced Users***] Enable debug tracing. When enabled, additional tracing logging is added to Stash\plugins\FileMonitor\filemonitor.log - type: BOOLEAN - zzdryRun: - displayName: Dry Run - description: Enable to run script in [Dry Run] mode. In this mode, Stash does NOT call meta_scan, and only logs the action it would have taken. + description: Enable debug tracing. When enabled, additional tracing logging is added to Stash\plugins\FileMonitor\filemonitor.log type: BOOLEAN exec: - python - "{pluginDir}/filemonitor.py" interface: raw tasks: - - name: Start Library Monitor - description: Monitors paths in Stash library for media file changes, and updates Stash. + - name: Start Library Monitor Service + description: Run [Library Monitor] as a SERVICE to update Stash with any media file changes. + defaultArgs: + mode: start_library_monitor_service + - name: Stop Library Monitor + description: Stops library monitoring within 2 minute. + defaultArgs: + mode: stop_library_monitor + - name: Monitor as a Plugin + description: Run [Library Monitor] as a plugin (*not recommended method*) defaultArgs: mode: start_library_monitor diff --git a/plugins/FileMonitor/filemonitor_config.py b/plugins/FileMonitor/filemonitor_config.py index de0210b6..a5f6f00a 100644 --- a/plugins/FileMonitor/filemonitor_config.py +++ b/plugins/FileMonitor/filemonitor_config.py @@ -2,11 +2,135 @@ # By David Maisonave (aka Axter) Jul-2024 (https://www.axter.com/) # Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/FileMonitor config = { + # The task scheduler list. + # Task can be scheduled to run monthly, weekly, daily, hourly, and by minutes. For best results use the scheduler with FileMonitor running as a service. + # For daily, weekly, and monthly task, use the weekday syntax. + # The [Auto Tag] task is an example of a daily scheduled task. + # The [Generate] task is an example of a weekly scheduled task. + # The [Backup] task is an example of a monthly scheduled task. + # Note: The hour section in time MUST be a two digit number, and use military time format. Example: 1PM = "13:00" and 1AM = "01:00" + "task_scheduler": [ + # To create a daily task, include each day of the week for the weekday field. + {"task" : "Auto Tag", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "06:00"}, # Auto Tag -> [Auto Tag] (Daily at 6AM) + {"task" : "Optimise Database", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "07:00"}, # Maintenance -> [Optimise Database] (Daily at 7AM) + + # The following tasks are scheduled for 3 days out of the week. + {"task" : "Clean", "weekday" : "monday,wednesday,friday", "time" : "08:00"}, # Maintenance -> [Clean] (3 days per week at 8AM) + {"task" : "Clean Generated Files", "weekday" : "tuesday,thursday,saturday", "time" : "08:00"}, # Maintenance -> [Clean Generated Files] (3 days per week at 8AM) + + # The following tasks are scheduled weekly + {"task" : "Generate", "weekday" : "sunday", "time" : "07:00"}, # Generated Content-> [Generate] (Every Sunday at 7AM) + {"task" : "Scan", "weekday" : "sunday", "time" : "03:00"}, # Library -> [Scan] (Weekly) (Every Sunday at 3AM) + + # To perform a task monthly, specify the day of the month as in the weekly schedule format, and add a monthly field. + # The monthly field value must be 1, 2, 3, or 4. + # 1 = 1st specified weekday of the month. Example 1st monday. + # 2 = 2nd specified weekday of the month. Example 2nd monday of the month. + # 3 = 3rd specified weekday of the month. + # 4 = 4th specified weekday of the month. + # The following task is scheduled monthly + {"task" : "Backup", "weekday" : "sunday", "time" : "01:00", "monthly" : 2}, # Backup -> [Backup] 2nd sunday of the month at 1AM (01:00) + + # The following task is the syntax used for a plugins. A plugin task requires the plugin name for the [task] field, and the plugin-ID for the [pluginId] field. + # This task requires plugin [Path Parser], and it's disabled by default. + {"task" : "Create Tags", "pluginId" : "pathParser", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "DISABLED"}, # To enable this task change time "DISABLED" to a valid time. + + # Example#A1: Task to call call_GQL API with custom input + {"task" : "GQL", "input" : "mutation OptimiseDatabase { optimiseDatabase }", "weekday" : "sunday", "time" : "DISABLED"}, # To enable, change "DISABLED" to valid time + + # Example#A2: Task to call a python script. When this task is executed, the keyword is replaced by filemonitor.py current directory. + # The args field is NOT required. + {"task" : "python", "script" : "test_script_hello_world.py", "args" : "--MyArguments Hello", "weekday" : "monday", "time" : "DISABLED"}, # change "DISABLED" to valid time + + # Example#A3: The following task types can optionally take a [paths] field. If the paths field does not exists, the paths in the Stash library is used. + {"task" : "Scan", "paths" : [r"E:\MyVideos\downloads", r"V:\MyOtherVideos"], "weekday" : "sunday", "time" : "DISABLED"}, # Library -> [Scan] + {"task" : "Auto Tag", "paths" : [r"E:\MyVideos\downloads", r"V:\MyOtherVideos"], "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "DISABLED"}, # Auto Tag -> [Auto Tag] + {"task" : "Clean", "paths" : ["E:\\MyVideos\\downloads", "V:\\MyOtherVideos"], "weekday" : "sunday", "time" : "DISABLED"}, # Generated Content-> [Generate] + + # Example#A4: Task which calls Migrations -> [Rename generated files] + {"task" : "RenameGeneratedFiles", "weekday" : "tuesday,thursday", "time" : "DISABLED"}, # (bi-weekly) example + + # Example#A5: The Backup task using optional field maxBackup, which overrides the UI [Max DB Backups] value + {"task" : "Backup", "maxBackup" : 12, "weekday" : "sunday", "time" : "DISABLED"}, # Trim the DB backup files down to 12 backup files. + {"task" : "Backup", "maxBackup" : 0, "weekday" : "sunday", "time" : "DISABLED"}, # When used with a zero value, it will make sure no file trimming will occur no matter the value of the UI [Max DB Backups] + + # The above weekday method is the more reliable method to schedule task, because it doesn't rely on FileMonitor running continuously (non-stop). + + # The below examples use frequency field method which can work with minutes and hours. A zero frequency value disables the task. + # Note: Both seconds and days are also supported for the frequency field. + # However, seconds is mainly used for test purposes. + # And days usage is discourage, because it only works if FileMonitor is running for X many days non-stop. + # The below example tasks are done using hours and minutes, however any of these task types can be converted to a daily, weekly, or monthly syntax. + + # Example#B1: Task for calling another Stash plugin, which needs plugin name and plugin ID. + {"task" : "PluginButtonName_Here", "pluginId" : "PluginId_Here", "hours" : 0}, # The zero frequency value makes this task disabled. + + # Example#B2: Task to execute a command + {"task" : "execute", "command" : "C:\\MyPath\\HelloWorld.bat", "hours" : 0}, + + # Example#B3: Task to execute a command with optional args field, and using keyword , which gets replaced with filemonitor.py current directory. + {"task" : "execute", "command" : "HelloWorld.cmd", "args" : "--name David", "minutes" : 0}, + + # Comment out **test** tasks. + # To run test, enable all task, and start FileMonitor as a service. + # When executed, these task should be seen in the Task Queue unless otherwise stated in comments. + # These tasks are usually executed before updating major releases on https://github.com/David-Maisonave/Axter-Stash/blob/main/plugins/FileMonitor + # These tasks are ALWAYS executed before updating to https://github.com/stashapp/CommunityScripts + # MUST ToDo: Always comment out below test task before checking in this code!!! + # {"task" : "TestBadTaskNameError", "minutes" : 1}, # Test invalid task name + # {"task" : "execute", "minutes" : 1}, # Test invalid task (missing command) + # {"task" : "python", "minutes" : 1}, # Test invalid task (missing scripts) + # {"task" : "PluginWithOutID", "minutes" : 1}, # Test invalid task (missing pluginId) + # {"task" : "execute", "command" : "", "minutes" : 1}, # Test invalid task (missing command) + # {"task" : "python", "script" : "", "minutes" : 1}, # Test invalid task (missing scripts) + # {"task" : "PluginWithOutID", "pluginId" : "", "minutes" : 1}, # Test invalid task (missing pluginId) + # {"task" : "Generate", "weekday" : "friday", "time" : "00:00"}, + # {"task" : "Clean", "weekday" : "friday", "time" : "00:00"}, + # {"task" : "Auto Tag", "weekday" : "friday", "time" : "00:00"}, + # {"task" : "Optimise Database", "weekday" : "friday", "time" : "00:00"}, + # {"task" : "Create Tags", "pluginId" : "pathParser", "weekday" : "friday", "time" : "00:00"}, # In task queue as -> Running plugin task: Create Tags + # {"task" : "Scan","paths": [r"B:\_\SpecialSet", r"C:\foo"], "weekday" : "friday", "time" : "00:00"}, + # {"task" : "GQL", "input" : "mutation OptimiseDatabase { optimiseDatabase }", "weekday" : "friday", "time" : "00:00"}, # In task queue as -> Optimising database... + # {"task" : "Clean Generated Files", "weekday" : "friday", "time" : "00:00"}, + # {"task" : "RenameGeneratedFiles", "weekday" : "friday", "time" : "00:00"}, # In task queue as -> Migrating scene hashes... + # {"task" : "Backup", "maxBackups" : 0, "weekday" : "friday", "time" : "00:00"}, # Does NOT show up in the Task Queue. Must check STASH log file to verify run. + # {"task" : "python", "script" : "test_hello_world2.py", "weekday" : "friday", "time" : "00:00"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=??? + # {"task" : "python", "script" : "test_hello_world.py", "detach" : False, "weekday" : "friday", "time" : "00:00"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=??? + # {"task" : "execute", "command" : "test_hello_world2.cmd", "weekday" : "friday", "time" : "00:00"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=??? + # {"task" : "execute", "command" : "test_hello_world.bat", "args" : "--name David", "weekday" : "friday", "time" : "00:00"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=??? + ], + + # Timeout in seconds. This is how often FileMonitor will check the scheduler and (in-plugin mode) check if another job (Task) is in the queue. + "timeOut": 60, + # Enable to run metadata clean task after file deletion. + "runCleanAfterDelete": False, # Enable to run metadata_generate (Generate Content) after metadata scan. "runGenerateContent": False, + # When populated (comma separated list [lower-case]), only scan for changes for specified file extension + "fileExtTypes" : "", # Example: "mp4,mpg,mpeg,m2ts,wmv,avi,m4v,flv,mov,asf,mkv,divx,webm,ts,mp2t" + # When populated, only include file changes in specified paths. + "includePathChanges" :[], # Example: ["C:\\MyVideos", "C:\\MyImages"] + # When populated, exclude file changes in paths that start with specified entries. + "excludePathChanges" :[], # Example: ["C:\\MyVideos\\SomeSubFolder\\", "C:\\MyImages\\folder\\Sub\\"] - # The following fields are ONLY used when running FileMonitor in script mode + # The following fields are ONLY used when running FileMonitor in script mode. "endpoint_Scheme" : "http", # Define endpoint to use when contacting the Stash server "endpoint_Host" : "0.0.0.0", # Define endpoint to use when contacting the Stash server "endpoint_Port" : 9999, # Define endpoint to use when contacting the Stash server + + # The following are advanced user options. + # Enable to run scan when triggered by on_any_event. + "onAnyEvent": False, # If enabled may cause excessive triggers. + # Enable to monitor changes in file system for modification flag. This option is NOT needed for Windows, because on Windows changes are triggered via CREATE, DELETE, and MOVE flags. Other OS may differ. + "scanModified": False, # Warning: Enabling this in Windows OS may cause excessive triggers when user is only viewing directory content. + # Enable to exit FileMonitor by creating special file in plugin folder\working + "createSpecFileToExit": True, + # Enable to delete special file immediately after it's created in stop process. + "deleteSpecFileInStop": False, + + # Below are place holders for **possible** future features. + # !!! Not yet implemented !!! + # When enabled, if CREATE flag is triggered, DupFileManager task is called if the plugin is installed. + "onCreateCallDupFileManager": False, # Not yet implemented!!!! + # !!! Not yet implemented !!! } diff --git a/plugins/FileMonitor/requirements.txt b/plugins/FileMonitor/requirements.txt index aa553701..e7825b02 100644 --- a/plugins/FileMonitor/requirements.txt +++ b/plugins/FileMonitor/requirements.txt @@ -1,4 +1,3 @@ -stashapp-tools +stashapp-tools >= 0.2.49 pyYAML -watchdog -requests \ No newline at end of file +watchdog \ No newline at end of file diff --git a/plugins/RenameFile/README.md b/plugins/RenameFile/README.md index 7ea05101..1474ed6f 100644 --- a/plugins/RenameFile/README.md +++ b/plugins/RenameFile/README.md @@ -39,9 +39,9 @@ Error: [WinError 32] The process cannot access the file because it is being used To avoid this error, refresh the URL before changing the Title field. ### Requirements -pip install -r requirements.txt +- pip install -r requirements.txt - Or manually install each requirement: - - `pip install stashapp-tools` + - `pip install stashapp-tools --upgrade` - `pip install pyYAML` - `pip install requests` diff --git a/plugins/RenameFile/renamefile.py b/plugins/RenameFile/renamefile.py index ccf7b27e..884eaa86 100644 --- a/plugins/RenameFile/renamefile.py +++ b/plugins/RenameFile/renamefile.py @@ -17,7 +17,7 @@ # ********************************************************************** # Constant global variables -------------------------------------------- -LOG_FILE_PATH = log_file_path = f"{Path(__file__).resolve().parent}\\{Path(__file__).stem}.log" +LOG_FILE_PATH = f"{Path(__file__).resolve().parent}\\{Path(__file__).stem}.log" FORMAT = "[%(asctime)s - LN:%(lineno)s] %(message)s" DEFAULT_FIELD_KEY_LIST = "title,performers,studio,tags" # Default Field Key List with the desired order PLUGIN_ID = Path(__file__).stem.lower() diff --git a/plugins/RenameFile/requirements.txt b/plugins/RenameFile/requirements.txt index 2b546d99..a3649895 100644 --- a/plugins/RenameFile/requirements.txt +++ b/plugins/RenameFile/requirements.txt @@ -1,3 +1,3 @@ -stashapp-tools +stashapp-tools >= 0.2.49 pyYAML requests \ No newline at end of file From e847bdc1077237bc67cff647f3ba2ab4cb4ead52 Mon Sep 17 00:00:00 2001 From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com> Date: Fri, 16 Aug 2024 04:10:39 -0400 Subject: [PATCH 18/39] Removing template changes to let plugin changes go through first. --- .github/ISSUE_TEMPLATE/bug_report.yml | 77 ------------------ .github/ISSUE_TEMPLATE/bug_report_plugin.yml | 84 -------------------- .github/ISSUE_TEMPLATE/discussion.yml | 42 ---------- .github/ISSUE_TEMPLATE/feature_request.yml | 35 -------- .github/ISSUE_TEMPLATE/help.yml | 37 --------- 5 files changed, 275 deletions(-) delete mode 100644 .github/ISSUE_TEMPLATE/bug_report.yml delete mode 100644 .github/ISSUE_TEMPLATE/bug_report_plugin.yml delete mode 100644 .github/ISSUE_TEMPLATE/discussion.yml delete mode 100644 .github/ISSUE_TEMPLATE/feature_request.yml delete mode 100644 .github/ISSUE_TEMPLATE/help.yml diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml deleted file mode 100644 index 061780ac..00000000 --- a/.github/ISSUE_TEMPLATE/bug_report.yml +++ /dev/null @@ -1,77 +0,0 @@ -name: 🐞 Bug -description: Create a bug report -title: "🐞[Bug] Your_Short_title" -labels: [Bug] -body: - - type: markdown - attributes: - value: | - Thank you for taking the time to fill out this bug report! - Fields marked with an asterisk (*) are required, and remaining fields are optional, but we encourage their usage. - Steps to reproduce the behavior: - 1. Go to '...' - 2. Click on '....' - 3. Scroll down to '....' - 4. See error - - type: textarea - id: steps - attributes: - label: Please enter steps to reproduce the behavior. - validations: - required: true - - type: input - id: stash_ver - attributes: - label: Stash Version (from Settings -> About) - placeholder: e.g. v0.26.2 - validations: - required: true - - type: input - id: os - attributes: - label: What Operating System (OS)? - placeholder: e.g. Windows, MacOS, Linux, iOS8.1 (mobile OS) - validations: - required: true - - type: input - id: device - attributes: - label: Phone or tablets - placeholder: e.g. iPhone6, Galaxy Tab A9+ - validations: - required: false - - type: input - id: browser - attributes: - label: What browser and version? - placeholder: e.g. Chrome Ver:127, Firefox Ver:129, Safari Ver:1.3.2, Any - validations: - required: true - - type: textarea - id: expected-behavior - attributes: - label: Expected Behavior - description: What was the expected behavior? - validations: - required: false - - type: textarea - id: logging - attributes: - label: Logging - description: Enter Stash logging. - validations: - required: false - - type: textarea - id: snapshots - attributes: - label: Snapshot(s) - description: Optionally attach snapshot(s) which displays the bug. - validations: - required: false - - type: textarea - id: additional - attributes: - label: Additional context - description: Add any other context about the problem here. - validations: - required: false diff --git a/.github/ISSUE_TEMPLATE/bug_report_plugin.yml b/.github/ISSUE_TEMPLATE/bug_report_plugin.yml deleted file mode 100644 index 5c03d45b..00000000 --- a/.github/ISSUE_TEMPLATE/bug_report_plugin.yml +++ /dev/null @@ -1,84 +0,0 @@ -name: 🪲 Plugin Bug -description: Create a plugin bug report -title: "🪲[EnterPluginNameHere] Your_Short_title" -labels: [Plugin_Bug] -body: - - type: markdown - attributes: - value: | - Thank you for taking the time to fill out this bug report! - Fields marked with an asterisk (*) are required, and remaining fields are optional, but we encourage their usage. - Steps to reproduce the behavior: - 1. Go to '...' - 2. Click on '....' - 3. Scroll down to '....' - 4. See error - - type: textarea - id: steps - attributes: - label: Please enter steps to reproduce the behavior. - validations: - required: true - - type: input - id: stash_ver - attributes: - label: Stash Version (from Settings -> About) - placeholder: e.g. v0.26.2 - validations: - required: true - - type: input - id: os - attributes: - label: What Operating System (OS)? - placeholder: e.g. Windows, MacOS, Linux, iOS8.1 (mobile OS) - validations: - required: true - - type: input - id: device - attributes: - label: Phone or tablets - placeholder: e.g. iPhone6, Galaxy Tab A9+ - validations: - required: false - - type: input - id: browser - attributes: - label: What browser and version? - placeholder: e.g. Chrome Ver:127, Firefox Ver:129, Safari Ver:1.3.2, Any - validations: - required: true - - type: textarea - id: install - attributes: - label: The pip install for the plugin - description: pip install stashapp-tools --upgrade, pip install pyYAML - validations: - required: true - - type: textarea - id: expected-behavior - attributes: - label: Expected Behavior - description: What was the expected behavior? - validations: - required: false - - type: textarea - id: logging - attributes: - label: Logging - description: Enter Stash logging and plugin logging file if applicable. - validations: - required: false - - type: textarea - id: snapshots - attributes: - label: Snapshot(s) - description: Optionally attach snapshot(s) which displays the bug. - validations: - required: false - - type: textarea - id: additional - attributes: - label: Additional context - description: Add any other context about the problem here. - validations: - required: false \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/discussion.yml b/.github/ISSUE_TEMPLATE/discussion.yml deleted file mode 100644 index 177b35cd..00000000 --- a/.github/ISSUE_TEMPLATE/discussion.yml +++ /dev/null @@ -1,42 +0,0 @@ -name: 🛗Discussion / Request for Commentary [RFC] -description: This is for issues that will be discussed and won't necessarily result directly in commits or pull requests. -title: "🛗[RFC]: Your_Short_title" -labels: [RFC] -body: - - type: markdown - attributes: - value: | - This is for issues that will be discussed and won't necessarily result directly in commits or pull requests. - Please ensure that you respect people's time and attention and understand that people are volunteering their time, so concision is ideal and considerate. - Fields marked with an asterisk (*) are required, and remaining fields are optional, but we encourage their usage. - - type: textarea - id: Title - attributes: - label: Optional title of the topic to discuss. - validations: - required: false - - type: textarea - id: Summary - attributes: - label: Describe the scope of your topic and your goals ideally within a single paragraph or TL. A summary that makes it easier for people to determine if they can contribute at a glance. - validations: - required: true - - type: textarea - id: Details - attributes: - label: Only required if summary and title doesn't cover everything. - validations: - required: false - - type: textarea - id: Examples - attributes: - label: If you can show a picture or video examples post them here. - validations: - required: false - - type: textarea - id: snapshots - attributes: - label: Snapshot(s) - description: Optionally attach additional snapshot(s) which helps describe the discussion. - validations: - required: false diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml deleted file mode 100644 index 9593dc41..00000000 --- a/.github/ISSUE_TEMPLATE/feature_request.yml +++ /dev/null @@ -1,35 +0,0 @@ -name: 💡️Feature Request -description: Suggest improvement for this project -title: "💡️[Enhancement]: Your_Short_title" -labels: [Enhancement] -body: - - type: markdown - attributes: - value: | - Please fill out the following fields with as much detail as possible: - Fields marked with an asterisk (*) are required, and remaining fields are optional, but we encourage their usage. - - type: textarea - id: problem - attributes: - label: If feature request is related to a problem, give a clear and concise description of what is the problem. Ex. I'm always frustrated when [...] - validations: - required: false - - type: textarea - id: solution - attributes: - label: Describe the solution you'd like. A clear and concise description of what you want to happen. - validations: - required: true - - type: textarea - id: alternatives - attributes: - label: Describe alternatives you've considered. A clear and concise description of any alternative solutions or features you've considered. - validations: - required: false - - type: textarea - id: Snapshots - attributes: - label: Snapshots / Images - description: Add any other context or screenshots about the feature request here, which can help explain the feature, and a description of what to look for in the image(s). - validations: - required: false diff --git a/.github/ISSUE_TEMPLATE/help.yml b/.github/ISSUE_TEMPLATE/help.yml deleted file mode 100644 index dae58e2e..00000000 --- a/.github/ISSUE_TEMPLATE/help.yml +++ /dev/null @@ -1,37 +0,0 @@ -name: ❓Help -description: Post your question -title: "❓[Help]: Your_Short_title" -labels: [Question] -body: - - type: markdown - attributes: - value: | - Please fill out the following fields with as much detail as possible, so that we can better answer your question. - Fields marked with an asterisk (*) are required, and remaining fields are optional, but we encourage their usage. - - type: textarea - id: question - attributes: - label: Please enter a clear and concise detailed question. - validations: - required: true - - type: input - id: os - attributes: - label: What Operating System (OS)? - placeholder: e.g. Windows, MacOS, Linux - validations: - required: false - - type: input - id: browser - attributes: - label: What browser and version? - placeholder: e.g. Chrome Ver:127, Firefox Ver:129, Safari Ver:1.3.2, Any - validations: - required: false - - type: textarea - id: snapshots - attributes: - label: Snapshot(s) - description: Optionally attach snapshot(s) which helps describe the question. - validations: - required: false From bdc12bd66ae3678e9b38be4a6733cadca0255f64 Mon Sep 17 00:00:00 2001 From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com> Date: Fri, 16 Aug 2024 04:14:05 -0400 Subject: [PATCH 19/39] ver change --- plugins/RenameFile/README.md | 2 +- plugins/RenameFile/renamefile.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/plugins/RenameFile/README.md b/plugins/RenameFile/README.md index 1474ed6f..acf06b9a 100644 --- a/plugins/RenameFile/README.md +++ b/plugins/RenameFile/README.md @@ -1,4 +1,4 @@ -# RenameFile: Ver 0.4.0 (By David Maisonave) +# RenameFile: Ver 0.4.1 (By David Maisonave) RenameFile is a [Stash](https://github.com/stashapp/stash) plugin which performs the following tasks. - **Rename Scene File Name** (On-The-Fly) - **Append tag names** to file name diff --git a/plugins/RenameFile/renamefile.yml b/plugins/RenameFile/renamefile.yml index e5d2a0f0..20778b34 100644 --- a/plugins/RenameFile/renamefile.yml +++ b/plugins/RenameFile/renamefile.yml @@ -1,6 +1,6 @@ name: RenameFile description: Renames video (scene) file names when the user edits the [Title] field located in the scene [Edit] tab. -version: 0.4.0 +version: 0.4.1 url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/RenameFile settings: performerAppend: From 1d2f57582f4e70e32156969969888b3bf634ecb8 Mon Sep 17 00:00:00 2001 From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com> Date: Fri, 16 Aug 2024 12:14:31 -0400 Subject: [PATCH 20/39] Added validation check for scheduled plugins --- plugins/FileMonitor/README.md | 33 ++++++------ plugins/FileMonitor/StashPluginHelper.py | 3 ++ plugins/FileMonitor/filemonitor.py | 21 ++++++-- plugins/FileMonitor/filemonitor.yml | 2 +- plugins/FileMonitor/filemonitor_config.py | 66 +++++++++++------------ 5 files changed, 72 insertions(+), 53 deletions(-) diff --git a/plugins/FileMonitor/README.md b/plugins/FileMonitor/README.md index a196509c..63f5c9c4 100644 --- a/plugins/FileMonitor/README.md +++ b/plugins/FileMonitor/README.md @@ -1,4 +1,4 @@ -# FileMonitor: Ver 0.8.2 (By David Maisonave) +# FileMonitor: Ver 0.8.3 (By David Maisonave) FileMonitor is a [Stash](https://github.com/stashapp/stash) plugin with the following two main features: - Updates Stash when any file changes occurs in the Stash library. - **Task Scheduler**: Runs scheduled task based on the scheduler configuration in **filemonitor_config.py**. @@ -41,12 +41,13 @@ To configure the schedule or to add new task, edit the **task_scheduler** sectio ```` python "task_scheduler": [ # To create a daily task, include each day of the week for the weekday field. - {"task" : "Auto Tag", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "06:00"}, # Auto Tag -> [Auto Tag] (Daily at 6AM) - {"task" : "Optimise Database", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "07:00"}, # Maintenance -> [Optimise Database] (Daily at 7AM) + {"task" : "Auto Tag", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "06:00"}, # Auto Tag -> [Auto Tag] (Daily at 6AM) + {"task" : "Optimise Database", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "07:00"}, # Maintenance -> [Optimise Database] (Daily at 7AM) + {"task" : "Create Tags", "pluginId" : "pathParser", "validateDir" : "pathParser", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "05:00"}, # [Plugin Tasks] - > [Path Parser] -> [Create Tags] (Daily at 5AM) : This task requires plugin [Path Parser] # The following tasks are scheduled for 3 days out of the week. - {"task" : "Clean", "weekday" : "monday,wednesday,friday", "time" : "08:00"}, # Maintenance -> [Clean] (3 days per week at 8AM) - {"task" : "Clean Generated Files", "weekday" : "tuesday,thursday,saturday", "time" : "08:00"}, # Maintenance -> [Clean Generated Files] (3 days per week at 8AM) + {"task" : "Clean", "weekday" : "monday,wednesday,friday", "time" : "08:00"}, # Maintenance -> [Clean] (3 days per week at 8AM) + {"task" : "Clean Generated Files", "weekday" : "tuesday,thursday,saturday", "time" : "08:00"}, # Maintenance -> [Clean Generated Files] (3 days per week at 8AM) # The following tasks are scheduled weekly {"task" : "Generate", "weekday" : "sunday", "time" : "07:00"}, # Generated Content-> [Generate] (Every Sunday at 7AM) @@ -59,11 +60,7 @@ To configure the schedule or to add new task, edit the **task_scheduler** sectio # 3 = 3rd specified weekday of the month. # 4 = 4th specified weekday of the month. # The following task is scheduled monthly - {"task" : "Backup", "weekday" : "sunday", "time" : "01:00", "monthly" : 2}, # Backup -> [Backup] 2nd sunday of the month at 1AM (01:00) - - # The following task is the syntax used for a plugins. A plugin task requires the plugin name for the [task] field, and the plugin-ID for the [pluginId] field. - # This task requires plugin [Path Parser], and it's disabled by default. - {"task" : "Create Tags", "pluginId" : "pathParser", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "DISABLED"}, # To enable this task change time "DISABLED" to a valid time. + {"task" : "Backup", "weekday" : "sunday", "time" : "01:00", "monthly" : 2}, # Backup -> [Backup] 2nd sunday of the month at 1AM (01:00) # Example#A1: Task to call call_GQL API with custom input {"task" : "GQL", "input" : "mutation OptimiseDatabase { optimiseDatabase }", "weekday" : "sunday", "time" : "DISABLED"}, # To enable, change "DISABLED" to valid time @@ -73,13 +70,17 @@ To configure the schedule or to add new task, edit the **task_scheduler** sectio {"task" : "python", "script" : "test_script_hello_world.py", "args" : "--MyArguments Hello", "weekday" : "monday", "time" : "DISABLED"}, # change "DISABLED" to valid time # Example#A3: The following task types can optionally take a [paths] field. If the paths field does not exists, the paths in the Stash library is used. - {"task" : "Scan", "paths" : ["E:\\MyVideos\\downloads", "V:\\MyOtherVideos"], "weekday" : "sunday", "time" : "DISABLED"}, # Library -> [Scan] + {"task" : "Scan", "paths" : [r"E:\MyVideos\downloads", r"V:\MyOtherVideos"], "weekday" : "sunday", "time" : "DISABLED"}, # Library -> [Scan] {"task" : "Auto Tag", "paths" : [r"E:\MyVideos\downloads", r"V:\MyOtherVideos"], "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "DISABLED"}, # Auto Tag -> [Auto Tag] - {"task" : "Clean", "paths" : [r"E:\MyVideos\downloads", r"V:\MyOtherVideos"], "weekday" : "sunday", "time" : "DISABLED"}, # Generated Content-> [Generate] + {"task" : "Clean", "paths" : ["E:\\MyVideos\\downloads", "V:\\MyOtherVideos"], "weekday" : "sunday", "time" : "DISABLED"}, # Generated Content-> [Generate] # Example#A4: Task which calls Migrations -> [Rename generated files] {"task" : "RenameGeneratedFiles", "weekday" : "tuesday,thursday", "time" : "DISABLED"}, # (bi-weekly) example + # Example#A5: The Backup task using optional field maxBackup, which overrides the UI [Max DB Backups] value + {"task" : "Backup", "maxBackup" : 12, "weekday" : "sunday", "time" : "DISABLED"}, # Trim the DB backup files down to 12 backup files. + {"task" : "Backup", "maxBackup" : 0, "weekday" : "sunday", "time" : "DISABLED"}, # When used with a zero value, it will make sure no file trimming will occur no matter the value of the UI [Max DB Backups] + # The above weekday method is the more reliable method to schedule task, because it doesn't rely on FileMonitor running continuously (non-stop). # The below examples use frequency field method which can work with minutes and hours. A zero frequency value disables the task. @@ -88,13 +89,15 @@ To configure the schedule or to add new task, edit the **task_scheduler** sectio # And days usage is discourage, because it only works if FileMonitor is running for X many days non-stop. # The below example tasks are done using hours and minutes, however any of these task types can be converted to a daily, weekly, or monthly syntax. - # Example#B1: Task for calling another Stash plugin, which needs plugin name and plugin ID. + # Example#B1: The following task is the syntax used for a plugin. A plugin task requires the plugin name for the [task] field, and the plugin-ID for the [pluginId] field. {"task" : "PluginButtonName_Here", "pluginId" : "PluginId_Here", "hours" : 0}, # The zero frequency value makes this task disabled. + # Example#B2: Optionally, the validateDir field can be included which is used to validate that the plugin is installed either under the plugins folder or under the plugins-community folder. + {"task" : "PluginButtonName_Here", "pluginId" : "PluginId_Here", "validateDir" : "UsuallySameAsPluginID", "hours" : 0}, # The zero frequency value makes this task disabled. - # Example#B2: Task to execute a command + # Example#B3: Task to execute a command {"task" : "execute", "command" : "C:\\MyPath\\HelloWorld.bat", "hours" : 0}, - # Example#B3: Task to execute a command with optional args field, and using keyword , which gets replaced with filemonitor.py current directory. + # Example#B4: Task to execute a command with optional args field, and using keyword , which gets replaced with filemonitor.py current directory. {"task" : "execute", "command" : "HelloWorld.cmd", "args" : "--name David", "minutes" : 0}, ], ```` diff --git a/plugins/FileMonitor/StashPluginHelper.py b/plugins/FileMonitor/StashPluginHelper.py index 218e055c..c32fe779 100644 --- a/plugins/FileMonitor/StashPluginHelper.py +++ b/plugins/FileMonitor/StashPluginHelper.py @@ -27,6 +27,7 @@ class StashPluginHelper(StashInterface): PLUGIN_TASK_NAME = None PLUGIN_ID = None PLUGIN_CONFIGURATION = None + PLUGINS_PATH = None pluginSettings = None pluginConfig = None STASH_INTERFACE_INIT = False @@ -165,6 +166,8 @@ def __init__(self, self.PLUGIN_CONFIGURATION = self.get_configuration()["plugins"] self.STASH_CONFIGURATION = self.get_configuration()["general"] self.STASHPATHSCONFIG = self.STASH_CONFIGURATION['stashes'] + if 'pluginsPath' in self.STASH_CONFIGURATION: + self.PLUGINS_PATH = self.STASH_CONFIGURATION['pluginsPath'] for item in self.STASHPATHSCONFIG: self.STASH_PATHS.append(item["path"]) if settings: diff --git a/plugins/FileMonitor/filemonitor.py b/plugins/FileMonitor/filemonitor.py index 6d6752d1..e23fbd47 100644 --- a/plugins/FileMonitor/filemonitor.py +++ b/plugins/FileMonitor/filemonitor.py @@ -45,8 +45,6 @@ stash.Status() stash.Log(f"\nStarting (__file__={__file__}) (stash.CALLED_AS_STASH_PLUGIN={stash.CALLED_AS_STASH_PLUGIN}) (stash.DEBUG_TRACING={stash.DEBUG_TRACING}) (stash.DRY_RUN={stash.DRY_RUN}) (stash.PLUGIN_TASK_NAME={stash.PLUGIN_TASK_NAME})************************************************") -# stash.Log(f"{stash.find_duplicate_scenes()}") - exitMsg = "Change success!!" mutex = Lock() signal = Condition(mutex) @@ -260,8 +258,23 @@ def runTask(self, task): # ToDo: Add code to check if plugin is installed. try: if 'pluginId' in task and task['pluginId'] != "": - stash.Trace(f"Running plugin task pluginID={task['pluginId']}, task name = {task['task']}") - stash.run_plugin_task(plugin_id=task['pluginId'], task_name=task['task']) + invalidDir = False + validDirMsg = "" + if 'validateDir' in task and task['validateDir'] != "": + invalidDir = True + communityPluginPath = f"{stash.PLUGINS_PATH}{os.sep}community{os.sep}{task['validateDir']}" + basePluginPath = f"{stash.PLUGINS_PATH}{os.sep}{task['validateDir']}" + if os.path.exists(communityPluginPath): + invalidDir = False + validDirMsg = f"Valid path in {communityPluginPath}" + elif os.path.exists(basePluginPath): + invalidDir = False + validDirMsg = f"Valid path in {basePluginPath}" + if invalidDir: + stash.Error(f"Could not run task '{task['task']}' because sub directory '{task['validateDir']}' does not exist under path '{stash.PLUGINS_PATH}'") + else: + stash.Trace(f"Running plugin task pluginID={task['pluginId']}, task name = {task['task']}. {validDirMsg}") + stash.run_plugin_task(plugin_id=task['pluginId'], task_name=task['task']) else: stash.Error(f"Can not run task '{task['task']}', because it's an invalid task.") stash.LogOnce(f"If task '{task['task']}' is supposed to be a built-in task, check for correct task name spelling.") diff --git a/plugins/FileMonitor/filemonitor.yml b/plugins/FileMonitor/filemonitor.yml index 4d2adff4..b30f935b 100644 --- a/plugins/FileMonitor/filemonitor.yml +++ b/plugins/FileMonitor/filemonitor.yml @@ -1,6 +1,6 @@ name: FileMonitor description: Monitors the Stash library folders, and updates Stash if any changes occurs in the Stash library paths. -version: 0.8.2 +version: 0.8.3 url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/FileMonitor settings: recursiveDisabled: diff --git a/plugins/FileMonitor/filemonitor_config.py b/plugins/FileMonitor/filemonitor_config.py index a5f6f00a..add1c3d3 100644 --- a/plugins/FileMonitor/filemonitor_config.py +++ b/plugins/FileMonitor/filemonitor_config.py @@ -11,12 +11,13 @@ # Note: The hour section in time MUST be a two digit number, and use military time format. Example: 1PM = "13:00" and 1AM = "01:00" "task_scheduler": [ # To create a daily task, include each day of the week for the weekday field. - {"task" : "Auto Tag", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "06:00"}, # Auto Tag -> [Auto Tag] (Daily at 6AM) - {"task" : "Optimise Database", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "07:00"}, # Maintenance -> [Optimise Database] (Daily at 7AM) + {"task" : "Auto Tag", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "06:00"}, # Auto Tag -> [Auto Tag] (Daily at 6AM) + {"task" : "Optimise Database", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "07:00"}, # Maintenance -> [Optimise Database] (Daily at 7AM) + {"task" : "Create Tags", "pluginId" : "pathParser", "validateDir" : "pathParser", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "05:00"}, # [Plugin Tasks] - > [Path Parser] -> [Create Tags] (Daily at 5AM) : This task requires plugin [Path Parser] # The following tasks are scheduled for 3 days out of the week. - {"task" : "Clean", "weekday" : "monday,wednesday,friday", "time" : "08:00"}, # Maintenance -> [Clean] (3 days per week at 8AM) - {"task" : "Clean Generated Files", "weekday" : "tuesday,thursday,saturday", "time" : "08:00"}, # Maintenance -> [Clean Generated Files] (3 days per week at 8AM) + {"task" : "Clean", "weekday" : "monday,wednesday,friday", "time" : "08:00"}, # Maintenance -> [Clean] (3 days per week at 8AM) + {"task" : "Clean Generated Files", "weekday" : "tuesday,thursday,saturday", "time" : "08:00"}, # Maintenance -> [Clean Generated Files] (3 days per week at 8AM) # The following tasks are scheduled weekly {"task" : "Generate", "weekday" : "sunday", "time" : "07:00"}, # Generated Content-> [Generate] (Every Sunday at 7AM) @@ -29,11 +30,7 @@ # 3 = 3rd specified weekday of the month. # 4 = 4th specified weekday of the month. # The following task is scheduled monthly - {"task" : "Backup", "weekday" : "sunday", "time" : "01:00", "monthly" : 2}, # Backup -> [Backup] 2nd sunday of the month at 1AM (01:00) - - # The following task is the syntax used for a plugins. A plugin task requires the plugin name for the [task] field, and the plugin-ID for the [pluginId] field. - # This task requires plugin [Path Parser], and it's disabled by default. - {"task" : "Create Tags", "pluginId" : "pathParser", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "DISABLED"}, # To enable this task change time "DISABLED" to a valid time. + {"task" : "Backup", "weekday" : "sunday", "time" : "01:00", "monthly" : 2}, # Backup -> [Backup] 2nd sunday of the month at 1AM (01:00) # Example#A1: Task to call call_GQL API with custom input {"task" : "GQL", "input" : "mutation OptimiseDatabase { optimiseDatabase }", "weekday" : "sunday", "time" : "DISABLED"}, # To enable, change "DISABLED" to valid time @@ -62,13 +59,15 @@ # And days usage is discourage, because it only works if FileMonitor is running for X many days non-stop. # The below example tasks are done using hours and minutes, however any of these task types can be converted to a daily, weekly, or monthly syntax. - # Example#B1: Task for calling another Stash plugin, which needs plugin name and plugin ID. + # Example#B1: The following task is the syntax used for a plugin. A plugin task requires the plugin name for the [task] field, and the plugin-ID for the [pluginId] field. {"task" : "PluginButtonName_Here", "pluginId" : "PluginId_Here", "hours" : 0}, # The zero frequency value makes this task disabled. + # Example#B2: Optionally, the validateDir field can be included which is used to validate that the plugin is installed either under the plugins folder or under the plugins-community folder. + {"task" : "PluginButtonName_Here", "pluginId" : "PluginId_Here", "validateDir" : "UsuallySameAsPluginID", "hours" : 0}, # The zero frequency value makes this task disabled. - # Example#B2: Task to execute a command + # Example#B3: Task to execute a command {"task" : "execute", "command" : "C:\\MyPath\\HelloWorld.bat", "hours" : 0}, - # Example#B3: Task to execute a command with optional args field, and using keyword , which gets replaced with filemonitor.py current directory. + # Example#B4: Task to execute a command with optional args field, and using keyword , which gets replaced with filemonitor.py current directory. {"task" : "execute", "command" : "HelloWorld.cmd", "args" : "--name David", "minutes" : 0}, # Comment out **test** tasks. @@ -77,27 +76,28 @@ # These tasks are usually executed before updating major releases on https://github.com/David-Maisonave/Axter-Stash/blob/main/plugins/FileMonitor # These tasks are ALWAYS executed before updating to https://github.com/stashapp/CommunityScripts # MUST ToDo: Always comment out below test task before checking in this code!!! - # {"task" : "TestBadTaskNameError", "minutes" : 1}, # Test invalid task name - # {"task" : "execute", "minutes" : 1}, # Test invalid task (missing command) - # {"task" : "python", "minutes" : 1}, # Test invalid task (missing scripts) - # {"task" : "PluginWithOutID", "minutes" : 1}, # Test invalid task (missing pluginId) - # {"task" : "execute", "command" : "", "minutes" : 1}, # Test invalid task (missing command) - # {"task" : "python", "script" : "", "minutes" : 1}, # Test invalid task (missing scripts) - # {"task" : "PluginWithOutID", "pluginId" : "", "minutes" : 1}, # Test invalid task (missing pluginId) - # {"task" : "Generate", "weekday" : "friday", "time" : "00:00"}, - # {"task" : "Clean", "weekday" : "friday", "time" : "00:00"}, - # {"task" : "Auto Tag", "weekday" : "friday", "time" : "00:00"}, - # {"task" : "Optimise Database", "weekday" : "friday", "time" : "00:00"}, - # {"task" : "Create Tags", "pluginId" : "pathParser", "weekday" : "friday", "time" : "00:00"}, # In task queue as -> Running plugin task: Create Tags - # {"task" : "Scan","paths": [r"B:\_\SpecialSet", r"C:\foo"], "weekday" : "friday", "time" : "00:00"}, - # {"task" : "GQL", "input" : "mutation OptimiseDatabase { optimiseDatabase }", "weekday" : "friday", "time" : "00:00"}, # In task queue as -> Optimising database... - # {"task" : "Clean Generated Files", "weekday" : "friday", "time" : "00:00"}, - # {"task" : "RenameGeneratedFiles", "weekday" : "friday", "time" : "00:00"}, # In task queue as -> Migrating scene hashes... - # {"task" : "Backup", "maxBackups" : 0, "weekday" : "friday", "time" : "00:00"}, # Does NOT show up in the Task Queue. Must check STASH log file to verify run. - # {"task" : "python", "script" : "test_hello_world2.py", "weekday" : "friday", "time" : "00:00"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=??? - # {"task" : "python", "script" : "test_hello_world.py", "detach" : False, "weekday" : "friday", "time" : "00:00"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=??? - # {"task" : "execute", "command" : "test_hello_world2.cmd", "weekday" : "friday", "time" : "00:00"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=??? - # {"task" : "execute", "command" : "test_hello_world.bat", "args" : "--name David", "weekday" : "friday", "time" : "00:00"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=??? + # {"task" : "TestBadTaskNameError", "minutes" : 1}, # Test invalid task name + # {"task" : "execute", "minutes" : 1}, # Test invalid task (missing command) + # {"task" : "python", "minutes" : 1}, # Test invalid task (missing scripts) + # {"task" : "PluginWithOutID", "minutes" : 1}, # Test invalid task (missing pluginId) + # {"task" : "execute", "command" : "", "minutes" : 1}, # Test invalid task (missing command) + # {"task" : "python", "script" : "", "minutes" : 1}, # Test invalid task (missing scripts) + # {"task" : "PluginWithOutID", "pluginId" : "", "minutes" : 1}, # Test invalid task (missing pluginId) + # {"task" : "Foo","pluginId":"foo","validateDir":"foo", "minutes" : 1}, # Test invalid task (missing plugin directory) + # {"task" : "Generate", "weekday" : "friday", "time" : "12:03"}, + # {"task" : "Clean", "weekday" : "friday", "time" : "12:03"}, + # {"task" : "Auto Tag", "weekday" : "friday", "time" : "12:03"}, + # {"task" : "Optimise Database", "weekday" : "friday", "time" : "12:03"}, + # {"task" : "Create Tags", "pluginId" : "pathParser", "validateDir" : "pathParser", "weekday" : "friday", "time" : "12:03"}, # In task queue as -> Running plugin task: Create Tags + # {"task" : "Scan","paths": [r"B:\_\SpecialSet", r"C:\foo"], "weekday" : "friday", "time" : "12:03"}, + # {"task" : "GQL", "input" : "mutation OptimiseDatabase { optimiseDatabase }", "weekday" : "friday", "time" : "12:03"}, # In task queue as -> Optimising database... + # {"task" : "Clean Generated Files", "weekday" : "friday", "time" : "12:03"}, + # {"task" : "RenameGeneratedFiles", "weekday" : "friday", "time" : "12:03"}, # In task queue as -> Migrating scene hashes... + # {"task" : "Backup", "maxBackups" : 0, "weekday" : "friday", "time" : "12:03"}, # Does NOT show up in the Task Queue. Must check STASH log file to verify run. + # {"task" : "python", "script" : "test_hello_world2.py", "weekday" : "friday", "time" : "12:03"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=??? + # {"task" : "python", "script" : "test_hello_world.py", "detach" : False, "weekday" : "friday", "time" : "12:03"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=??? + # {"task" : "execute", "command" : "test_hello_world2.cmd", "weekday" : "friday", "time" : "12:03"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=??? + # {"task" : "execute", "command" : "test_hello_world.bat", "args" : "--name David", "weekday" : "friday", "time" : "12:03"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=??? ], # Timeout in seconds. This is how often FileMonitor will check the scheduler and (in-plugin mode) check if another job (Task) is in the queue. From 44e794871fea8510006fa08bf0e03cf843b749e9 Mon Sep 17 00:00:00 2001 From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com> Date: Tue, 20 Aug 2024 20:46:26 -0400 Subject: [PATCH 21/39] Update requirements.txt --- plugins/FileMonitor/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/FileMonitor/requirements.txt b/plugins/FileMonitor/requirements.txt index 7a38cdfb..19a1174d 100644 --- a/plugins/FileMonitor/requirements.txt +++ b/plugins/FileMonitor/requirements.txt @@ -1,3 +1,3 @@ stashapp-tools >= 0.2.50 pyYAML -watchdog +watchdog \ No newline at end of file From 90b30a4970aa479799e30ba80279fba89b58c595 Mon Sep 17 00:00:00 2001 From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com> Date: Tue, 20 Aug 2024 20:57:18 -0400 Subject: [PATCH 22/39] Fixed bug that was adding duplicate resolution 1. Fixed bug that added duplicate resolution, width, height , scene_date, video_codec, and frame_rate when title is not populated. 2. Added excludeTags field, and pre-populated it with DuplicateMarkForDeletion and DuplicateWhitelistFile to avoid conflicts with up coming plugin DupFileManager. --- plugins/RenameFile/README.md | 10 +++++++- plugins/RenameFile/renamefile.py | 29 +++++++++++++---------- plugins/RenameFile/renamefile.yml | 2 +- plugins/RenameFile/renamefile_settings.py | 2 ++ plugins/RenameFile/requirements.txt | 3 ++- 5 files changed, 30 insertions(+), 16 deletions(-) diff --git a/plugins/RenameFile/README.md b/plugins/RenameFile/README.md index acf06b9a..bab3fca7 100644 --- a/plugins/RenameFile/README.md +++ b/plugins/RenameFile/README.md @@ -1,4 +1,4 @@ -# RenameFile: Ver 0.4.1 (By David Maisonave) +# RenameFile: Ver 0.4.2 (By David Maisonave) RenameFile is a [Stash](https://github.com/stashapp/stash) plugin which performs the following tasks. - **Rename Scene File Name** (On-The-Fly) - **Append tag names** to file name @@ -57,3 +57,11 @@ That's it!!! - Main options are accessible in the GUI via Settings->Plugins->Plugins->[RenameFile]. - Advanced options are avialable in the **renamefile_settings.py** file. After making changes, go to http://localhost:9999/settings?tab=plugins, and click [Reload Plugins]. +## Bugs and Feature Request +Please use the following link to report RenameFile bugs: +[RenameFile Bug Report](https://github.com/David-Maisonave/Axter-Stash/issues/new?assignees=&labels=Plugin_Bug&projects=&template=bug_report_plugin.yml&title=%F0%9F%AA%B2%5BRenameFile%5D+Your_Short_title) + +Please use the following link to report RenameFile Feature Request:[RenameFile Feature Reques](https://github.com/David-Maisonave/Axter-Stash/issues/new?assignees=&labels=Enhancement&projects=&template=feature_request_plugin.yml&title=%F0%9F%92%A1%EF%B8%8F%5BEnhancement%5D%3A%5BRenameFile%5D+Your_Short_title) + +Please do **NOT** use the feature request to include any problems associated with errors. Instead use the bug report for error issues. + diff --git a/plugins/RenameFile/renamefile.py b/plugins/RenameFile/renamefile.py index 884eaa86..00e112a7 100644 --- a/plugins/RenameFile/renamefile.py +++ b/plugins/RenameFile/renamefile.py @@ -117,6 +117,7 @@ exclude_paths = config["pathToExclude"] exclude_paths = exclude_paths.split() if debugTracing: logger.info(f"Debug Tracing (exclude_paths={exclude_paths})................") +excluded_tags = config["excludeTags"] # Extract tag whitelist from settings tag_whitelist = config["tagWhitelist"] if debugTracing: logger.info("Debug Tracing................") @@ -203,7 +204,9 @@ def add_tag(tag_name): if debugTracing: logger.info(f"Debug Tracing (tag_name={tag_name})................") if max_tag_keys == -1 or (max_tag_keys is not None and tag_keys_added >= int(max_tag_keys)): return # Skip adding more tags if the maximum limit is reached - + if tag_name in excluded_tags: + if debugTracing: logger.info(f"Debug Tracing EXCLUDING (tag_name={tag_name})") + return # Check if the tag name is in the whitelist if tag_whitelist == "" or tag_whitelist == None or (tag_whitelist and tag_name in tag_whitelist): if WRAPPER_STYLES.get('tag'): @@ -261,8 +264,8 @@ def add_tag(tag_name): scene_date += POSTFIX_STYLES.get('date') if debugTracing: logger.info("Debug Tracing................") if WRAPPER_STYLES.get('date'): - filename_parts.append(f"{WRAPPER_STYLES['date'][0]}{scene_date}{WRAPPER_STYLES['date'][1]}") - else: + scene_date = f"{WRAPPER_STYLES['date'][0]}{scene_date}{WRAPPER_STYLES['date'][1]}" + if scene_date not in title: filename_parts.append(scene_date) elif key == 'resolution': width = str(scene_details.get('files', [{}])[0].get('width', '')) # Convert width to string @@ -270,40 +273,40 @@ def add_tag(tag_name): if width and height: resolution = width + POSTFIX_STYLES.get('width_height_seperator') + height + POSTFIX_STYLES.get('resolution') if WRAPPER_STYLES.get('resolution'): - filename_parts.append(f"{WRAPPER_STYLES['resolution'][0]}{resolution}{WRAPPER_STYLES['width'][1]}") - else: + resolution = f"{WRAPPER_STYLES['resolution'][0]}{resolution}{WRAPPER_STYLES['width'][1]}" + if resolution not in title: filename_parts.append(resolution) elif key == 'width': width = str(scene_details.get('files', [{}])[0].get('width', '')) # Convert width to string if width: width += POSTFIX_STYLES.get('width') if WRAPPER_STYLES.get('width'): - filename_parts.append(f"{WRAPPER_STYLES['width'][0]}{width}{WRAPPER_STYLES['width'][1]}") - else: + width = f"{WRAPPER_STYLES['width'][0]}{width}{WRAPPER_STYLES['width'][1]}" + if width not in title: filename_parts.append(width) elif key == 'height': height = str(scene_details.get('files', [{}])[0].get('height', '')) # Convert height to string if height: height += POSTFIX_STYLES.get('height') if WRAPPER_STYLES.get('height'): - filename_parts.append(f"{WRAPPER_STYLES['height'][0]}{height}{WRAPPER_STYLES['height'][1]}") - else: + height = f"{WRAPPER_STYLES['height'][0]}{height}{WRAPPER_STYLES['height'][1]}" + if height not in title: filename_parts.append(height) elif key == 'video_codec': video_codec = scene_details.get('files', [{}])[0].get('video_codec', '').upper() # Convert to uppercase if video_codec: video_codec += POSTFIX_STYLES.get('video_codec') if WRAPPER_STYLES.get('video_codec'): - filename_parts.append(f"{WRAPPER_STYLES['video_codec'][0]}{video_codec}{WRAPPER_STYLES['video_codec'][1]}") - else: + video_codec = f"{WRAPPER_STYLES['video_codec'][0]}{video_codec}{WRAPPER_STYLES['video_codec'][1]}" + if video_codec not in title: filename_parts.append(video_codec) elif key == 'frame_rate': frame_rate = str(scene_details.get('files', [{}])[0].get('frame_rate', '')) + 'FPS' # Convert to string and append ' FPS' if frame_rate: frame_rate += POSTFIX_STYLES.get('frame_rate') if WRAPPER_STYLES.get('frame_rate'): - filename_parts.append(f"{WRAPPER_STYLES['frame_rate'][0]}{frame_rate}{WRAPPER_STYLES['frame_rate'][1]}") - else: + frame_rate = f"{WRAPPER_STYLES['frame_rate'][0]}{frame_rate}{WRAPPER_STYLES['frame_rate'][1]}" + if frame_rate not in title: filename_parts.append(frame_rate) elif key == 'galleries': galleries = [gallery.get('title', '') for gallery in scene_details.get('galleries', [])] diff --git a/plugins/RenameFile/renamefile.yml b/plugins/RenameFile/renamefile.yml index 20778b34..1c9d5ad9 100644 --- a/plugins/RenameFile/renamefile.yml +++ b/plugins/RenameFile/renamefile.yml @@ -1,6 +1,6 @@ name: RenameFile description: Renames video (scene) file names when the user edits the [Title] field located in the scene [Edit] tab. -version: 0.4.1 +version: 0.4.2 url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/RenameFile settings: performerAppend: diff --git a/plugins/RenameFile/renamefile_settings.py b/plugins/RenameFile/renamefile_settings.py index 24052f8a..6a4445db 100644 --- a/plugins/RenameFile/renamefile_settings.py +++ b/plugins/RenameFile/renamefile_settings.py @@ -37,6 +37,8 @@ "frame_rate": 'FR', "date": '', }, + # Add tags to exclude from RenameFile. + "excludeTags": ["DuplicateMarkForDeletion", "DuplicateWhitelistFile","_DuplicateMarkForDeletion", "_DuplicateWhitelistFile","_DuplicateMarkForDeletion_", "_DuplicateWhitelistFile_"], # Add path(s) to exclude from RenameFile. Example Usage: r"/path/to/exclude1" When entering multiple paths, use space. Example: r"/path_1_to/exclude" r"/someOtherPath2Exclude" r"/yetAnotherPath" "pathToExclude": "", # Define a whitelist of allowed tags or EMPTY to allow all tags. Example Usage: "tag1", "tag2", "tag3" diff --git a/plugins/RenameFile/requirements.txt b/plugins/RenameFile/requirements.txt index 14af1d68..d4e029a9 100644 --- a/plugins/RenameFile/requirements.txt +++ b/plugins/RenameFile/requirements.txt @@ -1,2 +1,3 @@ -stashapp-tools >= 0.2.49 +stashapp-tools >= 0.2.50 +pyYAML requests \ No newline at end of file From 131f3ebbb3fb786fe296e44ef6f5fcaaea24e111 Mon Sep 17 00:00:00 2001 From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com> Date: Tue, 20 Aug 2024 21:04:34 -0400 Subject: [PATCH 23/39] Added API Key option Added API Key logic to allow FileMonitor to work in service mode when user has configured a Stash username and password. To slim down the main config, moved self_unit_test and task_examples to their own configuration files. Added more self_unit_test, and a selfUnitTest activation field that is normally false. When field is true, it turns on the task for unit testing. --- .../FileMonitor/filemonitor_self_unit_test.py | 42 ++++++++++++++++ .../FileMonitor/filemonitor_task_examples.py | 49 +++++++++++++++++++ 2 files changed, 91 insertions(+) create mode 100644 plugins/FileMonitor/filemonitor_self_unit_test.py create mode 100644 plugins/FileMonitor/filemonitor_task_examples.py diff --git a/plugins/FileMonitor/filemonitor_self_unit_test.py b/plugins/FileMonitor/filemonitor_self_unit_test.py new file mode 100644 index 00000000..c30311fc --- /dev/null +++ b/plugins/FileMonitor/filemonitor_self_unit_test.py @@ -0,0 +1,42 @@ +# **test** tasks which are disabled by default. To enable test tasks, set selfUnitTest to True. +# To run test, enable all task, and start FileMonitor as a service. +# When executed, these task should be seen in the Task Queue unless otherwise stated in comments. +# These tasks are usually executed before updating major releases on https://github.com/David-Maisonave/Axter-Stash/blob/main/plugins/FileMonitor +# These tasks are ALWAYS executed before updating to https://github.com/stashapp/CommunityScripts +self_unit_test = { + "task_scheduler": [ + {"task" : "TestBadTaskNameError", "minutes" : 1}, # Test invalid task name + {"task" : "execute", "minutes" : 1}, # Test invalid task (missing command) + {"task" : "python", "minutes" : 1}, # Test invalid task (missing scripts) + {"task" : "PluginWithOutID", "minutes" : 1}, # Test invalid task (missing pluginId) + {"task" : "execute", "command" : "", "minutes" : 1}, # Test invalid task (missing command) + {"task" : "python", "script" : "", "minutes" : 1}, # Test invalid task (missing scripts) + {"task" : "PluginWithOutID", "pluginId" : "", "minutes" : 1}, # Test invalid task (missing pluginId) + {"task" : "Foo","pluginId":"foo","validateDir":"foo", "minutes" : 1}, # Test invalid task (missing plugin directory) + {"task" : "Log", "msg" : "Testing Scheduled Log", "minutes" : 1}, # Test plugin log file + {"task" : "Trace", "minutes" : 1}, # Test plugin trace logging + {"task" : "LogOnce", "seconds" :15}, # Test LogOnce + {"task" : "TraceOnce", "seconds" : 5}, # Test TraceOnce + # {"task" : "CheckStashIsRunning", "RunAfter" : [{"task" : "Scan"}], "seconds" :15}, # To test CheckStashIsRunning, kill Stash after starting FileMonitor service via following command:taskkill /F /IM "stash-win.exe" + {"task" : "CheckStashIsRunning", "RunAfter" : [{"task" : "Scan"},{"task" : "Backup", "maxBackup" : 0},{"task" : "Clean"}], "seconds" :15}, # Test RunAfter + {"task" : "CheckStashIsRunning", "command" : "stash-win.exe", "seconds" :10}, # Check if Stash is running. If not running, start up Stash. + {"task" : "Generate", "weekday" : "friday", "time" : "12:03"}, + {"task" : "Clean", "weekday" : "friday", "time" : "12:03"}, + {"task" : "Auto Tag", "weekday" : "friday", "time" : "12:03"}, + {"task" : "Optimise Database", "weekday" : "friday", "time" : "12:03"}, + {"task" : "Create Tags", "pluginId" : "pathParser", "validateDir" : "pathParser", "weekday" : "friday", "time" : "12:03"}, # In task queue as -> Running plugin task: Create Tags + {"task" : "Scan","paths": [r"B:\_\SpecialSet", r"C:\foo"], "weekday" : "friday", "time" : "12:03"}, + {"task" : "GQL", "input" : "mutation OptimiseDatabase { optimiseDatabase }", "weekday" : "friday", "time" : "12:03"}, # In task queue as -> Optimising database... + {"task" : "Clean Generated Files", "weekday" : "friday", "time" : "12:03"}, + {"task" : "RenameGeneratedFiles", "weekday" : "friday", "time" : "12:03"}, # In task queue as -> Migrating scene hashes... + {"task" : "Backup", "maxBackups" : 0, "weekday" : "friday", "time" : "12:03"}, # Does NOT show up in the Task Queue. Must check STASH log file to verify run. + {"task" : "python", "script" : "test_hello_world2.py", "weekday" : "friday", "time" : "12:03"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=??? + {"task" : "python", "script" : "test_hello_world.py", "detach" : False, "weekday" : "friday", "time" : "12:03"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=??? + {"task" : "execute", "command" : "test_hello_world2.cmd", "weekday" : "friday", "time" : "12:03"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=??? + {"task" : "execute", "command" : "test_hello_world.bat", "args" : "--name David", "weekday" : "friday", "time" : "12:03"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=??? + ], + + # MUST ToDo: Always set selfUnitTest to False before checking in this code!!! + # Enable to turn on self unit test. + "selfUnitTest": False, +} diff --git a/plugins/FileMonitor/filemonitor_task_examples.py b/plugins/FileMonitor/filemonitor_task_examples.py new file mode 100644 index 00000000..3cbfad23 --- /dev/null +++ b/plugins/FileMonitor/filemonitor_task_examples.py @@ -0,0 +1,49 @@ +# Below are example tasks. +# They are all disabled by default, by having zero value for time frequency, or by having "DISABLED" set for the time field. +# To enable these tasks, set the frequency or the time value to a valid frequency or time stamp. +task_examples = { + "task_scheduler": [ + # Example#A1: Task to call call_GQL API with custom input + {"task" : "GQL", "input" : "mutation OptimiseDatabase { optimiseDatabase }", "weekday" : "sunday", "time" : "DISABLED"}, # To enable, change "DISABLED" to valid time + + # Example#A2: Task to call a python script. When this task is executed, the keyword is replaced by filemonitor.py current directory. + # The args field is NOT required. + {"task" : "python", "script" : "test_script_hello_world.py", "args" : "--MyArguments Hello", "weekday" : "monday", "time" : "DISABLED"}, # change "DISABLED" to valid time + + # Example#A3: The following task types can optionally take a [paths] field. If the paths field does not exists, the paths in the Stash library is used. + {"task" : "Scan", "paths" : [r"E:\MyVideos\downloads", r"V:\MyOtherVideos"], "weekday" : "sunday", "time" : "DISABLED"}, # Library -> [Scan] + {"task" : "Auto Tag", "paths" : [r"E:\MyVideos\downloads", r"V:\MyOtherVideos"], "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "DISABLED"}, # Auto Tag -> [Auto Tag] + {"task" : "Clean", "paths" : ["E:\\MyVideos\\downloads", "V:\\MyOtherVideos"], "weekday" : "sunday", "time" : "DISABLED"}, # Generated Content-> [Generate] + + # Example#A4: Task which calls Migrations -> [Rename generated files] + {"task" : "RenameGeneratedFiles", "weekday" : "tuesday,thursday", "time" : "DISABLED"}, # (bi-weekly) example + + # Example#A5: The Backup task using optional field maxBackup, which overrides the UI [Max DB Backups] value + {"task" : "Backup", "maxBackup" : 12, "weekday" : "sunday", "time" : "DISABLED"}, # Trim the DB backup files down to 12 backup files. + {"task" : "Backup", "maxBackup" : 0, "weekday" : "sunday", "time" : "DISABLED"}, # When used with a zero value, it will make sure no file trimming will occur no matter the value of the UI [Max DB Backups] + + # The above weekday method is the more reliable method to schedule task, because it doesn't rely on FileMonitor running continuously (non-stop). + + # The below examples use frequency field method which can work with minutes and hours. A zero frequency value disables the task. + # Note: Both seconds and days are also supported for the frequency field. + # However, seconds is mainly used for test purposes. + # And days usage is discourage, because it only works if FileMonitor is running for X many days non-stop. + # The below example tasks are done using hours and minutes, however any of these task types can be converted to a daily, weekly, or monthly syntax. + + # Example#B1: The following task is the syntax used for a plugin. A plugin task requires the plugin name for the [task] field, and the plugin-ID for the [pluginId] field. + {"task" : "PluginButtonName_Here", "pluginId" : "PluginId_Here", "hours" : 0}, # The zero frequency value makes this task disabled. + # Example#B2: Optionally, the validateDir field can be included which is used to validate that the plugin is installed either under the plugins folder or under the plugins-community folder. + {"task" : "PluginButtonName_Here", "pluginId" : "PluginId_Here", "validateDir" : "UsuallySameAsPluginID", "hours" : 0}, # The zero frequency value makes this task disabled. + + # Example#B3: Task to execute a command + {"task" : "execute", "command" : "C:\\MyPath\\HelloWorld.bat", "hours" : 0}, + + # Example#B4: Task to execute a command with optional args field, and using keyword , which gets replaced with filemonitor.py current directory. + {"task" : "execute", "command" : "HelloWorld.cmd", "args" : "--name David", "minutes" : 0}, + + # Example#C1 Some OS may need the "command" field, which specifies the binary path. + {"task" : "CheckStashIsRunning", "command" : "stash-linux-arm64v8", "minutes" :0}, + # Example#C2 RunAfter field can be used to specify task to run after starting Stash + {"task" : "CheckStashIsRunning", "RunAfter" : [{"task" : "Scan"},{"task" : "Backup", "maxBackup" : 0},{"task" : "Clean"}], "minutes" :0}, + ], +} From f02e66899bb59914d33a85a102675b57c1fe6e20 Mon Sep 17 00:00:00 2001 From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com> Date: Tue, 20 Aug 2024 23:04:46 -0400 Subject: [PATCH 24/39] Change default rename using Move Change default rename using Move, in order to avoid access issues when scene is being played. --- plugins/RenameFile/renamefile.py | 4 ++-- plugins/RenameFile/renamefile.yml | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/plugins/RenameFile/renamefile.py b/plugins/RenameFile/renamefile.py index 00e112a7..a8ab1fd4 100644 --- a/plugins/RenameFile/renamefile.py +++ b/plugins/RenameFile/renamefile.py @@ -66,7 +66,7 @@ "studioAppend": False, "tagAppend": False, "z_keyFIeldsIncludeInFileName": False, - "zafileRenameViaMove": False, + "zafileRenameViaRaname": False, "zfieldKeyList": DEFAULT_FIELD_KEY_LIST, "zmaximumTagKeys": 12, "zseparators": DEFAULT_SEPERATOR, @@ -133,7 +133,7 @@ if debugTracing: logger.info(f"Debug Tracing (endpoint={endpoint})................") # Extract rename_files and move_files settings from renamefile_settings.py rename_files = config["rename_files"] -move_files = settings["zafileRenameViaMove"] +move_files = False if settings["zafileRenameViaRaname"] else True if debugTracing: logger.info("Debug Tracing................") fieldKeyList = settings["zfieldKeyList"] # Default Field Key List with the desired order if not fieldKeyList or fieldKeyList == "": diff --git a/plugins/RenameFile/renamefile.yml b/plugins/RenameFile/renamefile.yml index 1c9d5ad9..ca2c8f53 100644 --- a/plugins/RenameFile/renamefile.yml +++ b/plugins/RenameFile/renamefile.yml @@ -1,6 +1,6 @@ name: RenameFile description: Renames video (scene) file names when the user edits the [Title] field located in the scene [Edit] tab. -version: 0.4.2 +version: 0.4.3 url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/RenameFile settings: performerAppend: @@ -19,9 +19,9 @@ settings: displayName: Include Existing Key Field description: Enable to append performer, tags, studios, & galleries even if name already exists in the original file name. type: BOOLEAN - zafileRenameViaMove: - displayName: Rename Using Move - description: Enable to have file moved when renaming file. + zafileRenameViaRaname: + displayName: Rename Instead of Move + description: Enable to rename file instead of Move file. (Not recommended for Windows OS) type: BOOLEAN zfieldKeyList: displayName: Key Fields From 444a569a32ef529e61ed8cbfcba0fcd86e0fd868 Mon Sep 17 00:00:00 2001 From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com> Date: Thu, 22 Aug 2024 02:40:38 -0400 Subject: [PATCH 25/39] Added logic to stop running multiple scan jobs. 100's of file changes at the same time caused FileMonitor to run many dozens of scan jobs. Added logic to have FileMonitor delay new scan jobs while last scan job is still running. --- plugins/FileMonitor/filemonitor.py | 62 ++++++++++++++++++++--- plugins/FileMonitor/filemonitor_config.py | 5 ++ 2 files changed, 59 insertions(+), 8 deletions(-) diff --git a/plugins/FileMonitor/filemonitor.py b/plugins/FileMonitor/filemonitor.py index ecc2d5d5..948088c2 100644 --- a/plugins/FileMonitor/filemonitor.py +++ b/plugins/FileMonitor/filemonitor.py @@ -65,6 +65,8 @@ RUN_GENERATE_CONTENT = stash.pluginConfig['runGenerateContent'] SCAN_ON_ANY_EVENT = stash.pluginConfig['onAnyEvent'] SIGNAL_TIMEOUT = stash.pluginConfig['timeOut'] if stash.pluginConfig['timeOut'] > 0 else 1 +MAX_TIMEOUT_FOR_DELAY_PATH_PROCESS = stash.pluginConfig['timeOutDelayProcess'] +MAX_SECONDS_WAIT_SCANJOB_COMPLETE = stash.pluginConfig['maxWaitTimeJobFinish'] CREATE_SPECIAL_FILE_TO_EXIT = stash.pluginConfig['createSpecFileToExit'] DELETE_SPECIAL_FILE_ON_STOP = stash.pluginConfig['deleteSpecFileInStop'] @@ -399,10 +401,19 @@ def checkSchedulePending(self): schedule.run_pending() stash.TraceOnce("Pending check complete.") -TargetPaths = [] +TargetPaths = [] +lastScanJob = { + "id": -1, + "TargetPaths": [], + "DelayedProcessTargetPaths": [], + "timeAddedToTaskQueue": None, + "lastStatus" : "" +} + def start_library_monitor(): global shouldUpdate global TargetPaths + global lastScanJob try: # Create shared memory buffer which can be used as singleton logic or to get a signal to quit task from external script shm_a = shared_memory.SharedMemory(name=SHAREDMEMORY_NAME, create=True, size=4) @@ -529,9 +540,17 @@ def on_any_event(event): break if stash.pluginSettings['turnOnScheduler']: stashScheduler.checkSchedulePending() - stash.LogOnce("Waiting for a file change-trigger.") - signal.wait(timeout=SIGNAL_TIMEOUT) - if stash.pluginSettings['turnOnScheduler'] and not shouldUpdate: + timeOutInSeconds = SIGNAL_TIMEOUT + if lastScanJob['DelayedProcessTargetPaths'] != [] and timeOutInSeconds > MAX_TIMEOUT_FOR_DELAY_PATH_PROCESS: + timeOutInSeconds = MAX_TIMEOUT_FOR_DELAY_PATH_PROCESS + stash.LogOnce(f"Awaiting file change-trigger, with a short timeout ({timeOutInSeconds} seconds), because of active delay path processing.") + else: + stash.LogOnce(f"Waiting for a file change-trigger. Timeout = {timeOutInSeconds} seconds.") + signal.wait(timeout=timeOutInSeconds) + if lastScanJob['DelayedProcessTargetPaths'] != []: + stash.TraceOnce(f"Processing delay scan for path(s) {lastScanJob['DelayedProcessTargetPaths']}") + break + elif stash.pluginSettings['turnOnScheduler'] and not shouldUpdate: stash.TraceOnce("Checking the scheduler.") elif shouldUpdate: stash.LogOnce("File change trigger occurred.") @@ -555,12 +574,39 @@ def on_any_event(event): stash.Log(f"[SpFl]Detected trigger file to kill FileMonitor. {SPECIAL_FILE_NAME}", printTo = stash.LOG_TO_FILE + stash.LOG_TO_CONSOLE + stash.LOG_TO_STASH) TargetPaths = [] TmpTargetPaths = list(set(TmpTargetPaths)) - if TmpTargetPaths != []: + if TmpTargetPaths != [] or lastScanJob['DelayedProcessTargetPaths'] != []: stash.Log(f"Triggering Stash scan for path(s) {TmpTargetPaths}") - if len(TmpTargetPaths) > 1 or TmpTargetPaths[0] != SPECIAL_FILE_DIR: + if lastScanJob['DelayedProcessTargetPaths'] != [] or len(TmpTargetPaths) > 1 or TmpTargetPaths[0] != SPECIAL_FILE_DIR: if not stash.DRY_RUN: - # ToDo: Consider using create_scene, update_scene, and destroy_scene over general method metadata_scan - stash.metadata_scan(paths=TmpTargetPaths) + if lastScanJob['id'] > -1: + lastScanJob['lastStatus'] = stash.find_job(lastScanJob['id']) + stash.Trace(f"Last Scan Job ({lastScanJob['id']}); result = {lastScanJob['lastStatus']}") + elapsedTime = time.time() - lastScanJob['timeAddedToTaskQueue'] + if ('status' in lastScanJob['lastStatus'] and lastScanJob['lastStatus']['status'] == "FINISHED") or elapsedTime > MAX_SECONDS_WAIT_SCANJOB_COMPLETE: + if elapsedTime > MAX_SECONDS_WAIT_SCANJOB_COMPLETE: + stash.Warn(f"Timeout occurred waiting for scan job {lastScanJob['id']} to complete. Elapse-Time = {elapsedTime}; Max-Time={MAX_SECONDS_WAIT_SCANJOB_COMPLETE}; Scan-Path(s) = {lastScanJob['TargetPaths']}") + lastScanJob['id'] = -1 + if len(lastScanJob['DelayedProcessTargetPaths']) > 0: + stash.Trace(f"Adding {lastScanJob['DelayedProcessTargetPaths']} to {TmpTargetPaths}") + for path in lastScanJob['DelayedProcessTargetPaths']: + if path not in TmpTargetPaths: + TmpTargetPaths.append(path) + # TmpTargetPaths += [lastScanJob['DelayedProcessTargetPaths']] + stash.Trace(f"TmpTargetPaths = {TmpTargetPaths}") + lastScanJob['DelayedProcessTargetPaths'] = [] + else: + if TmpTargetPaths != []: + stash.Trace(f"Adding {TmpTargetPaths} to {lastScanJob['DelayedProcessTargetPaths']}") + for path in TmpTargetPaths: + if path not in lastScanJob['DelayedProcessTargetPaths']: + lastScanJob['DelayedProcessTargetPaths'].append(path) + stash.Trace(f"lastScanJob['DelayedProcessTargetPaths'] = {lastScanJob['DelayedProcessTargetPaths']}") + if lastScanJob['id'] == -1: + stash.Trace(f"Calling metadata_scan for paths '{TmpTargetPaths}'") + lastScanJob['id'] = int(stash.metadata_scan(paths=TmpTargetPaths)) + lastScanJob['TargetPaths'] = TmpTargetPaths + lastScanJob['timeAddedToTaskQueue'] = time.time() + stash.Trace(f"metadata_scan JobId = {lastScanJob['id']}, Start-Time = {lastScanJob['timeAddedToTaskQueue']}, paths = {lastScanJob['TargetPaths']}") if RUN_CLEAN_AFTER_DELETE and RunCleanMetadata: stash.metadata_clean(paths=TmpTargetPaths, dry_run=stash.DRY_RUN) if RUN_GENERATE_CONTENT: diff --git a/plugins/FileMonitor/filemonitor_config.py b/plugins/FileMonitor/filemonitor_config.py index dbda6312..e1aaceac 100644 --- a/plugins/FileMonitor/filemonitor_config.py +++ b/plugins/FileMonitor/filemonitor_config.py @@ -46,6 +46,11 @@ # Timeout in seconds. This is how often FileMonitor will check the scheduler and (in-plugin mode) check if another job (Task) is in the queue. "timeOut": 60, + # Timeout in seconds for delay processing of path scan jobs. This value should always be smaller than timeOut + "timeOutDelayProcess": 3, + # Maximum time to wait for a scan job to complete. Need this incase Stash gets restarted in the middle of a scan job. + "maxWaitTimeJobFinish": 30 * 60, # Wait 30 minutes max + # ApiKey only needed when Stash credentials are set and while calling FileMonitor via command line. "apiKey" : "", # Example: "eyJabccideJIUfg1NigRInD345I6dfpXVCfd.eyJ1abcDEfGheHRlHJiJklMonPQ32FsVewtsfSIsImlhdCI6MTcyMzg2NzkwOH0.5bkHU6sfs3532dsryu1ki3iFBwnd_4AHs325yHljsPw" # Enable to run metadata clean task after file deletion. From 43acfe18c4363f29473d61051e9aff5da29324d5 Mon Sep 17 00:00:00 2001 From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com> Date: Thu, 22 Aug 2024 03:20:35 -0400 Subject: [PATCH 26/39] Tweaked delay scan path logic --- plugins/FileMonitor/README.md | 2 +- plugins/FileMonitor/filemonitor.py | 18 ++++++++++++++---- plugins/FileMonitor/filemonitor.yml | 2 +- plugins/FileMonitor/filemonitor_config.py | 2 +- 4 files changed, 17 insertions(+), 7 deletions(-) diff --git a/plugins/FileMonitor/README.md b/plugins/FileMonitor/README.md index c639631c..5339c9ee 100644 --- a/plugins/FileMonitor/README.md +++ b/plugins/FileMonitor/README.md @@ -1,4 +1,4 @@ -# FileMonitor: Ver 0.8.6 (By David Maisonave) +# FileMonitor: Ver 0.8.7 (By David Maisonave) FileMonitor is a [Stash](https://github.com/stashapp/stash) plugin with the following two main features: - Updates Stash when any file changes occurs in the Stash library. - **Task Scheduler**: Runs scheduled task based on the scheduler configuration in **filemonitor_config.py**. diff --git a/plugins/FileMonitor/filemonitor.py b/plugins/FileMonitor/filemonitor.py index 948088c2..f0d69106 100644 --- a/plugins/FileMonitor/filemonitor.py +++ b/plugins/FileMonitor/filemonitor.py @@ -407,8 +407,10 @@ def checkSchedulePending(self): "TargetPaths": [], "DelayedProcessTargetPaths": [], "timeAddedToTaskQueue": None, + "timeOutDelayProcess": 1, "lastStatus" : "" } +JOB_ENDED_STATUSES = ["FINISHED", "CANCELLED"] def start_library_monitor(): global shouldUpdate @@ -541,8 +543,12 @@ def on_any_event(event): if stash.pluginSettings['turnOnScheduler']: stashScheduler.checkSchedulePending() timeOutInSeconds = SIGNAL_TIMEOUT - if lastScanJob['DelayedProcessTargetPaths'] != [] and timeOutInSeconds > MAX_TIMEOUT_FOR_DELAY_PATH_PROCESS: - timeOutInSeconds = MAX_TIMEOUT_FOR_DELAY_PATH_PROCESS + if lastScanJob['DelayedProcessTargetPaths'] != [] and timeOutInSeconds > lastScanJob['timeOutDelayProcess']: + if lastScanJob['timeOutDelayProcess'] < MAX_TIMEOUT_FOR_DELAY_PATH_PROCESS: + lastScanJob['timeOutDelayProcess'] = lastScanJob['timeOutDelayProcess'] * 2 + if lastScanJob['timeOutDelayProcess'] > MAX_TIMEOUT_FOR_DELAY_PATH_PROCESS: + lastScanJob['timeOutDelayProcess'] = MAX_TIMEOUT_FOR_DELAY_PATH_PROCESS + timeOutInSeconds = lastScanJob['timeOutDelayProcess'] stash.LogOnce(f"Awaiting file change-trigger, with a short timeout ({timeOutInSeconds} seconds), because of active delay path processing.") else: stash.LogOnce(f"Waiting for a file change-trigger. Timeout = {timeOutInSeconds} seconds.") @@ -580,12 +586,16 @@ def on_any_event(event): if not stash.DRY_RUN: if lastScanJob['id'] > -1: lastScanJob['lastStatus'] = stash.find_job(lastScanJob['id']) - stash.Trace(f"Last Scan Job ({lastScanJob['id']}); result = {lastScanJob['lastStatus']}") elapsedTime = time.time() - lastScanJob['timeAddedToTaskQueue'] - if ('status' in lastScanJob['lastStatus'] and lastScanJob['lastStatus']['status'] == "FINISHED") or elapsedTime > MAX_SECONDS_WAIT_SCANJOB_COMPLETE: + if 'status' not in lastScanJob['lastStatus']: + stash.Warn(f"Could not get a status from scan job {lastScanJob['id']}; result = {lastScanJob['lastStatus']}; Elapse-Time = {elapsedTime}") + else: + stash.Trace(f"Last Scan Job ({lastScanJob['id']}); Status = {lastScanJob['lastStatus']['status']}; result = {lastScanJob['lastStatus']}; Elapse-Time = {elapsedTime}") + if 'status' not in lastScanJob['lastStatus'] or lastScanJob['lastStatus']['status'] in JOB_ENDED_STATUSES or elapsedTime > MAX_SECONDS_WAIT_SCANJOB_COMPLETE: if elapsedTime > MAX_SECONDS_WAIT_SCANJOB_COMPLETE: stash.Warn(f"Timeout occurred waiting for scan job {lastScanJob['id']} to complete. Elapse-Time = {elapsedTime}; Max-Time={MAX_SECONDS_WAIT_SCANJOB_COMPLETE}; Scan-Path(s) = {lastScanJob['TargetPaths']}") lastScanJob['id'] = -1 + lastScanJob['timeOutDelayProcess'] = 1 if len(lastScanJob['DelayedProcessTargetPaths']) > 0: stash.Trace(f"Adding {lastScanJob['DelayedProcessTargetPaths']} to {TmpTargetPaths}") for path in lastScanJob['DelayedProcessTargetPaths']: diff --git a/plugins/FileMonitor/filemonitor.yml b/plugins/FileMonitor/filemonitor.yml index 98326791..f96bf70e 100644 --- a/plugins/FileMonitor/filemonitor.yml +++ b/plugins/FileMonitor/filemonitor.yml @@ -1,6 +1,6 @@ name: FileMonitor description: Monitors the Stash library folders, and updates Stash if any changes occurs in the Stash library paths. -version: 0.8.6 +version: 0.8.7 url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/FileMonitor settings: recursiveDisabled: diff --git a/plugins/FileMonitor/filemonitor_config.py b/plugins/FileMonitor/filemonitor_config.py index e1aaceac..38ed73a0 100644 --- a/plugins/FileMonitor/filemonitor_config.py +++ b/plugins/FileMonitor/filemonitor_config.py @@ -47,7 +47,7 @@ # Timeout in seconds. This is how often FileMonitor will check the scheduler and (in-plugin mode) check if another job (Task) is in the queue. "timeOut": 60, # Timeout in seconds for delay processing of path scan jobs. This value should always be smaller than timeOut - "timeOutDelayProcess": 3, + "timeOutDelayProcess": 32, # Maximum time to wait for a scan job to complete. Need this incase Stash gets restarted in the middle of a scan job. "maxWaitTimeJobFinish": 30 * 60, # Wait 30 minutes max From af2be30a043497835e9953e1e0e64dbdf5b9cc59 Mon Sep 17 00:00:00 2001 From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com> Date: Thu, 22 Aug 2024 03:54:34 -0400 Subject: [PATCH 27/39] Tweaked delay scan path logic --- plugins/FileMonitor/filemonitor.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/plugins/FileMonitor/filemonitor.py b/plugins/FileMonitor/filemonitor.py index f0d69106..75398d3f 100644 --- a/plugins/FileMonitor/filemonitor.py +++ b/plugins/FileMonitor/filemonitor.py @@ -313,7 +313,7 @@ def runPluginTask(self, task): pass return None - def checkStashIsRunning(self, task): + def checkStashIsRunning(self, task = {}, sleepAfterStart = 10): try: result = stash.stash_version() except: @@ -349,7 +349,7 @@ def checkStashIsRunning(self, task): stash.Error("Could not start Stash, because could not find executable Stash file '{execPath}'") return None result = f"Execute process PID = {stash.ExecuteProcess(args)}" - time.sleep(10) + time.sleep(sleepAfterStart) if "RunAfter" in task and len(task['RunAfter']) > 0: for runAfterTask in task['RunAfter']: self.runTask(runAfterTask) @@ -585,13 +585,15 @@ def on_any_event(event): if lastScanJob['DelayedProcessTargetPaths'] != [] or len(TmpTargetPaths) > 1 or TmpTargetPaths[0] != SPECIAL_FILE_DIR: if not stash.DRY_RUN: if lastScanJob['id'] > -1: + if stashScheduler: + stashScheduler.checkStashIsRunning() lastScanJob['lastStatus'] = stash.find_job(lastScanJob['id']) elapsedTime = time.time() - lastScanJob['timeAddedToTaskQueue'] - if 'status' not in lastScanJob['lastStatus']: + if lastScanJob['lastStatus'] == None or lastScanJob['lastStatus'] == "" or 'status' not in lastScanJob['lastStatus']: stash.Warn(f"Could not get a status from scan job {lastScanJob['id']}; result = {lastScanJob['lastStatus']}; Elapse-Time = {elapsedTime}") else: stash.Trace(f"Last Scan Job ({lastScanJob['id']}); Status = {lastScanJob['lastStatus']['status']}; result = {lastScanJob['lastStatus']}; Elapse-Time = {elapsedTime}") - if 'status' not in lastScanJob['lastStatus'] or lastScanJob['lastStatus']['status'] in JOB_ENDED_STATUSES or elapsedTime > MAX_SECONDS_WAIT_SCANJOB_COMPLETE: + if lastScanJob['lastStatus'] == None or lastScanJob['lastStatus'] == "" or 'status' not in lastScanJob['lastStatus'] or lastScanJob['lastStatus']['status'] in JOB_ENDED_STATUSES or elapsedTime > MAX_SECONDS_WAIT_SCANJOB_COMPLETE: if elapsedTime > MAX_SECONDS_WAIT_SCANJOB_COMPLETE: stash.Warn(f"Timeout occurred waiting for scan job {lastScanJob['id']} to complete. Elapse-Time = {elapsedTime}; Max-Time={MAX_SECONDS_WAIT_SCANJOB_COMPLETE}; Scan-Path(s) = {lastScanJob['TargetPaths']}") lastScanJob['id'] = -1 From 688f8c07d1f4abadf6c4d04b2b8b1e8f66471028 Mon Sep 17 00:00:00 2001 From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com> Date: Wed, 28 Aug 2024 04:36:29 -0400 Subject: [PATCH 28/39] FileMonitor updates Separated unit testing into two parts. Increase max log file size. Set status logging to debug level. Added UI option to enable DupFileManager delete duplicate task in the scheduler. Added "every" option for every day of the week in the scheduler. Added scheduler option to start plugin without the Task Scheduler. Changed the format for plugins in the scheduler. --- plugins/FileMonitor/README.md | 90 +++---- plugins/FileMonitor/StashPluginHelper.py | 240 ++++++++++++++---- plugins/FileMonitor/filemonitor.py | 123 +++++---- plugins/FileMonitor/filemonitor.yml | 12 +- plugins/FileMonitor/filemonitor_config.py | 14 +- .../FileMonitor/filemonitor_self_unit_test.py | 49 ++-- .../FileMonitor/filemonitor_task_examples.py | 18 +- 7 files changed, 366 insertions(+), 180 deletions(-) diff --git a/plugins/FileMonitor/README.md b/plugins/FileMonitor/README.md index 5339c9ee..cca15a93 100644 --- a/plugins/FileMonitor/README.md +++ b/plugins/FileMonitor/README.md @@ -1,4 +1,4 @@ -# FileMonitor: Ver 0.8.7 (By David Maisonave) +# FileMonitor: Ver 0.9.0 (By David Maisonave) FileMonitor is a [Stash](https://github.com/stashapp/stash) plugin with the following two main features: - Updates Stash when any file changes occurs in the Stash library. - **Task Scheduler**: Runs scheduled task based on the scheduler configuration in **filemonitor_config.py**. @@ -40,18 +40,25 @@ To enable the scheduler go to **Stash->Settings->Plugins->Plugins->FileMonitor** To configure the schedule or to add new task, edit the **task_scheduler** section in the **filemonitor_config.py** file. ```` python "task_scheduler": [ - # To create a daily task, include each day of the week for the weekday field. - {"task" : "Auto Tag", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "06:00"}, # Auto Tag -> [Auto Tag] (Daily at 6AM) - {"task" : "Optimise Database", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "07:00"}, # Maintenance -> [Optimise Database] (Daily at 7AM) - {"task" : "Create Tags", "pluginId" : "pathParser", "validateDir" : "pathParser", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "05:00"}, # [Plugin Tasks] - > [Path Parser] -> [Create Tags] (Daily at 5AM) : This task requires plugin [Path Parser] - - # The following tasks are scheduled for 3 days out of the week. - {"task" : "Clean", "weekday" : "monday,wednesday,friday", "time" : "08:00"}, # Maintenance -> [Clean] (3 days per week at 8AM) - {"task" : "Clean Generated Files", "weekday" : "tuesday,thursday,saturday", "time" : "08:00"}, # Maintenance -> [Clean Generated Files] (3 days per week at 8AM) + # To create a daily task, include each day of the week for the weekday field or "every" + # Optional field for task "Auto Tag" is 'paths'. For detail usage, see example #A3: in filemonitor_task_examples.py + {"task" : "Auto Tag", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "05:00"}, # Auto Tag -> [Auto Tag] (Daily at 6AM) + # Task "Create Tags" is a plugin task. Optional fields are taskName and validateDir field. For detail usage, see examples #B1, #B2, #B3, and #B4 in filemonitor_task_examples.py + {"task" : "pathParser", "taskName" : "Create Tags", "validateDir" : "pathParser", + "weekday" : "every", "time" : "05:30"}, # [Plugin Tasks] - > [Path Parser] -> [Create Tags] (Daily at 5AM) : This task requires plugin [Path Parser] + # The following task runs plugin DupFileManager (tag_duplicates_task) if the plugin is installed. The task runs in the background because of "taskQue" : False + {"task" : "DupFileManager", "taskMode" : "tag_duplicates_task", "validateDir" : "DupFileManager", "taskQue" : False, + "weekday" : "every", "time" : "02:30"}, # [Plugin Tasks] -> DupFileManager -> [Delete Duplicates] (Daily at 2:30AM) + {"task" : "Optimise Database", "weekday" : "monday,tuesday,wednesday,thursday,friday", "time" : "07:00"}, # Maintenance -> [Optimise Database] (Every weekday at 7AM) # The following tasks are scheduled weekly - {"task" : "Generate", "weekday" : "sunday", "time" : "07:00"}, # Generated Content-> [Generate] (Every Sunday at 7AM) - {"task" : "Scan", "weekday" : "sunday", "time" : "03:00"}, # Library -> [Scan] (Weekly) (Every Sunday at 3AM) + # Optional field for task "Scan", "Auto Tag", and "Clean" is 'paths'. For detail usage, see examples #A3: in filemonitor_task_examples.py + {"task" : "Scan", "weekday" : "saturday", "time" : "03:00"}, # Library -> [Scan] (Weekly) (Every saturday at 3AM) + {"task" : "Auto Tag", "weekday" : "saturday", "time" : "03:30"}, # Auto Tag -> [Auto Tag] (Weekly) (Every saturday at 3:30AM) + {"task" : "Generate", "weekday" : "saturday", "time" : "04:00"}, # Generated Content-> [Generate] (Every saturday at 4AM) + {"task" : "Clean", "weekday" : "saturday", "time" : "04:30"}, # Maintenance -> [Clean] (Every saturday at 4:30AM) + {"task" : "Clean Generated Files", "weekday" : "saturday", "time" : "05:00"}, # Maintenance -> [Clean Generated Files] (Every saturday at 5AM) + {"task" : "Optimise Database", "weekday" : "saturday", "time" : "05:30"}, # Maintenance -> [Optimise Database] (Every saturday at 5:30AM) # To perform a task monthly, specify the day of the month as in the weekly schedule format, and add a monthly field. # The monthly field value must be 1, 2, 3, or 4. @@ -59,60 +66,37 @@ To configure the schedule or to add new task, edit the **task_scheduler** sectio # 2 = 2nd specified weekday of the month. Example 2nd monday of the month. # 3 = 3rd specified weekday of the month. # 4 = 4th specified weekday of the month. - # The following task is scheduled monthly - {"task" : "Backup", "weekday" : "sunday", "time" : "01:00", "monthly" : 2}, # Backup -> [Backup] 2nd sunday of the month at 1AM (01:00) - - # Example#A1: Task to call call_GQL API with custom input - {"task" : "GQL", "input" : "mutation OptimiseDatabase { optimiseDatabase }", "weekday" : "sunday", "time" : "DISABLED"}, # To enable, change "DISABLED" to valid time - - # Example#A2: Task to call a python script. When this task is executed, the keyword is replaced by filemonitor.py current directory. - # The args field is NOT required. - {"task" : "python", "script" : "test_script_hello_world.py", "args" : "--MyArguments Hello", "weekday" : "monday", "time" : "DISABLED"}, # change "DISABLED" to valid time - - # Example#A3: The following task types can optionally take a [paths] field. If the paths field does not exists, the paths in the Stash library is used. - {"task" : "Scan", "paths" : [r"E:\MyVideos\downloads", r"V:\MyOtherVideos"], "weekday" : "sunday", "time" : "DISABLED"}, # Library -> [Scan] - {"task" : "Auto Tag", "paths" : [r"E:\MyVideos\downloads", r"V:\MyOtherVideos"], "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "DISABLED"}, # Auto Tag -> [Auto Tag] - {"task" : "Clean", "paths" : ["E:\\MyVideos\\downloads", "V:\\MyOtherVideos"], "weekday" : "sunday", "time" : "DISABLED"}, # Generated Content-> [Generate] - - # Example#A4: Task which calls Migrations -> [Rename generated files] - {"task" : "RenameGeneratedFiles", "weekday" : "tuesday,thursday", "time" : "DISABLED"}, # (bi-weekly) example - - # Example#A5: The Backup task using optional field maxBackup, which overrides the UI [Max DB Backups] value - {"task" : "Backup", "maxBackup" : 12, "weekday" : "sunday", "time" : "DISABLED"}, # Trim the DB backup files down to 12 backup files. - {"task" : "Backup", "maxBackup" : 0, "weekday" : "sunday", "time" : "DISABLED"}, # When used with a zero value, it will make sure no file trimming will occur no matter the value of the UI [Max DB Backups] - - # The above weekday method is the more reliable method to schedule task, because it doesn't rely on FileMonitor running continuously (non-stop). - - # The below examples use frequency field method which can work with minutes and hours. A zero frequency value disables the task. - # Note: Both seconds and days are also supported for the frequency field. - # However, seconds is mainly used for test purposes. - # And days usage is discourage, because it only works if FileMonitor is running for X many days non-stop. - # The below example tasks are done using hours and minutes, however any of these task types can be converted to a daily, weekly, or monthly syntax. - - # Example#B1: The following task is the syntax used for a plugin. A plugin task requires the plugin name for the [task] field, and the plugin-ID for the [pluginId] field. - {"task" : "PluginButtonName_Here", "pluginId" : "PluginId_Here", "hours" : 0}, # The zero frequency value makes this task disabled. - # Example#B2: Optionally, the validateDir field can be included which is used to validate that the plugin is installed either under the plugins folder or under the plugins-community folder. - {"task" : "PluginButtonName_Here", "pluginId" : "PluginId_Here", "validateDir" : "UsuallySameAsPluginID", "hours" : 0}, # The zero frequency value makes this task disabled. - - # Example#B3: Task to execute a command - {"task" : "execute", "command" : "C:\\MyPath\\HelloWorld.bat", "hours" : 0}, + # The Backup task is scheduled monthly + # Optional field for task "Backup" is maxBackup. For detail usage, see example #A5 in filemonitor_task_examples.py + {"task" : "Backup", "weekday" : "sunday", "time" : "01:00", "monthly" : 2}, # Backup -> [Backup] 2nd sunday of the month at 1AM (01:00) + # The following task requires plugin DupFileManager and UI option [Delete Duplicate Scheduler] enabled. + {"task" : "DupFileManager", "taskName" : "Delete Duplicates", "validateDir" : "DupFileManager", + "weekday" : "sunday", "time" : "02:00", "monthly" : 2}, # [Plugin Tasks] -> DupFileManager -> [Delete Duplicates] 2nd sunday of the month at 2AM (02:00) - # Example#B4: Task to execute a command with optional args field, and using keyword , which gets replaced with filemonitor.py current directory. - {"task" : "execute", "command" : "HelloWorld.cmd", "args" : "--name David", "minutes" : 0}, + # The [CheckStashIsRunning] task checks if Stash is running. If not running, it will start up stash. + # This task only works if FileMonitor is started as a service or in command line mode. + # Optional fields are 'command' and 'RunAfter'. For detail usage, see examples #C1 and #C2 in filemonitor_task_examples.py + {"task" : "CheckStashIsRunning", "minutes" :5}, # Checks every 5 minutes ], ```` -- To add plugins to the task list, both the Plugin-ID and the plugin name is required. The plugin ID is usually the file name of the script without the extension. - - For plugin task, optionally **validateDir** field can be included that defines the plugin sub directory, which is checked to see if it exist before running the task. +- To add plugins to the task list, use the Plugin-ID in the "task" field. The plugin ID is usually the file name of the script without the extension. + - Plugin task have the following optional fields: taskName, taskMode, validateDir, and taskQue + - The **validateDir** field can be used to define the plugin sub directory, which is checked to see if it exist before running the task. + - **taskName** field is used to name the task to call for the associated plugin. It can not be used with "taskQue":False + - **taskQue** field is used to call the plugin without using the Task Queue. I.E. "taskQue":False. When this field is set to False, the taskName field can NOT be used. Instead use taskMode to identify the task to call. + - **taskMode** field is used in order to run the plugin without using the Task Queue. The plugin runs immediatly. Be careful not to confuse taskMode with taskName. Look in the plugin *.yml file under the **tasks** section where it defines both the task-name and the task-mode. - Task can be scheduled to run monthly, weekly, hourly, and by minutes. - The scheduler list uses two types of syntax. One is **weekday** based, and the other is **frequency** based. - **weekday Based** - Use the weekday based syntax for daily, weekly, and monthly schedules. - All the weekday based methods must have a **weekday** field and a **time** field, which specifies the day(s) of the week and the time to start the task. - **Daily**: - - A daily task populates the weekday field with all the days of the week. + - A daily task populates the weekday field with all the days of the week or with keyword **every**. - **Daily Example**: - Starts a task daily at 6AM. - `{"task" : "Optimise Database", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "06:00"},` + - Starts a task daily at 2PM. + - `{"task" : "Optimise Database", "weekday" : "every", "time" : "14:00"},` - **Weekly**: - **Weekly Example**: - Starts a task weekly every monday and 9AM. diff --git a/plugins/FileMonitor/StashPluginHelper.py b/plugins/FileMonitor/StashPluginHelper.py index eec93b61..6f0d3d15 100644 --- a/plugins/FileMonitor/StashPluginHelper.py +++ b/plugins/FileMonitor/StashPluginHelper.py @@ -5,6 +5,8 @@ from stashapi.stash_types import PhashDistance import __main__ +_ARGUMENT_UNSPECIFIED_ = "_ARGUMENT_UNSPECIFIED_" + # StashPluginHelper (By David Maisonave aka Axter) # See end of this file for example usage # Log Features: @@ -41,6 +43,7 @@ class StashPluginHelper(StashInterface): STASHPATHSCONFIG = None STASH_PATHS = [] API_KEY = None + excludeMergeTags = None # printTo argument LOG_TO_FILE = 1 @@ -62,6 +65,9 @@ class StashPluginHelper(StashInterface): logLinePreviousHits = [] thredPool = None STASH_INTERFACE_INIT = False + _mergeMetadata = None + encodeToUtf8 = False + convertToAscii = False # If set True, it takes precedence over encodeToUtf8 # Prefix message value LEV_TRACE = "TRACE: " @@ -84,7 +90,7 @@ def __init__(self, debugTracing = None, # Set debugTracing to True so as to output debug and trace logging logFormat = LOG_FORMAT, # Plugin log line format dateFmt = "%y%m%d %H:%M:%S", # Date format when logging to plugin log file - maxbytes = 2*1024*1024, # Max size of plugin log file + maxbytes = 8*1024*1024, # Max size of plugin log file backupcount = 2, # Backup counts when log file size reaches max size logToWrnSet = 0, # Customize the target output set which will get warning logging logToErrSet = 0, # Customize the target output set which will get error logging @@ -126,12 +132,9 @@ def __init__(self, if debugTracing: self.DEBUG_TRACING = debugTracing if config: self.pluginConfig = config - if 'apiKey' in self.pluginConfig and self.pluginConfig['apiKey'] != "": - self.FRAGMENT_SERVER['ApiKey'] = self.pluginConfig['apiKey'] - if DebugTraceFieldName in self.pluginConfig: - self.DEBUG_TRACING = self.pluginConfig[DebugTraceFieldName] - if DryRunFieldName in self.pluginConfig: - self.DRY_RUN = self.pluginConfig[DryRunFieldName] + if self.Setting('apiKey', "") != "": + self.FRAGMENT_SERVER['ApiKey'] = self.Setting('apiKey') + if apiKey and apiKey != "": self.FRAGMENT_SERVER['ApiKey'] = apiKey @@ -169,6 +172,9 @@ def __init__(self, super().__init__(self.FRAGMENT_SERVER) self.STASH_INTERFACE_INIT = True + if self.STASH_URL.startswith("http://0.0.0.0:"): + self.STASH_URL = self.STASH_URL.replace("http://0.0.0.0:", "http://localhost:") + if self.STASH_INTERFACE_INIT: self.PLUGIN_CONFIGURATION = self.get_configuration()["plugins"] self.STASH_CONFIGURATION = self.get_configuration()["general"] @@ -181,12 +187,11 @@ def __init__(self, self.pluginSettings = settings if self.PLUGIN_ID in self.PLUGIN_CONFIGURATION: self.pluginSettings.update(self.PLUGIN_CONFIGURATION[self.PLUGIN_ID]) - if DebugTraceFieldName in self.pluginSettings: - self.DEBUG_TRACING = self.pluginSettings[DebugTraceFieldName] - if DryRunFieldName in self.pluginSettings: - self.DRY_RUN = self.pluginSettings[DryRunFieldName] if 'apiKey' in self.STASH_CONFIGURATION: self.API_KEY = self.STASH_CONFIGURATION['apiKey'] + + self.DRY_RUN = self.Setting(DryRunFieldName, self.DRY_RUN) + self.DEBUG_TRACING = self.Setting(DebugTraceFieldName, self.DEBUG_TRACING) if self.DEBUG_TRACING: self.LOG_LEVEL = logging.DEBUG logging.basicConfig(level=self.LOG_LEVEL, format=logFormat, datefmt=dateFmt, handlers=[RFH]) @@ -197,7 +202,22 @@ def __init__(self, def __del__(self): self.thredPool.shutdown(wait=False) - def Log(self, logMsg, printTo = 0, logLevel = logging.INFO, lineNo = -1, levelStr = "", logAlways = False): + def Setting(self, name, default=_ARGUMENT_UNSPECIFIED_, raiseEx=True, notEmpty=False): + if self.pluginSettings != None and name in self.pluginSettings: + if notEmpty == False or self.pluginSettings[name] != "": + return self.pluginSettings[name] + if self.pluginConfig != None and name in self.pluginConfig: + if notEmpty == False or self.pluginConfig[name] != "": + return self.pluginConfig[name] + if default == _ARGUMENT_UNSPECIFIED_ and raiseEx: + raise Exception(f"Missing {name} from both UI settings and config file settings.") + return default + + def Log(self, logMsg, printTo = 0, logLevel = logging.INFO, lineNo = -1, levelStr = "", logAlways = False, toAscii = None): + if toAscii or (toAscii == None and (self.encodeToUtf8 or self.convertToAscii)): + logMsg = self.asc2(logMsg) + else: + logMsg = logMsg if printTo == 0: printTo = self.log_to_norm elif printTo == self.LOG_TO_ERROR and logLevel == logging.INFO: @@ -238,7 +258,7 @@ def Log(self, logMsg, printTo = 0, logLevel = logging.INFO, lineNo = -1, levelSt if (printTo & self.LOG_TO_STDERR) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways): print(f"StdErr: {LN_Str} {levelStr}{logMsg}", file=sys.stderr) - def Trace(self, logMsg = "", printTo = 0, logAlways = False, lineNo = -1): + def Trace(self, logMsg = "", printTo = 0, logAlways = False, lineNo = -1, toAscii = None): if printTo == 0: printTo = self.LOG_TO_FILE if lineNo == -1: lineNo = inspect.currentframe().f_back.f_lineno @@ -246,40 +266,40 @@ def Trace(self, logMsg = "", printTo = 0, logAlways = False, lineNo = -1): if self.DEBUG_TRACING or logAlways: if logMsg == "": logMsg = f"Line number {lineNo}..." - self.Log(logMsg, printTo, logLev, lineNo, self.LEV_TRACE, logAlways) + self.Log(logMsg, printTo, logLev, lineNo, self.LEV_TRACE, logAlways, toAscii=toAscii) # Log once per session. Only logs the first time called from a particular line number in the code. - def TraceOnce(self, logMsg = "", printTo = 0, logAlways = False): + def TraceOnce(self, logMsg = "", printTo = 0, logAlways = False, toAscii = None): lineNo = inspect.currentframe().f_back.f_lineno if self.DEBUG_TRACING or logAlways: FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}" if FuncAndLineNo in self.logLinePreviousHits: return self.logLinePreviousHits.append(FuncAndLineNo) - self.Trace(logMsg, printTo, logAlways, lineNo) + self.Trace(logMsg, printTo, logAlways, lineNo, toAscii=toAscii) # Log INFO on first call, then do Trace on remaining calls. - def LogOnce(self, logMsg = "", printTo = 0, logAlways = False, traceOnRemainingCalls = True): + def LogOnce(self, logMsg = "", printTo = 0, logAlways = False, traceOnRemainingCalls = True, toAscii = None): if printTo == 0: printTo = self.LOG_TO_FILE lineNo = inspect.currentframe().f_back.f_lineno FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}" if FuncAndLineNo in self.logLinePreviousHits: if traceOnRemainingCalls: - self.Trace(logMsg, printTo, logAlways, lineNo) + self.Trace(logMsg, printTo, logAlways, lineNo, toAscii=toAscii) else: self.logLinePreviousHits.append(FuncAndLineNo) - self.Log(logMsg, printTo, logging.INFO, lineNo) + self.Log(logMsg, printTo, logging.INFO, lineNo, toAscii=toAscii) - def Warn(self, logMsg, printTo = 0): + def Warn(self, logMsg, printTo = 0, toAscii = None): if printTo == 0: printTo = self.log_to_wrn_set lineNo = inspect.currentframe().f_back.f_lineno - self.Log(logMsg, printTo, logging.WARN, lineNo) + self.Log(logMsg, printTo, logging.WARN, lineNo, toAscii=toAscii) - def Error(self, logMsg, printTo = 0): + def Error(self, logMsg, printTo = 0, toAscii = None): if printTo == 0: printTo = self.log_to_err_set lineNo = inspect.currentframe().f_back.f_lineno - self.Log(logMsg, printTo, logging.ERROR, lineNo) - + self.Log(logMsg, printTo, logging.ERROR, lineNo, toAscii=toAscii) + def Status(self, printTo = 0, logLevel = logging.INFO, lineNo = -1): if printTo == 0: printTo = self.log_to_norm if lineNo == -1: @@ -310,10 +330,86 @@ def ExecutePythonScript(self, args, ExecDetach=True): argsWithPython = [f"{PythonExe}"] + args return self.ExecuteProcess(argsWithPython,ExecDetach=ExecDetach) - def Submit(*args, **kwargs): - thredPool.submit(*args, **kwargs) + def Submit(self, *args, **kwargs): + return self.thredPool.submit(*args, **kwargs) + + def asc2(self, data, convertToAscii=None): + if convertToAscii or (convertToAscii == None and self.convertToAscii): + return ascii(data) + return str(str(data).encode('utf-8'))[2:-1] # This works better for logging than ascii function + # data = str(data).encode('ascii','ignore') # This works better for logging than ascii function + # return str(data)[2:-1] # strip out b'str' + + def init_mergeMetadata(self, excludeMergeTags=None): + self.excludeMergeTags = excludeMergeTags + self._mergeMetadata = mergeMetadata(self, self.excludeMergeTags) + + # Must call init_mergeMetadata, before calling merge_metadata + def merge_metadata(self, SrcData, DestData): # Input arguments can be scene ID or scene metadata + if type(SrcData) is int: + SrcData = self.find_scene(SrcData) + DestData = self.find_scene(DestData) + return self._mergeMetadata.merge(SrcData, DestData) + + def Progress(self, currentIndex, maxCount): + progress = (currentIndex / maxCount) if currentIndex < maxCount else (maxCount / currentIndex) + self.log.progress(progress) + + def run_plugin(self, plugin_id, task_mode=None, args:dict={}, asyn=False): + """Runs a plugin operation. + The operation is run immediately and does not use the job queue. + Args: + plugin_id (ID): plugin_id + task_name (str, optional): Plugin task to perform + args (dict, optional): Arguments to pass to plugin. Plugin access via JSON_INPUT['args'] + Returns: + A map of the result. + """ + query = """mutation RunPluginOperation($plugin_id: ID!, $args: Map!) { + runPluginOperation(plugin_id: $plugin_id, args: $args) + }""" + if task_mode != None: + args.update({"mode" : task_mode}) + variables = { + "plugin_id": plugin_id, + "args": args, + } + if asyn: + self.Submit(self.call_GQL, query, variables) + return f"Made asynchronous call for plugin {plugin_id}" + else: + return self.call_GQL(query, variables) + + def find_duplicate_scenes_diff(self, distance: PhashDistance=PhashDistance.EXACT, fragment='id', duration_diff: float=10.00 ): + query = """ + query FindDuplicateScenes($distance: Int, $duration_diff: Float) { + findDuplicateScenes(distance: $distance, duration_diff: $duration_diff) { + ...SceneSlim + } + } + """ + if fragment: + query = re.sub(r'\.\.\.SceneSlim', fragment, query) + else: + query += "fragment SceneSlim on Scene { id }" + + variables = { "distance": distance, "duration_diff": duration_diff } + result = self.call_GQL(query, variables) + return result['findDuplicateScenes'] + + # ################################################################################################# + # The below functions extends class StashInterface with functions which are not yet in the class + def get_all_scenes(self): + query_all_scenes = """ + query AllScenes { + allScenes { + id + updated_at + } + } + """ + return self.call_GQL(query_all_scenes) - # Extends class StashInterface with functions which are not yet in the class def metadata_autotag(self, paths:list=[], performers:list=[], studios:list=[], tags:list=[]): query = """ mutation MetadataAutoTag($input:AutoTagMetadataInput!) { @@ -355,20 +451,76 @@ def metadata_clean_generated(self, blobFiles=True, dryRun=False, imageThumbnails def rename_generated_files(self): return self.call_GQL("mutation MigrateHashNaming {migrateHashNaming}") - - def find_duplicate_scenes_diff(self, distance: PhashDistance=PhashDistance.EXACT, fragment='id', duration_diff: float=10.00 ): - query = """ - query FindDuplicateScenes($distance: Int, $duration_diff: Float) { - findDuplicateScenes(distance: $distance, duration_diff: $duration_diff) { - ...SceneSlim - } - } - """ - if fragment: - query = re.sub(r'\.\.\.SceneSlim', fragment, query) - else: - query += "fragment SceneSlim on Scene { id }" - - variables = { "distance": distance, "duration_diff": duration_diff } - result = self.call_GQL(query, variables) - return result['findDuplicateScenes'] + +class mergeMetadata: # A class to merge scene metadata from source scene to destination scene + srcData = None + destData = None + stash = None + excludeMergeTags = None + dataDict = None + result = "Nothing To Merge" + def __init__(self, stash, excludeMergeTags=None): + self.stash = stash + self.excludeMergeTags = excludeMergeTags + + def merge(self, SrcData, DestData): + self.srcData = SrcData + self.destData = DestData + ORG_DATA_DICT = {'id' : self.destData['id']} + self.dataDict = ORG_DATA_DICT.copy() + self.mergeItems('tags', 'tag_ids', [], excludeName=self.excludeMergeTags) + self.mergeItems('performers', 'performer_ids', []) + self.mergeItems('galleries', 'gallery_ids', []) + self.mergeItems('movies', 'movies', []) + self.mergeItems('urls', listToAdd=self.destData['urls'], NotStartWith=self.stash.STASH_URL) + self.mergeItem('studio', 'studio_id', 'id') + self.mergeItem('title') + self.mergeItem('director') + self.mergeItem('date') + self.mergeItem('details') + self.mergeItem('rating100') + self.mergeItem('code') + if self.dataDict != ORG_DATA_DICT: + self.stash.Trace(f"Updating scene ID({self.destData['id']}) with {self.dataDict}; path={self.destData['files'][0]['path']}", toAscii=True) + self.result = self.stash.update_scene(self.dataDict) + return self.result + + def Nothing(self, Data): + if not Data or Data == "" or (type(Data) is str and Data.strip() == ""): + return True + return False + + def mergeItem(self,fieldName, updateFieldName=None, subField=None): + if updateFieldName == None: + updateFieldName = fieldName + if self.Nothing(self.destData[fieldName]) and not self.Nothing(self.srcData[fieldName]): + if subField == None: + self.dataDict.update({ updateFieldName : self.srcData[fieldName]}) + else: + self.dataDict.update({ updateFieldName : self.srcData[fieldName][subField]}) + def mergeItems(self, fieldName, updateFieldName=None, listToAdd=[], NotStartWith=None, excludeName=None): + dataAdded = "" + for item in self.srcData[fieldName]: + if item not in self.destData[fieldName]: + if NotStartWith == None or not item.startswith(NotStartWith): + if excludeName == None or item['name'] not in excludeName: + if fieldName == 'movies': + listToAdd += [{"movie_id" : item['movie']['id'], "scene_index" : item['scene_index']}] + dataAdded += f"{item['movie']['id']} " + elif updateFieldName == None: + listToAdd += [item] + dataAdded += f"{item} " + else: + listToAdd += [item['id']] + dataAdded += f"{item['id']} " + if dataAdded != "": + if updateFieldName == None: + updateFieldName = fieldName + else: + for item in self.destData[fieldName]: + if fieldName == 'movies': + listToAdd += [{"movie_id" : item['movie']['id'], "scene_index" : item['scene_index']}] + else: + listToAdd += [item['id']] + self.dataDict.update({ updateFieldName : listToAdd}) + # self.stash.Trace(f"Added {fieldName} ({dataAdded}) to scene ID({self.destData['id']})", toAscii=True) diff --git a/plugins/FileMonitor/filemonitor.py b/plugins/FileMonitor/filemonitor.py index 75398d3f..29aea88b 100644 --- a/plugins/FileMonitor/filemonitor.py +++ b/plugins/FileMonitor/filemonitor.py @@ -3,7 +3,7 @@ # Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/FileMonitor # Note: To call this script outside of Stash, pass argument --url and the Stash URL. # Example: python filemonitor.py --url http://localhost:9999 -import os, sys, time, pathlib, argparse, platform, traceback +import os, sys, time, pathlib, argparse, platform, traceback, logging from StashPluginHelper import StashPluginHelper import watchdog # pip install watchdog # https://pythonhosted.org/watchdog/ from watchdog.observers import Observer # This is also needed for event attributes @@ -14,8 +14,10 @@ from filemonitor_self_unit_test import self_unit_test config['task_scheduler'] = config['task_scheduler'] + task_examples['task_scheduler'] -if self_unit_test['selfUnitTest']: - config['task_scheduler'] = config['task_scheduler'] + self_unit_test['task_scheduler'] +if self_unit_test['selfUnitTest_repeat']: + config['task_scheduler'] = config['task_scheduler'] + self_unit_test['task_scheduler_repeat'] +if self_unit_test['selfUnitTest_set_time']: + config['task_scheduler'] = config['task_scheduler'] + self_unit_test['task_scheduler_set_time'] CONTINUE_RUNNING_SIG = 99 STOP_RUNNING_SIG = 32 @@ -38,6 +40,7 @@ settings = { "recursiveDisabled": False, "turnOnScheduler": False, + "turnOnSchedulerDeleteDup": False, "zmaximumBackups": 1, "zzdebugTracing": False } @@ -48,9 +51,10 @@ config=config, logToErrSet=logToErrSet, logToNormSet=logToNormSet, + maxbytes=5*1024*1024, apiKey=parse_args.apikey ) -stash.Status() +stash.Status(logLevel=logging.DEBUG) stash.Log(f"\nStarting (__file__={__file__}) (stash.CALLED_AS_STASH_PLUGIN={stash.CALLED_AS_STASH_PLUGIN}) (stash.DEBUG_TRACING={stash.DEBUG_TRACING}) (stash.DRY_RUN={stash.DRY_RUN}) (stash.PLUGIN_TASK_NAME={stash.PLUGIN_TASK_NAME})************************************************") exitMsg = "Change success!!" @@ -81,6 +85,7 @@ fileExtTypes = stash.pluginConfig['fileExtTypes'].split(",") if stash.pluginConfig['fileExtTypes'] != "" else [] includePathChanges = stash.pluginConfig['includePathChanges'] if len(stash.pluginConfig['includePathChanges']) > 0 else stash.STASH_PATHS excludePathChanges = stash.pluginConfig['excludePathChanges'] +turnOnSchedulerDeleteDup = stash.pluginSettings['turnOnSchedulerDeleteDup'] if stash.DRY_RUN: stash.Log("Dry run mode is enabled.") @@ -157,29 +162,31 @@ def __init__(self): weekDays = task['weekday'].lower() if 'monthly' in task: stash.Log(f"Adding to scheduler task '{task['task']}' monthly on number {task['monthly']} {task['weekday']} at {task['time']}") + elif task['weekday'] == "every": + stash.Log(f"Adding to scheduler task '{task['task']}' (weekly) every day at {task['time']}") else: stash.Log(f"Adding to scheduler task '{task['task']}' (weekly) every {task['weekday']} at {task['time']}") hasValidDay = False - if "monday" in weekDays: + if "monday" in weekDays or "every" in weekDays: schedule.every().monday.at(task['time']).do(self.runTask, task) hasValidDay = True - if "tuesday" in weekDays: + if "tuesday" in weekDays or "every" in weekDays: schedule.every().tuesday.at(task['time']).do(self.runTask, task) hasValidDay = True - if "wednesday" in weekDays: + if "wednesday" in weekDays or "every" in weekDays: schedule.every().wednesday.at(task['time']).do(self.runTask, task) hasValidDay = True - if "thursday" in weekDays: + if "thursday" in weekDays or "every" in weekDays: schedule.every().thursday.at(task['time']).do(self.runTask, task) hasValidDay = True - if "friday" in weekDays: + if "friday" in weekDays or "every" in weekDays: schedule.every().friday.at(task['time']).do(self.runTask, task) hasValidDay = True - if "saturday" in weekDays: + if "saturday" in weekDays or "every" in weekDays or "weekend" in weekDays: schedule.every().saturday.at(task['time']).do(self.runTask, task) hasValidDay = True - if "sunday" in weekDays: + if "sunday" in weekDays or "every" in weekDays or "weekend" in weekDays: schedule.every().sunday.at(task['time']).do(self.runTask, task) hasValidDay = True @@ -207,23 +214,23 @@ def runTask(self, task): result = None if task['task'] == "Clean": - result = stash.metadata_clean(paths=targetPaths, dry_run=stash.DRY_RUN) + result = self.jobIdOutput(stash.metadata_clean(paths=targetPaths, dry_run=stash.DRY_RUN)) elif task['task'] == "Clean Generated Files": - result = stash.metadata_clean_generated() + result = self.jobIdOutput(stash.metadata_clean_generated()) elif task['task'] == "Generate": - result = stash.metadata_generate() + result = self.jobIdOutput(stash.metadata_generate()) elif task['task'] == "Backup": - result = self.runBackupTask(task) + result = self.jobIdOutput(self.runBackupTask(task)) elif task['task'] == "Scan": - result = stash.metadata_scan(paths=targetPaths) + result = self.jobIdOutput(stash.metadata_scan(paths=targetPaths)) elif task['task'] == "Auto Tag": - result = stash.metadata_autotag(paths=targetPaths) + result = self.jobIdOutput(stash.metadata_autotag(paths=targetPaths)) elif task['task'] == "Optimise Database": - result = stash.optimise_database() + result = self.jobIdOutput(stash.optimise_database()) elif task['task'] == "RenameGeneratedFiles": - result = stash.rename_generated_files() + result = self.jobIdOutput(stash.rename_generated_files()) elif task['task'] == "GQL": - result = stash.call_GQL(task['input']) + result = self.jobIdOutput(stash.call_GQL(task['input'])) elif task['task'] == "Log": Msg = "Scheduled Logging (INFO)." if 'msg' in task and task['msg'] != "": @@ -251,11 +258,23 @@ def runTask(self, task): elif task['task'] == "execute": result = self.runExecuteProcessTask(task) else: - result = self.runPluginTask(task) + result = self.jobIdOutput(self.runPluginTask(task)) if result: stash.Trace(f"Task '{task['task']}' result={result}") + def jobIdOutput(self, result): + if result == None or result == "": + return result + jobId = None + if type(result) is int: + jobId = result + elif str(result).isnumeric(): + jobId = int(result) + else: + return result + return f"Task started with Job-ID#({jobId})" + def runExecuteProcessTask(self, task): if 'command' in task and task['command'] != "": cmd = task['command'].replace("", f"{pathlib.Path(__file__).resolve().parent}{os.sep}") @@ -284,30 +303,44 @@ def runPythonScript(self, task): return None def runPluginTask(self, task): - # ToDo: Add code to check if plugin is installed. try: - if 'pluginId' in task and task['pluginId'] != "": - invalidDir = False - validDirMsg = "" - if 'validateDir' in task and task['validateDir'] != "": - invalidDir = True - communityPluginPath = f"{stash.PLUGINS_PATH}{os.sep}community{os.sep}{task['validateDir']}" - basePluginPath = f"{stash.PLUGINS_PATH}{os.sep}{task['validateDir']}" - if os.path.exists(communityPluginPath): - invalidDir = False - validDirMsg = f"Valid path in {communityPluginPath}" - elif os.path.exists(basePluginPath): - invalidDir = False - validDirMsg = f"Valid path in {basePluginPath}" - if invalidDir: - stash.Error(f"Could not run task '{task['task']}' because sub directory '{task['validateDir']}' does not exist under path '{stash.PLUGINS_PATH}'") - else: - stash.Trace(f"Running plugin task pluginID={task['pluginId']}, task name = {task['task']}. {validDirMsg}") - return stash.run_plugin_task(plugin_id=task['pluginId'], task_name=task['task']) + invalidDir = False + validDirMsg = "" + if 'validateDir' in task and task['validateDir'] != "": + invalidDir = True + communityPluginPath = f"{stash.PLUGINS_PATH}{os.sep}community{os.sep}{task['validateDir']}" + basePluginPath = f"{stash.PLUGINS_PATH}{os.sep}{task['validateDir']}" + if os.path.exists(communityPluginPath): + invalidDir = False + validDirMsg = f"Valid path in {communityPluginPath}" + elif os.path.exists(basePluginPath): + invalidDir = False + validDirMsg = f"Valid path in {basePluginPath}" + if invalidDir: + stash.Error(f"Could not run task '{task['task']}' because sub directory '{task['validateDir']}' does not exist under path '{stash.PLUGINS_PATH}'") + return None + if not turnOnSchedulerDeleteDup and (task['task'] == "Delete Duplicates" or ('taskName' in task and task['taskName'] == "Delete Duplicates") or ('taskMode' in task and task['taskMode'] == "delete_duplicates_task")): + stash.Warn(f"Not running task {task['task']}, because [Delete Duplicate Scheduler] is NOT enabled. See Stash UI option Settings->Plugins->Plugins->FileMonitor->[Delete Duplicate Scheduler]") + return None + # The pluginId field is only here for backward compatibility, and should not be used in future scheduler configurations + if 'pluginId' in task and task['pluginId'] != "": # Obsolete method + stash.Trace(f"Adding to Task Queue plugin task pluginID={task['pluginId']}, task name = {task['task']}. {validDirMsg}") + return stash.run_plugin_task(plugin_id=task['pluginId'], task_name=task['task']) else: - stash.Error(f"Can not run task '{task['task']}', because it's an invalid task.") - stash.LogOnce(f"If task '{task['task']}' is supposed to be a built-in task, check for correct task name spelling.") - stash.LogOnce(f"If task '{task['task']}' is supposed to be a plugin, make sure to include the pluginId field in the task. task={task}") + taskName = None + taskMode = None + if 'taskName' in task: + taskName = task['taskName'] + if 'taskMode' in task: + taskMode = task['taskMode'] + if ('taskQue' in task and task['taskQue'] == False) or taskName == None: + stash.Log(f"Running plugin task pluginID={task['task']}, task mode = {taskMode}. {validDirMsg}") + # Asynchronous threading logic to call run_plugin, because it's a blocking call. + stash.run_plugin(plugin_id=task['task'], task_mode=taskMode, asyn=True) + return None + else: + stash.Trace(f"Adding to Task Queue plugin task pluginID={task['task']}, task name = {taskName}. {validDirMsg}") + return stash.run_plugin_task(plugin_id=task['task'], task_name=taskName) except Exception as e: stash.LogOnce(f"Failed to call plugin {task['task']} with plugin-ID {task['pluginId']}. Error: {e}") pass @@ -683,7 +716,7 @@ def start_library_monitor_service(): if stash.API_KEY: args = args + ["-a", stash.API_KEY] stash.ExecutePythonScript(args) - + if parse_args.stop or parse_args.restart or stash.PLUGIN_TASK_NAME == "stop_library_monitor": stop_library_monitor() if parse_args.restart: @@ -701,7 +734,7 @@ def start_library_monitor_service(): elif not stash.CALLED_AS_STASH_PLUGIN: try: start_library_monitor() - stash.Trace(f"Command line FileMonitor EXIT") + stash.Trace("Command line FileMonitor EXIT") except Exception as e: tb = traceback.format_exc() stash.Error(f"Exception while running FileMonitor from the command line. Error: {e}\nTraceBack={tb}") diff --git a/plugins/FileMonitor/filemonitor.yml b/plugins/FileMonitor/filemonitor.yml index f96bf70e..5637ee3b 100644 --- a/plugins/FileMonitor/filemonitor.yml +++ b/plugins/FileMonitor/filemonitor.yml @@ -1,6 +1,6 @@ name: FileMonitor description: Monitors the Stash library folders, and updates Stash if any changes occurs in the Stash library paths. -version: 0.8.7 +version: 0.9.0 url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/FileMonitor settings: recursiveDisabled: @@ -11,9 +11,13 @@ settings: displayName: Scheduler description: Enable to turn on the scheduler. See filemonitor_config.py for more details. type: BOOLEAN + turnOnSchedulerDeleteDup: + displayName: Delete Duplicate Scheduler + description: Turn on scheduler for deleting duplicates in Stash library. (Requires plugin DupFileManager and [Scheduler] enabled) + type: BOOLEAN zmaximumBackups: displayName: Max DB Backups - description: When value greater than 1, will trim the number of database backup files to set value. Requires [Scheduler] enabled and backupDirectoryPath populated with path length longer than 4. + description: Trim database backup files to set value. Requires [Scheduler] enabled and backupDirectoryPath path length longer than 4. type: NUMBER zzdebugTracing: displayName: Debug Tracing @@ -29,10 +33,10 @@ tasks: defaultArgs: mode: start_library_monitor_service - name: Stop Library Monitor - description: Stops library monitoring within 2 minute. + description: Stops library monitoring within 2 minutes. defaultArgs: mode: stop_library_monitor - name: Monitor as a Plugin - description: Run [Library Monitor] as a plugin (*not recommended method*) + description: Run [Library Monitor] as a plugin (*Not recommended*) defaultArgs: mode: start_library_monitor diff --git a/plugins/FileMonitor/filemonitor_config.py b/plugins/FileMonitor/filemonitor_config.py index 38ed73a0..60824fd6 100644 --- a/plugins/FileMonitor/filemonitor_config.py +++ b/plugins/FileMonitor/filemonitor_config.py @@ -11,12 +11,15 @@ # The hour section in time MUST be a two digit number, and use military time format. Example: 1PM = "13:00" and 1AM = "01:00" # Note: Look at filemonitor_task_examples.py for many example task having more detailed usage. "task_scheduler": [ - # To create a daily task, include each day of the week for the weekday field. + # To create a daily task, include each day of the week for the weekday field or "every" # Optional field for task "Auto Tag" is 'paths'. For detail usage, see example #A3: in filemonitor_task_examples.py {"task" : "Auto Tag", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "05:00"}, # Auto Tag -> [Auto Tag] (Daily at 6AM) - # Task "Create Tags" is a plugin task. All plugin task have a REQUIRED pluginId field and an optional validateDir field. For detail usage, see examples #B1 and #B2 in filemonitor_task_examples.py - {"task" : "Create Tags", "pluginId" : "pathParser", "validateDir" : "pathParser", - "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "05:30"}, # [Plugin Tasks] - > [Path Parser] -> [Create Tags] (Daily at 5AM) : This task requires plugin [Path Parser] + # Task "Create Tags" is a plugin task. Optional fields are taskName and validateDir field. For detail usage, see examples #B1, #B2, #B3, and #B4 in filemonitor_task_examples.py + {"task" : "pathParser", "taskName" : "Create Tags", "validateDir" : "pathParser", + "weekday" : "every", "time" : "05:30"}, # [Plugin Tasks] - > [Path Parser] -> [Create Tags] (Daily at 5AM) : This task requires plugin [Path Parser] + # The following task runs plugin DupFileManager (tag_duplicates_task) if the plugin is installed. The task runs in the background because of "taskQue" : False + {"task" : "DupFileManager", "taskMode" : "tag_duplicates_task", "validateDir" : "DupFileManager", "taskQue" : False, + "weekday" : "every", "time" : "02:30"}, # [Plugin Tasks] -> DupFileManager -> [Delete Duplicates] (Daily at 2:30AM) {"task" : "Optimise Database", "weekday" : "monday,tuesday,wednesday,thursday,friday", "time" : "07:00"}, # Maintenance -> [Optimise Database] (Every weekday at 7AM) # The following tasks are scheduled weekly @@ -37,6 +40,9 @@ # The Backup task is scheduled monthly # Optional field for task "Backup" is maxBackup. For detail usage, see example #A5 in filemonitor_task_examples.py {"task" : "Backup", "weekday" : "sunday", "time" : "01:00", "monthly" : 2}, # Backup -> [Backup] 2nd sunday of the month at 1AM (01:00) + # The following task requires plugin DupFileManager and UI option [Delete Duplicate Scheduler] enabled. + {"task" : "DupFileManager", "taskName" : "Delete Duplicates", "validateDir" : "DupFileManager", + "weekday" : "sunday", "time" : "02:00", "monthly" : 2}, # [Plugin Tasks] -> DupFileManager -> [Delete Duplicates] 2nd sunday of the month at 2AM (02:00) # The [CheckStashIsRunning] task checks if Stash is running. If not running, it will start up stash. # This task only works if FileMonitor is started as a service or in command line mode. diff --git a/plugins/FileMonitor/filemonitor_self_unit_test.py b/plugins/FileMonitor/filemonitor_self_unit_test.py index c30311fc..83942f46 100644 --- a/plugins/FileMonitor/filemonitor_self_unit_test.py +++ b/plugins/FileMonitor/filemonitor_self_unit_test.py @@ -4,39 +4,42 @@ # These tasks are usually executed before updating major releases on https://github.com/David-Maisonave/Axter-Stash/blob/main/plugins/FileMonitor # These tasks are ALWAYS executed before updating to https://github.com/stashapp/CommunityScripts self_unit_test = { - "task_scheduler": [ + "task_scheduler_repeat": [ {"task" : "TestBadTaskNameError", "minutes" : 1}, # Test invalid task name {"task" : "execute", "minutes" : 1}, # Test invalid task (missing command) {"task" : "python", "minutes" : 1}, # Test invalid task (missing scripts) - {"task" : "PluginWithOutID", "minutes" : 1}, # Test invalid task (missing pluginId) {"task" : "execute", "command" : "", "minutes" : 1}, # Test invalid task (missing command) {"task" : "python", "script" : "", "minutes" : 1}, # Test invalid task (missing scripts) - {"task" : "PluginWithOutID", "pluginId" : "", "minutes" : 1}, # Test invalid task (missing pluginId) - {"task" : "Foo","pluginId":"foo","validateDir":"foo", "minutes" : 1}, # Test invalid task (missing plugin directory) + {"task" : "Foo","taskName":"foo","validateDir":"foo", "minutes" : 1}, # Test invalid task (missing plugin directory) {"task" : "Log", "msg" : "Testing Scheduled Log", "minutes" : 1}, # Test plugin log file {"task" : "Trace", "minutes" : 1}, # Test plugin trace logging {"task" : "LogOnce", "seconds" :15}, # Test LogOnce {"task" : "TraceOnce", "seconds" : 5}, # Test TraceOnce - # {"task" : "CheckStashIsRunning", "RunAfter" : [{"task" : "Scan"}], "seconds" :15}, # To test CheckStashIsRunning, kill Stash after starting FileMonitor service via following command:taskkill /F /IM "stash-win.exe" {"task" : "CheckStashIsRunning", "RunAfter" : [{"task" : "Scan"},{"task" : "Backup", "maxBackup" : 0},{"task" : "Clean"}], "seconds" :15}, # Test RunAfter - {"task" : "CheckStashIsRunning", "command" : "stash-win.exe", "seconds" :10}, # Check if Stash is running. If not running, start up Stash. - {"task" : "Generate", "weekday" : "friday", "time" : "12:03"}, - {"task" : "Clean", "weekday" : "friday", "time" : "12:03"}, - {"task" : "Auto Tag", "weekday" : "friday", "time" : "12:03"}, - {"task" : "Optimise Database", "weekday" : "friday", "time" : "12:03"}, - {"task" : "Create Tags", "pluginId" : "pathParser", "validateDir" : "pathParser", "weekday" : "friday", "time" : "12:03"}, # In task queue as -> Running plugin task: Create Tags - {"task" : "Scan","paths": [r"B:\_\SpecialSet", r"C:\foo"], "weekday" : "friday", "time" : "12:03"}, - {"task" : "GQL", "input" : "mutation OptimiseDatabase { optimiseDatabase }", "weekday" : "friday", "time" : "12:03"}, # In task queue as -> Optimising database... - {"task" : "Clean Generated Files", "weekday" : "friday", "time" : "12:03"}, - {"task" : "RenameGeneratedFiles", "weekday" : "friday", "time" : "12:03"}, # In task queue as -> Migrating scene hashes... - {"task" : "Backup", "maxBackups" : 0, "weekday" : "friday", "time" : "12:03"}, # Does NOT show up in the Task Queue. Must check STASH log file to verify run. - {"task" : "python", "script" : "test_hello_world2.py", "weekday" : "friday", "time" : "12:03"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=??? - {"task" : "python", "script" : "test_hello_world.py", "detach" : False, "weekday" : "friday", "time" : "12:03"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=??? - {"task" : "execute", "command" : "test_hello_world2.cmd", "weekday" : "friday", "time" : "12:03"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=??? - {"task" : "execute", "command" : "test_hello_world.bat", "args" : "--name David", "weekday" : "friday", "time" : "12:03"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=??? + {"task" : "CheckStashIsRunning", "command" : "stash-win.exe", "seconds" :10}, # Check if Stash is running. If not running, start up Stash. + # {"task" : "CheckStashIsRunning", "RunAfter" : [{"task" : "Scan"}], "seconds" :15}, # To test CheckStashIsRunning, kill Stash after starting FileMonitor service via following command:taskkill /F /IM "stash-win.exe" ], - + "task_scheduler_set_time": [ + # Test [Delete Duplicates] with [Delete Duplicate Scheduler] disabled, and then with it enabled. + {"task" : "DupFileManager", "taskName" : "Delete Duplicates", "validateDir" : "DupFileManager", "weekday" : "every", "time" : "04:01"}, # [Plugin Tasks] -> DupFileManager -> [Delete Duplicates] + {"task" : "Generate", "weekday" : "every", "time" : "04:01"}, + {"task" : "Clean", "weekday" : "every", "time" : "04:01"}, + {"task" : "Auto Tag", "weekday" : "every", "time" : "04:01"}, + {"task" : "Optimise Database", "weekday" : "every", "time" : "04:01"}, + {"task" : "pathParser", "taskName" : "Create Tags", "validateDir" : "pathParser", "weekday" : "every", "time" : "04:01"}, # In task queue as -> Running plugin task: Create Tags + {"task" : "DupFileManager", "taskMode" : "tag_duplicates_task", "taskQue":False, "weekday" : "every", "time" : "04:01"}, # Does NOT run in the task queue + {"task" : "DupFileManager", "taskName" : "Tag Duplicates", "validateDir" : "DupFileManager", "weekday" : "every", "time" : "04:01"}, # [Plugin Tasks] -> DupFileManager -> [Tag Duplicates] + {"task" : "Scan","paths": [r"B:\_\SpecialSet", r"C:\foo"], "weekday" : "every", "time" : "04:01"}, + {"task" : "GQL", "input" : "mutation OptimiseDatabase { optimiseDatabase }", "weekday" : "every", "time" : "04:01"}, # In task queue as -> Optimising database... + {"task" : "Clean Generated Files", "weekday" : "every", "time" : "04:01"}, + {"task" : "RenameGeneratedFiles", "weekday" : "every", "time" : "04:01"}, # In task queue as -> Migrating scene hashes... + {"task" : "Backup", "maxBackups" : 0, "weekday" : "every", "time" : "04:01"}, # Does NOT show up in the Task Queue. Must check STASH log file to verify run. + {"task" : "python", "script" : "test_hello_world2.py", "weekday" : "every", "time" : "04:01"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=??? + {"task" : "python", "script" : "test_hello_world.py", "detach" : False, "weekday" : "every", "time" : "04:01"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=??? + {"task" : "execute", "command" : "test_hello_world2.cmd", "weekday" : "every", "time" : "04:01"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=??? + {"task" : "execute", "command" : "test_hello_world.bat", "args" : "--name David", "weekday" : "every", "time" : "04:01"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=??? + ], # MUST ToDo: Always set selfUnitTest to False before checking in this code!!! - # Enable to turn on self unit test. - "selfUnitTest": False, + "selfUnitTest_repeat" : False , # Enable to turn on self unit test. + "selfUnitTest_set_time" : False , # Enable to turn on self unit test. } diff --git a/plugins/FileMonitor/filemonitor_task_examples.py b/plugins/FileMonitor/filemonitor_task_examples.py index 3cbfad23..2bd58126 100644 --- a/plugins/FileMonitor/filemonitor_task_examples.py +++ b/plugins/FileMonitor/filemonitor_task_examples.py @@ -30,20 +30,24 @@ # And days usage is discourage, because it only works if FileMonitor is running for X many days non-stop. # The below example tasks are done using hours and minutes, however any of these task types can be converted to a daily, weekly, or monthly syntax. - # Example#B1: The following task is the syntax used for a plugin. A plugin task requires the plugin name for the [task] field, and the plugin-ID for the [pluginId] field. - {"task" : "PluginButtonName_Here", "pluginId" : "PluginId_Here", "hours" : 0}, # The zero frequency value makes this task disabled. + # Example#B1: The following task is the syntax used for a plugin. A plugin task requires the plugin-ID for the [task] field. Optional fields are taskName, taskMode, validateDir, and taskQue. + {"task" : "PluginId_Here", "taskName" : "Task Name or Plugin Button Name Here", "hours" : 0}, # The zero frequency value makes this task disabled. # Example#B2: Optionally, the validateDir field can be included which is used to validate that the plugin is installed either under the plugins folder or under the plugins-community folder. - {"task" : "PluginButtonName_Here", "pluginId" : "PluginId_Here", "validateDir" : "UsuallySameAsPluginID", "hours" : 0}, # The zero frequency value makes this task disabled. + {"task" : "PluginId_Here", "taskName" : "Task Name or Plugin Button Name Here", "validateDir" : "UsuallySameAsPluginID", "hours" : 0}, + # Example#B3: To run a plugin WITHOUT using the Task Queue, use taskMode instead of taskName and/or add field "taskQue":False. The plugin will run immediately + {"task" : "PluginId_Here", "taskMode" : "Plugin_Task_MODE", "taskQue" : False, "hours" : 0}, # Do NOT use taskName when including "taskQue":False + # Example#B4: When taskName field is missing, it will always run the task without using the Task Queue. The plugin will run immediately + {"task" : "PluginId_Here", "hours" : 0}, - # Example#B3: Task to execute a command + # Example#C1: Task to execute a command {"task" : "execute", "command" : "C:\\MyPath\\HelloWorld.bat", "hours" : 0}, - # Example#B4: Task to execute a command with optional args field, and using keyword , which gets replaced with filemonitor.py current directory. + # Example#C2: Task to execute a command with optional args field, and using keyword , which gets replaced with filemonitor.py current directory. {"task" : "execute", "command" : "HelloWorld.cmd", "args" : "--name David", "minutes" : 0}, - # Example#C1 Some OS may need the "command" field, which specifies the binary path. + # Example#D1 Some OS may need the "command" field, which specifies the binary path. {"task" : "CheckStashIsRunning", "command" : "stash-linux-arm64v8", "minutes" :0}, - # Example#C2 RunAfter field can be used to specify task to run after starting Stash + # Example#D2 RunAfter field can be used to specify task to run after starting Stash {"task" : "CheckStashIsRunning", "RunAfter" : [{"task" : "Scan"},{"task" : "Backup", "maxBackup" : 0},{"task" : "Clean"}], "minutes" :0}, ], } From a667d8786c4979138461c39c212af1777e011cbb Mon Sep 17 00:00:00 2001 From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com> Date: Wed, 28 Aug 2024 04:43:08 -0400 Subject: [PATCH 29/39] RenameFile Updates Fixed issue with Studio triggering folder being created on rename. Integrated changes to use StashPluginHelper class, and stream line the code. Put rename back as default method over move. Removed functions which are already included in stashapi and StashPluginHelper. --- plugins/RenameFile/README.md | 2 +- plugins/RenameFile/StashPluginHelper.py | 526 ++++++++++++++++++++++ plugins/RenameFile/renamefile.py | 407 ++++------------- plugins/RenameFile/renamefile.yml | 8 +- plugins/RenameFile/renamefile_settings.py | 4 +- 5 files changed, 627 insertions(+), 320 deletions(-) create mode 100644 plugins/RenameFile/StashPluginHelper.py diff --git a/plugins/RenameFile/README.md b/plugins/RenameFile/README.md index bab3fca7..c2f7041e 100644 --- a/plugins/RenameFile/README.md +++ b/plugins/RenameFile/README.md @@ -1,4 +1,4 @@ -# RenameFile: Ver 0.4.2 (By David Maisonave) +# RenameFile: Ver 0.4.6 (By David Maisonave) RenameFile is a [Stash](https://github.com/stashapp/stash) plugin which performs the following tasks. - **Rename Scene File Name** (On-The-Fly) - **Append tag names** to file name diff --git a/plugins/RenameFile/StashPluginHelper.py b/plugins/RenameFile/StashPluginHelper.py new file mode 100644 index 00000000..6f0d3d15 --- /dev/null +++ b/plugins/RenameFile/StashPluginHelper.py @@ -0,0 +1,526 @@ +from stashapi.stashapp import StashInterface +from logging.handlers import RotatingFileHandler +import re, inspect, sys, os, pathlib, logging, json +import concurrent.futures +from stashapi.stash_types import PhashDistance +import __main__ + +_ARGUMENT_UNSPECIFIED_ = "_ARGUMENT_UNSPECIFIED_" + +# StashPluginHelper (By David Maisonave aka Axter) + # See end of this file for example usage + # Log Features: + # Can optionally log out to multiple outputs for each Log or Trace call. + # Logging includes source code line number + # Sets a maximum plugin log file size + # Stash Interface Features: + # Gets STASH_URL value from command line argument and/or from STDIN_READ + # Sets FRAGMENT_SERVER based on command line arguments or STDIN_READ + # Sets PLUGIN_ID based on the main script file name (in lower case) + # Gets PLUGIN_TASK_NAME value + # Sets pluginSettings (The plugin UI settings) + # Misc Features: + # Gets DRY_RUN value from command line argument and/or from UI and/or from config file + # Gets DEBUG_TRACING value from command line argument and/or from UI and/or from config file + # Sets RUNNING_IN_COMMAND_LINE_MODE to True if detects multiple arguments + # Sets CALLED_AS_STASH_PLUGIN to True if it's able to read from STDIN_READ +class StashPluginHelper(StashInterface): + # Primary Members for external reference + PLUGIN_TASK_NAME = None + PLUGIN_ID = None + PLUGIN_CONFIGURATION = None + PLUGINS_PATH = None + pluginSettings = None + pluginConfig = None + STASH_URL = None + STASH_CONFIGURATION = None + JSON_INPUT = None + DEBUG_TRACING = False + DRY_RUN = False + CALLED_AS_STASH_PLUGIN = False + RUNNING_IN_COMMAND_LINE_MODE = False + FRAGMENT_SERVER = None + STASHPATHSCONFIG = None + STASH_PATHS = [] + API_KEY = None + excludeMergeTags = None + + # printTo argument + LOG_TO_FILE = 1 + LOG_TO_CONSOLE = 2 # Note: Only see output when running in command line mode. In plugin mode, this output is lost. + LOG_TO_STDERR = 4 # Note: In plugin mode, output to StdErr ALWAYS gets sent to stash logging as an error. + LOG_TO_STASH = 8 + LOG_TO_WARN = 16 + LOG_TO_ERROR = 32 + LOG_TO_CRITICAL = 64 + LOG_TO_ALL = LOG_TO_FILE + LOG_TO_CONSOLE + LOG_TO_STDERR + LOG_TO_STASH + + # Misc class variables + MAIN_SCRIPT_NAME = None + LOG_LEVEL = logging.INFO + LOG_FILE_DIR = None + LOG_FILE_NAME = None + STDIN_READ = None + pluginLog = None + logLinePreviousHits = [] + thredPool = None + STASH_INTERFACE_INIT = False + _mergeMetadata = None + encodeToUtf8 = False + convertToAscii = False # If set True, it takes precedence over encodeToUtf8 + + # Prefix message value + LEV_TRACE = "TRACE: " + LEV_DBG = "DBG: " + LEV_INF = "INF: " + LEV_WRN = "WRN: " + LEV_ERR = "ERR: " + LEV_CRITICAL = "CRITICAL: " + + # Default format + LOG_FORMAT = "[%(asctime)s] %(message)s" + + # Externally modifiable variables + log_to_err_set = LOG_TO_FILE + LOG_TO_STDERR # This can be changed by the calling source in order to customize what targets get error messages + log_to_norm = LOG_TO_FILE + LOG_TO_CONSOLE # Can be change so-as to set target output for normal logging + # Warn message goes to both plugin log file and stash when sent to Stash log file. + log_to_wrn_set = LOG_TO_STASH # This can be changed by the calling source in order to customize what targets get warning messages + + def __init__(self, + debugTracing = None, # Set debugTracing to True so as to output debug and trace logging + logFormat = LOG_FORMAT, # Plugin log line format + dateFmt = "%y%m%d %H:%M:%S", # Date format when logging to plugin log file + maxbytes = 8*1024*1024, # Max size of plugin log file + backupcount = 2, # Backup counts when log file size reaches max size + logToWrnSet = 0, # Customize the target output set which will get warning logging + logToErrSet = 0, # Customize the target output set which will get error logging + logToNormSet = 0, # Customize the target output set which will get normal logging + logFilePath = "", # Plugin log file. If empty, the log file name will be set based on current python file name and path + mainScriptName = "", # The main plugin script file name (full path) + pluginID = "", + settings = None, # Default settings for UI fields + config = None, # From pluginName_config.py or pluginName_setting.py + fragmentServer = None, + stash_url = None, # Stash URL (endpoint URL) Example: http://localhost:9999 + apiKey = None, # API Key only needed when username and password set while running script via command line + DebugTraceFieldName = "zzdebugTracing", + DryRunFieldName = "zzdryRun", + setStashLoggerAsPluginLogger = False): + self.thredPool = concurrent.futures.ThreadPoolExecutor(max_workers=2) + if logToWrnSet: self.log_to_wrn_set = logToWrnSet + if logToErrSet: self.log_to_err_set = logToErrSet + if logToNormSet: self.log_to_norm = logToNormSet + if stash_url and len(stash_url): self.STASH_URL = stash_url + self.MAIN_SCRIPT_NAME = mainScriptName if mainScriptName != "" else __main__.__file__ + self.PLUGIN_ID = pluginID if pluginID != "" else pathlib.Path(self.MAIN_SCRIPT_NAME).stem + # print(f"self.MAIN_SCRIPT_NAME={self.MAIN_SCRIPT_NAME}, self.PLUGIN_ID={self.PLUGIN_ID}", file=sys.stderr) + self.LOG_FILE_NAME = logFilePath if logFilePath != "" else f"{pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent}{os.sep}{pathlib.Path(self.MAIN_SCRIPT_NAME).stem}.log" + self.LOG_FILE_DIR = pathlib.Path(self.LOG_FILE_NAME).resolve().parent + RFH = RotatingFileHandler( + filename=self.LOG_FILE_NAME, + mode='a', + maxBytes=maxbytes, + backupCount=backupcount, + encoding=None, + delay=0 + ) + if fragmentServer: + self.FRAGMENT_SERVER = fragmentServer + else: + self.FRAGMENT_SERVER = {'Scheme': 'http', 'Host': '0.0.0.0', 'Port': '9999', 'SessionCookie': {'Name': 'session', 'Value': '', 'Path': '', 'Domain': '', 'Expires': '0001-01-01T00:00:00Z', 'RawExpires': '', 'MaxAge': 0, 'Secure': False, 'HttpOnly': False, 'SameSite': 0, 'Raw': '', 'Unparsed': None}, 'Dir': os.path.dirname(pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent), 'PluginDir': pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent} + + if debugTracing: self.DEBUG_TRACING = debugTracing + if config: + self.pluginConfig = config + if self.Setting('apiKey', "") != "": + self.FRAGMENT_SERVER['ApiKey'] = self.Setting('apiKey') + + + if apiKey and apiKey != "": + self.FRAGMENT_SERVER['ApiKey'] = apiKey + + if len(sys.argv) > 1: + RUNNING_IN_COMMAND_LINE_MODE = True + if not debugTracing or not stash_url: + for argValue in sys.argv[1:]: + if argValue.lower() == "--trace": + self.DEBUG_TRACING = True + elif argValue.lower() == "--dry_run" or argValue.lower() == "--dryrun": + self.DRY_RUN = True + elif ":" in argValue and not self.STASH_URL: + self.STASH_URL = argValue + if self.STASH_URL: + endpointUrlArr = self.STASH_URL.split(":") + if len(endpointUrlArr) == 3: + self.FRAGMENT_SERVER['Scheme'] = endpointUrlArr[0] + self.FRAGMENT_SERVER['Host'] = endpointUrlArr[1][2:] + self.FRAGMENT_SERVER['Port'] = endpointUrlArr[2] + super().__init__(self.FRAGMENT_SERVER) + self.STASH_INTERFACE_INIT = True + else: + try: + self.STDIN_READ = sys.stdin.read() + self.CALLED_AS_STASH_PLUGIN = True + except: + pass + if self.STDIN_READ: + self.JSON_INPUT = json.loads(self.STDIN_READ) + if "args" in self.JSON_INPUT and "mode" in self.JSON_INPUT["args"]: + self.PLUGIN_TASK_NAME = self.JSON_INPUT["args"]["mode"] + self.FRAGMENT_SERVER = self.JSON_INPUT["server_connection"] + self.STASH_URL = f"{self.FRAGMENT_SERVER['Scheme']}://{self.FRAGMENT_SERVER['Host']}:{self.FRAGMENT_SERVER['Port']}" + super().__init__(self.FRAGMENT_SERVER) + self.STASH_INTERFACE_INIT = True + + if self.STASH_URL.startswith("http://0.0.0.0:"): + self.STASH_URL = self.STASH_URL.replace("http://0.0.0.0:", "http://localhost:") + + if self.STASH_INTERFACE_INIT: + self.PLUGIN_CONFIGURATION = self.get_configuration()["plugins"] + self.STASH_CONFIGURATION = self.get_configuration()["general"] + self.STASHPATHSCONFIG = self.STASH_CONFIGURATION['stashes'] + if 'pluginsPath' in self.STASH_CONFIGURATION: + self.PLUGINS_PATH = self.STASH_CONFIGURATION['pluginsPath'] + for item in self.STASHPATHSCONFIG: + self.STASH_PATHS.append(item["path"]) + if settings: + self.pluginSettings = settings + if self.PLUGIN_ID in self.PLUGIN_CONFIGURATION: + self.pluginSettings.update(self.PLUGIN_CONFIGURATION[self.PLUGIN_ID]) + if 'apiKey' in self.STASH_CONFIGURATION: + self.API_KEY = self.STASH_CONFIGURATION['apiKey'] + + self.DRY_RUN = self.Setting(DryRunFieldName, self.DRY_RUN) + self.DEBUG_TRACING = self.Setting(DebugTraceFieldName, self.DEBUG_TRACING) + if self.DEBUG_TRACING: self.LOG_LEVEL = logging.DEBUG + + logging.basicConfig(level=self.LOG_LEVEL, format=logFormat, datefmt=dateFmt, handlers=[RFH]) + self.pluginLog = logging.getLogger(pathlib.Path(self.MAIN_SCRIPT_NAME).stem) + if setStashLoggerAsPluginLogger: + self.log = self.pluginLog + + def __del__(self): + self.thredPool.shutdown(wait=False) + + def Setting(self, name, default=_ARGUMENT_UNSPECIFIED_, raiseEx=True, notEmpty=False): + if self.pluginSettings != None and name in self.pluginSettings: + if notEmpty == False or self.pluginSettings[name] != "": + return self.pluginSettings[name] + if self.pluginConfig != None and name in self.pluginConfig: + if notEmpty == False or self.pluginConfig[name] != "": + return self.pluginConfig[name] + if default == _ARGUMENT_UNSPECIFIED_ and raiseEx: + raise Exception(f"Missing {name} from both UI settings and config file settings.") + return default + + def Log(self, logMsg, printTo = 0, logLevel = logging.INFO, lineNo = -1, levelStr = "", logAlways = False, toAscii = None): + if toAscii or (toAscii == None and (self.encodeToUtf8 or self.convertToAscii)): + logMsg = self.asc2(logMsg) + else: + logMsg = logMsg + if printTo == 0: + printTo = self.log_to_norm + elif printTo == self.LOG_TO_ERROR and logLevel == logging.INFO: + logLevel = logging.ERROR + printTo = self.log_to_err_set + elif printTo == self.LOG_TO_CRITICAL and logLevel == logging.INFO: + logLevel = logging.CRITICAL + printTo = self.log_to_err_set + elif printTo == self.LOG_TO_WARN and logLevel == logging.INFO: + logLevel = logging.WARN + printTo = self.log_to_wrn_set + if lineNo == -1: + lineNo = inspect.currentframe().f_back.f_lineno + LN_Str = f"[LN:{lineNo}]" + # print(f"{LN_Str}, {logAlways}, {self.LOG_LEVEL}, {logging.DEBUG}, {levelStr}, {logMsg}") + if logLevel == logging.DEBUG and (logAlways == False or self.LOG_LEVEL == logging.DEBUG): + if levelStr == "": levelStr = self.LEV_DBG + if printTo & self.LOG_TO_FILE: self.pluginLog.debug(f"{LN_Str} {levelStr}{logMsg}") + if printTo & self.LOG_TO_STASH: self.log.debug(f"{LN_Str} {levelStr}{logMsg}") + elif logLevel == logging.INFO or logLevel == logging.DEBUG: + if levelStr == "": levelStr = self.LEV_INF if logLevel == logging.INFO else self.LEV_DBG + if printTo & self.LOG_TO_FILE: self.pluginLog.info(f"{LN_Str} {levelStr}{logMsg}") + if printTo & self.LOG_TO_STASH: self.log.info(f"{LN_Str} {levelStr}{logMsg}") + elif logLevel == logging.WARN: + if levelStr == "": levelStr = self.LEV_WRN + if printTo & self.LOG_TO_FILE: self.pluginLog.warning(f"{LN_Str} {levelStr}{logMsg}") + if printTo & self.LOG_TO_STASH: self.log.warning(f"{LN_Str} {levelStr}{logMsg}") + elif logLevel == logging.ERROR: + if levelStr == "": levelStr = self.LEV_ERR + if printTo & self.LOG_TO_FILE: self.pluginLog.error(f"{LN_Str} {levelStr}{logMsg}") + if printTo & self.LOG_TO_STASH: self.log.error(f"{LN_Str} {levelStr}{logMsg}") + elif logLevel == logging.CRITICAL: + if levelStr == "": levelStr = self.LEV_CRITICAL + if printTo & self.LOG_TO_FILE: self.pluginLog.critical(f"{LN_Str} {levelStr}{logMsg}") + if printTo & self.LOG_TO_STASH: self.log.error(f"{LN_Str} {levelStr}{logMsg}") + if (printTo & self.LOG_TO_CONSOLE) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways): + print(f"{LN_Str} {levelStr}{logMsg}") + if (printTo & self.LOG_TO_STDERR) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways): + print(f"StdErr: {LN_Str} {levelStr}{logMsg}", file=sys.stderr) + + def Trace(self, logMsg = "", printTo = 0, logAlways = False, lineNo = -1, toAscii = None): + if printTo == 0: printTo = self.LOG_TO_FILE + if lineNo == -1: + lineNo = inspect.currentframe().f_back.f_lineno + logLev = logging.INFO if logAlways else logging.DEBUG + if self.DEBUG_TRACING or logAlways: + if logMsg == "": + logMsg = f"Line number {lineNo}..." + self.Log(logMsg, printTo, logLev, lineNo, self.LEV_TRACE, logAlways, toAscii=toAscii) + + # Log once per session. Only logs the first time called from a particular line number in the code. + def TraceOnce(self, logMsg = "", printTo = 0, logAlways = False, toAscii = None): + lineNo = inspect.currentframe().f_back.f_lineno + if self.DEBUG_TRACING or logAlways: + FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}" + if FuncAndLineNo in self.logLinePreviousHits: + return + self.logLinePreviousHits.append(FuncAndLineNo) + self.Trace(logMsg, printTo, logAlways, lineNo, toAscii=toAscii) + + # Log INFO on first call, then do Trace on remaining calls. + def LogOnce(self, logMsg = "", printTo = 0, logAlways = False, traceOnRemainingCalls = True, toAscii = None): + if printTo == 0: printTo = self.LOG_TO_FILE + lineNo = inspect.currentframe().f_back.f_lineno + FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}" + if FuncAndLineNo in self.logLinePreviousHits: + if traceOnRemainingCalls: + self.Trace(logMsg, printTo, logAlways, lineNo, toAscii=toAscii) + else: + self.logLinePreviousHits.append(FuncAndLineNo) + self.Log(logMsg, printTo, logging.INFO, lineNo, toAscii=toAscii) + + def Warn(self, logMsg, printTo = 0, toAscii = None): + if printTo == 0: printTo = self.log_to_wrn_set + lineNo = inspect.currentframe().f_back.f_lineno + self.Log(logMsg, printTo, logging.WARN, lineNo, toAscii=toAscii) + + def Error(self, logMsg, printTo = 0, toAscii = None): + if printTo == 0: printTo = self.log_to_err_set + lineNo = inspect.currentframe().f_back.f_lineno + self.Log(logMsg, printTo, logging.ERROR, lineNo, toAscii=toAscii) + + def Status(self, printTo = 0, logLevel = logging.INFO, lineNo = -1): + if printTo == 0: printTo = self.log_to_norm + if lineNo == -1: + lineNo = inspect.currentframe().f_back.f_lineno + self.Log(f"StashPluginHelper Status: (CALLED_AS_STASH_PLUGIN={self.CALLED_AS_STASH_PLUGIN}), (RUNNING_IN_COMMAND_LINE_MODE={self.RUNNING_IN_COMMAND_LINE_MODE}), (DEBUG_TRACING={self.DEBUG_TRACING}), (DRY_RUN={self.DRY_RUN}), (PLUGIN_ID={self.PLUGIN_ID}), (PLUGIN_TASK_NAME={self.PLUGIN_TASK_NAME}), (STASH_URL={self.STASH_URL}), (MAIN_SCRIPT_NAME={self.MAIN_SCRIPT_NAME})", + printTo, logLevel, lineNo) + + def ExecuteProcess(self, args, ExecDetach=False): + import platform, subprocess + is_windows = any(platform.win32_ver()) + pid = None + self.Trace(f"is_windows={is_windows} args={args}") + if is_windows: + if ExecDetach: + self.Trace("Executing process using Windows DETACHED_PROCESS") + DETACHED_PROCESS = 0x00000008 + pid = subprocess.Popen(args,creationflags=DETACHED_PROCESS, shell=True).pid + else: + pid = subprocess.Popen(args, shell=True).pid + else: + self.Trace("Executing process using normal Popen") + pid = subprocess.Popen(args).pid + self.Trace(f"pid={pid}") + return pid + + def ExecutePythonScript(self, args, ExecDetach=True): + PythonExe = f"{sys.executable}" + argsWithPython = [f"{PythonExe}"] + args + return self.ExecuteProcess(argsWithPython,ExecDetach=ExecDetach) + + def Submit(self, *args, **kwargs): + return self.thredPool.submit(*args, **kwargs) + + def asc2(self, data, convertToAscii=None): + if convertToAscii or (convertToAscii == None and self.convertToAscii): + return ascii(data) + return str(str(data).encode('utf-8'))[2:-1] # This works better for logging than ascii function + # data = str(data).encode('ascii','ignore') # This works better for logging than ascii function + # return str(data)[2:-1] # strip out b'str' + + def init_mergeMetadata(self, excludeMergeTags=None): + self.excludeMergeTags = excludeMergeTags + self._mergeMetadata = mergeMetadata(self, self.excludeMergeTags) + + # Must call init_mergeMetadata, before calling merge_metadata + def merge_metadata(self, SrcData, DestData): # Input arguments can be scene ID or scene metadata + if type(SrcData) is int: + SrcData = self.find_scene(SrcData) + DestData = self.find_scene(DestData) + return self._mergeMetadata.merge(SrcData, DestData) + + def Progress(self, currentIndex, maxCount): + progress = (currentIndex / maxCount) if currentIndex < maxCount else (maxCount / currentIndex) + self.log.progress(progress) + + def run_plugin(self, plugin_id, task_mode=None, args:dict={}, asyn=False): + """Runs a plugin operation. + The operation is run immediately and does not use the job queue. + Args: + plugin_id (ID): plugin_id + task_name (str, optional): Plugin task to perform + args (dict, optional): Arguments to pass to plugin. Plugin access via JSON_INPUT['args'] + Returns: + A map of the result. + """ + query = """mutation RunPluginOperation($plugin_id: ID!, $args: Map!) { + runPluginOperation(plugin_id: $plugin_id, args: $args) + }""" + if task_mode != None: + args.update({"mode" : task_mode}) + variables = { + "plugin_id": plugin_id, + "args": args, + } + if asyn: + self.Submit(self.call_GQL, query, variables) + return f"Made asynchronous call for plugin {plugin_id}" + else: + return self.call_GQL(query, variables) + + def find_duplicate_scenes_diff(self, distance: PhashDistance=PhashDistance.EXACT, fragment='id', duration_diff: float=10.00 ): + query = """ + query FindDuplicateScenes($distance: Int, $duration_diff: Float) { + findDuplicateScenes(distance: $distance, duration_diff: $duration_diff) { + ...SceneSlim + } + } + """ + if fragment: + query = re.sub(r'\.\.\.SceneSlim', fragment, query) + else: + query += "fragment SceneSlim on Scene { id }" + + variables = { "distance": distance, "duration_diff": duration_diff } + result = self.call_GQL(query, variables) + return result['findDuplicateScenes'] + + # ################################################################################################# + # The below functions extends class StashInterface with functions which are not yet in the class + def get_all_scenes(self): + query_all_scenes = """ + query AllScenes { + allScenes { + id + updated_at + } + } + """ + return self.call_GQL(query_all_scenes) + + def metadata_autotag(self, paths:list=[], performers:list=[], studios:list=[], tags:list=[]): + query = """ + mutation MetadataAutoTag($input:AutoTagMetadataInput!) { + metadataAutoTag(input: $input) + } + """ + metadata_autotag_input = { + "paths":paths, + "performers": performers, + "studios":studios, + "tags":tags, + } + result = self.call_GQL(query, {"input": metadata_autotag_input}) + return result + + def backup_database(self): + return self.call_GQL("mutation { backupDatabase(input: {download: false})}") + + def optimise_database(self): + return self.call_GQL("mutation OptimiseDatabase { optimiseDatabase }") + + def metadata_clean_generated(self, blobFiles=True, dryRun=False, imageThumbnails=True, markers=True, screenshots=True, sprites=True, transcodes=True): + query = """ + mutation MetadataCleanGenerated($input: CleanGeneratedInput!) { + metadataCleanGenerated(input: $input) + } + """ + clean_metadata_input = { + "blobFiles": blobFiles, + "dryRun": dryRun, + "imageThumbnails": imageThumbnails, + "markers": markers, + "screenshots": screenshots, + "sprites": sprites, + "transcodes": transcodes, + } + result = self.call_GQL(query, {"input": clean_metadata_input}) + return result + + def rename_generated_files(self): + return self.call_GQL("mutation MigrateHashNaming {migrateHashNaming}") + +class mergeMetadata: # A class to merge scene metadata from source scene to destination scene + srcData = None + destData = None + stash = None + excludeMergeTags = None + dataDict = None + result = "Nothing To Merge" + def __init__(self, stash, excludeMergeTags=None): + self.stash = stash + self.excludeMergeTags = excludeMergeTags + + def merge(self, SrcData, DestData): + self.srcData = SrcData + self.destData = DestData + ORG_DATA_DICT = {'id' : self.destData['id']} + self.dataDict = ORG_DATA_DICT.copy() + self.mergeItems('tags', 'tag_ids', [], excludeName=self.excludeMergeTags) + self.mergeItems('performers', 'performer_ids', []) + self.mergeItems('galleries', 'gallery_ids', []) + self.mergeItems('movies', 'movies', []) + self.mergeItems('urls', listToAdd=self.destData['urls'], NotStartWith=self.stash.STASH_URL) + self.mergeItem('studio', 'studio_id', 'id') + self.mergeItem('title') + self.mergeItem('director') + self.mergeItem('date') + self.mergeItem('details') + self.mergeItem('rating100') + self.mergeItem('code') + if self.dataDict != ORG_DATA_DICT: + self.stash.Trace(f"Updating scene ID({self.destData['id']}) with {self.dataDict}; path={self.destData['files'][0]['path']}", toAscii=True) + self.result = self.stash.update_scene(self.dataDict) + return self.result + + def Nothing(self, Data): + if not Data or Data == "" or (type(Data) is str and Data.strip() == ""): + return True + return False + + def mergeItem(self,fieldName, updateFieldName=None, subField=None): + if updateFieldName == None: + updateFieldName = fieldName + if self.Nothing(self.destData[fieldName]) and not self.Nothing(self.srcData[fieldName]): + if subField == None: + self.dataDict.update({ updateFieldName : self.srcData[fieldName]}) + else: + self.dataDict.update({ updateFieldName : self.srcData[fieldName][subField]}) + def mergeItems(self, fieldName, updateFieldName=None, listToAdd=[], NotStartWith=None, excludeName=None): + dataAdded = "" + for item in self.srcData[fieldName]: + if item not in self.destData[fieldName]: + if NotStartWith == None or not item.startswith(NotStartWith): + if excludeName == None or item['name'] not in excludeName: + if fieldName == 'movies': + listToAdd += [{"movie_id" : item['movie']['id'], "scene_index" : item['scene_index']}] + dataAdded += f"{item['movie']['id']} " + elif updateFieldName == None: + listToAdd += [item] + dataAdded += f"{item} " + else: + listToAdd += [item['id']] + dataAdded += f"{item['id']} " + if dataAdded != "": + if updateFieldName == None: + updateFieldName = fieldName + else: + for item in self.destData[fieldName]: + if fieldName == 'movies': + listToAdd += [{"movie_id" : item['movie']['id'], "scene_index" : item['scene_index']}] + else: + listToAdd += [item['id']] + self.dataDict.update({ updateFieldName : listToAdd}) + # self.stash.Trace(f"Added {fieldName} ({dataAdded}) to scene ID({self.destData['id']})", toAscii=True) diff --git a/plugins/RenameFile/renamefile.py b/plugins/RenameFile/renamefile.py index a8ab1fd4..4a00d84c 100644 --- a/plugins/RenameFile/renamefile.py +++ b/plugins/RenameFile/renamefile.py @@ -2,30 +2,17 @@ # By David Maisonave (aka Axter) Jul-2024 (https://www.axter.com/) # Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/RenameFile # Based on source code from https://github.com/Serechops/Serechops-Stash/tree/main/plugins/Renamer -import os -import sys -import shutil -import hashlib -import json +import os, sys, shutil, json, requests, hashlib, pathlib, logging from pathlib import Path -import requests -import logging -from logging.handlers import RotatingFileHandler import stashapi.log as log # Importing stashapi.log as log for critical events ONLY from stashapi.stashapp import StashInterface +from StashPluginHelper import StashPluginHelper from renamefile_settings import config # Import settings from renamefile_settings.py # ********************************************************************** # Constant global variables -------------------------------------------- -LOG_FILE_PATH = f"{Path(__file__).resolve().parent}\\{Path(__file__).stem}.log" -FORMAT = "[%(asctime)s - LN:%(lineno)s] %(message)s" DEFAULT_FIELD_KEY_LIST = "title,performers,studio,tags" # Default Field Key List with the desired order -PLUGIN_ID = Path(__file__).stem.lower() DEFAULT_SEPERATOR = "-" -PLUGIN_ARGS = False -PLUGIN_ARGS_MODE = False -WRAPPER_STYLES = config["wrapper_styles"] -POSTFIX_STYLES = config["postfix_styles"] # GraphQL query to fetch all scenes QUERY_ALL_SCENES = """ query AllScenes { @@ -35,133 +22,81 @@ } } """ -RFH = RotatingFileHandler( - filename=LOG_FILE_PATH, - mode='a', - maxBytes=2*1024*1024, # Configure logging for this script with max log file size of 2000K - backupCount=2, - encoding=None, - delay=0 -) - # ********************************************************************** # Global variables -------------------------------------------- inputToUpdateScenePost = False exitMsg = "Change success!!" -# Configure local log file for plugin within plugin folder having a limited max log file size -logging.basicConfig(level=logging.INFO, format=FORMAT, datefmt="%y%m%d %H:%M:%S", handlers=[RFH]) -logger = logging.getLogger(PLUGIN_ID) - # ********************************************************************** # ---------------------------------------------------------------------- -# Code section to fetch variables from Plugin UI and from renamefile_settings.py -json_input = json.loads(sys.stdin.read()) -FRAGMENT_SERVER = json_input['server_connection'] -stash = StashInterface(FRAGMENT_SERVER) -pluginConfiguration = stash.get_configuration()["plugins"] - settings = { "performerAppend": False, "studioAppend": False, "tagAppend": False, "z_keyFIeldsIncludeInFileName": False, - "zafileRenameViaRaname": False, + "zafileRenameViaMove": False, "zfieldKeyList": DEFAULT_FIELD_KEY_LIST, "zmaximumTagKeys": 12, "zseparators": DEFAULT_SEPERATOR, "zzdebugTracing": False, "zzdryRun": False, } -if PLUGIN_ID in pluginConfiguration: - settings.update(pluginConfiguration[PLUGIN_ID]) +stash = StashPluginHelper( + settings=settings, + config=config, + maxbytes=10*1024*1024, + ) +stash.Status(logLevel=logging.DEBUG) +if stash.PLUGIN_ID in stash.PLUGIN_CONFIGURATION: + stash.pluginSettings.update(stash.PLUGIN_CONFIGURATION[stash.PLUGIN_ID]) # ---------------------------------------------------------------------- -debugTracing = settings["zzdebugTracing"] +WRAPPER_STYLES = config["wrapper_styles"] +POSTFIX_STYLES = config["postfix_styles"] # Extract dry_run setting from settings -dry_run = settings["zzdryRun"] +dry_run = stash.pluginSettings["zzdryRun"] dry_run_prefix = '' try: - PLUGIN_ARGS = json_input['args'] - PLUGIN_ARGS_MODE = json_input['args']["mode"] -except: - pass -try: - if json_input['args']['hookContext']['input']: inputToUpdateScenePost = True # This avoids calling rename logic twice + if stash.JSON_INPUT['args']['hookContext']['input']: inputToUpdateScenePost = True # This avoids calling rename logic twice except: pass -logger.info(f"\nStarting (debugTracing={debugTracing}) (dry_run={dry_run}) (PLUGIN_ARGS_MODE={PLUGIN_ARGS_MODE}) (inputToUpdateScenePost={inputToUpdateScenePost})************************************************") -if debugTracing: logger.info("settings: %s " % (settings,)) - -if PLUGIN_ID in pluginConfiguration: - if debugTracing: logger.info(f"Debug Tracing (pluginConfiguration[PLUGIN_ID]={pluginConfiguration[PLUGIN_ID]})................") - # if 'zmaximumTagKeys' not in pluginConfiguration[PLUGIN_ID]: - # if debugTracing: logger.info("Debug Tracing................") - # try: - # stash.configure_plugin(PLUGIN_ID, settings) - # stash.configure_plugin("renamefile", {"zmaximumTagKeys": 12}) - # except Exception as e: - # logger.error(f"configure_plugin failed!!! Error: {e}") - # logger.exception('Got exception on main handler') - # pass - # # stash.configure_plugin(PLUGIN_ID, settings) # , init_defaults=True - # if debugTracing: logger.info("Debug Tracing................") +stash.Trace("settings: %s " % (stash.pluginSettings,)) if dry_run: - logger.info("Dry run mode is enabled.") + stash.Log("Dry run mode is enabled.") dry_run_prefix = "Would've " -if debugTracing: logger.info("Debug Tracing................") -max_tag_keys = settings["zmaximumTagKeys"] if settings["zmaximumTagKeys"] != 0 else 12 # Need this incase use explicitly sets value to zero in UI -if debugTracing: logger.info("Debug Tracing................") +max_tag_keys = stash.pluginSettings["zmaximumTagKeys"] if stash.pluginSettings["zmaximumTagKeys"] != 0 else 12 # Need this incase use explicitly sets value to zero in UI # ToDo: Add split logic here to slpit possible string array into an array exclude_paths = config["pathToExclude"] exclude_paths = exclude_paths.split() -if debugTracing: logger.info(f"Debug Tracing (exclude_paths={exclude_paths})................") +stash.Trace(f"(exclude_paths={exclude_paths})") excluded_tags = config["excludeTags"] # Extract tag whitelist from settings tag_whitelist = config["tagWhitelist"] -if debugTracing: logger.info("Debug Tracing................") if not tag_whitelist: tag_whitelist = "" -if debugTracing: logger.info(f"Debug Tracing (tag_whitelist={tag_whitelist})................") +stash.Trace(f"(tag_whitelist={tag_whitelist})") -endpointHost = json_input['server_connection']['Host'] +endpointHost = stash.JSON_INPUT['server_connection']['Host'] if endpointHost == "0.0.0.0": endpointHost = "localhost" -endpoint = f"{json_input['server_connection']['Scheme']}://{endpointHost}:{json_input['server_connection']['Port']}/graphql" +endpoint = f"{stash.JSON_INPUT['server_connection']['Scheme']}://{endpointHost}:{stash.JSON_INPUT['server_connection']['Port']}/graphql" -if debugTracing: logger.info(f"Debug Tracing (endpoint={endpoint})................") -# Extract rename_files and move_files settings from renamefile_settings.py -rename_files = config["rename_files"] -move_files = False if settings["zafileRenameViaRaname"] else True -if debugTracing: logger.info("Debug Tracing................") -fieldKeyList = settings["zfieldKeyList"] # Default Field Key List with the desired order +stash.Trace(f"(endpoint={endpoint})") +move_files = stash.pluginSettings["zafileRenameViaMove"] +fieldKeyList = stash.pluginSettings["zfieldKeyList"] # Default Field Key List with the desired order if not fieldKeyList or fieldKeyList == "": fieldKeyList = DEFAULT_FIELD_KEY_LIST fieldKeyList = fieldKeyList.replace(" ", "") fieldKeyList = fieldKeyList.replace(";", ",") fieldKeyList = fieldKeyList.split(",") -if debugTracing: logger.info(f"Debug Tracing (fieldKeyList={fieldKeyList})................") -separator = settings["zseparators"] +stash.Trace(f"(fieldKeyList={fieldKeyList})") +separator = stash.pluginSettings["zseparators"] # ---------------------------------------------------------------------- # ********************************************************************** double_separator = separator + separator -if debugTracing: logger.info(f"Debug Tracing (PLUGIN_ARGS={PLUGIN_ARGS}) (WRAPPER_STYLES={WRAPPER_STYLES}) (POSTFIX_STYLES={POSTFIX_STYLES})................") -if debugTracing: logger.info(f"Debug Tracing (PLUGIN_ID=\"{PLUGIN_ID}\")................") -if debugTracing: logger.info("Debug Tracing................") - -# Function to make GraphQL requests -def graphql_request(query, variables=None): - if debugTracing: logger.info("Debug Tracing................%s", query) - data = {'query': query} - if variables: - data['variables'] = variables - if debugTracing: logger.info("Debug Tracing................") - if debugTracing: logger.info("Debug Tracing................") - response = requests.post(endpoint, json=data) - if debugTracing: logger.info("Debug Tracing................") - return response.json() +stash.Trace(f"(WRAPPER_STYLES={WRAPPER_STYLES}) (POSTFIX_STYLES={POSTFIX_STYLES})") # Function to replace illegal characters in filenames def replace_illegal_characters(filename): @@ -179,12 +114,11 @@ def should_exclude_path(scene_details): # Function to form the new filename based on scene details and user settings def form_filename(original_file_stem, scene_details): - if debugTracing: logger.info("Debug Tracing................") filename_parts = [] tag_keys_added = 0 default_title = '' if_notitle_use_org_filename = config["if_notitle_use_org_filename"] - include_keyField_if_in_name = settings["z_keyFIeldsIncludeInFileName"] + include_keyField_if_in_name = stash.pluginSettings["z_keyFIeldsIncludeInFileName"] if if_notitle_use_org_filename: default_title = original_file_stem # ................... @@ -195,44 +129,39 @@ def form_filename(original_file_stem, scene_details): title = default_title # ................... - if debugTracing: logger.info(f"Debug Tracing (title=\"{title}\")................") + stash.Trace(f"(title=\"{title}\")") # Function to add tag to filename def add_tag(tag_name): nonlocal tag_keys_added nonlocal filename_parts - if debugTracing: logger.info(f"Debug Tracing (tag_name={tag_name})................") + stash.Trace(f"(tag_name={tag_name})") if max_tag_keys == -1 or (max_tag_keys is not None and tag_keys_added >= int(max_tag_keys)): return # Skip adding more tags if the maximum limit is reached if tag_name in excluded_tags: - if debugTracing: logger.info(f"Debug Tracing EXCLUDING (tag_name={tag_name})") + stash.Trace(f"EXCLUDING (tag_name={tag_name})") return # Check if the tag name is in the whitelist if tag_whitelist == "" or tag_whitelist == None or (tag_whitelist and tag_name in tag_whitelist): if WRAPPER_STYLES.get('tag'): filename_parts.append(f"{WRAPPER_STYLES['tag'][0]}{tag_name}{WRAPPER_STYLES['tag'][1]}") - if debugTracing: logger.info("Debug Tracing................") else: filename_parts.append(tag_name) - if debugTracing: logger.info("Debug Tracing................") tag_keys_added += 1 - if debugTracing: logger.info("Debug Tracing................") else: - logger.info(f"Skipping tag not in whitelist: {tag_name}") - if debugTracing: logger.info(f"Debug Tracing (tag_keys_added={tag_keys_added})................") + stash.Log(f"Skipping tag not in whitelist: {tag_name}") + stash.Trace(f"(tag_keys_added={tag_keys_added})") for key in fieldKeyList: if key == 'studio': - if settings["studioAppend"]: - if debugTracing: logger.info("Debug Tracing................") + if stash.pluginSettings["studioAppend"]: studio_name = scene_details.get('studio', {}) - if debugTracing: logger.info(f"Debug Tracing (studio_name={studio_name})................") + stash.Trace(f"(studio_name={studio_name})") if studio_name: studio_name = scene_details.get('studio', {}).get('name', '') - if debugTracing: logger.info(f"Debug Tracing (studio_name={studio_name})................") + stash.Trace(f"(studio_name={studio_name})") if studio_name: studio_name += POSTFIX_STYLES.get('studio') - if debugTracing: logger.info("Debug Tracing................") if include_keyField_if_in_name or studio_name.lower() not in title.lower(): if WRAPPER_STYLES.get('studio'): filename_parts.append(f"{WRAPPER_STYLES['studio'][0]}{studio_name}{WRAPPER_STYLES['studio'][1]}") @@ -246,23 +175,21 @@ def add_tag(tag_name): else: filename_parts.append(title) elif key == 'performers': - if settings["performerAppend"]: + if stash.pluginSettings["performerAppend"]: performers = '-'.join([performer.get('name', '') for performer in scene_details.get('performers', [])]) if performers: performers += POSTFIX_STYLES.get('performers') - if debugTracing: logger.info(f"Debug Tracing (include_keyField_if_in_name={include_keyField_if_in_name})................") + stash.Trace(f"(include_keyField_if_in_name={include_keyField_if_in_name})") if include_keyField_if_in_name or performers.lower() not in title.lower(): - if debugTracing: logger.info(f"Debug Tracing (performers={performers})................") + stash.Trace(f"(performers={performers})") if WRAPPER_STYLES.get('performers'): filename_parts.append(f"{WRAPPER_STYLES['performers'][0]}{performers}{WRAPPER_STYLES['performers'][1]}") else: filename_parts.append(performers) elif key == 'date': scene_date = scene_details.get('date', '') - if debugTracing: logger.info("Debug Tracing................") if scene_date: scene_date += POSTFIX_STYLES.get('date') - if debugTracing: logger.info("Debug Tracing................") if WRAPPER_STYLES.get('date'): scene_date = f"{WRAPPER_STYLES['date'][0]}{scene_date}{WRAPPER_STYLES['date'][1]}" if scene_date not in title: @@ -310,197 +237,53 @@ def add_tag(tag_name): filename_parts.append(frame_rate) elif key == 'galleries': galleries = [gallery.get('title', '') for gallery in scene_details.get('galleries', [])] - if debugTracing: logger.info("Debug Tracing................") for gallery_name in galleries: - if debugTracing: logger.info(f"Debug Tracing (include_keyField_if_in_name={include_keyField_if_in_name}) (gallery_name={gallery_name})................") + stash.Trace(f"(include_keyField_if_in_name={include_keyField_if_in_name}) (gallery_name={gallery_name})") if include_keyField_if_in_name or gallery_name.lower() not in title.lower(): gallery_name += POSTFIX_STYLES.get('galleries') if WRAPPER_STYLES.get('galleries'): filename_parts.append(f"{WRAPPER_STYLES['galleries'][0]}{gallery_name}{WRAPPER_STYLES['galleries'][1]}") - if debugTracing: logger.info("Debug Tracing................") else: filename_parts.append(gallery_name) - if debugTracing: logger.info("Debug Tracing................") - if debugTracing: logger.info(f"Debug Tracing (gallery_name={gallery_name})................") - if debugTracing: logger.info("Debug Tracing................") + stash.Trace(f"(gallery_name={gallery_name})") elif key == 'tags': - if settings["tagAppend"]: + if stash.pluginSettings["tagAppend"]: tags = [tag.get('name', '') for tag in scene_details.get('tags', [])] - if debugTracing: logger.info("Debug Tracing................") for tag_name in tags: - if debugTracing: logger.info(f"Debug Tracing (include_keyField_if_in_name={include_keyField_if_in_name}) (tag_name={tag_name})................") + stash.Trace(f"(include_keyField_if_in_name={include_keyField_if_in_name}) (tag_name={tag_name})") if include_keyField_if_in_name or tag_name.lower() not in title.lower(): add_tag(tag_name + POSTFIX_STYLES.get('tag')) - if debugTracing: logger.info(f"Debug Tracing (tag_name={tag_name})................") - if debugTracing: logger.info("Debug Tracing................") + stash.Trace(f"(tag_name={tag_name})") - if debugTracing: logger.info(f"Debug Tracing (filename_parts={filename_parts})................") + stash.Trace(f"(filename_parts={filename_parts})") new_filename = separator.join(filename_parts).replace(double_separator, separator) - if debugTracing: logger.info(f"Debug Tracing (new_filename={new_filename})................") + stash.Trace(f"(new_filename={new_filename})") # Check if the scene's path matches any of the excluded paths if exclude_paths and should_exclude_path(scene_details): - logger.info(f"Scene belongs to an excluded path. Skipping filename modification.") + stash.Log(f"Scene belongs to an excluded path. Skipping filename modification.") return Path(scene_details['files'][0]['path']).name # Return the original filename return replace_illegal_characters(new_filename) -def find_scene_by_id(scene_id): - query_find_scene = """ - query FindScene($scene_id: ID!) { - findScene(id: $scene_id) { - id - title - date - files { - path - width - height - video_codec - frame_rate - } - galleries { - title - } - studio { - name - } - performers { - name - } - tags { - name - } - } - } -""" - scene_result = graphql_request(query_find_scene, variables={"scene_id": scene_id}) - return scene_result.get('data', {}).get('findScene') - -def move_or_rename_files(scene_details, new_filename, original_parent_directory): +def rename_scene(scene_id): global exitMsg - studio_directory = None - for file_info in scene_details['files']: - path = file_info['path'] - original_path = Path(path) - - # Check if the file's path matches any of the excluded paths - if exclude_paths and any(original_path.match(exclude_path) for exclude_path in exclude_paths): - logger.info(f"File {path} belongs to an excluded path. Skipping modification.") - continue - - new_path = original_parent_directory if not move_files else original_parent_directory / scene_details['studio']['name'] - if rename_files: - new_path = new_path / (new_filename + original_path.suffix) - try: - if move_files: - if studio_directory is None: - studio_directory = original_parent_directory / scene_details['studio']['name'] - studio_directory.mkdir(parents=True, exist_ok=True) - if rename_files: # Check if rename_files is True - if not dry_run: - shutil.move(original_path, new_path) - logger.info(f"{dry_run_prefix}Moved and renamed file: {path} -> {new_path}") - else: - if not dry_run: - shutil.move(original_path, new_path) - logger.info(f"{dry_run_prefix}Moved file: {path} -> {new_path}") - else: - if rename_files: # Check if rename_files is True - if not dry_run: - original_path.rename(new_path) - logger.info(f"{dry_run_prefix}Renamed file: {path} -> {new_path}") - else: - if not dry_run: - shutil.move(original_path, new_path) - logger.info(f"{dry_run_prefix}Moved file: {path} -> {new_path}") - except FileNotFoundError: - log.error(f"File not found: {path}. Skipping...") - logger.error(f"File not found: {path}. Skipping...") - exitMsg = "File not found" - continue - except OSError as e: - log.error(f"Failed to move or rename file: {path}. Error: {e}") - logger.error(f"Failed to move or rename file: {path}. Error: {e}") - exitMsg = "Failed to move or rename file" - continue - return new_path # Return the new_path variable after the loop - -def perform_metadata_scan(metadata_scan_path): - metadata_scan_path_windows = metadata_scan_path.resolve().as_posix() - mutation_metadata_scan = """ - mutation { - metadataScan(input: { paths: "%s" }) - } - """ % metadata_scan_path_windows - if debugTracing: - logger.info(f"Attempting metadata scan mutation with path: {metadata_scan_path_windows}") - logger.info(f"Mutation string: {mutation_metadata_scan}") - graphql_request(mutation_metadata_scan) - -def rename_scene(scene_id, stash_directory): - global exitMsg - scene_details = find_scene_by_id(scene_id) - if debugTracing: logger.info(f"Debug Tracing (scene_details={scene_details})................") + scene_details = stash.find_scene(scene_id) + stash.Trace(f"(scene_details1={scene_details})") if not scene_details: - log.error(f"Scene with ID {scene_id} not found.") - logger.error(f"Scene with ID {scene_id} not found.") - return - - if debugTracing: logger.info(f"Debug Tracing................") - + stash.Error(f"Scene with ID {scene_id} not found.") + return None original_file_path = scene_details['files'][0]['path'] original_parent_directory = Path(original_file_path).parent - if debugTracing: logger.info(f"Debug Tracing (original_file_path={original_file_path})................") - + stash.Trace(f"(original_file_path={original_file_path})") # Check if the scene's path matches any of the excluded paths if exclude_paths and any(Path(original_file_path).match(exclude_path) for exclude_path in exclude_paths): - logger.info(f"Scene with ID {scene_id} belongs to an excluded path. Skipping modifications.") - return - - if debugTracing: logger.info(f"Debug Tracing................") - original_path_info = {'original_file_path': original_file_path, - 'original_parent_directory': original_parent_directory} - - new_path_info = None - - original_file_stem = Path(original_file_path).stem - original_file_name = Path(original_file_path).name - new_filename = form_filename(original_file_stem, scene_details) - newFilenameWithExt = new_filename + Path(original_file_path).suffix - if debugTracing: logger.info(f"Debug Tracing (original_file_name={original_file_name})(newFilenameWithExt={newFilenameWithExt})................") - if original_file_name == newFilenameWithExt: - logger.info(f"Nothing to do, because new file name matches original file name: (newFilenameWithExt={newFilenameWithExt})") - return - if debugTracing: logger.info(f"Debug Tracing................") - - if rename_files: - new_path = original_parent_directory / (newFilenameWithExt) - new_path_info = {'new_file_path': new_path} - if debugTracing: logger.info(f"{dry_run_prefix}New filename: {new_path}") - - if move_files and original_parent_directory.name != scene_details['studio']['name']: - new_path = original_parent_directory / scene_details['studio']['name'] / (new_filename + Path(original_file_path).suffix) - new_path_info = {'new_file_path': new_path} - move_or_rename_files(scene_details, new_filename, original_parent_directory) - logger.info(f"{dry_run_prefix}Moved to directory: '{new_path}'") - - # If rename_files is True, attempt renaming even if move_files is False - if rename_files: - new_file_path = original_parent_directory / (new_filename + Path(original_file_name).suffix) - if original_file_name != new_filename: - try: - if not dry_run: - os.rename(original_file_path, new_file_path) - logger.info(f"{dry_run_prefix}Renamed file: {original_file_path} -> {new_file_path}") - except Exception as e: - exitMsg = "Failed to rename file" - log.error(f"Failed to rename file: {original_file_path}. Error: {e}") - logger.error(f"Failed to rename file: {original_file_path}. Error: {e}") - - metadata_scan_path = original_parent_directory - perform_metadata_scan(metadata_scan_path) + stash.Log(f"Scene with ID {scene_id} belongs to an excluded path. Skipping modifications.") + return None + original_file_stem = Path(original_file_path).stem + original_file_name = Path(original_file_path).name + new_filename = form_filename(original_file_stem, scene_details) max_filename_length = int(config["max_filename_length"]) if len(new_filename) > max_filename_length: extension_length = len(Path(original_file_path).suffix) @@ -508,61 +291,61 @@ def rename_scene(scene_id, stash_directory): truncated_filename = new_filename[:max_base_filename_length] hash_suffix = hashlib.md5(new_filename.encode()).hexdigest() new_filename = truncated_filename + '_' + hash_suffix + Path(original_file_path).suffix + newFilenameWithExt = new_filename + Path(original_file_path).suffix + new_file_path = f"{original_parent_directory}{os.sep}{new_filename}{Path(original_file_name).suffix}" + stash.Trace(f"(original_file_name={original_file_name})(new_file_path={new_file_path})") + if original_file_name == newFilenameWithExt or original_file_name == new_filename: + stash.Log(f"Nothing to do, because new file name matches original file name: (newFilenameWithExt={newFilenameWithExt})") + return None + targetDidExist = True if os.path.isfile(new_file_path) else False + try: + if move_files: + if not dry_run: + shutil.move(original_file_path, new_file_path) + exitMsg = f"{dry_run_prefix}Moved file to '{new_file_path}' from '{original_file_path}'" + else: + if not dry_run: + os.rename(original_file_path, new_file_path) + exitMsg = f"{dry_run_prefix}Renamed file to '{new_file_path}' from '{original_file_path}'" + except OSError as e: + exitMsg = f"Failed to move/rename file: From {original_file_path} to {new_file_path}. Error: {e}" + stash.Error(exitMsg) + if not targetDidExist and os.path.isfile(new_file_path): + if os.path.isfile(original_file_path): + os.remove(original_file_path) + pass + else: + raise - if debugTracing: logger.info(f"Debug Tracing (exitMsg={exitMsg})................") - return new_filename, original_path_info, new_path_info + stash.metadata_scan(paths=[original_parent_directory.resolve().as_posix()]) + stash.Log(exitMsg) + return new_filename -# Main default function for rename scene def rename_files_task(): - if debugTracing: logger.info("Debug Tracing................") - # Execute the GraphQL query to fetch all scenes - scene_result = graphql_request(QUERY_ALL_SCENES) - if debugTracing: logger.info("Debug Tracing................") - all_scenes = scene_result.get('data', {}).get('allScenes', []) - if debugTracing: logger.info("Debug Tracing................") + scene_result = stash.get_all_scenes() + all_scenes = scene_result['allScenes'] if not all_scenes: - if debugTracing: logger.info("Debug Tracing................") - log.error("No scenes found.") - logger.error("No scenes found.") + stash.Error("No scenes found.") exit() - if debugTracing: logger.info("Debug Tracing................") - # Find the scene with the latest updated_at timestamp latest_scene = max(all_scenes, key=lambda scene: scene['updated_at']) - # Extract the ID of the latest scene latest_scene_id = latest_scene.get('id') - - # Read stash directory from renamefile_settings.py - stash_directory = config.get('stash_directory', '') - if debugTracing: logger.info("Debug Tracing................") - # Rename the latest scene and trigger metadata scan - new_filename = rename_scene(latest_scene_id, stash_directory) - if debugTracing: logger.info(f"Debug Tracing (exitMsg={exitMsg})................") - + new_filename = rename_scene(latest_scene_id) # Log dry run state and indicate if no changes were made if dry_run: - log.info("Dry run: Script executed in dry run mode. No changes were made.") - logger.info("Dry run: Script executed in dry run mode. No changes were made.") + stash.Log("Dry run: Script executed in dry run mode. No changes were made.") elif not new_filename: - logger.info("No changes were made.") - else: - logger.info(f"{exitMsg}") - return - -def fetch_dup_filename_tags(): # Place holder for new implementation + stash.Log("No changes were made.") return -if PLUGIN_ARGS_MODE == "fetch_dup_filename_tags": - fetch_dup_filename_tags() -elif PLUGIN_ARGS_MODE == "rename_files_task": +if stash.PLUGIN_TASK_NAME == "rename_files_task": rename_files_task() elif inputToUpdateScenePost: rename_files_task() -if debugTracing: logger.info("\n*********************************\nEXITING ***********************\n*********************************") +stash.Trace("\n*********************************\nEXITING ***********************\n*********************************") # ToDo: Wish List - # Add logic to update Sqlite DB on file name change, instead of perform_metadata_scan. # Add code to get tags from duplicate filenames \ No newline at end of file diff --git a/plugins/RenameFile/renamefile.yml b/plugins/RenameFile/renamefile.yml index ca2c8f53..d2bcf1a3 100644 --- a/plugins/RenameFile/renamefile.yml +++ b/plugins/RenameFile/renamefile.yml @@ -1,6 +1,6 @@ name: RenameFile description: Renames video (scene) file names when the user edits the [Title] field located in the scene [Edit] tab. -version: 0.4.3 +version: 0.4.6 url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/RenameFile settings: performerAppend: @@ -19,9 +19,9 @@ settings: displayName: Include Existing Key Field description: Enable to append performer, tags, studios, & galleries even if name already exists in the original file name. type: BOOLEAN - zafileRenameViaRaname: - displayName: Rename Instead of Move - description: Enable to rename file instead of Move file. (Not recommended for Windows OS) + zafileRenameViaMove: + displayName: Move Instead of Rename + description: Enable to move file instead of rename file. (Not recommended for Windows OS) type: BOOLEAN zfieldKeyList: displayName: Key Fields diff --git a/plugins/RenameFile/renamefile_settings.py b/plugins/RenameFile/renamefile_settings.py index 6a4445db..a84aef41 100644 --- a/plugins/RenameFile/renamefile_settings.py +++ b/plugins/RenameFile/renamefile_settings.py @@ -38,13 +38,11 @@ "date": '', }, # Add tags to exclude from RenameFile. - "excludeTags": ["DuplicateMarkForDeletion", "DuplicateWhitelistFile","_DuplicateMarkForDeletion", "_DuplicateWhitelistFile","_DuplicateMarkForDeletion_", "_DuplicateWhitelistFile_"], + "excludeTags": ["DuplicateMarkForDeletion", "DuplicateMarkForSwap", "DuplicateWhitelistFile","_DuplicateMarkForDeletion","_DuplicateMarkForSwap", "_DuplicateWhitelistFile"], # Add path(s) to exclude from RenameFile. Example Usage: r"/path/to/exclude1" When entering multiple paths, use space. Example: r"/path_1_to/exclude" r"/someOtherPath2Exclude" r"/yetAnotherPath" "pathToExclude": "", # Define a whitelist of allowed tags or EMPTY to allow all tags. Example Usage: "tag1", "tag2", "tag3" "tagWhitelist": "", - # Define whether files should be renamed when moved - "rename_files": True, # Define whether the original file name should be used if title is empty "if_notitle_use_org_filename": True, # Warning: Do not recommend setting this to False. # Current Stash DB schema only allows maximum base file name length to be 255 From cf6147b2e8f9fc2de0ebb06ce4954f2ab65147fb Mon Sep 17 00:00:00 2001 From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com> Date: Wed, 28 Aug 2024 04:45:25 -0400 Subject: [PATCH 30/39] Adding DupFileManager plugin --- plugins/DupFileManager/DupFileManager.py | 469 ++++++++++++++++ plugins/DupFileManager/DupFileManager.yml | 70 +++ .../DupFileManager/DupFileManager_config.py | 26 + plugins/DupFileManager/README.md | 39 ++ plugins/DupFileManager/StashPluginHelper.py | 526 ++++++++++++++++++ plugins/DupFileManager/requirements.txt | 4 + 6 files changed, 1134 insertions(+) create mode 100644 plugins/DupFileManager/DupFileManager.py create mode 100644 plugins/DupFileManager/DupFileManager.yml create mode 100644 plugins/DupFileManager/DupFileManager_config.py create mode 100644 plugins/DupFileManager/README.md create mode 100644 plugins/DupFileManager/StashPluginHelper.py create mode 100644 plugins/DupFileManager/requirements.txt diff --git a/plugins/DupFileManager/DupFileManager.py b/plugins/DupFileManager/DupFileManager.py new file mode 100644 index 00000000..a1b2d541 --- /dev/null +++ b/plugins/DupFileManager/DupFileManager.py @@ -0,0 +1,469 @@ +# Description: This is a Stash plugin which manages duplicate files. +# By David Maisonave (aka Axter) Jul-2024 (https://www.axter.com/) +# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/DupFileManager +# Note: To call this script outside of Stash, pass argument --url +# Example: python DupFileManager.py --url http://localhost:9999 -a + +# Research: +# Research following links to complete this plugin: +# Python library for parse-reparsepoint +# https://pypi.org/project/parse-reparsepoint/ +# pip install parse-reparsepoint +import os, sys, time, pathlib, argparse, platform, shutil, logging +from StashPluginHelper import StashPluginHelper +from DupFileManager_config import config # Import config from DupFileManager_config.py + +parser = argparse.ArgumentParser() +parser.add_argument('--url', '-u', dest='stash_url', type=str, help='Add Stash URL') +parser.add_argument('--trace', '-t', dest='trace', action='store_true', help='Enables debug trace mode.') +parser.add_argument('--add_dup_tag', '-a', dest='dup_tag', action='store_true', help='Set a tag to duplicate files.') +parser.add_argument('--del_tag_dup', '-d', dest='del_tag', action='store_true', help='Only delete scenes having DuplicateMarkForDeletion tag.') +parser.add_argument('--remove_dup', '-r', dest='remove', action='store_true', help='Remove (delete) duplicate files.') +parse_args = parser.parse_args() + +settings = { + "mergeDupFilename": False, + "permanentlyDelete": False, + "whitelistDelDupInSameFolder": False, + "whitelistDoTagLowResDup": False, + "zCleanAfterDel": False, + "zSwapHighRes": False, + "zSwapLongLength": False, + "zWhitelist": "", + "zxGraylist": "", + "zyBlacklist": "", + "zyMaxDupToProcess": 0, + "zzdebugTracing": False, +} +stash = StashPluginHelper( + stash_url=parse_args.stash_url, + debugTracing=parse_args.trace, + settings=settings, + config=config, + maxbytes=10*1024*1024, + ) +if len(sys.argv) > 1: + stash.Log(f"argv = {sys.argv}") +else: + stash.Trace(f"No command line arguments. JSON_INPUT['args'] = {stash.JSON_INPUT['args']}") +stash.Status(logLevel=logging.DEBUG) + +# stash.Trace(f"\nStarting (__file__={__file__}) (stash.CALLED_AS_STASH_PLUGIN={stash.CALLED_AS_STASH_PLUGIN}) (stash.DEBUG_TRACING={stash.DEBUG_TRACING}) (stash.PLUGIN_TASK_NAME={stash.PLUGIN_TASK_NAME})************************************************") +# stash.encodeToUtf8 = True + + +LOG_STASH_N_PLUGIN = stash.LOG_TO_STASH if stash.CALLED_AS_STASH_PLUGIN else stash.LOG_TO_CONSOLE + stash.LOG_TO_FILE +listSeparator = stash.Setting('listSeparator', ',', notEmpty=True) +addPrimaryDupPathToDetails = stash.Setting('addPrimaryDupPathToDetails') +mergeDupFilename = stash.Setting('mergeDupFilename') +moveToTrashCan = False if stash.Setting('permanentlyDelete') else True +alternateTrashCanPath = stash.Setting('dup_path') +whitelistDelDupInSameFolder = stash.Setting('whitelistDelDupInSameFolder') +whitelistDoTagLowResDup = stash.Setting('whitelistDoTagLowResDup') +maxDupToProcess = int(stash.Setting('zyMaxDupToProcess')) +swapHighRes = stash.Setting('zSwapHighRes') +swapLongLength = stash.Setting('zSwapLongLength') +significantTimeDiff = stash.Setting('significantTimeDiff') +toRecycleBeforeSwap = stash.Setting('toRecycleBeforeSwap') +cleanAfterDel = stash.Setting('zCleanAfterDel') +duration_diff = float(stash.Setting('duration_diff')) +if duration_diff > 10: + duration_diff = 10 +elif duration_diff < 1: + duration_diff = 1 + +# significantTimeDiff can not be higher than 1 and shouldn't be lower than .5 +if significantTimeDiff > 1: + significantTimeDiff = 1 +if significantTimeDiff < .5: + significantTimeDiff = .5 + + +duplicateMarkForDeletion = stash.Setting('DupFileTag') +if duplicateMarkForDeletion == "": + duplicateMarkForDeletion = 'DuplicateMarkForDeletion' + +duplicateWhitelistTag = stash.Setting('DupWhiteListTag') +if duplicateWhitelistTag == "": + duplicateWhitelistTag = 'DuplicateWhitelistFile' + +excludeMergeTags = [duplicateMarkForDeletion, duplicateWhitelistTag] +stash.init_mergeMetadata(excludeMergeTags) + +graylist = stash.Setting('zxGraylist').split(listSeparator) +graylist = [item.lower() for item in graylist] +if graylist == [""] : graylist = [] +stash.Trace(f"graylist = {graylist}") +whitelist = stash.Setting('zWhitelist').split(listSeparator) +whitelist = [item.lower() for item in whitelist] +if whitelist == [""] : whitelist = [] +stash.Trace(f"whitelist = {whitelist}") +blacklist = stash.Setting('zyBlacklist').split(listSeparator) +blacklist = [item.lower() for item in blacklist] +if blacklist == [""] : blacklist = [] +stash.Trace(f"blacklist = {blacklist}") + +def realpath(path): + """ + get_symbolic_target for win + """ + try: + import win32file + f = win32file.CreateFile(path, win32file.GENERIC_READ, + win32file.FILE_SHARE_READ, None, + win32file.OPEN_EXISTING, + win32file.FILE_FLAG_BACKUP_SEMANTICS, None) + target = win32file.GetFinalPathNameByHandle(f, 0) + # an above gives us something like u'\\\\?\\C:\\tmp\\scalarizr\\3.3.0.7978' + return target.strip('\\\\?\\') + except ImportError: + handle = open_dir(path) + target = get_symbolic_target(handle) + check_closed(handle) + return target + +def isReparsePoint(path): + import win32api + import win32con + from parse_reparsepoint import Navigator + FinalPathname = realpath(path) + stash.Log(f"(path='{path}') (FinalPathname='{FinalPathname}')") + if FinalPathname != path: + stash.Log(f"Symbolic link '{path}'") + return True + if not os.path.isdir(path): + path = os.path.dirname(path) + return win32api.GetFileAttributes(path) & win32con.FILE_ATTRIBUTE_REPARSE_POINT + +def testReparsePointAndSymLink(merge=False, deleteDup=False): + stash.Trace(f"Debug Tracing (platform.system()={platform.system()})") + myTestPath1 = r"B:\V\V\Tip\POV - Holly Molly petite ginger anal slut - RedTube.mp4" # not a reparse point or symbolic link + myTestPath2 = r"B:\_\SpecialSet\Amateur Anal Attempts\BRCC test studio name.m2ts" # reparse point + myTestPath3 = r"B:\_\SpecialSet\Amateur Anal Attempts\Amateur Anal Attempts 4.mp4" #symbolic link + myTestPath4 = r"E:\Stash\plugins\RenameFile\README.md" #symbolic link + myTestPath5 = r"E:\_\David-Maisonave\Axter-Stash\plugins\RenameFile\README.md" #symbolic link + myTestPath6 = r"E:\_\David-Maisonave\Axter-Stash\plugins\DeleteMe\Renamer\README.md" # not reparse point + stash.Log(f"Testing '{myTestPath1}'") + if isReparsePoint(myTestPath1): + stash.Log(f"isSymLink '{myTestPath1}'") + else: + stash.Log(f"Not isSymLink '{myTestPath1}'") + + if isReparsePoint(myTestPath2): + stash.Log(f"isSymLink '{myTestPath2}'") + else: + stash.Log(f"Not isSymLink '{myTestPath2}'") + + if isReparsePoint(myTestPath3): + stash.Log(f"isSymLink '{myTestPath3}'") + else: + stash.Log(f"Not isSymLink '{myTestPath3}'") + + if isReparsePoint(myTestPath4): + stash.Log(f"isSymLink '{myTestPath4}'") + else: + stash.Log(f"Not isSymLink '{myTestPath4}'") + + if isReparsePoint(myTestPath5): + stash.Log(f"isSymLink '{myTestPath5}'") + else: + stash.Log(f"Not isSymLink '{myTestPath5}'") + + if isReparsePoint(myTestPath6): + stash.Log(f"isSymLink '{myTestPath6}'") + else: + stash.Log(f"Not isSymLink '{myTestPath6}'") + return + + +def createTagId(tagName, tagName_descp, deleteIfExist = False): + tagId = stash.find_tags(q=tagName) + if len(tagId): + tagId = tagId[0] + if deleteIfExist: + stash.destroy_tag(int(tagId['id'])) + else: + return tagId['id'] + tagId = stash.create_tag({"name":tagName, "description":tagName_descp, "ignore_auto_tag": True}) + stash.Log(f"Dup-tagId={tagId['id']}") + return tagId['id'] + +def setTagId(tagId, tagName, sceneDetails, DupFileToKeep): + details = "" + ORG_DATA_DICT = {'id' : sceneDetails['id']} + dataDict = ORG_DATA_DICT.copy() + doAddTag = True + if addPrimaryDupPathToDetails: + BaseDupStr = f"BaseDup={DupFileToKeep['files'][0]['path']}\n{stash.STASH_URL}/scenes/{DupFileToKeep['id']}\n" + if sceneDetails['details'] == "": + details = BaseDupStr + elif not sceneDetails['details'].startswith(BaseDupStr): + details = f"{BaseDupStr};\n{sceneDetails['details']}" + for tag in sceneDetails['tags']: + if tag['name'] == tagName: + doAddTag = False + break + if doAddTag: + dataDict.update({'tag_ids' : tagId}) + if details != "": + dataDict.update({'details' : details}) + if dataDict != ORG_DATA_DICT: + stash.update_scene(dataDict) + stash.Trace(f"[setTagId] Updated {sceneDetails['files'][0]['path']} with metadata {dataDict}", toAscii=True) + else: + stash.Trace(f"[setTagId] Nothing to update {sceneDetails['files'][0]['path']}.", toAscii=True) + + +def isInList(listToCk, pathToCk): + pathToCk = pathToCk.lower() + for item in listToCk: + if pathToCk.startswith(item): + return True + return False + +def hasSameDir(path1, path2): + if pathlib.Path(path1).resolve().parent == pathlib.Path(path2).resolve().parent: + return True + return False + +def sendToTrash(path): + if not os.path.isfile(path): + stash.Warn(f"File does not exist: {path}.", toAscii=True) + return False + try: + from send2trash import send2trash # Requirement: pip install Send2Trash + send2trash(path) + return True + except Exception as e: + stash.Error(f"Failed to send file {path} to recycle bin. Error: {e}", toAscii=True) + try: + if os.path.isfile(path): + os.remove(path) + return True + except Exception as e: + stash.Error(f"Failed to delete file {path}. Error: {e}", toAscii=True) + return False + +def significantLessTime(durrationToKeep, durrationOther): + timeDiff = durrationToKeep / durrationOther + if timeDiff < significantTimeDiff: + return True + return False + +def isSwapCandidate(DupFileToKeep, DupFile): + # Don't move if both are in whitelist + if isInList(whitelist, DupFileToKeep['files'][0]['path']) and isInList(whitelist, DupFile['files'][0]['path']): + return False + if swapHighRes and (int(DupFileToKeep['files'][0]['width']) > int(DupFile['files'][0]['width']) or int(DupFileToKeep['files'][0]['height']) > int(DupFile['files'][0]['height'])): + if not significantLessTime(int(DupFileToKeep['files'][0]['duration']), int(DupFile['files'][0]['duration'])): + return True + else: + stash.Warn(f"File '{DupFileToKeep['files'][0]['path']}' has a higher resolution than '{DupFile['files'][0]['path']}', but the duration is significantly shorter.", toAscii=True) + if swapLongLength and int(DupFileToKeep['files'][0]['duration']) > int(DupFile['files'][0]['duration']): + if int(DupFileToKeep['files'][0]['width']) >= int(DupFile['files'][0]['width']) or int(DupFileToKeep['files'][0]['height']) >= int(DupFile['files'][0]['height']): + return True + return False + +def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False): + duplicateMarkForDeletion_descp = 'Tag added to duplicate scenes so-as to tag them for deletion.' + stash.Trace(f"duplicateMarkForDeletion = {duplicateMarkForDeletion}") + dupTagId = createTagId(duplicateMarkForDeletion, duplicateMarkForDeletion_descp) + stash.Trace(f"dupTagId={dupTagId} name={duplicateMarkForDeletion}") + + dupWhitelistTagId = None + if whitelistDoTagLowResDup: + stash.Trace(f"duplicateWhitelistTag = {duplicateWhitelistTag}") + duplicateWhitelistTag_descp = 'Tag added to duplicate scenes which are in the whitelist. This means there are two or more duplicates in the whitelist.' + dupWhitelistTagId = createTagId(duplicateWhitelistTag, duplicateWhitelistTag_descp) + stash.Trace(f"dupWhitelistTagId={dupWhitelistTagId} name={duplicateWhitelistTag}") + + QtyDupSet = 0 + QtyDup = 0 + QtyExactDup = 0 + QtyAlmostDup = 0 + QtyRealTimeDiff = 0 + QtyTagForDel = 0 + QtySkipForDel = 0 + QtySwap = 0 + QtyMerge = 0 + QtyDeleted = 0 + stash.Log("#########################################################################") + stash.Trace("#########################################################################") + stash.Log(f"Waiting for find_duplicate_scenes_diff to return results; duration_diff={duration_diff}; significantTimeDiff={significantTimeDiff}", printTo=LOG_STASH_N_PLUGIN) + DupFileSets = stash.find_duplicate_scenes_diff(duration_diff=duration_diff) + qtyResults = len(DupFileSets) + stash.Trace("#########################################################################") + for DupFileSet in DupFileSets: + stash.Trace(f"DupFileSet={DupFileSet}") + QtyDupSet+=1 + stash.Progress(QtyDupSet, qtyResults) + SepLine = "---------------------------" + DupFileToKeep = "" + DupToCopyFrom = "" + DupFileDetailList = [] + for DupFile in DupFileSet: + QtyDup+=1 + stash.log.sl.progress(f"Scene ID = {DupFile['id']}") + time.sleep(2) + Scene = stash.find_scene(DupFile['id']) + sceneData = f"Scene = {Scene}" + stash.Trace(sceneData, toAscii=True) + DupFileDetailList = DupFileDetailList + [Scene] + if DupFileToKeep != "": + if int(DupFileToKeep['files'][0]['duration']) == int(Scene['files'][0]['duration']): # Do not count fractions of a second as a difference + QtyExactDup+=1 + else: + QtyAlmostDup+=1 + SepLine = "***************************" + if significantLessTime(int(DupFileToKeep['files'][0]['duration']), int(Scene['files'][0]['duration'])): + QtyRealTimeDiff += 1 + if int(DupFileToKeep['files'][0]['width']) < int(Scene['files'][0]['width']) or int(DupFileToKeep['files'][0]['height']) < int(Scene['files'][0]['height']): + DupFileToKeep = Scene + elif int(DupFileToKeep['files'][0]['duration']) < int(Scene['files'][0]['duration']): + DupFileToKeep = Scene + elif isInList(whitelist, Scene['files'][0]['path']) and not isInList(whitelist, DupFileToKeep['files'][0]['path']): + DupFileToKeep = Scene + elif isInList(blacklist, DupFileToKeep['files'][0]['path']) and not isInList(blacklist, Scene['files'][0]['path']): + DupFileToKeep = Scene + elif isInList(graylist, Scene['files'][0]['path']) and not isInList(graylist, DupFileToKeep['files'][0]['path']): + DupFileToKeep = Scene + elif len(DupFileToKeep['files'][0]['path']) < len(Scene['files'][0]['path']): + DupFileToKeep = Scene + elif int(DupFileToKeep['files'][0]['size']) < int(Scene['files'][0]['size']): + DupFileToKeep = Scene + else: + DupFileToKeep = Scene + # stash.Trace(f"DupFileToKeep = {DupFileToKeep}") + stash.Trace(f"KeepID={DupFileToKeep['id']}, ID={DupFile['id']} duration=({Scene['files'][0]['duration']}), Size=({Scene['files'][0]['size']}), Res=({Scene['files'][0]['width']} x {Scene['files'][0]['height']}) Name={Scene['files'][0]['path']}, KeepPath={DupFileToKeep['files'][0]['path']}", toAscii=True) + + for DupFile in DupFileDetailList: + if DupFile['id'] != DupFileToKeep['id']: + if merge: + result = stash.merge_metadata(DupFile, DupFileToKeep) + if result != "Nothing To Merge": + QtyMerge += 1 + + if isInList(whitelist, DupFile['files'][0]['path']) and (not whitelistDelDupInSameFolder or not hasSameDir(DupFile['files'][0]['path'], DupFileToKeep['files'][0]['path'])): + if isSwapCandidate(DupFileToKeep, DupFile): + if merge: + stash.merge_metadata(DupFileToKeep, DupFile) + if toRecycleBeforeSwap: + sendToTrash(DupFile['files'][0]['path']) + shutil.move(DupFileToKeep['files'][0]['path'], DupFile['files'][0]['path']) + stash.Log(f"Moved better file '{DupFileToKeep['files'][0]['path']}' to '{DupFile['files'][0]['path']}'", toAscii=True, printTo=LOG_STASH_N_PLUGIN) + DupFileToKeep = DupFile + QtySwap+=1 + else: + stash.Log(f"NOT processing duplicate, because it's in whitelist. '{DupFile['files'][0]['path']}'", toAscii=True) + if dupWhitelistTagId and tagDuplicates: + setTagId(dupWhitelistTagId, duplicateWhitelistTag, DupFile, DupFileToKeep) + QtySkipForDel+=1 + else: + if deleteDup: + DupFileName = DupFile['files'][0]['path'] + DupFileNameOnly = pathlib.Path(DupFileName).stem + stash.Warn(f"Deleting duplicate '{DupFileName}'", toAscii=True, printTo=LOG_STASH_N_PLUGIN) + if alternateTrashCanPath != "": + destPath = f"{alternateTrashCanPath }{os.sep}{DupFileNameOnly}" + if os.path.isfile(destPath): + destPath = f"{alternateTrashCanPath }{os.sep}_{time.time()}_{DupFileNameOnly}" + shutil.move(DupFileName, destPath) + elif moveToTrashCan: + sendToTrash(DupFileName) + stash.destroy_scene(DupFile['id'], delete_file=True) + QtyDeleted += 1 + elif tagDuplicates: + if QtyTagForDel == 0: + stash.Log(f"Tagging duplicate {DupFile['files'][0]['path']} for deletion with tag {duplicateMarkForDeletion}.", toAscii=True, printTo=LOG_STASH_N_PLUGIN) + else: + stash.Log(f"Tagging duplicate {DupFile['files'][0]['path']} for deletion.", toAscii=True, printTo=LOG_STASH_N_PLUGIN) + setTagId(dupTagId, duplicateMarkForDeletion, DupFile, DupFileToKeep) + QtyTagForDel+=1 + stash.Trace(SepLine) + if maxDupToProcess > 0 and QtyDup > maxDupToProcess: + break + + stash.Log(f"QtyDupSet={QtyDupSet}, QtyDup={QtyDup}, QtyDeleted={QtyDeleted}, QtySwap={QtySwap}, QtyTagForDel={QtyTagForDel}, QtySkipForDel={QtySkipForDel}, QtyExactDup={QtyExactDup}, QtyAlmostDup={QtyAlmostDup}, QtyMerge={QtyMerge}, QtyRealTimeDiff={QtyRealTimeDiff}", printTo=LOG_STASH_N_PLUGIN) + if cleanAfterDel: + stash.Log("Adding clean jobs to the Task Queue", printTo=LOG_STASH_N_PLUGIN) + stash.metadata_clean(paths=stash.STASH_PATHS) + stash.metadata_clean_generated() + stash.optimise_database() + +def deleteTagggedDuplicates(): + tagId = stash.find_tags(q=duplicateMarkForDeletion) + if len(tagId) > 0 and 'id' in tagId[0]: + tagId = tagId[0]['id'] + else: + stash.Warn(f"Could not find tag ID for tag '{duplicateMarkForDeletion}'.") + return + QtyDup = 0 + QtyDeleted = 0 + QtyFailedQuery = 0 + stash.Trace("#########################################################################") + sceneIDs = stash.find_scenes(f={"tags": {"value":tagId, "modifier":"INCLUDES"}}, fragment='id') + qtyResults = len(sceneIDs) + stash.Trace(f"Found {qtyResults} scenes with tag ({duplicateMarkForDeletion}): sceneIDs = {sceneIDs}") + for sceneID in sceneIDs: + # stash.Trace(f"Getting scene data for scene ID {sceneID['id']}.") + QtyDup += 1 + prgs = QtyDup / qtyResults + stash.Progress(QtyDup, qtyResults) + scene = stash.find_scene(sceneID['id']) + if scene == None or len(scene) == 0: + stash.Warn(f"Could not get scene data for scene ID {sceneID['id']}.") + QtyFailedQuery += 1 + continue + # stash.Log(f"scene={scene}") + DupFileName = scene['files'][0]['path'] + DupFileNameOnly = pathlib.Path(DupFileName).stem + stash.Warn(f"Deleting duplicate '{DupFileName}'", toAscii=True, printTo=LOG_STASH_N_PLUGIN) + if alternateTrashCanPath != "": + destPath = f"{alternateTrashCanPath }{os.sep}{DupFileNameOnly}" + if os.path.isfile(destPath): + destPath = f"{alternateTrashCanPath }{os.sep}_{time.time()}_{DupFileNameOnly}" + shutil.move(DupFileName, destPath) + elif moveToTrashCan: + sendToTrash(DupFileName) + result = stash.destroy_scene(scene['id'], delete_file=True) + stash.Trace(f"destroy_scene result={result} for file {DupFileName}", toAscii=True) + QtyDeleted += 1 + stash.Log(f"QtyDup={QtyDup}, QtyDeleted={QtyDeleted}, QtyFailedQuery={QtyFailedQuery}", printTo=LOG_STASH_N_PLUGIN) + return + +def testSetDupTagOnScene(sceneId): + scene = stash.find_scene(sceneId) + stash.Log(f"scene={scene}") + stash.Log(f"scene tags={scene['tags']}") + tag_ids = [dupTagId] + for tag in scene['tags']: + tag_ids = tag_ids + [tag['id']] + stash.Log(f"tag_ids={tag_ids}") + stash.update_scene({'id' : scene['id'], 'tag_ids' : tag_ids}) + +if stash.PLUGIN_TASK_NAME == "tag_duplicates_task": + mangeDupFiles(tagDuplicates=True, merge=mergeDupFilename) + stash.Trace(f"{stash.PLUGIN_TASK_NAME} EXIT") +elif stash.PLUGIN_TASK_NAME == "delete_tagged_duplicates_task": + deleteTagggedDuplicates() + stash.Trace(f"{stash.PLUGIN_TASK_NAME} EXIT") +elif stash.PLUGIN_TASK_NAME == "delete_duplicates_task": + mangeDupFiles(deleteDup=True, merge=mergeDupFilename) + stash.Trace(f"{stash.PLUGIN_TASK_NAME} EXIT") +elif parse_args.dup_tag: + mangeDupFiles(tagDuplicates=True, merge=mergeDupFilename) + stash.Trace(f"Tag duplicate EXIT") +elif parse_args.del_tag: + deleteTagggedDuplicates() + stash.Trace(f"Delete Tagged duplicates EXIT") +elif parse_args.remove: + mangeDupFiles(deleteDup=True, merge=mergeDupFilename) + stash.Trace(f"Delete duplicate EXIT") +else: + stash.Log(f"Nothing to do!!! (PLUGIN_ARGS_MODE={stash.PLUGIN_TASK_NAME})") + + + + + +stash.Trace("\n*********************************\nEXITING ***********************\n*********************************") diff --git a/plugins/DupFileManager/DupFileManager.yml b/plugins/DupFileManager/DupFileManager.yml new file mode 100644 index 00000000..497aca1b --- /dev/null +++ b/plugins/DupFileManager/DupFileManager.yml @@ -0,0 +1,70 @@ +name: DupFileManager +description: Manages duplicate files. +version: 0.1.2 +url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/DupFileManager +settings: + mergeDupFilename: + displayName: Merge Duplicate Tags + description: Before deletion, merge metadata from duplicate. E.g. Tag names, performers, studios, title, galleries, rating, details, etc... + type: BOOLEAN + permanentlyDelete: + displayName: Permanent Delete + description: Enable to permanently delete files, instead of moving files to trash can. + type: BOOLEAN + whitelistDelDupInSameFolder: + displayName: Whitelist Delete In Same Folder + description: Allow whitelist deletion of duplicates within the same whitelist folder. + type: BOOLEAN + whitelistDoTagLowResDup: + displayName: Whitelist Duplicate Tagging + description: Enable to tag whitelist duplicates of lower resolution or duration or same folder. + type: BOOLEAN + zCleanAfterDel: + displayName: Run Clean After Delete + description: After running a 'Delete Duplicates' task, run Clean, Clean-Generated, and Optimize-Database. + type: BOOLEAN + zSwapHighRes: + displayName: Swap High Resolution + description: If enabled, swap higher resolution duplicate files to preferred path. + type: BOOLEAN + zSwapLongLength: + displayName: Swap Longer Duration + description: If enabled, swap longer duration media files to preferred path. Longer is determine by significantLongerTime field. + type: BOOLEAN + zWhitelist: + displayName: White List + description: A comma seperated list of paths NOT to be deleted. E.g. C:\Favorite\,E:\MustKeep\ + type: STRING + zxGraylist: + displayName: Gray List + description: List of preferential paths to determine which duplicate should be the primary. E.g. C:\2nd_Favorite\,H:\ShouldKeep\ + type: STRING + zyBlacklist: + displayName: Black List + description: List of LEAST preferential paths to determine primary candidates for deletion. E.g. C:\Downloads\,F:\DeleteMeFirst\ + type: STRING + zyMaxDupToProcess: + displayName: Max Dup Process + description: Maximum number of duplicates to process. If 0, infinity + type: NUMBER + zzdebugTracing: + displayName: Debug Tracing + description: (Default=false) [***For Advanced Users***] Enable debug tracing. When enabled, additional tracing logging is added to Stash\plugins\DupFileManager\DupFileManager.log + type: BOOLEAN +exec: + - python + - "{pluginDir}/DupFileManager.py" +interface: raw +tasks: + - name: Tag Duplicates + description: Set tag DuplicateMarkForDeletion to the duplicates with lower resolution, duration, file name length, or black list path. + defaultArgs: + mode: tag_duplicates_task + - name: Delete Tagged Duplicates + description: Only delete scenes having DuplicateMarkForDeletion tag. + defaultArgs: + mode: delete_tagged_duplicates_task + - name: Delete Duplicates + description: Delete duplicate scenes. Performs deletion without first tagging. + defaultArgs: + mode: delete_duplicates_task diff --git a/plugins/DupFileManager/DupFileManager_config.py b/plugins/DupFileManager/DupFileManager_config.py new file mode 100644 index 00000000..ab5b8178 --- /dev/null +++ b/plugins/DupFileManager/DupFileManager_config.py @@ -0,0 +1,26 @@ +# Description: This is a Stash plugin which manages duplicate files. +# By David Maisonave (aka Axter) Jul-2024 (https://www.axter.com/) +# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/DupFileManager +config = { + # If enabled, adds the primary duplicate path to the scene detail. + "addPrimaryDupPathToDetails" : True, + # Alternative path to move duplicate files. + "dup_path": "", #Example: "C:\\TempDeleteFolder" + # The threshold as to what percentage is consider a significant shorter time. + "significantTimeDiff" : .90, # 90% threshold + # Valued passed to stash API function FindDuplicateScenes. + "duration_diff" : 10, # (default=10) A value from 1 to 10. + # If enabled, moves destination file to recycle bin before swapping Hi-Res file. + "toRecycleBeforeSwap" : True, + # Character used to seperate items on the whitelist, blacklist, and graylist + "listSeparator" : ",", + # Tag used to tag duplicates with lower resolution, duration, and file name length. + "DupFileTag" : "DuplicateMarkForDeletion", + # Tag name used to tag duplicates in the whitelist. E.g. DuplicateWhitelistFile + "DupWhiteListTag" : "DuplicateWhitelistFile", + + # The following fields are ONLY used when running DupFileManager in script mode + "endpoint_Scheme" : "http", # Define endpoint to use when contacting the Stash server + "endpoint_Host" : "0.0.0.0", # Define endpoint to use when contacting the Stash server + "endpoint_Port" : 9999, # Define endpoint to use when contacting the Stash server +} diff --git a/plugins/DupFileManager/README.md b/plugins/DupFileManager/README.md new file mode 100644 index 00000000..d5b35dfc --- /dev/null +++ b/plugins/DupFileManager/README.md @@ -0,0 +1,39 @@ +# DupFileManager: Ver 0.1.2 (By David Maisonave) +DupFileManager is a [Stash](https://github.com/stashapp/stash) plugin which manages duplicate file in the Stash system. +### Features +- Can merge potential source in the duplicate file names for tag names, performers, and studios. + - Normally when Stash searches the file name for tag names, performers, and studios, it only does so using the primary file. +- Delete duplicate file task with the following options: + - Tasks (Settings->Task->[Plugin Tasks]->DupFileManager) + - **Tag Duplicate Filename** - Set tag DuplicateMarkForDeletion to the duplicates with lower resolution, duration, file name length, and/or black list path. + - **Delete Duplicates** - Deletes duplicate files + - Plugin UI options (Settings->Plugins->Plugins->[DupFileManager]) + - Use a white-list of preferential directories to determine which duplicate will be the primary. + - Use a gray-list of preferential directories to determine which duplicate should be the primary. + - Use a black-list to determine which duplicates should be deleted first. + - **Permanent Delete** - Enable to permanently delete files, instead of moving files to trash can. + - **Max Dup Process** - Use to limit the maximum files to process. Can be used to do a limited test run. + - **Merge Duplicate Tags** - Before deletion, merge metadata from duplicate. E.g. Tag names, performers, studios, title, galleries, rating, details, etc... + - Options available via DupFileManager_config.py + - **dup_path** - Alternate path to move deleted files to. Example: "C:\TempDeleteFolder" + - **swapHighRes** - When enabled, swaps higher resolution files between whitelist and blacklist/graylist files. + - **swapLongLength** - When enabled, swaps scene with longer duration. + - **toRecycleBeforeSwap** - When enabled, moves destination file to recycle bin before swapping files. + +### Requirements +`pip install --upgrade stashapp-tools` +`pip install pyYAML` +`pip install Send2Trash` + +### Installation +- Follow **Requirements** instructions. +- In the stash plugin directory (C:\Users\MyUserName\.stash\plugins), create a folder named **DupFileManager**. +- Copy all the plugin files to this folder.(**C:\Users\MyUserName\\.stash\plugins\DupFileManager**). +- Click the **[Reload Plugins]** button in Stash->Settings->Plugins->Plugins. + +That's it!!! + +### Options +- Options are accessible in the GUI via Settings->Plugins->Plugins->[DupFileManager]. +- More options available in DupFileManager_config.py. + diff --git a/plugins/DupFileManager/StashPluginHelper.py b/plugins/DupFileManager/StashPluginHelper.py new file mode 100644 index 00000000..6f0d3d15 --- /dev/null +++ b/plugins/DupFileManager/StashPluginHelper.py @@ -0,0 +1,526 @@ +from stashapi.stashapp import StashInterface +from logging.handlers import RotatingFileHandler +import re, inspect, sys, os, pathlib, logging, json +import concurrent.futures +from stashapi.stash_types import PhashDistance +import __main__ + +_ARGUMENT_UNSPECIFIED_ = "_ARGUMENT_UNSPECIFIED_" + +# StashPluginHelper (By David Maisonave aka Axter) + # See end of this file for example usage + # Log Features: + # Can optionally log out to multiple outputs for each Log or Trace call. + # Logging includes source code line number + # Sets a maximum plugin log file size + # Stash Interface Features: + # Gets STASH_URL value from command line argument and/or from STDIN_READ + # Sets FRAGMENT_SERVER based on command line arguments or STDIN_READ + # Sets PLUGIN_ID based on the main script file name (in lower case) + # Gets PLUGIN_TASK_NAME value + # Sets pluginSettings (The plugin UI settings) + # Misc Features: + # Gets DRY_RUN value from command line argument and/or from UI and/or from config file + # Gets DEBUG_TRACING value from command line argument and/or from UI and/or from config file + # Sets RUNNING_IN_COMMAND_LINE_MODE to True if detects multiple arguments + # Sets CALLED_AS_STASH_PLUGIN to True if it's able to read from STDIN_READ +class StashPluginHelper(StashInterface): + # Primary Members for external reference + PLUGIN_TASK_NAME = None + PLUGIN_ID = None + PLUGIN_CONFIGURATION = None + PLUGINS_PATH = None + pluginSettings = None + pluginConfig = None + STASH_URL = None + STASH_CONFIGURATION = None + JSON_INPUT = None + DEBUG_TRACING = False + DRY_RUN = False + CALLED_AS_STASH_PLUGIN = False + RUNNING_IN_COMMAND_LINE_MODE = False + FRAGMENT_SERVER = None + STASHPATHSCONFIG = None + STASH_PATHS = [] + API_KEY = None + excludeMergeTags = None + + # printTo argument + LOG_TO_FILE = 1 + LOG_TO_CONSOLE = 2 # Note: Only see output when running in command line mode. In plugin mode, this output is lost. + LOG_TO_STDERR = 4 # Note: In plugin mode, output to StdErr ALWAYS gets sent to stash logging as an error. + LOG_TO_STASH = 8 + LOG_TO_WARN = 16 + LOG_TO_ERROR = 32 + LOG_TO_CRITICAL = 64 + LOG_TO_ALL = LOG_TO_FILE + LOG_TO_CONSOLE + LOG_TO_STDERR + LOG_TO_STASH + + # Misc class variables + MAIN_SCRIPT_NAME = None + LOG_LEVEL = logging.INFO + LOG_FILE_DIR = None + LOG_FILE_NAME = None + STDIN_READ = None + pluginLog = None + logLinePreviousHits = [] + thredPool = None + STASH_INTERFACE_INIT = False + _mergeMetadata = None + encodeToUtf8 = False + convertToAscii = False # If set True, it takes precedence over encodeToUtf8 + + # Prefix message value + LEV_TRACE = "TRACE: " + LEV_DBG = "DBG: " + LEV_INF = "INF: " + LEV_WRN = "WRN: " + LEV_ERR = "ERR: " + LEV_CRITICAL = "CRITICAL: " + + # Default format + LOG_FORMAT = "[%(asctime)s] %(message)s" + + # Externally modifiable variables + log_to_err_set = LOG_TO_FILE + LOG_TO_STDERR # This can be changed by the calling source in order to customize what targets get error messages + log_to_norm = LOG_TO_FILE + LOG_TO_CONSOLE # Can be change so-as to set target output for normal logging + # Warn message goes to both plugin log file and stash when sent to Stash log file. + log_to_wrn_set = LOG_TO_STASH # This can be changed by the calling source in order to customize what targets get warning messages + + def __init__(self, + debugTracing = None, # Set debugTracing to True so as to output debug and trace logging + logFormat = LOG_FORMAT, # Plugin log line format + dateFmt = "%y%m%d %H:%M:%S", # Date format when logging to plugin log file + maxbytes = 8*1024*1024, # Max size of plugin log file + backupcount = 2, # Backup counts when log file size reaches max size + logToWrnSet = 0, # Customize the target output set which will get warning logging + logToErrSet = 0, # Customize the target output set which will get error logging + logToNormSet = 0, # Customize the target output set which will get normal logging + logFilePath = "", # Plugin log file. If empty, the log file name will be set based on current python file name and path + mainScriptName = "", # The main plugin script file name (full path) + pluginID = "", + settings = None, # Default settings for UI fields + config = None, # From pluginName_config.py or pluginName_setting.py + fragmentServer = None, + stash_url = None, # Stash URL (endpoint URL) Example: http://localhost:9999 + apiKey = None, # API Key only needed when username and password set while running script via command line + DebugTraceFieldName = "zzdebugTracing", + DryRunFieldName = "zzdryRun", + setStashLoggerAsPluginLogger = False): + self.thredPool = concurrent.futures.ThreadPoolExecutor(max_workers=2) + if logToWrnSet: self.log_to_wrn_set = logToWrnSet + if logToErrSet: self.log_to_err_set = logToErrSet + if logToNormSet: self.log_to_norm = logToNormSet + if stash_url and len(stash_url): self.STASH_URL = stash_url + self.MAIN_SCRIPT_NAME = mainScriptName if mainScriptName != "" else __main__.__file__ + self.PLUGIN_ID = pluginID if pluginID != "" else pathlib.Path(self.MAIN_SCRIPT_NAME).stem + # print(f"self.MAIN_SCRIPT_NAME={self.MAIN_SCRIPT_NAME}, self.PLUGIN_ID={self.PLUGIN_ID}", file=sys.stderr) + self.LOG_FILE_NAME = logFilePath if logFilePath != "" else f"{pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent}{os.sep}{pathlib.Path(self.MAIN_SCRIPT_NAME).stem}.log" + self.LOG_FILE_DIR = pathlib.Path(self.LOG_FILE_NAME).resolve().parent + RFH = RotatingFileHandler( + filename=self.LOG_FILE_NAME, + mode='a', + maxBytes=maxbytes, + backupCount=backupcount, + encoding=None, + delay=0 + ) + if fragmentServer: + self.FRAGMENT_SERVER = fragmentServer + else: + self.FRAGMENT_SERVER = {'Scheme': 'http', 'Host': '0.0.0.0', 'Port': '9999', 'SessionCookie': {'Name': 'session', 'Value': '', 'Path': '', 'Domain': '', 'Expires': '0001-01-01T00:00:00Z', 'RawExpires': '', 'MaxAge': 0, 'Secure': False, 'HttpOnly': False, 'SameSite': 0, 'Raw': '', 'Unparsed': None}, 'Dir': os.path.dirname(pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent), 'PluginDir': pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent} + + if debugTracing: self.DEBUG_TRACING = debugTracing + if config: + self.pluginConfig = config + if self.Setting('apiKey', "") != "": + self.FRAGMENT_SERVER['ApiKey'] = self.Setting('apiKey') + + + if apiKey and apiKey != "": + self.FRAGMENT_SERVER['ApiKey'] = apiKey + + if len(sys.argv) > 1: + RUNNING_IN_COMMAND_LINE_MODE = True + if not debugTracing or not stash_url: + for argValue in sys.argv[1:]: + if argValue.lower() == "--trace": + self.DEBUG_TRACING = True + elif argValue.lower() == "--dry_run" or argValue.lower() == "--dryrun": + self.DRY_RUN = True + elif ":" in argValue and not self.STASH_URL: + self.STASH_URL = argValue + if self.STASH_URL: + endpointUrlArr = self.STASH_URL.split(":") + if len(endpointUrlArr) == 3: + self.FRAGMENT_SERVER['Scheme'] = endpointUrlArr[0] + self.FRAGMENT_SERVER['Host'] = endpointUrlArr[1][2:] + self.FRAGMENT_SERVER['Port'] = endpointUrlArr[2] + super().__init__(self.FRAGMENT_SERVER) + self.STASH_INTERFACE_INIT = True + else: + try: + self.STDIN_READ = sys.stdin.read() + self.CALLED_AS_STASH_PLUGIN = True + except: + pass + if self.STDIN_READ: + self.JSON_INPUT = json.loads(self.STDIN_READ) + if "args" in self.JSON_INPUT and "mode" in self.JSON_INPUT["args"]: + self.PLUGIN_TASK_NAME = self.JSON_INPUT["args"]["mode"] + self.FRAGMENT_SERVER = self.JSON_INPUT["server_connection"] + self.STASH_URL = f"{self.FRAGMENT_SERVER['Scheme']}://{self.FRAGMENT_SERVER['Host']}:{self.FRAGMENT_SERVER['Port']}" + super().__init__(self.FRAGMENT_SERVER) + self.STASH_INTERFACE_INIT = True + + if self.STASH_URL.startswith("http://0.0.0.0:"): + self.STASH_URL = self.STASH_URL.replace("http://0.0.0.0:", "http://localhost:") + + if self.STASH_INTERFACE_INIT: + self.PLUGIN_CONFIGURATION = self.get_configuration()["plugins"] + self.STASH_CONFIGURATION = self.get_configuration()["general"] + self.STASHPATHSCONFIG = self.STASH_CONFIGURATION['stashes'] + if 'pluginsPath' in self.STASH_CONFIGURATION: + self.PLUGINS_PATH = self.STASH_CONFIGURATION['pluginsPath'] + for item in self.STASHPATHSCONFIG: + self.STASH_PATHS.append(item["path"]) + if settings: + self.pluginSettings = settings + if self.PLUGIN_ID in self.PLUGIN_CONFIGURATION: + self.pluginSettings.update(self.PLUGIN_CONFIGURATION[self.PLUGIN_ID]) + if 'apiKey' in self.STASH_CONFIGURATION: + self.API_KEY = self.STASH_CONFIGURATION['apiKey'] + + self.DRY_RUN = self.Setting(DryRunFieldName, self.DRY_RUN) + self.DEBUG_TRACING = self.Setting(DebugTraceFieldName, self.DEBUG_TRACING) + if self.DEBUG_TRACING: self.LOG_LEVEL = logging.DEBUG + + logging.basicConfig(level=self.LOG_LEVEL, format=logFormat, datefmt=dateFmt, handlers=[RFH]) + self.pluginLog = logging.getLogger(pathlib.Path(self.MAIN_SCRIPT_NAME).stem) + if setStashLoggerAsPluginLogger: + self.log = self.pluginLog + + def __del__(self): + self.thredPool.shutdown(wait=False) + + def Setting(self, name, default=_ARGUMENT_UNSPECIFIED_, raiseEx=True, notEmpty=False): + if self.pluginSettings != None and name in self.pluginSettings: + if notEmpty == False or self.pluginSettings[name] != "": + return self.pluginSettings[name] + if self.pluginConfig != None and name in self.pluginConfig: + if notEmpty == False or self.pluginConfig[name] != "": + return self.pluginConfig[name] + if default == _ARGUMENT_UNSPECIFIED_ and raiseEx: + raise Exception(f"Missing {name} from both UI settings and config file settings.") + return default + + def Log(self, logMsg, printTo = 0, logLevel = logging.INFO, lineNo = -1, levelStr = "", logAlways = False, toAscii = None): + if toAscii or (toAscii == None and (self.encodeToUtf8 or self.convertToAscii)): + logMsg = self.asc2(logMsg) + else: + logMsg = logMsg + if printTo == 0: + printTo = self.log_to_norm + elif printTo == self.LOG_TO_ERROR and logLevel == logging.INFO: + logLevel = logging.ERROR + printTo = self.log_to_err_set + elif printTo == self.LOG_TO_CRITICAL and logLevel == logging.INFO: + logLevel = logging.CRITICAL + printTo = self.log_to_err_set + elif printTo == self.LOG_TO_WARN and logLevel == logging.INFO: + logLevel = logging.WARN + printTo = self.log_to_wrn_set + if lineNo == -1: + lineNo = inspect.currentframe().f_back.f_lineno + LN_Str = f"[LN:{lineNo}]" + # print(f"{LN_Str}, {logAlways}, {self.LOG_LEVEL}, {logging.DEBUG}, {levelStr}, {logMsg}") + if logLevel == logging.DEBUG and (logAlways == False or self.LOG_LEVEL == logging.DEBUG): + if levelStr == "": levelStr = self.LEV_DBG + if printTo & self.LOG_TO_FILE: self.pluginLog.debug(f"{LN_Str} {levelStr}{logMsg}") + if printTo & self.LOG_TO_STASH: self.log.debug(f"{LN_Str} {levelStr}{logMsg}") + elif logLevel == logging.INFO or logLevel == logging.DEBUG: + if levelStr == "": levelStr = self.LEV_INF if logLevel == logging.INFO else self.LEV_DBG + if printTo & self.LOG_TO_FILE: self.pluginLog.info(f"{LN_Str} {levelStr}{logMsg}") + if printTo & self.LOG_TO_STASH: self.log.info(f"{LN_Str} {levelStr}{logMsg}") + elif logLevel == logging.WARN: + if levelStr == "": levelStr = self.LEV_WRN + if printTo & self.LOG_TO_FILE: self.pluginLog.warning(f"{LN_Str} {levelStr}{logMsg}") + if printTo & self.LOG_TO_STASH: self.log.warning(f"{LN_Str} {levelStr}{logMsg}") + elif logLevel == logging.ERROR: + if levelStr == "": levelStr = self.LEV_ERR + if printTo & self.LOG_TO_FILE: self.pluginLog.error(f"{LN_Str} {levelStr}{logMsg}") + if printTo & self.LOG_TO_STASH: self.log.error(f"{LN_Str} {levelStr}{logMsg}") + elif logLevel == logging.CRITICAL: + if levelStr == "": levelStr = self.LEV_CRITICAL + if printTo & self.LOG_TO_FILE: self.pluginLog.critical(f"{LN_Str} {levelStr}{logMsg}") + if printTo & self.LOG_TO_STASH: self.log.error(f"{LN_Str} {levelStr}{logMsg}") + if (printTo & self.LOG_TO_CONSOLE) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways): + print(f"{LN_Str} {levelStr}{logMsg}") + if (printTo & self.LOG_TO_STDERR) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways): + print(f"StdErr: {LN_Str} {levelStr}{logMsg}", file=sys.stderr) + + def Trace(self, logMsg = "", printTo = 0, logAlways = False, lineNo = -1, toAscii = None): + if printTo == 0: printTo = self.LOG_TO_FILE + if lineNo == -1: + lineNo = inspect.currentframe().f_back.f_lineno + logLev = logging.INFO if logAlways else logging.DEBUG + if self.DEBUG_TRACING or logAlways: + if logMsg == "": + logMsg = f"Line number {lineNo}..." + self.Log(logMsg, printTo, logLev, lineNo, self.LEV_TRACE, logAlways, toAscii=toAscii) + + # Log once per session. Only logs the first time called from a particular line number in the code. + def TraceOnce(self, logMsg = "", printTo = 0, logAlways = False, toAscii = None): + lineNo = inspect.currentframe().f_back.f_lineno + if self.DEBUG_TRACING or logAlways: + FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}" + if FuncAndLineNo in self.logLinePreviousHits: + return + self.logLinePreviousHits.append(FuncAndLineNo) + self.Trace(logMsg, printTo, logAlways, lineNo, toAscii=toAscii) + + # Log INFO on first call, then do Trace on remaining calls. + def LogOnce(self, logMsg = "", printTo = 0, logAlways = False, traceOnRemainingCalls = True, toAscii = None): + if printTo == 0: printTo = self.LOG_TO_FILE + lineNo = inspect.currentframe().f_back.f_lineno + FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}" + if FuncAndLineNo in self.logLinePreviousHits: + if traceOnRemainingCalls: + self.Trace(logMsg, printTo, logAlways, lineNo, toAscii=toAscii) + else: + self.logLinePreviousHits.append(FuncAndLineNo) + self.Log(logMsg, printTo, logging.INFO, lineNo, toAscii=toAscii) + + def Warn(self, logMsg, printTo = 0, toAscii = None): + if printTo == 0: printTo = self.log_to_wrn_set + lineNo = inspect.currentframe().f_back.f_lineno + self.Log(logMsg, printTo, logging.WARN, lineNo, toAscii=toAscii) + + def Error(self, logMsg, printTo = 0, toAscii = None): + if printTo == 0: printTo = self.log_to_err_set + lineNo = inspect.currentframe().f_back.f_lineno + self.Log(logMsg, printTo, logging.ERROR, lineNo, toAscii=toAscii) + + def Status(self, printTo = 0, logLevel = logging.INFO, lineNo = -1): + if printTo == 0: printTo = self.log_to_norm + if lineNo == -1: + lineNo = inspect.currentframe().f_back.f_lineno + self.Log(f"StashPluginHelper Status: (CALLED_AS_STASH_PLUGIN={self.CALLED_AS_STASH_PLUGIN}), (RUNNING_IN_COMMAND_LINE_MODE={self.RUNNING_IN_COMMAND_LINE_MODE}), (DEBUG_TRACING={self.DEBUG_TRACING}), (DRY_RUN={self.DRY_RUN}), (PLUGIN_ID={self.PLUGIN_ID}), (PLUGIN_TASK_NAME={self.PLUGIN_TASK_NAME}), (STASH_URL={self.STASH_URL}), (MAIN_SCRIPT_NAME={self.MAIN_SCRIPT_NAME})", + printTo, logLevel, lineNo) + + def ExecuteProcess(self, args, ExecDetach=False): + import platform, subprocess + is_windows = any(platform.win32_ver()) + pid = None + self.Trace(f"is_windows={is_windows} args={args}") + if is_windows: + if ExecDetach: + self.Trace("Executing process using Windows DETACHED_PROCESS") + DETACHED_PROCESS = 0x00000008 + pid = subprocess.Popen(args,creationflags=DETACHED_PROCESS, shell=True).pid + else: + pid = subprocess.Popen(args, shell=True).pid + else: + self.Trace("Executing process using normal Popen") + pid = subprocess.Popen(args).pid + self.Trace(f"pid={pid}") + return pid + + def ExecutePythonScript(self, args, ExecDetach=True): + PythonExe = f"{sys.executable}" + argsWithPython = [f"{PythonExe}"] + args + return self.ExecuteProcess(argsWithPython,ExecDetach=ExecDetach) + + def Submit(self, *args, **kwargs): + return self.thredPool.submit(*args, **kwargs) + + def asc2(self, data, convertToAscii=None): + if convertToAscii or (convertToAscii == None and self.convertToAscii): + return ascii(data) + return str(str(data).encode('utf-8'))[2:-1] # This works better for logging than ascii function + # data = str(data).encode('ascii','ignore') # This works better for logging than ascii function + # return str(data)[2:-1] # strip out b'str' + + def init_mergeMetadata(self, excludeMergeTags=None): + self.excludeMergeTags = excludeMergeTags + self._mergeMetadata = mergeMetadata(self, self.excludeMergeTags) + + # Must call init_mergeMetadata, before calling merge_metadata + def merge_metadata(self, SrcData, DestData): # Input arguments can be scene ID or scene metadata + if type(SrcData) is int: + SrcData = self.find_scene(SrcData) + DestData = self.find_scene(DestData) + return self._mergeMetadata.merge(SrcData, DestData) + + def Progress(self, currentIndex, maxCount): + progress = (currentIndex / maxCount) if currentIndex < maxCount else (maxCount / currentIndex) + self.log.progress(progress) + + def run_plugin(self, plugin_id, task_mode=None, args:dict={}, asyn=False): + """Runs a plugin operation. + The operation is run immediately and does not use the job queue. + Args: + plugin_id (ID): plugin_id + task_name (str, optional): Plugin task to perform + args (dict, optional): Arguments to pass to plugin. Plugin access via JSON_INPUT['args'] + Returns: + A map of the result. + """ + query = """mutation RunPluginOperation($plugin_id: ID!, $args: Map!) { + runPluginOperation(plugin_id: $plugin_id, args: $args) + }""" + if task_mode != None: + args.update({"mode" : task_mode}) + variables = { + "plugin_id": plugin_id, + "args": args, + } + if asyn: + self.Submit(self.call_GQL, query, variables) + return f"Made asynchronous call for plugin {plugin_id}" + else: + return self.call_GQL(query, variables) + + def find_duplicate_scenes_diff(self, distance: PhashDistance=PhashDistance.EXACT, fragment='id', duration_diff: float=10.00 ): + query = """ + query FindDuplicateScenes($distance: Int, $duration_diff: Float) { + findDuplicateScenes(distance: $distance, duration_diff: $duration_diff) { + ...SceneSlim + } + } + """ + if fragment: + query = re.sub(r'\.\.\.SceneSlim', fragment, query) + else: + query += "fragment SceneSlim on Scene { id }" + + variables = { "distance": distance, "duration_diff": duration_diff } + result = self.call_GQL(query, variables) + return result['findDuplicateScenes'] + + # ################################################################################################# + # The below functions extends class StashInterface with functions which are not yet in the class + def get_all_scenes(self): + query_all_scenes = """ + query AllScenes { + allScenes { + id + updated_at + } + } + """ + return self.call_GQL(query_all_scenes) + + def metadata_autotag(self, paths:list=[], performers:list=[], studios:list=[], tags:list=[]): + query = """ + mutation MetadataAutoTag($input:AutoTagMetadataInput!) { + metadataAutoTag(input: $input) + } + """ + metadata_autotag_input = { + "paths":paths, + "performers": performers, + "studios":studios, + "tags":tags, + } + result = self.call_GQL(query, {"input": metadata_autotag_input}) + return result + + def backup_database(self): + return self.call_GQL("mutation { backupDatabase(input: {download: false})}") + + def optimise_database(self): + return self.call_GQL("mutation OptimiseDatabase { optimiseDatabase }") + + def metadata_clean_generated(self, blobFiles=True, dryRun=False, imageThumbnails=True, markers=True, screenshots=True, sprites=True, transcodes=True): + query = """ + mutation MetadataCleanGenerated($input: CleanGeneratedInput!) { + metadataCleanGenerated(input: $input) + } + """ + clean_metadata_input = { + "blobFiles": blobFiles, + "dryRun": dryRun, + "imageThumbnails": imageThumbnails, + "markers": markers, + "screenshots": screenshots, + "sprites": sprites, + "transcodes": transcodes, + } + result = self.call_GQL(query, {"input": clean_metadata_input}) + return result + + def rename_generated_files(self): + return self.call_GQL("mutation MigrateHashNaming {migrateHashNaming}") + +class mergeMetadata: # A class to merge scene metadata from source scene to destination scene + srcData = None + destData = None + stash = None + excludeMergeTags = None + dataDict = None + result = "Nothing To Merge" + def __init__(self, stash, excludeMergeTags=None): + self.stash = stash + self.excludeMergeTags = excludeMergeTags + + def merge(self, SrcData, DestData): + self.srcData = SrcData + self.destData = DestData + ORG_DATA_DICT = {'id' : self.destData['id']} + self.dataDict = ORG_DATA_DICT.copy() + self.mergeItems('tags', 'tag_ids', [], excludeName=self.excludeMergeTags) + self.mergeItems('performers', 'performer_ids', []) + self.mergeItems('galleries', 'gallery_ids', []) + self.mergeItems('movies', 'movies', []) + self.mergeItems('urls', listToAdd=self.destData['urls'], NotStartWith=self.stash.STASH_URL) + self.mergeItem('studio', 'studio_id', 'id') + self.mergeItem('title') + self.mergeItem('director') + self.mergeItem('date') + self.mergeItem('details') + self.mergeItem('rating100') + self.mergeItem('code') + if self.dataDict != ORG_DATA_DICT: + self.stash.Trace(f"Updating scene ID({self.destData['id']}) with {self.dataDict}; path={self.destData['files'][0]['path']}", toAscii=True) + self.result = self.stash.update_scene(self.dataDict) + return self.result + + def Nothing(self, Data): + if not Data or Data == "" or (type(Data) is str and Data.strip() == ""): + return True + return False + + def mergeItem(self,fieldName, updateFieldName=None, subField=None): + if updateFieldName == None: + updateFieldName = fieldName + if self.Nothing(self.destData[fieldName]) and not self.Nothing(self.srcData[fieldName]): + if subField == None: + self.dataDict.update({ updateFieldName : self.srcData[fieldName]}) + else: + self.dataDict.update({ updateFieldName : self.srcData[fieldName][subField]}) + def mergeItems(self, fieldName, updateFieldName=None, listToAdd=[], NotStartWith=None, excludeName=None): + dataAdded = "" + for item in self.srcData[fieldName]: + if item not in self.destData[fieldName]: + if NotStartWith == None or not item.startswith(NotStartWith): + if excludeName == None or item['name'] not in excludeName: + if fieldName == 'movies': + listToAdd += [{"movie_id" : item['movie']['id'], "scene_index" : item['scene_index']}] + dataAdded += f"{item['movie']['id']} " + elif updateFieldName == None: + listToAdd += [item] + dataAdded += f"{item} " + else: + listToAdd += [item['id']] + dataAdded += f"{item['id']} " + if dataAdded != "": + if updateFieldName == None: + updateFieldName = fieldName + else: + for item in self.destData[fieldName]: + if fieldName == 'movies': + listToAdd += [{"movie_id" : item['movie']['id'], "scene_index" : item['scene_index']}] + else: + listToAdd += [item['id']] + self.dataDict.update({ updateFieldName : listToAdd}) + # self.stash.Trace(f"Added {fieldName} ({dataAdded}) to scene ID({self.destData['id']})", toAscii=True) diff --git a/plugins/DupFileManager/requirements.txt b/plugins/DupFileManager/requirements.txt new file mode 100644 index 00000000..d503550d --- /dev/null +++ b/plugins/DupFileManager/requirements.txt @@ -0,0 +1,4 @@ +stashapp-tools >= 0.2.50 +pyYAML +watchdog +Send2Trash \ No newline at end of file From 41031c0bdc0b7f1eb561ef93d4f550cd74790198 Mon Sep 17 00:00:00 2001 From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com> Date: Wed, 28 Aug 2024 05:02:58 -0400 Subject: [PATCH 31/39] prettier changes --- plugins/DupFileManager/DupFileManager.yml | 2 +- plugins/DupFileManager/README.md | 9 +++-- plugins/FileMonitor/README.md | 41 +++++++++++++++-------- plugins/FileMonitor/filemonitor.yml | 2 +- 4 files changed, 36 insertions(+), 18 deletions(-) diff --git a/plugins/DupFileManager/DupFileManager.yml b/plugins/DupFileManager/DupFileManager.yml index 497aca1b..c75f561f 100644 --- a/plugins/DupFileManager/DupFileManager.yml +++ b/plugins/DupFileManager/DupFileManager.yml @@ -5,7 +5,7 @@ url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/DupFileMan settings: mergeDupFilename: displayName: Merge Duplicate Tags - description: Before deletion, merge metadata from duplicate. E.g. Tag names, performers, studios, title, galleries, rating, details, etc... + description: Before deletion, merge metadata from duplicate. E.g. Tag names, performers, studios, title, galleries, rating, details, etc... type: BOOLEAN permanentlyDelete: displayName: Permanent Delete diff --git a/plugins/DupFileManager/README.md b/plugins/DupFileManager/README.md index d5b35dfc..82e3fb7d 100644 --- a/plugins/DupFileManager/README.md +++ b/plugins/DupFileManager/README.md @@ -1,6 +1,9 @@ # DupFileManager: Ver 0.1.2 (By David Maisonave) + DupFileManager is a [Stash](https://github.com/stashapp/stash) plugin which manages duplicate file in the Stash system. + ### Features + - Can merge potential source in the duplicate file names for tag names, performers, and studios. - Normally when Stash searches the file name for tag names, performers, and studios, it only does so using the primary file. - Delete duplicate file task with the following options: @@ -13,7 +16,7 @@ DupFileManager is a [Stash](https://github.com/stashapp/stash) plugin which mana - Use a black-list to determine which duplicates should be deleted first. - **Permanent Delete** - Enable to permanently delete files, instead of moving files to trash can. - **Max Dup Process** - Use to limit the maximum files to process. Can be used to do a limited test run. - - **Merge Duplicate Tags** - Before deletion, merge metadata from duplicate. E.g. Tag names, performers, studios, title, galleries, rating, details, etc... + - **Merge Duplicate Tags** - Before deletion, merge metadata from duplicate. E.g. Tag names, performers, studios, title, galleries, rating, details, etc... - Options available via DupFileManager_config.py - **dup_path** - Alternate path to move deleted files to. Example: "C:\TempDeleteFolder" - **swapHighRes** - When enabled, swaps higher resolution files between whitelist and blacklist/graylist files. @@ -21,11 +24,13 @@ DupFileManager is a [Stash](https://github.com/stashapp/stash) plugin which mana - **toRecycleBeforeSwap** - When enabled, moves destination file to recycle bin before swapping files. ### Requirements + `pip install --upgrade stashapp-tools` `pip install pyYAML` `pip install Send2Trash` ### Installation + - Follow **Requirements** instructions. - In the stash plugin directory (C:\Users\MyUserName\.stash\plugins), create a folder named **DupFileManager**. - Copy all the plugin files to this folder.(**C:\Users\MyUserName\\.stash\plugins\DupFileManager**). @@ -34,6 +39,6 @@ DupFileManager is a [Stash](https://github.com/stashapp/stash) plugin which mana That's it!!! ### Options + - Options are accessible in the GUI via Settings->Plugins->Plugins->[DupFileManager]. - More options available in DupFileManager_config.py. - diff --git a/plugins/FileMonitor/README.md b/plugins/FileMonitor/README.md index cca15a93..c801ee28 100644 --- a/plugins/FileMonitor/README.md +++ b/plugins/FileMonitor/README.md @@ -1,19 +1,24 @@ # FileMonitor: Ver 0.9.0 (By David Maisonave) + FileMonitor is a [Stash](https://github.com/stashapp/stash) plugin with the following two main features: + - Updates Stash when any file changes occurs in the Stash library. - **Task Scheduler**: Runs scheduled task based on the scheduler configuration in **filemonitor_config.py**. ## Starting FileMonitor from the UI + From the GUI, FileMonitor can be started as a service or as a plugin. The recommended method is to start it as a service. When started as a service, it will jump on the Task Queue momentarily, and then disappear as it starts running in the background. + - To start monitoring file changes, go to **Stash->Settings->Task->[Plugin Tasks]->FileMonitor**, and click on the [Start Library Monitor Service] button. - ![FileMonitorService](https://github.com/user-attachments/assets/b12aeca9-37a8-447f-90da-26e9440735ad) - **Important Note**: At first, this will show up as a plugin in the Task Queue momentarily. It will then disappear from the Task Queue and run in the background as a service. - To stop FileMonitor click on [Stop Library Monitor] button. - The **[Monitor as a Plugin]** option is mainly available for backwards compatibility and for test purposes. - ## Using FileMonitor as a script + **FileMonitor** can be called as a standalone script. + - To start monitoring call the script and pass --url and the Stash URL. - python filemonitor.py --url http://localhost:9999 - To stop **FileMonitor**, pass argument **--stop**. @@ -24,6 +29,7 @@ From the GUI, FileMonitor can be started as a service or as a plugin. The recomm - The restart command restarts FileMonitor as a Task in Stash. # Task Scheduler + To enable the scheduler go to **Stash->Settings->Plugins->Plugins->FileMonitor** and enable the **Scheduler** option. ![ReoccurringTaskScheduler](https://github.com/user-attachments/assets/5a7bf6a4-3bd6-4692-a6c3-e9f8f4664f14) @@ -38,19 +44,20 @@ To enable the scheduler go to **Stash->Settings->Plugins->Plugins->FileMonitor** - The example tasks are disabled by default because they either have a zero frequency value or the time field is set to **DISABLED**. To configure the schedule or to add new task, edit the **task_scheduler** section in the **filemonitor_config.py** file. -```` python + +```python "task_scheduler": [ # To create a daily task, include each day of the week for the weekday field or "every" # Optional field for task "Auto Tag" is 'paths'. For detail usage, see example #A3: in filemonitor_task_examples.py {"task" : "Auto Tag", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "05:00"}, # Auto Tag -> [Auto Tag] (Daily at 6AM) # Task "Create Tags" is a plugin task. Optional fields are taskName and validateDir field. For detail usage, see examples #B1, #B2, #B3, and #B4 in filemonitor_task_examples.py - {"task" : "pathParser", "taskName" : "Create Tags", "validateDir" : "pathParser", + {"task" : "pathParser", "taskName" : "Create Tags", "validateDir" : "pathParser", "weekday" : "every", "time" : "05:30"}, # [Plugin Tasks] - > [Path Parser] -> [Create Tags] (Daily at 5AM) : This task requires plugin [Path Parser] # The following task runs plugin DupFileManager (tag_duplicates_task) if the plugin is installed. The task runs in the background because of "taskQue" : False {"task" : "DupFileManager", "taskMode" : "tag_duplicates_task", "validateDir" : "DupFileManager", "taskQue" : False, "weekday" : "every", "time" : "02:30"}, # [Plugin Tasks] -> DupFileManager -> [Delete Duplicates] (Daily at 2:30AM) {"task" : "Optimise Database", "weekday" : "monday,tuesday,wednesday,thursday,friday", "time" : "07:00"}, # Maintenance -> [Optimise Database] (Every weekday at 7AM) - + # The following tasks are scheduled weekly # Optional field for task "Scan", "Auto Tag", and "Clean" is 'paths'. For detail usage, see examples #A3: in filemonitor_task_examples.py {"task" : "Scan", "weekday" : "saturday", "time" : "03:00"}, # Library -> [Scan] (Weekly) (Every saturday at 3AM) @@ -59,7 +66,7 @@ To configure the schedule or to add new task, edit the **task_scheduler** sectio {"task" : "Clean", "weekday" : "saturday", "time" : "04:30"}, # Maintenance -> [Clean] (Every saturday at 4:30AM) {"task" : "Clean Generated Files", "weekday" : "saturday", "time" : "05:00"}, # Maintenance -> [Clean Generated Files] (Every saturday at 5AM) {"task" : "Optimise Database", "weekday" : "saturday", "time" : "05:30"}, # Maintenance -> [Optimise Database] (Every saturday at 5:30AM) - + # To perform a task monthly, specify the day of the month as in the weekly schedule format, and add a monthly field. # The monthly field value must be 1, 2, 3, or 4. # 1 = 1st specified weekday of the month. Example 1st monday. @@ -68,26 +75,29 @@ To configure the schedule or to add new task, edit the **task_scheduler** sectio # 4 = 4th specified weekday of the month. # The Backup task is scheduled monthly # Optional field for task "Backup" is maxBackup. For detail usage, see example #A5 in filemonitor_task_examples.py - {"task" : "Backup", "weekday" : "sunday", "time" : "01:00", "monthly" : 2}, # Backup -> [Backup] 2nd sunday of the month at 1AM (01:00) + {"task" : "Backup", "weekday" : "sunday", "time" : "01:00", "monthly" : 2}, # Backup -> [Backup] 2nd sunday of the month at 1AM (01:00) # The following task requires plugin DupFileManager and UI option [Delete Duplicate Scheduler] enabled. - {"task" : "DupFileManager", "taskName" : "Delete Duplicates", "validateDir" : "DupFileManager", - "weekday" : "sunday", "time" : "02:00", "monthly" : 2}, # [Plugin Tasks] -> DupFileManager -> [Delete Duplicates] 2nd sunday of the month at 2AM (02:00) - - # The [CheckStashIsRunning] task checks if Stash is running. If not running, it will start up stash. + {"task" : "DupFileManager", "taskName" : "Delete Duplicates", "validateDir" : "DupFileManager", + "weekday" : "sunday", "time" : "02:00", "monthly" : 2}, # [Plugin Tasks] -> DupFileManager -> [Delete Duplicates] 2nd sunday of the month at 2AM (02:00) + + # The [CheckStashIsRunning] task checks if Stash is running. If not running, it will start up stash. # This task only works if FileMonitor is started as a service or in command line mode. # Optional fields are 'command' and 'RunAfter'. For detail usage, see examples #C1 and #C2 in filemonitor_task_examples.py {"task" : "CheckStashIsRunning", "minutes" :5}, # Checks every 5 minutes ], -```` +``` + - To add plugins to the task list, use the Plugin-ID in the "task" field. The plugin ID is usually the file name of the script without the extension. - Plugin task have the following optional fields: taskName, taskMode, validateDir, and taskQue - The **validateDir** field can be used to define the plugin sub directory, which is checked to see if it exist before running the task. - **taskName** field is used to name the task to call for the associated plugin. It can not be used with "taskQue":False - - **taskQue** field is used to call the plugin without using the Task Queue. I.E. "taskQue":False. When this field is set to False, the taskName field can NOT be used. Instead use taskMode to identify the task to call. - - **taskMode** field is used in order to run the plugin without using the Task Queue. The plugin runs immediatly. Be careful not to confuse taskMode with taskName. Look in the plugin *.yml file under the **tasks** section where it defines both the task-name and the task-mode. + - **taskQue** field is used to call the plugin without using the Task Queue. I.E. "taskQue":False. When this field is set to False, the taskName field can NOT be used. Instead use taskMode to identify the task to call. + - **taskMode** field is used in order to run the plugin without using the Task Queue. The plugin runs immediatly. Be careful not to confuse taskMode with taskName. Look in the plugin \*.yml file under the **tasks** section where it defines both the task-name and the task-mode. - Task can be scheduled to run monthly, weekly, hourly, and by minutes. - The scheduler list uses two types of syntax. One is **weekday** based, and the other is **frequency** based. + - **weekday Based** + - Use the weekday based syntax for daily, weekly, and monthly schedules. - All the weekday based methods must have a **weekday** field and a **time** field, which specifies the day(s) of the week and the time to start the task. - **Daily**: @@ -128,6 +138,7 @@ To configure the schedule or to add new task, edit the **task_scheduler** sectio - For best results use the scheduler with FileMonitor running as a service. ## Requirements + - pip install -r requirements.txt - Or manually install each requirement: - `pip install stashapp-tools --upgrade` @@ -136,6 +147,7 @@ To configure the schedule or to add new task, edit the **task_scheduler** sectio - `pip install schedule` ## Installation + - Follow **Requirements** instructions. - In the stash plugin directory (C:\Users\MyUserName\.stash\plugins), create a folder named **FileMonitor**. - Copy all the plugin files to this folder.(**C:\Users\MyUserName\\.stash\plugins\FileMonitor**). @@ -144,16 +156,17 @@ To configure the schedule or to add new task, edit the **task_scheduler** sectio That's it!!! ## Options + - Main options are accessible in the GUI via Settings->Plugins->Plugins->[FileMonitor]. - When the UI option [Max DB Backups] is set to a value greater than 1, and when the scheduler is enabled, the quantity of database backup files are trim down to the set [**Max DB Backups**] value after the scheduler executes the Backup task. - The other options are self explanatory from the UI. - Additional options available in filemonitor_config.py. The options are well documented in the commented code. ## Bugs and Feature Request + Please use the following link to report FileMonitor bugs: [FileMonitor Bug Report](https://github.com/David-Maisonave/Axter-Stash/issues/new?assignees=&labels=Plugin_Bug&projects=&template=bug_report_plugin.yml&title=%F0%9F%AA%B2%5BFileMonitor%5D+Your_Short_title) Please use the following link to report FileMonitor Feature Request:[FileMonitor Feature Reques](https://github.com/David-Maisonave/Axter-Stash/issues/new?assignees=&labels=Enhancement&projects=&template=feature_request_plugin.yml&title=%F0%9F%92%A1%EF%B8%8F%5BEnhancement%5D%3A%5BFileMonitor%5D+Your_Short_title) Please do **NOT** use the feature request to include any problems associated with errors. Instead use the bug report for error issues. - diff --git a/plugins/FileMonitor/filemonitor.yml b/plugins/FileMonitor/filemonitor.yml index 5637ee3b..19f448cd 100644 --- a/plugins/FileMonitor/filemonitor.yml +++ b/plugins/FileMonitor/filemonitor.yml @@ -13,7 +13,7 @@ settings: type: BOOLEAN turnOnSchedulerDeleteDup: displayName: Delete Duplicate Scheduler - description: Turn on scheduler for deleting duplicates in Stash library. (Requires plugin DupFileManager and [Scheduler] enabled) + description: Turn on scheduler for deleting duplicates in Stash library. (Requires plugin DupFileManager and [Scheduler] enabled) type: BOOLEAN zmaximumBackups: displayName: Max DB Backups From d7ebfb0a11049bfb319e41127fd4941cb2dcc212 Mon Sep 17 00:00:00 2001 From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com> Date: Wed, 28 Aug 2024 05:16:57 -0400 Subject: [PATCH 32/39] Update DupFileManager.py --- plugins/DupFileManager/DupFileManager.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/plugins/DupFileManager/DupFileManager.py b/plugins/DupFileManager/DupFileManager.py index a1b2d541..c9ef4a16 100644 --- a/plugins/DupFileManager/DupFileManager.py +++ b/plugins/DupFileManager/DupFileManager.py @@ -3,12 +3,6 @@ # Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/DupFileManager # Note: To call this script outside of Stash, pass argument --url # Example: python DupFileManager.py --url http://localhost:9999 -a - -# Research: -# Research following links to complete this plugin: -# Python library for parse-reparsepoint -# https://pypi.org/project/parse-reparsepoint/ -# pip install parse-reparsepoint import os, sys, time, pathlib, argparse, platform, shutil, logging from StashPluginHelper import StashPluginHelper from DupFileManager_config import config # Import config from DupFileManager_config.py From b4e59e118c96d81efee03f046cb28696a9af7078 Mon Sep 17 00:00:00 2001 From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com> Date: Wed, 28 Aug 2024 05:46:50 -0400 Subject: [PATCH 33/39] Update README.md --- plugins/DupFileManager/README.md | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/plugins/DupFileManager/README.md b/plugins/DupFileManager/README.md index 82e3fb7d..7d0cf052 100644 --- a/plugins/DupFileManager/README.md +++ b/plugins/DupFileManager/README.md @@ -8,20 +8,26 @@ DupFileManager is a [Stash](https://github.com/stashapp/stash) plugin which mana - Normally when Stash searches the file name for tag names, performers, and studios, it only does so using the primary file. - Delete duplicate file task with the following options: - Tasks (Settings->Task->[Plugin Tasks]->DupFileManager) - - **Tag Duplicate Filename** - Set tag DuplicateMarkForDeletion to the duplicates with lower resolution, duration, file name length, and/or black list path. - - **Delete Duplicates** - Deletes duplicate files + - **Tag Duplicates** - Set tag DuplicateMarkForDeletion to the duplicates with lower resolution, duration, file name length, and/or black list path. + - **Delete Tagged Duplicates** - Delete scenes having DuplicateMarkForDeletion tag. + - **Delete Duplicates** - Deletes duplicate files. Performs deletion without first tagging. - Plugin UI options (Settings->Plugins->Plugins->[DupFileManager]) - - Use a white-list of preferential directories to determine which duplicate will be the primary. - - Use a gray-list of preferential directories to determine which duplicate should be the primary. - - Use a black-list to determine which duplicates should be deleted first. + - Has a 3 tier path selection to determine which duplicates to keep, and which should be candidates for deletions. + - **Whitelist** - List of paths NOT to be deleted. + - E.g. C:\Favorite\,E:\MustKeep\ + - **Gray-List** - List of preferential paths to determine which duplicate should be the primary. + - E.g. C:\2nd_Favorite\,H:\ShouldKeep\ + - **Blacklist** - List of LEAST preferential paths to determine primary candidates for deletion. + - E.g. C:\Downloads\,F:\DeleteMeFirst\ - **Permanent Delete** - Enable to permanently delete files, instead of moving files to trash can. - **Max Dup Process** - Use to limit the maximum files to process. Can be used to do a limited test run. - **Merge Duplicate Tags** - Before deletion, merge metadata from duplicate. E.g. Tag names, performers, studios, title, galleries, rating, details, etc... + - **Swap High Resolution** - When enabled, swaps higher resolution files between whitelist and blacklist/graylist files. + - **Swap Longer Duration** - When enabled, swaps scene with longer duration. - Options available via DupFileManager_config.py - **dup_path** - Alternate path to move deleted files to. Example: "C:\TempDeleteFolder" - - **swapHighRes** - When enabled, swaps higher resolution files between whitelist and blacklist/graylist files. - - **swapLongLength** - When enabled, swaps scene with longer duration. - **toRecycleBeforeSwap** - When enabled, moves destination file to recycle bin before swapping files. + - **addPrimaryDupPathToDetails** - If enabled, adds the primary duplicate path to the scene detail. ### Requirements From c4be9c2d804a64bd1e500af370be9c2d08431681 Mon Sep 17 00:00:00 2001 From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com> Date: Wed, 28 Aug 2024 06:22:48 -0400 Subject: [PATCH 34/39] Added option to avoid "Delete Tagged Duplicates" Added option to avoid "Delete Tagged Duplicates" without turnOnSchedulerDeleteDup enabled. --- plugins/FileMonitor/filemonitor.py | 18 +++++++--- plugins/FileMonitor/filemonitor_config.py | 12 +++---- .../FileMonitor/filemonitor_self_unit_test.py | 35 ++++++++++--------- 3 files changed, 38 insertions(+), 27 deletions(-) diff --git a/plugins/FileMonitor/filemonitor.py b/plugins/FileMonitor/filemonitor.py index 29aea88b..03575b3f 100644 --- a/plugins/FileMonitor/filemonitor.py +++ b/plugins/FileMonitor/filemonitor.py @@ -161,11 +161,11 @@ def __init__(self): else: weekDays = task['weekday'].lower() if 'monthly' in task: - stash.Log(f"Adding to scheduler task '{task['task']}' monthly on number {task['monthly']} {task['weekday']} at {task['time']}") + stash.Log(f"Adding to scheduler task '{self.taskName(task)}' monthly on number {task['monthly']} {task['weekday']} at {task['time']}") elif task['weekday'] == "every": - stash.Log(f"Adding to scheduler task '{task['task']}' (weekly) every day at {task['time']}") + stash.Log(f"Adding to scheduler task '{self.taskName(task)}' (weekly) every day at {task['time']}") else: - stash.Log(f"Adding to scheduler task '{task['task']}' (weekly) every {task['weekday']} at {task['time']}") + stash.Log(f"Adding to scheduler task '{self.taskName(task)}' (weekly) every {task['weekday']} at {task['time']}") hasValidDay = False if "monday" in weekDays or "every" in weekDays: @@ -196,6 +196,16 @@ def __init__(self): stash.Error(f"Task '{task['task']}' is missing fields.") self.checkSchedulePending() + def taskName(self, task): + pluginTask = None + if 'taskName' in task: + pluginTask = task['taskName'] + elif 'taskMode' in task: + pluginTask = task['taskMode'] + if pluginTask == None or pluginTask == "": + return task['task'] + return f"{task['task']}->{pluginTask}" + # ToDo: Add asynchronous threading logic to running task. def runTask(self, task): import datetime @@ -319,7 +329,7 @@ def runPluginTask(self, task): if invalidDir: stash.Error(f"Could not run task '{task['task']}' because sub directory '{task['validateDir']}' does not exist under path '{stash.PLUGINS_PATH}'") return None - if not turnOnSchedulerDeleteDup and (task['task'] == "Delete Duplicates" or ('taskName' in task and task['taskName'] == "Delete Duplicates") or ('taskMode' in task and task['taskMode'] == "delete_duplicates_task")): + if not turnOnSchedulerDeleteDup and (task['task'] == "Delete Duplicates" or ('taskName' in task and (task['taskName'] == "Delete Duplicates" or task['taskName'] == "Delete Tagged Duplicates")) or ('taskMode' in task and task['taskMode'] == "delete_duplicates_task")): stash.Warn(f"Not running task {task['task']}, because [Delete Duplicate Scheduler] is NOT enabled. See Stash UI option Settings->Plugins->Plugins->FileMonitor->[Delete Duplicate Scheduler]") return None # The pluginId field is only here for backward compatibility, and should not be used in future scheduler configurations diff --git a/plugins/FileMonitor/filemonitor_config.py b/plugins/FileMonitor/filemonitor_config.py index 60824fd6..a2456471 100644 --- a/plugins/FileMonitor/filemonitor_config.py +++ b/plugins/FileMonitor/filemonitor_config.py @@ -17,9 +17,6 @@ # Task "Create Tags" is a plugin task. Optional fields are taskName and validateDir field. For detail usage, see examples #B1, #B2, #B3, and #B4 in filemonitor_task_examples.py {"task" : "pathParser", "taskName" : "Create Tags", "validateDir" : "pathParser", "weekday" : "every", "time" : "05:30"}, # [Plugin Tasks] - > [Path Parser] -> [Create Tags] (Daily at 5AM) : This task requires plugin [Path Parser] - # The following task runs plugin DupFileManager (tag_duplicates_task) if the plugin is installed. The task runs in the background because of "taskQue" : False - {"task" : "DupFileManager", "taskMode" : "tag_duplicates_task", "validateDir" : "DupFileManager", "taskQue" : False, - "weekday" : "every", "time" : "02:30"}, # [Plugin Tasks] -> DupFileManager -> [Delete Duplicates] (Daily at 2:30AM) {"task" : "Optimise Database", "weekday" : "monday,tuesday,wednesday,thursday,friday", "time" : "07:00"}, # Maintenance -> [Optimise Database] (Every weekday at 7AM) # The following tasks are scheduled weekly @@ -30,6 +27,12 @@ {"task" : "Clean", "weekday" : "saturday", "time" : "04:30"}, # Maintenance -> [Clean] (Every saturday at 4:30AM) {"task" : "Clean Generated Files", "weekday" : "saturday", "time" : "05:00"}, # Maintenance -> [Clean Generated Files] (Every saturday at 5AM) {"task" : "Optimise Database", "weekday" : "saturday", "time" : "05:30"}, # Maintenance -> [Optimise Database] (Every saturday at 5:30AM) + # The following task runs plugin DupFileManager (tag_duplicates_task) if the plugin is installed. The task runs in the background because of "taskQue" : False + {"task" : "DupFileManager", "taskMode" : "tag_duplicates_task", "validateDir" : "DupFileManager", "taskQue" : False, + "weekday" : "sunday", "time" : "02:30"}, # [Plugin Tasks] -> DupFileManager -> [Tag Duplicates] (Sunday at 2:30AM) + # The following task requires plugin DupFileManager and UI option [Delete Duplicate Scheduler] enabled. + {"task" : "DupFileManager", "taskName" : "Delete Tagged Duplicates", "validateDir" : "DupFileManager", + "weekday" : "saturday", "time" : "02:30"}, # [Plugin Tasks] -> DupFileManager -> [Delete Tagged Duplicates] 6 days after tagging at 2:30AM # To perform a task monthly, specify the day of the month as in the weekly schedule format, and add a monthly field. # The monthly field value must be 1, 2, 3, or 4. @@ -40,9 +43,6 @@ # The Backup task is scheduled monthly # Optional field for task "Backup" is maxBackup. For detail usage, see example #A5 in filemonitor_task_examples.py {"task" : "Backup", "weekday" : "sunday", "time" : "01:00", "monthly" : 2}, # Backup -> [Backup] 2nd sunday of the month at 1AM (01:00) - # The following task requires plugin DupFileManager and UI option [Delete Duplicate Scheduler] enabled. - {"task" : "DupFileManager", "taskName" : "Delete Duplicates", "validateDir" : "DupFileManager", - "weekday" : "sunday", "time" : "02:00", "monthly" : 2}, # [Plugin Tasks] -> DupFileManager -> [Delete Duplicates] 2nd sunday of the month at 2AM (02:00) # The [CheckStashIsRunning] task checks if Stash is running. If not running, it will start up stash. # This task only works if FileMonitor is started as a service or in command line mode. diff --git a/plugins/FileMonitor/filemonitor_self_unit_test.py b/plugins/FileMonitor/filemonitor_self_unit_test.py index 83942f46..135a1eba 100644 --- a/plugins/FileMonitor/filemonitor_self_unit_test.py +++ b/plugins/FileMonitor/filemonitor_self_unit_test.py @@ -21,23 +21,24 @@ ], "task_scheduler_set_time": [ # Test [Delete Duplicates] with [Delete Duplicate Scheduler] disabled, and then with it enabled. - {"task" : "DupFileManager", "taskName" : "Delete Duplicates", "validateDir" : "DupFileManager", "weekday" : "every", "time" : "04:01"}, # [Plugin Tasks] -> DupFileManager -> [Delete Duplicates] - {"task" : "Generate", "weekday" : "every", "time" : "04:01"}, - {"task" : "Clean", "weekday" : "every", "time" : "04:01"}, - {"task" : "Auto Tag", "weekday" : "every", "time" : "04:01"}, - {"task" : "Optimise Database", "weekday" : "every", "time" : "04:01"}, - {"task" : "pathParser", "taskName" : "Create Tags", "validateDir" : "pathParser", "weekday" : "every", "time" : "04:01"}, # In task queue as -> Running plugin task: Create Tags - {"task" : "DupFileManager", "taskMode" : "tag_duplicates_task", "taskQue":False, "weekday" : "every", "time" : "04:01"}, # Does NOT run in the task queue - {"task" : "DupFileManager", "taskName" : "Tag Duplicates", "validateDir" : "DupFileManager", "weekday" : "every", "time" : "04:01"}, # [Plugin Tasks] -> DupFileManager -> [Tag Duplicates] - {"task" : "Scan","paths": [r"B:\_\SpecialSet", r"C:\foo"], "weekday" : "every", "time" : "04:01"}, - {"task" : "GQL", "input" : "mutation OptimiseDatabase { optimiseDatabase }", "weekday" : "every", "time" : "04:01"}, # In task queue as -> Optimising database... - {"task" : "Clean Generated Files", "weekday" : "every", "time" : "04:01"}, - {"task" : "RenameGeneratedFiles", "weekday" : "every", "time" : "04:01"}, # In task queue as -> Migrating scene hashes... - {"task" : "Backup", "maxBackups" : 0, "weekday" : "every", "time" : "04:01"}, # Does NOT show up in the Task Queue. Must check STASH log file to verify run. - {"task" : "python", "script" : "test_hello_world2.py", "weekday" : "every", "time" : "04:01"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=??? - {"task" : "python", "script" : "test_hello_world.py", "detach" : False, "weekday" : "every", "time" : "04:01"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=??? - {"task" : "execute", "command" : "test_hello_world2.cmd", "weekday" : "every", "time" : "04:01"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=??? - {"task" : "execute", "command" : "test_hello_world.bat", "args" : "--name David", "weekday" : "every", "time" : "04:01"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=??? + {"task" : "DupFileManager", "taskName" : "Delete Duplicates", "validateDir" : "DupFileManager", "weekday" : "every", "time" : "06:17"}, # [Plugin Tasks] -> DupFileManager -> [Delete Duplicates] + {"task" : "Generate", "weekday" : "every", "time" : "06:17"}, + {"task" : "Clean", "weekday" : "every", "time" : "06:17"}, + {"task" : "Auto Tag", "weekday" : "every", "time" : "06:17"}, + {"task" : "Optimise Database", "weekday" : "every", "time" : "06:17"}, + {"task" : "pathParser", "taskName" : "Create Tags", "validateDir" : "pathParser", "weekday" : "every", "time" : "06:17"}, # In task queue as -> Running plugin task: Create Tags + {"task" : "DupFileManager", "taskMode" : "tag_duplicates_task", "taskQue":False, "weekday" : "every", "time" : "06:17"}, # Does NOT run in the task queue + {"task" : "DupFileManager", "taskName" : "Tag Duplicates", "validateDir" : "DupFileManager", "weekday" : "every", "time" : "06:17"}, # [Plugin Tasks] -> DupFileManager -> [Tag Duplicates] + {"task" : "DupFileManager", "taskName" : "Delete Tagged Duplicates", "weekday" : "every", "time" : "06:17"}, # [Plugin Tasks] -> DupFileManager -> [Tag Duplicates] + {"task" : "Scan","paths": [r"B:\_\SpecialSet", r"C:\foo"], "weekday" : "every", "time" : "06:17"}, + {"task" : "GQL", "input" : "mutation OptimiseDatabase { optimiseDatabase }", "weekday" : "every", "time" : "06:17"}, # In task queue as -> Optimising database... + {"task" : "Clean Generated Files", "weekday" : "every", "time" : "06:17"}, + {"task" : "RenameGeneratedFiles", "weekday" : "every", "time" : "06:17"}, # In task queue as -> Migrating scene hashes... + {"task" : "Backup", "maxBackups" : 0, "weekday" : "every", "time" : "06:17"}, # Does NOT show up in the Task Queue. Must check STASH log file to verify run. + {"task" : "python", "script" : "test_hello_world2.py", "weekday" : "every", "time" : "06:17"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=??? + {"task" : "python", "script" : "test_hello_world.py", "detach" : False, "weekday" : "every", "time" : "06:17"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=??? + {"task" : "execute", "command" : "test_hello_world2.cmd", "weekday" : "every", "time" : "06:17"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=??? + {"task" : "execute", "command" : "test_hello_world.bat", "args" : "--name David", "weekday" : "every", "time" : "06:17"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=??? ], # MUST ToDo: Always set selfUnitTest to False before checking in this code!!! "selfUnitTest_repeat" : False , # Enable to turn on self unit test. From b34fb92790e03713cb8bb0bc44ff2b046618f594 Mon Sep 17 00:00:00 2001 From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com> Date: Sat, 23 Nov 2024 00:17:40 -0500 Subject: [PATCH 35/39] Added report feature, tools UI, and advanced menu --- plugins/DupFileManager/DupFileManager.css | 67 + plugins/DupFileManager/DupFileManager.css.map | 1 + plugins/DupFileManager/DupFileManager.dev.py | 1440 +++++++++++++ plugins/DupFileManager/DupFileManager.js | 310 +++ plugins/DupFileManager/DupFileManager.js.map | 1 + plugins/DupFileManager/DupFileManager.py | 1500 +++++++++++-- plugins/DupFileManager/DupFileManager.yml | 81 +- .../DupFileManager/DupFileManager_config.py | 74 +- .../DupFileManager_config_dev.py | 24 + .../DupFileManager_report_config.py | 212 ++ plugins/DupFileManager/ModulesValidate.py | 126 ++ plugins/DupFileManager/README.md | 73 +- plugins/DupFileManager/StashPluginHelper.py | 847 ++++++-- plugins/DupFileManager/advance_options.html | 1902 +++++++++++++++++ plugins/DupFileManager/requirements.txt | 3 +- 15 files changed, 6265 insertions(+), 396 deletions(-) create mode 100644 plugins/DupFileManager/DupFileManager.css create mode 100644 plugins/DupFileManager/DupFileManager.css.map create mode 100644 plugins/DupFileManager/DupFileManager.dev.py create mode 100644 plugins/DupFileManager/DupFileManager.js create mode 100644 plugins/DupFileManager/DupFileManager.js.map create mode 100644 plugins/DupFileManager/DupFileManager_config_dev.py create mode 100644 plugins/DupFileManager/DupFileManager_report_config.py create mode 100644 plugins/DupFileManager/ModulesValidate.py create mode 100644 plugins/DupFileManager/advance_options.html diff --git a/plugins/DupFileManager/DupFileManager.css b/plugins/DupFileManager/DupFileManager.css new file mode 100644 index 00000000..7ef71ede --- /dev/null +++ b/plugins/DupFileManager/DupFileManager.css @@ -0,0 +1,67 @@ +.scene-card__date { + color: #bfccd6; + font-size: 0.85em; +} + +.scene-card__performer { + display: inline-block; + font-weight: 500; + margin-right: 0.5em; +} +.scene-card__performer a { + color: #137cbd; +} + +.scene-card__performers, +.scene-card__tags { + -webkit-box-orient: vertical; + display: -webkit-box; + -webkit-line-clamp: 1; + overflow: hidden; +} +.scene-card__performers:hover, +.scene-card__tags:hover { + -webkit-line-clamp: unset; + overflow: visible; +} + +.scene-card__tags .tag-item { + margin-left: 0; +} + +.scene-performer-popover .image-thumbnail { + margin: 1em; +} + + /* Dashed border */ +hr.dashed { + border-top: 3px dashed #bbb; +} + +/* Dotted border */ +hr.dotted { + border-top: 3px dotted #bbb; +} + +/* Solid border */ +hr.solid { + border-top: 3px solid #bbb; +} + +/* Rounded border */ +hr.rounded { + border-top: 8px solid #bbb; + border-radius: 5px; +} + +h3.under_construction { + color:red; + background-color:yellow; +} + +h3.submenu { + color:Tomato; + background-color:rgba(100, 100, 100); +} + +/*# sourceMappingURL=DupFileManager.css.map */ diff --git a/plugins/DupFileManager/DupFileManager.css.map b/plugins/DupFileManager/DupFileManager.css.map new file mode 100644 index 00000000..a4afe07b --- /dev/null +++ b/plugins/DupFileManager/DupFileManager.css.map @@ -0,0 +1 @@ +{"version":3,"sourceRoot":"","sources":["../src/DupFileManager.scss"],"names":[],"mappings":"AAAA;EACE;EACA;;;AAGF;EACE;EACA;EACA;;AAEA;EACE;;;AAIJ;AAAA;EAEE;EACA;EACA;EACA;;AAEA;AAAA;EACE;EACA;;;AAIJ;EACE;;;AAGF;EACE","file":"DupFileManager.css"} \ No newline at end of file diff --git a/plugins/DupFileManager/DupFileManager.dev.py b/plugins/DupFileManager/DupFileManager.dev.py new file mode 100644 index 00000000..630e16e2 --- /dev/null +++ b/plugins/DupFileManager/DupFileManager.dev.py @@ -0,0 +1,1440 @@ +# Description: This is a Stash plugin which manages duplicate files. +# By David Maisonave (aka Axter) Jul-2024 (https://www.axter.com/) +# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/DupFileManager +# Note: To call this script outside of Stash, pass argument --url +# Example: python DupFileManager.py --url http://localhost:9999 -a +try: + import ModulesValidate + ModulesValidate.modulesInstalled(["send2trash", "requests"], silent=True) +except Exception as e: + import traceback, sys + tb = traceback.format_exc() + print(f"ModulesValidate Exception. Error: {e}\nTraceBack={tb}", file=sys.stderr) +import os, sys, time, pathlib, argparse, platform, shutil, traceback, logging, requests +from datetime import datetime +from StashPluginHelper import StashPluginHelper +from stashapi.stash_types import PhashDistance +from DupFileManager_config import config # Import config from DupFileManager_config.py +from DupFileManager_report_config import report_config + +# ToDo: make sure the following line of code works +config += report_config + +parser = argparse.ArgumentParser() +parser.add_argument('--url', '-u', dest='stash_url', type=str, help='Add Stash URL') +parser.add_argument('--trace', '-t', dest='trace', action='store_true', help='Enables debug trace mode.') +parser.add_argument('--add_dup_tag', '-a', dest='dup_tag', action='store_true', help='Set a tag to duplicate files.') +parser.add_argument('--clear_dup_tag', '-c', dest='clear_tag', action='store_true', help='Clear duplicates of duplicate tags.') +parser.add_argument('--del_tag_dup', '-d', dest='del_tag', action='store_true', help='Only delete scenes having DuplicateMarkForDeletion tag.') +parser.add_argument('--remove_dup', '-r', dest='remove', action='store_true', help='Remove (delete) duplicate files.') +parse_args = parser.parse_args() + +settings = { + "matchDupDistance": 0, + "mergeDupFilename": False, + "whitelistDelDupInSameFolder": False, + "zvWhitelist": "", + "zwGraylist": "", + "zxBlacklist": "", + "zyMaxDupToProcess": 0, + "zySwapHighRes": False, + "zySwapLongLength": False, + "zySwapBetterBitRate": False, + "zySwapCodec": False, + "zySwapBetterFrameRate": False, + "zzDebug": False, + "zzTracing": False, + + "zzObsoleteSettingsCheckVer2": False, # This is a hidden variable that is NOT displayed in the UI + + # Obsolete setting names + "zWhitelist": "", + "zxGraylist": "", + "zyBlacklist": "", + "zyMatchDupDistance": 0, + "zSwapHighRes": False, + "zSwapLongLength": False, + "zSwapBetterBitRate": False, + "zSwapCodec": False, + "zSwapBetterFrameRate": False, +} +stash = StashPluginHelper( + stash_url=parse_args.stash_url, + debugTracing=parse_args.trace, + settings=settings, + config=config, + maxbytes=10*1024*1024, + DebugTraceFieldName="zzTracing", + DebugFieldName="zzDebug", + ) +stash.convertToAscii = True + +advanceMenuOptions = [ "applyCombo", "applyComboBlacklist", "pathToDelete", "pathToDeleteBlacklist", "sizeToDeleteLess", "sizeToDeleteGreater", "sizeToDeleteBlacklistLess", "sizeToDeleteBlacklistGreater", "durationToDeleteLess", "durationToDeleteGreater", "durationToDeleteBlacklistLess", "durationToDeleteBlacklistGreater", + "commonResToDeleteLess", "commonResToDeleteEq", "commonResToDeleteGreater", "commonResToDeleteBlacklistLess", "commonResToDeleteBlacklistEq", "commonResToDeleteBlacklistGreater", "resolutionToDeleteLess", "resolutionToDeleteEq", "resolutionToDeleteGreater", + "resolutionToDeleteBlacklistLess", "resolutionToDeleteBlacklistEq", "resolutionToDeleteBlacklistGreater", "ratingToDeleteLess", "ratingToDeleteEq", "ratingToDeleteGreater", "ratingToDeleteBlacklistLess", "ratingToDeleteBlacklistEq", "ratingToDeleteBlacklistGreater", + "tagToDelete", "tagToDeleteBlacklist", "titleToDelete", "titleToDeleteBlacklist", "pathStrToDelete", "pathStrToDeleteBlacklist"] + +doJsonReturnModeTypes = ["tag_duplicates_task", "removeDupTag", "addExcludeTag", "removeExcludeTag", "mergeTags", "getLocalDupReportPath", + "createDuplicateReportWithoutTagging", "deleteLocalDupReportHtmlFiles", "clear_duplicate_tags_task", + "deleteAllDupFileManagerTags", "deleteBlackListTaggedDuplicatesTask", "deleteTaggedDuplicatesLwrResOrLwrDuration", + "deleteBlackListTaggedDuplicatesLwrResOrLwrDuration"] +doJsonReturnModeTypes += [advanceMenuOptions] +doJsonReturn = False +if len(sys.argv) < 2 and stash.PLUGIN_TASK_NAME in doJsonReturnModeTypes: + doJsonReturn = True + stash.log_to_norm = stash.LogTo.FILE +elif stash.PLUGIN_TASK_NAME == "doEarlyExit": + time.sleep(3) + stash.Log("Doing early exit because of task name") + time.sleep(3) + exit(0) + +stash.Log("******************* Starting *******************") +if len(sys.argv) > 1: + stash.Log(f"argv = {sys.argv}") +else: + stash.Debug(f"No command line arguments. JSON_INPUT['args'] = {stash.JSON_INPUT['args']}; PLUGIN_TASK_NAME = {stash.PLUGIN_TASK_NAME}; argv = {sys.argv}") +stash.status(logLevel=logging.DEBUG) + +obsoleteSettingsToConvert = {"zWhitelist" : "zvWhitelist", "zxGraylist" : "zwGraylist", "zyBlacklist" : "zxBlacklist", "zyMatchDupDistance" : "matchDupDistance", "zSwapHighRes" : "zySwapHighRes", "zSwapLongLength" : "zySwapLongLength", "zSwapBetterBitRate" : "zySwapBetterBitRate", "zSwapCodec" : "zySwapCodec", "zSwapBetterFrameRate" : "zySwapBetterFrameRate"} +stash.replaceObsoleteSettings(obsoleteSettingsToConvert, "zzObsoleteSettingsCheckVer2") + + +LOG_STASH_N_PLUGIN = stash.LogTo.STASH if stash.CALLED_AS_STASH_PLUGIN else stash.LogTo.CONSOLE + stash.LogTo.FILE +listSeparator = stash.Setting('listSeparator', ',', notEmpty=True) +addPrimaryDupPathToDetails = stash.Setting('addPrimaryDupPathToDetails') +clearAllDupfileManagerTags = stash.Setting('clearAllDupfileManagerTags') +doGeneratePhash = stash.Setting('doGeneratePhash') +mergeDupFilename = stash.Setting('mergeDupFilename') +moveToTrashCan = False if stash.Setting('permanentlyDelete') else True +alternateTrashCanPath = stash.Setting('dup_path') +whitelistDelDupInSameFolder = stash.Setting('whitelistDelDupInSameFolder') +graylistTagging = stash.Setting('graylistTagging') +maxDupToProcess = int(stash.Setting('zyMaxDupToProcess')) +significantTimeDiff = float(stash.Setting('significantTimeDiff')) +toRecycleBeforeSwap = stash.Setting('toRecycleBeforeSwap') +cleanAfterDel = stash.Setting('cleanAfterDel') + +swapHighRes = stash.Setting('zySwapHighRes') +swapLongLength = stash.Setting('zySwapLongLength') +swapBetterBitRate = stash.Setting('zySwapBetterBitRate') +swapCodec = stash.Setting('zySwapCodec') +swapBetterFrameRate = stash.Setting('zySwapBetterFrameRate') +favorLongerFileName = stash.Setting('favorLongerFileName') +favorLargerFileSize = stash.Setting('favorLargerFileSize') +favorBitRateChange = stash.Setting('favorBitRateChange') +favorHighBitRate = stash.Setting('favorHighBitRate') +favorFrameRateChange = stash.Setting('favorFrameRateChange') +favorHigherFrameRate = stash.Setting('favorHigherFrameRate') +favorCodecRanking = stash.Setting('favorCodecRanking') +codecRankingSetToUse = stash.Setting('codecRankingSetToUse') +if codecRankingSetToUse == 4: + codecRanking = stash.Setting('codecRankingSet4') +elif codecRankingSetToUse == 3: + codecRanking = stash.Setting('codecRankingSet3') +elif codecRankingSetToUse == 2: + codecRanking = stash.Setting('codecRankingSet2') +else: + codecRanking = stash.Setting('codecRankingSet1') +skipIfTagged = stash.Setting('skipIfTagged') +killScanningPostProcess = stash.Setting('killScanningPostProcess') +tagLongDurationLowRes = stash.Setting('tagLongDurationLowRes') +bitRateIsImporantComp = stash.Setting('bitRateIsImporantComp') +codecIsImporantComp = stash.Setting('codecIsImporantComp') + +excludeFromReportIfSignificantTimeDiff = False + +matchDupDistance = int(stash.Setting('matchDupDistance')) +matchPhaseDistance = PhashDistance.EXACT +matchPhaseDistanceText = "Exact Match" +if stash.PLUGIN_TASK_NAME == "tag_duplicates_task" and 'Target' in stash.JSON_INPUT['args']: + if stash.JSON_INPUT['args']['Target'].startswith("0"): + matchDupDistance = 0 + elif stash.JSON_INPUT['args']['Target'].startswith("1"): + matchDupDistance = 1 + elif stash.JSON_INPUT['args']['Target'].startswith("2"): + matchDupDistance = 2 + elif stash.JSON_INPUT['args']['Target'].startswith("3"): + matchDupDistance = 3 + + if stash.JSON_INPUT['args']['Target'].find(":") == 1: + significantTimeDiff = float(stash.JSON_INPUT['args']['Target'][2:]) + excludeFromReportIfSignificantTimeDiff = True + +if matchDupDistance == 1: + matchPhaseDistance = PhashDistance.HIGH + matchPhaseDistanceText = "High Match" +elif matchDupDistance == 2: + matchPhaseDistance = PhashDistance.MEDIUM + matchPhaseDistanceText = "Medium Match" +elif matchDupDistance == 3: + matchPhaseDistance = PhashDistance.LOW + matchPhaseDistanceText = "Low Match" + +# significantTimeDiff can not be higher than 1 and shouldn't be lower than .5 +if significantTimeDiff > 1: + significantTimeDiff = float(1.00) +if significantTimeDiff < .25: + significantTimeDiff = float(0.25) + + +duplicateMarkForDeletion = stash.Setting('DupFileTag') +if duplicateMarkForDeletion == "": + duplicateMarkForDeletion = 'DuplicateMarkForDeletion' + +base1_duplicateMarkForDeletion = duplicateMarkForDeletion + +duplicateWhitelistTag = stash.Setting('DupWhiteListTag') +if duplicateWhitelistTag == "": + duplicateWhitelistTag = '_DuplicateWhitelistFile' + +excludeDupFileDeleteTag = stash.Setting('excludeDupFileDeleteTag') +if excludeDupFileDeleteTag == "": + excludeDupFileDeleteTag = '_ExcludeDuplicateMarkForDeletion' + +graylistMarkForDeletion = stash.Setting('graylistMarkForDeletion') +if graylistMarkForDeletion == "": + graylistMarkForDeletion = '_GraylistMarkForDeletion' + +longerDurationLowerResolution = stash.Setting('longerDurationLowerResolution') +if longerDurationLowerResolution == "": + longerDurationLowerResolution = '_LongerDurationLowerResolution' + +excludeMergeTags = [duplicateMarkForDeletion, duplicateWhitelistTag, excludeDupFileDeleteTag] + +if stash.Setting('underscoreDupFileTag') and not duplicateMarkForDeletion.startswith('_'): + duplicateMarkForDeletionWithOutUnderscore = duplicateMarkForDeletion + duplicateMarkForDeletion = "_" + duplicateMarkForDeletion + if stash.renameTag(duplicateMarkForDeletionWithOutUnderscore, duplicateMarkForDeletion): + stash.Log(f"Renamed tag {duplicateMarkForDeletionWithOutUnderscore} to {duplicateMarkForDeletion}") + stash.Trace(f"Added underscore to {duplicateMarkForDeletionWithOutUnderscore} = {duplicateMarkForDeletion}") + excludeMergeTags += [duplicateMarkForDeletion] +else: + stash.Trace(f"duplicateMarkForDeletion = {duplicateMarkForDeletion}") + +base2_duplicateMarkForDeletion = duplicateMarkForDeletion + +if stash.Setting('appendMatchDupDistance'): + duplicateMarkForDeletion += f"_{matchDupDistance}" + excludeMergeTags += [duplicateMarkForDeletion] + +stash.initMergeMetadata(excludeMergeTags) + +graylist = stash.Setting('zwGraylist').split(listSeparator) +graylist = [item.lower() for item in graylist] +if graylist == [""] : graylist = [] +stash.Trace(f"graylist = {graylist}") +whitelist = stash.Setting('zvWhitelist').split(listSeparator) +whitelist = [item.lower() for item in whitelist] +if whitelist == [""] : whitelist = [] +stash.Trace(f"whitelist = {whitelist}") +blacklist = stash.Setting('zxBlacklist').split(listSeparator) +blacklist = [item.lower() for item in blacklist] +if blacklist == [""] : blacklist = [] +stash.Trace(f"blacklist = {blacklist}") + +def realpath(path): + """ + get_symbolic_target for win + """ + try: + import win32file + f = win32file.CreateFile(path, win32file.GENERIC_READ, + win32file.FILE_SHARE_READ, None, + win32file.OPEN_EXISTING, + win32file.FILE_FLAG_BACKUP_SEMANTICS, None) + target = win32file.GetFinalPathNameByHandle(f, 0) + # an above gives us something like u'\\\\?\\C:\\tmp\\scalarizr\\3.3.0.7978' + return target.strip('\\\\?\\') + except ImportError: + handle = open_dir(path) + target = get_symbolic_target(handle) + check_closed(handle) + return target + +def isReparsePoint(path): + import win32api + import win32con + from parse_reparsepoint import Navigator + FinalPathname = realpath(path) + stash.Log(f"(path='{path}') (FinalPathname='{FinalPathname}')") + if FinalPathname != path: + stash.Log(f"Symbolic link '{path}'") + return True + if not os.path.isdir(path): + path = os.path.dirname(path) + return win32api.GetFileAttributes(path) & win32con.FILE_ATTRIBUTE_REPARSE_POINT + +def testReparsePointAndSymLink(merge=False, deleteDup=False): + stash.Trace(f"Debug Tracing (platform.system()={platform.system()})") + myTestPath1 = r"B:\V\V\Tip\POV - Holly Molly petite ginger anal slut - RedTube.mp4" # not a reparse point or symbolic link + myTestPath2 = r"B:\_\SpecialSet\Amateur Anal Attempts\BRCC test studio name.m2ts" # reparse point + myTestPath3 = r"B:\_\SpecialSet\Amateur Anal Attempts\Amateur Anal Attempts 4.mp4" #symbolic link + myTestPath4 = r"E:\Stash\plugins\RenameFile\README.md" #symbolic link + myTestPath5 = r"E:\_\David-Maisonave\Axter-Stash\plugins\RenameFile\README.md" #symbolic link + myTestPath6 = r"E:\_\David-Maisonave\Axter-Stash\plugins\DeleteMe\Renamer\README.md" # not reparse point + stash.Log(f"Testing '{myTestPath1}'") + if isReparsePoint(myTestPath1): + stash.Log(f"isSymLink '{myTestPath1}'") + else: + stash.Log(f"Not isSymLink '{myTestPath1}'") + + if isReparsePoint(myTestPath2): + stash.Log(f"isSymLink '{myTestPath2}'") + else: + stash.Log(f"Not isSymLink '{myTestPath2}'") + + if isReparsePoint(myTestPath3): + stash.Log(f"isSymLink '{myTestPath3}'") + else: + stash.Log(f"Not isSymLink '{myTestPath3}'") + + if isReparsePoint(myTestPath4): + stash.Log(f"isSymLink '{myTestPath4}'") + else: + stash.Log(f"Not isSymLink '{myTestPath4}'") + + if isReparsePoint(myTestPath5): + stash.Log(f"isSymLink '{myTestPath5}'") + else: + stash.Log(f"Not isSymLink '{myTestPath5}'") + + if isReparsePoint(myTestPath6): + stash.Log(f"isSymLink '{myTestPath6}'") + else: + stash.Log(f"Not isSymLink '{myTestPath6}'") + return + +detailPrefix = "BaseDup=" +detailPostfix = "\n" + +def setTagId(tagName, sceneDetails, DupFileToKeep, TagReason="", ignoreAutoTag=False): + details = "" + ORG_DATA_DICT = {'id' : sceneDetails['id']} + dataDict = ORG_DATA_DICT.copy() + doAddTag = True + if addPrimaryDupPathToDetails: + BaseDupStr = f"{detailPrefix}{DupFileToKeep['files'][0]['path']}\n{stash.STASH_URL}/scenes/{DupFileToKeep['id']}\n{TagReason}(matchDupDistance={matchPhaseDistanceText})\n{detailPostfix}" + if sceneDetails['details'] == "": + details = BaseDupStr + elif not sceneDetails['details'].startswith(detailPrefix): + details = f"{BaseDupStr};\n{sceneDetails['details']}" + for tag in sceneDetails['tags']: + if tag['name'] == tagName: + doAddTag = False + break + if doAddTag: + stash.addTag(sceneDetails, tagName, ignoreAutoTag=ignoreAutoTag) + if details != "": + dataDict.update({'details' : details}) + if dataDict != ORG_DATA_DICT: + stash.updateScene(dataDict) + stash.Trace(f"[setTagId] Updated {sceneDetails['files'][0]['path']} with metadata {dataDict} and tag {tagName}", toAscii=True) + else: + stash.Trace(f"[setTagId] Nothing to update {sceneDetails['files'][0]['path']} already has tag {tagName}.", toAscii=True) + return doAddTag + +def setTagId_withRetry(tagName, sceneDetails, DupFileToKeep, TagReason="", ignoreAutoTag=False, retryCount = 12, sleepSecondsBetweenRetry = 5): + errMsg = None + for i in range(0, retryCount): + try: + if errMsg != None: + stash.Warn(errMsg) + return setTagId(tagName, sceneDetails, DupFileToKeep, TagReason, ignoreAutoTag) + except (requests.exceptions.ConnectionError, ConnectionResetError): + tb = traceback.format_exc() + errMsg = f"[setTagId] Exception calling setTagId. Will retry; count({i}); Error: {e}\nTraceBack={tb}" + except Exception as e: + tb = traceback.format_exc() + errMsg = f"[setTagId] Unknown exception calling setTagId. Will retry; count({i}); Error: {e}\nTraceBack={tb}" + time.sleep(sleepSecondsBetweenRetry) + +def hasSameDir(path1, path2): + if pathlib.Path(path1).resolve().parent == pathlib.Path(path2).resolve().parent: + return True + return False + +def sendToTrash(path): + if not os.path.isfile(path): + stash.Warn(f"File does not exist: {path}.", toAscii=True) + return False + try: + from send2trash import send2trash # Requirement: pip install Send2Trash + send2trash(path) + return True + except Exception as e: + stash.Error(f"Failed to send file {path} to recycle bin. Error: {e}", toAscii=True) + try: + if os.path.isfile(path): + os.remove(path) + return True + except Exception as e: + stash.Error(f"Failed to delete file {path}. Error: {e}", toAscii=True) + return False +# If ckTimeDiff=False: Does durration2 have significant more time than durration1 +def significantTimeDiffCheck(durration1, durration2, ckTimeDiff = False): # If ckTimeDiff=True: is time different significant in either direction. + if not isinstance(durration1, int) and 'files' in durration1: + durration1 = int(durration1['files'][0]['duration']) + durration2 = int(durration2['files'][0]['duration']) + timeDiff = getTimeDif(durration1, durration2) + if ckTimeDiff and timeDiff > 1: + timeDiff = getTimeDif(durration2, durration1) + if timeDiff < significantTimeDiff: + return True + return False + +def getTimeDif(durration1, durration2): # Where durration1 is ecpected to be smaller than durration2 IE(45/60=.75) + return durration1 / durration2 + +def isBetterVideo(scene1, scene2, swapCandidateCk = False): # is scene2 better than scene1 + # Prioritize higher reslution over codec, bit rate, and frame rate + if int(scene1['files'][0]['width']) * int(scene1['files'][0]['height']) > int(scene2['files'][0]['width']) * int(scene2['files'][0]['height']): + return False + if (favorBitRateChange and swapCandidateCk == False) or (swapCandidateCk and swapBetterBitRate): + if (favorHighBitRate and int(scene2['files'][0]['bit_rate']) > int(scene1['files'][0]['bit_rate'])) or (not favorHighBitRate and int(scene2['files'][0]['bit_rate']) < int(scene1['files'][0]['bit_rate'])): + stash.Trace(f"[isBetterVideo]:[favorHighBitRate={favorHighBitRate}] Better bit rate. {scene1['files'][0]['path']}={scene1['files'][0]['bit_rate']} v.s. {scene2['files'][0]['path']}={scene2['files'][0]['bit_rate']}") + return True + if (favorCodecRanking and swapCandidateCk == False) or (swapCandidateCk and swapCodec): + scene1CodecRank = stash.indexStartsWithInList(codecRanking, scene1['files'][0]['video_codec']) + scene2CodecRank = stash.indexStartsWithInList(codecRanking, scene2['files'][0]['video_codec']) + if scene2CodecRank < scene1CodecRank: + stash.Trace(f"[isBetterVideo] Better codec. {scene1['files'][0]['path']}={scene1['files'][0]['video_codec']}:Rank={scene1CodecRank} v.s. {scene2['files'][0]['path']}={scene2['files'][0]['video_codec']}:Rank={scene2CodecRank}") + return True + if (favorFrameRateChange and swapCandidateCk == False) or (swapCandidateCk and swapBetterFrameRate): + if (favorHigherFrameRate and int(scene2['files'][0]['frame_rate']) > int(scene1['files'][0]['frame_rate'])) or (not favorHigherFrameRate and int(scene2['files'][0]['frame_rate']) < int(scene1['files'][0]['frame_rate'])): + stash.Trace(f"[isBetterVideo]:[favorHigherFrameRate={favorHigherFrameRate}] Better frame rate. {scene1['files'][0]['path']}={scene1['files'][0]['frame_rate']} v.s. {scene2['files'][0]['path']}={scene2['files'][0]['frame_rate']}") + return True + return False + +def significantMoreTimeCompareToBetterVideo(scene1, scene2): # is scene2 better than scene1 + if isinstance(scene1, int): + scene1 = stash.find_scene(scene1) + scene2 = stash.find_scene(scene2) + if int(scene1['files'][0]['duration']) >= int(scene2['files'][0]['duration']): + return False + if int(scene1['files'][0]['width']) * int(scene1['files'][0]['height']) > int(scene2['files'][0]['width']) * int(scene2['files'][0]['height']): + if significantTimeDiffCheck(scene1, scene2): + if tagLongDurationLowRes: + didAddTag = setTagId_withRetry(longerDurationLowerResolution, scene2, scene1, ignoreAutoTag=True) + stash.Log(f"Tagged sene2 with tag {longerDurationLowerResolution}, because scene1 is better video, but it has significant less time ({getTimeDif(int(scene1['files'][0]['duration']), int(scene2['files'][0]['duration']))}%) compare to scene2; scene1={scene1['files'][0]['path']} (ID={scene1['id']})(duration={scene1['files'][0]['duration']}); scene2={scene2['files'][0]['path']} (ID={scene2['id']}) (duration={scene1['files'][0]['duration']}); didAddTag={didAddTag}") + else: + stash.Warn(f"Scene1 is better video, but it has significant less time ({getTimeDif(int(scene1['files'][0]['duration']), int(scene2['files'][0]['duration']))}%) compare to scene2; Scene1={scene1['files'][0]['path']} (ID={scene1['id']})(duration={scene1['files'][0]['duration']}); Scene2={scene2['files'][0]['path']} (ID={scene2['id']}) (duration={scene1['files'][0]['duration']})") + return False + return True + +def allThingsEqual(scene1, scene2): # If all important things are equal, return true + if int(scene1['files'][0]['duration']) != int(scene2['files'][0]['duration']): + return False + if scene1['files'][0]['width'] != scene2['files'][0]['width']: + return False + if scene1['files'][0]['height'] != scene2['files'][0]['height']: + return False + if bitRateIsImporantComp and scene1['files'][0]['bit_rate'] != scene2['files'][0]['bit_rate']: + return False + if codecIsImporantComp and scene1['files'][0]['video_codec'] != scene2['files'][0]['video_codec']: + return False + return True + +def isSwapCandidate(DupFileToKeep, DupFile): + # Don't move if both are in whitelist + if stash.startsWithInList(whitelist, DupFileToKeep['files'][0]['path']) and stash.startsWithInList(whitelist, DupFile['files'][0]['path']): + return False + if swapHighRes and int(DupFileToKeep['files'][0]['width']) * int(DupFileToKeep['files'][0]['height']) > int(DupFile['files'][0]['width']) * int(DupFile['files'][0]['height']): + if not significantTimeDiffCheck(DupFileToKeep, DupFile): + return True + else: + stash.Warn(f"File '{DupFileToKeep['files'][0]['path']}' has a higher resolution than '{DupFile['files'][0]['path']}', but the duration is significantly shorter.", toAscii=True) + if swapLongLength and int(DupFileToKeep['files'][0]['duration']) > int(DupFile['files'][0]['duration']): + if int(DupFileToKeep['files'][0]['width']) >= int(DupFile['files'][0]['width']) or int(DupFileToKeep['files'][0]['height']) >= int(DupFile['files'][0]['height']): + return True + if isBetterVideo(DupFile, DupFileToKeep, swapCandidateCk=True): + if not significantTimeDiffCheck(DupFileToKeep, DupFile): + return True + else: + stash.Warn(f"File '{DupFileToKeep['files'][0]['path']}' has better codec/bit-rate than '{DupFile['files'][0]['path']}', but the duration is significantly shorter; DupFileToKeep-ID={DupFileToKeep['id']};DupFile-ID={DupFile['id']};BitRate {DupFileToKeep['files'][0]['bit_rate']} vs {DupFile['files'][0]['bit_rate']};Codec {DupFileToKeep['files'][0]['video_codec']} vs {DupFile['files'][0]['video_codec']};FrameRate {DupFileToKeep['files'][0]['frame_rate']} vs {DupFile['files'][0]['frame_rate']};", toAscii=True) + return False + +dupWhitelistTagId = None +def addDupWhitelistTag(): + global dupWhitelistTagId + stash.Trace(f"Adding tag duplicateWhitelistTag = {duplicateWhitelistTag}") + descp = 'Tag added to duplicate scenes which are in the whitelist. This means there are two or more duplicates in the whitelist.' + dupWhitelistTagId = stash.createTagId(duplicateWhitelistTag, descp, ignoreAutoTag=True) + stash.Trace(f"dupWhitelistTagId={dupWhitelistTagId} name={duplicateWhitelistTag}") + +excludeDupFileDeleteTagId = None +def addExcludeDupTag(): + global excludeDupFileDeleteTagId + stash.Trace(f"Adding tag excludeDupFileDeleteTag = {excludeDupFileDeleteTag}") + descp = 'Excludes duplicate scene from DupFileManager tagging and deletion process. A scene having this tag will not get deleted by DupFileManager' + excludeDupFileDeleteTagId = stash.createTagId(excludeDupFileDeleteTag, descp, ignoreAutoTag=True) + stash.Trace(f"dupWhitelistTagId={excludeDupFileDeleteTagId} name={excludeDupFileDeleteTag}") + +def isTaggedExcluded(Scene): + for tag in Scene['tags']: + if tag['name'] == excludeDupFileDeleteTag: + return True + return False + +def isWorseKeepCandidate(DupFileToKeep, Scene): + if not stash.startsWithInList(whitelist, Scene['files'][0]['path']) and stash.startsWithInList(whitelist, DupFileToKeep['files'][0]['path']): + return True + if not stash.startsWithInList(graylist, Scene['files'][0]['path']) and stash.startsWithInList(graylist, DupFileToKeep['files'][0]['path']): + return True + if not stash.startsWithInList(blacklist, DupFileToKeep['files'][0]['path']) and stash.startsWithInList(blacklist, Scene['files'][0]['path']): + return True + + if stash.startsWithInList(graylist, Scene['files'][0]['path']) and stash.startsWithInList(graylist, DupFileToKeep['files'][0]['path']) and stash.indexStartsWithInList(graylist, DupFileToKeep['files'][0]['path']) < stash.indexStartsWithInList(graylist, Scene['files'][0]['path']): + return True + if stash.startsWithInList(blacklist, DupFileToKeep['files'][0]['path']) and stash.startsWithInList(blacklist, Scene['files'][0]['path']) and stash.indexStartsWithInList(blacklist, DupFileToKeep['files'][0]['path']) < stash.indexStartsWithInList(blacklist, Scene['files'][0]['path']): + return True + return False + +def killScanningJobs(): + try: + if killScanningPostProcess: + stash.stopJobs(1, "Scanning...") + except Exception as e: + tb = traceback.format_exc() + stash.Error(f"Exception while trying to kill scan jobs; Error: {e}\nTraceBack={tb}") + +def getPath(Scene, getParent = False): + path = stash.asc2(Scene['files'][0]['path']) + path = path.replace("'", "") + path = path.replace("\\\\", "\\") + if getParent: + return pathlib.Path(path).resolve().parent + return path + +def getHtmlReportTableRow(qtyResults, tagDuplicates): + htmlReportPrefix = stash.Setting('htmlReportPrefix') + htmlReportPrefix = htmlReportPrefix.replace('http://127.0.0.1:9999/graphql', stash.url) + htmlReportPrefix = htmlReportPrefix.replace('http://localhost:9999/graphql', stash.url) + if tagDuplicates == False: + htmlReportPrefix = htmlReportPrefix.replace('") + fileHtmlReport.write(f"") + fileHtmlReport.write(f"") + fileHtmlReport.write(f"") + fileHtmlReport.write(f"") + # ToDo: Add following buttons: + # rename file + if dupFileExist and tagDuplicates: + fileHtmlReport.write(f"") + fileHtmlReport.write(f"") + fileHtmlReport.write(f"") + if dupFileExist: + fileHtmlReport.write(f"[Folder]") + fileHtmlReport.write(f"[Play]") + else: + fileHtmlReport.write("[File NOT Exist]") + fileHtmlReport.write("

") + + videoPreview = f"" + if htmlIncludeImagePreview: + imagePreview = f"
  • \"\"\"\"
" + fileHtmlReport.write(f"{getSceneID(DupFileToKeep['id'])}
{videoPreview}{imagePreview}
") + else: + fileHtmlReport.write(f"{getSceneID(DupFileToKeep['id'])}{videoPreview}") + fileHtmlReport.write(f"{getSceneID(DupFileToKeep['id'])}{getPath(DupFileToKeep)}") + fileHtmlReport.write(f"

") + fileHtmlReport.write(f"
ResDurrationBitRateCodecFrameRatesizeID
{DupFileToKeep['files'][0]['width']}x{DupFileToKeep['files'][0]['height']}{DupFileToKeep['files'][0]['duration']}{DupFileToKeep['files'][0]['bit_rate']}{DupFileToKeep['files'][0]['video_codec']}{DupFileToKeep['files'][0]['frame_rate']}{DupFileToKeep['files'][0]['size']}{DupFileToKeep['id']}
") + fileHtmlReport.write(f"") + fileHtmlReport.write(f"") + fileHtmlReport.write(f"") + if isTaggedExcluded(DupFileToKeep): + fileHtmlReport.write(f"") + fileHtmlReport.write(f"[Folder]") + if toKeepFileExist: + fileHtmlReport.write(f"[Play]") + else: + fileHtmlReport.write("[File NOT Exist]") + # ToDo: Add following buttons: + # rename file + fileHtmlReport.write(f"

") + + fileHtmlReport.write("\n") + + if QtyTagForDelPaginate >= htmlReportPaginate: + QtyTagForDelPaginate = 0 + fileHtmlReport.write("\n") + homeHtmReportLink = f"[Home]" + prevHtmReportLink = "" + if PaginateId > 0: + if PaginateId > 1: + prevHtmReport = htmlReportNameHomePage.replace(".html", f"_{PaginateId-1}.html") + else: + prevHtmReport = htmlReportNameHomePage + prevHtmReportLink = f"[Prev]" + nextHtmReport = htmlReportNameHomePage.replace(".html", f"_{PaginateId+1}.html") + nextHtmReportLink = f"[Next]" + fileHtmlReport.write(f"
{homeHtmReportLink}{prevHtmReportLink}{nextHtmReportLink}
") + fileHtmlReport.write(f"{stash.Setting('htmlReportPostfix')}") + fileHtmlReport.close() + PaginateId+=1 + fileHtmlReport = open(nextHtmReport, "w") + fileHtmlReport.write(f"{getHtmlReportTableRow(qtyResults, tagDuplicates)}\n") + if PaginateId > 1: + prevHtmReport = htmlReportNameHomePage.replace(".html", f"_{PaginateId-1}.html") + else: + prevHtmReport = htmlReportNameHomePage + prevHtmReportLink = f"[Prev]" + if len(DupFileSets) > (QtyTagForDel + htmlReportPaginate): + nextHtmReport = htmlReportNameHomePage.replace(".html", f"_{PaginateId+1}.html") + nextHtmReportLink = f"[Next]" + fileHtmlReport.write(f"
{homeHtmReportLink}{prevHtmReportLink}{nextHtmReportLink}
") + else: + stash.Debug(f"DupFileSets Qty = {len(DupFileSets)}; DupFileDetailList Qty = {len(DupFileDetailList)}; QtyTagForDel = {QtyTagForDel}; htmlReportPaginate = {htmlReportPaginate}; QtyTagForDel + htmlReportPaginate = {QtyTagForDel+htmlReportPaginate}") + fileHtmlReport.write(f"
{homeHtmReportLink}{prevHtmReportLink}
") + fileHtmlReport.write(f"{stash.Setting('htmlReportTable')}\n") + fileHtmlReport.write(f"{htmlReportTableRow}{htmlReportTableHeader}Scene{htmlReportTableHeader}Duplicate to Delete{htmlReportTableHeader}Scene-ToKeep{htmlReportTableHeader}Duplicate to Keep\n") + + if tagDuplicates and graylistTagging and stash.startsWithInList(graylist, DupFile['files'][0]['path']): + stash.addTag(DupFile, graylistMarkForDeletion, ignoreAutoTag=True) + if didAddTag: + QtyNewlyTag+=1 + if QtyTagForDel == 1: + stash.Log(f"Tagging duplicate {DupFile['files'][0]['path']} for deletion with tag {duplicateMarkForDeletion}", toAscii=True, printTo=LOG_STASH_N_PLUGIN) + else: + didAddTag = 1 if didAddTag else 0 + stash.Log(f"Tagging duplicate {DupFile['files'][0]['path']} for deletion;AddTag={didAddTag};Qty={QtyDup};Set={QtyDupSet} of {qtyResults};NewlyTag={QtyNewlyTag};isTag={QtyTagForDel}", toAscii=True, printTo=LOG_STASH_N_PLUGIN) + stash.Trace(SepLine) + if maxDupToProcess > 0 and ((QtyTagForDel > maxDupToProcess) or (QtyTagForDel == 0 and QtyDup > maxDupToProcess)): + break + + if fileHtmlReport != None: + fileHtmlReport.write("\n") + if PaginateId > 0: + homeHtmReportLink = f"[Home]" + if PaginateId > 1: + prevHtmReport = htmlReportNameHomePage.replace(".html", f"_{PaginateId-1}.html") + else: + prevHtmReport = htmlReportNameHomePage + prevHtmReportLink = f"[Prev]" + fileHtmlReport.write(f"
{homeHtmReportLink}{prevHtmReportLink}
") + fileHtmlReport.write(f"

Total Tagged for Deletion {QtyTagForDel}

\n") + fileHtmlReport.write(f"{stash.Setting('htmlReportPostfix')}") + fileHtmlReport.close() + stash.Log(f"************************************************************", printTo = stash.LogTo.STASH) + stash.Log(f"************************************************************", printTo = stash.LogTo.STASH) + stash.Log(f"View Stash duplicate report using Stash->Settings->Tools->[Duplicate File Report]", printTo = stash.LogTo.STASH) + stash.Log(f"************************************************************", printTo = stash.LogTo.STASH) + stash.Log(f"************************************************************", printTo = stash.LogTo.STASH) + + + stash.Debug("#####################################################") + stash.Log(f"QtyDupSet={QtyDupSet}, QtyDup={QtyDup}, QtyDeleted={QtyDeleted}, QtySwap={QtySwap}, QtyTagForDel={QtyTagForDel}, QtySkipForDel={QtySkipForDel}, QtyExcludeForDel={QtyExcludeForDel}, QtyExactDup={QtyExactDup}, QtyAlmostDup={QtyAlmostDup}, QtyMerge={QtyMerge}, QtyRealTimeDiff={QtyRealTimeDiff}", printTo=LOG_STASH_N_PLUGIN) + killScanningJobs() + if cleanAfterDel and deleteDup: + stash.Log("Adding clean jobs to the Task Queue", printTo=LOG_STASH_N_PLUGIN) + stash.metadata_clean() + stash.metadata_clean_generated() + stash.optimise_database() + if doGeneratePhash: + stash.metadata_generate({"phashes": True}) + sys.stdout.write("Report complete") + +def findCurrentTagId(tagNames): + # tagNames = [i for n, i in enumerate(tagNames) if i not in tagNames[:n]] + for tagName in tagNames: + tagId = stash.find_tags(q=tagName) + if len(tagId) > 0 and 'id' in tagId[0]: + stash.Debug(f"Using tag name {tagName} with Tag ID {tagId[0]['id']}") + return tagId[0]['id'] + return "-1" + +def toJson(data): + import json + # data = data.replace("'", '"') + data = data.replace("\\", "\\\\") + data = data.replace("\\\\\\\\", "\\\\") + return json.loads(data) + +def getAnAdvanceMenuOptionSelected(taskName, target, isBlackList, pathToDelete, sizeToDelete, durationToDelete, resolutionToDelete, ratingToDelete, tagToDelete, titleToDelete, pathStrToDelete, fileNotExistToDelete, compareToLess, compareToGreater): + stash.Log(f"Processing taskName = {taskName}, target = {target}") + if "Blacklist" in taskName: + isBlackList = True + if "Less" in taskName: + compareToLess = True + if "Greater" in taskName: + compareToGreater = True + + if "pathToDelete" in taskName: + pathToDelete = target.lower() + elif "sizeToDelete" in taskName: + sizeToDelete = int(target) + elif "durationToDelete" in taskName: + durationToDelete = int(target) + elif "commonResToDelete" in taskName: + resolutionToDelete = int(target) + elif "resolutionToDelete" in taskName: + resolutionToDelete = int(target) + elif "ratingToDelete" in taskName: + ratingToDelete = int(target) * 20 + elif "tagToDelete" in taskName: + tagToDelete = target.lower() + elif "titleToDelete" in taskName: + titleToDelete = target.lower() + elif "pathStrToDelete" in taskName: + pathStrToDelete = target.lower() + elif "fileNotExistToDelete" in taskName: + fileNotExistToDelete = True + return isBlackList, pathToDelete, sizeToDelete, durationToDelete, resolutionToDelete, ratingToDelete, tagToDelete, titleToDelete, pathStrToDelete, fileNotExistToDelete, compareToLess, compareToGreater + +def getAdvanceMenuOptionSelected(advanceMenuOptionSelected): + isBlackList = False + pathToDelete = "" + sizeToDelete = -1 + durationToDelete = -1 + resolutionToDelete = -1 + ratingToDelete = -1 + tagToDelete = "" + titleToDelete = "" + pathStrToDelete = "" + fileNotExistToDelete = False + compareToLess = False + compareToGreater = False + if advanceMenuOptionSelected: + if 'Target' in stash.JSON_INPUT['args']: + if "applyCombo" in stash.PLUGIN_TASK_NAME: + jsonObject = toJson(stash.JSON_INPUT['args']['Target']) + for taskName in jsonObject: + isBlackList, pathToDelete, sizeToDelete, durationToDelete, resolutionToDelete, ratingToDelete, tagToDelete, titleToDelete, pathStrToDelete, fileNotExistToDelete, compareToLess, compareToGreater = getAnAdvanceMenuOptionSelected(taskName, jsonObject[taskName], isBlackList, pathToDelete, sizeToDelete, durationToDelete, resolutionToDelete, ratingToDelete, tagToDelete, titleToDelete, pathStrToDelete, compareToLess, compareToGreater) + else: + return getAnAdvanceMenuOptionSelected(stash.PLUGIN_TASK_NAME, stash.JSON_INPUT['args']['Target'], isBlackList, pathToDelete, sizeToDelete, durationToDelete, resolutionToDelete, ratingToDelete, tagToDelete, titleToDelete, pathStrToDelete, compareToLess, compareToGreater) + return isBlackList, pathToDelete, sizeToDelete, durationToDelete, resolutionToDelete, ratingToDelete, tagToDelete, titleToDelete, pathStrToDelete, fileNotExistToDelete, compareToLess, compareToGreater + +# ////////////////////////////////////////////////////////////////////////////// +# ////////////////////////////////////////////////////////////////////////////// +def manageTagggedDuplicates(deleteScenes=False, clearTag=False, setGrayListTag=False, tagId=-1, advanceMenuOptionSelected=False): + if tagId == -1: + tagId = findCurrentTagId([duplicateMarkForDeletion, base1_duplicateMarkForDeletion, base2_duplicateMarkForDeletion, 'DuplicateMarkForDeletion', '_DuplicateMarkForDeletion']) + if int(tagId) < 0: + stash.Warn(f"Could not find tag ID for tag '{duplicateMarkForDeletion}'.") + return + + excludedTags = [duplicateMarkForDeletion] + if clearAllDupfileManagerTags: + excludedTags = [duplicateMarkForDeletion, duplicateWhitelistTag, excludeDupFileDeleteTag, graylistMarkForDeletion, longerDurationLowerResolution] + + isBlackList, pathToDelete, sizeToDelete, durationToDelete, resolutionToDelete, ratingToDelete, tagToDelete, titleToDelete, pathStrToDelete, fileNotExistToDelete, compareToLess, compareToGreater = getAdvanceMenuOptionSelected(advanceMenuOptionSelected) + if advanceMenuOptionSelected and deleteScenes and pathToDelete == "" and tagToDelete == "" and titleToDelete == "" and pathStrToDelete == "" and sizeToDelete == -1 and durationToDelete == -1 and resolutionToDelete == -1 and ratingToDelete == -1 and fileNotExistToDelete == False: + stash.Error("Running advance menu option with no options enabled.") + return + + QtyDup = 0 + QtyDeleted = 0 + QtyClearedTags = 0 + QtySetGraylistTag = 0 + QtyFailedQuery = 0 + stash.Debug("#########################################################################") + stash.startSpinningProcessBar() + scenes = stash.find_scenes(f={"tags": {"value":tagId, "modifier":"INCLUDES"}}, fragment='id tags {id name} files {path width height duration size video_codec bit_rate frame_rate} details title rating100') + stash.stopSpinningProcessBar() + qtyResults = len(scenes) + stash.Log(f"Found {qtyResults} scenes with tag ({duplicateMarkForDeletion})") + stash.setProgressBarIter(qtyResults) + for scene in scenes: + QtyDup += 1 + stash.progressBar(QtyDup, qtyResults) + # scene = stash.find_scene(sceneID['id']) + # if scene == None or len(scene) == 0: + # stash.Warn(f"Could not get scene data for scene ID {scene['id']}.") + # QtyFailedQuery += 1 + # continue + # stash.Trace(f"scene={scene}") + if clearTag: + QtyClearedTags += 1 + # ToDo: Add logic to exclude graylistMarkForDeletion + tags = [int(item['id']) for item in scene["tags"] if item['name'] not in excludedTags] + # if clearAllDupfileManagerTags: + # tags = [] + # for tag in scene["tags"]: + # if tag['name'] in excludedTags: + # continue + # tags += [int(tag['id'])] + stash.TraceOnce(f"tagId={tagId}, len={len(tags)}, tags = {tags}") + dataDict = {'id' : scene['id']} + if addPrimaryDupPathToDetails: + sceneDetails = scene['details'] + if sceneDetails.find(detailPrefix) == 0 and sceneDetails.find(detailPostfix) > 1: + Pos1 = sceneDetails.find(detailPrefix) + Pos2 = sceneDetails.find(detailPostfix) + sceneDetails = sceneDetails[0:Pos1] + sceneDetails[Pos2 + len(detailPostfix):] + dataDict.update({'details' : sceneDetails}) + dataDict.update({'tag_ids' : tags}) + stash.Log(f"Updating scene with {dataDict};QtyClearedTags={QtyClearedTags};Count={QtyDup} of {qtyResults}") + stash.updateScene(dataDict) + # stash.removeTag(scene, duplicateMarkForDeletion) + elif setGrayListTag: + if stash.startsWithInList(graylist, scene['files'][0]['path']): + QtySetGraylistTag+=1 + if stash.addTag(scene, graylistMarkForDeletion, ignoreAutoTag=True): + stash.Log(f"Added tag {graylistMarkForDeletion} to scene {scene['files'][0]['path']};QtySetGraylistTag={QtySetGraylistTag};Count={QtyDup} of {qtyResults}") + else: + stash.Trace(f"Scene already had tag {graylistMarkForDeletion}; {scene['files'][0]['path']}") + elif deleteScenes: + DupFileName = scene['files'][0]['path'] + DupFileNameOnly = pathlib.Path(DupFileName).stem + if advanceMenuOptionSelected: + if isBlackList: + if not stash.startsWithInList(blacklist, scene['files'][0]['path']): + continue + if pathToDelete != "": + if not DupFileName.lower().startswith(pathToDelete): + stash.Debug(f"Skipping file {DupFileName} because it does not start with {pathToDelete}.") + continue + if pathStrToDelete != "": + if not pathStrToDelete in DupFileName.lower(): + stash.Debug(f"Skipping file {DupFileName} because it does not contain value {pathStrToDelete}.") + continue + if sizeToDelete != -1: + compareTo = int(scene['files'][0]['size']) + if compareToLess: + if not (compareTo < sizeToDelete): + continue + elif compareToGreater: + if not (compareTo > sizeToDelete): + continue + else: + if not compareTo == sizeToDelete: + continue + if durationToDelete != -1: + compareTo = int(scene['files'][0]['duration']) + if compareToLess: + if not (compareTo < durationToDelete): + continue + elif compareToGreater: + if not (compareTo > durationToDelete): + continue + else: + if not compareTo == durationToDelete: + continue + if resolutionToDelete != -1: + compareTo = int(scene['files'][0]['width']) * int(scene['files'][0]['height']) + if compareToLess: + if not (compareTo < resolutionToDelete): + continue + elif compareToGreater: + if not (compareTo > resolutionToDelete): + continue + else: + if not compareTo == resolutionToDelete: + continue + if ratingToDelete != -1: + if scene['rating100'] == "None": + compareTo = 0 + else: + compareTo = int(scene['rating100']) + if compareToLess: + if not (compareTo < resolutionToDelete): + continue + elif compareToGreater: + if not (compareTo > resolutionToDelete): + continue + else: + if not compareTo == resolutionToDelete: + continue + if titleToDelete != "": + if not titleToDelete in scene['title'].lower(): + stash.Debug(f"Skipping file {DupFileName} because it does not contain value {titleToDelete} in title ({scene['title']}).") + continue + if tagToDelete != "": + doProcessThis = False + for tag in scene['tags']: + if tag['name'].lower() == tagToDelete: + doProcessThis = True + break + if doProcessThis == False: + continue + if fileNotExistToDelete: + if os.path.isfile(scene['files'][0]['path']): + continue + stash.Warn(f"Deleting duplicate '{DupFileName}'", toAscii=True, printTo=LOG_STASH_N_PLUGIN) + if alternateTrashCanPath != "": + destPath = f"{alternateTrashCanPath }{os.sep}{DupFileNameOnly}" + if os.path.isfile(destPath): + destPath = f"{alternateTrashCanPath }{os.sep}_{time.time()}_{DupFileNameOnly}" + shutil.move(DupFileName, destPath) + elif moveToTrashCan: + sendToTrash(DupFileName) + result = stash.destroyScene(scene['id'], delete_file=True) + QtyDeleted += 1 + stash.Debug(f"destroyScene result={result} for file {DupFileName};QtyDeleted={QtyDeleted};Count={QtyDup} of {qtyResults}", toAscii=True) + else: + stash.Error("manageTagggedDuplicates called with invlaid input arguments. Doing early exit.") + return + stash.Debug("#####################################################") + stash.Log(f"QtyDup={QtyDup}, QtyClearedTags={QtyClearedTags}, QtySetGraylistTag={QtySetGraylistTag}, QtyDeleted={QtyDeleted}, QtyFailedQuery={QtyFailedQuery}", printTo=LOG_STASH_N_PLUGIN) + killScanningJobs() + if deleteScenes and not advanceMenuOptionSelected: + if cleanAfterDel: + stash.Log("Adding clean jobs to the Task Queue", printTo=LOG_STASH_N_PLUGIN) + stash.metadata_clean() + stash.metadata_clean_generated() + stash.optimise_database() + + +def removeDupTag(): + if 'Target' not in stash.JSON_INPUT['args']: + stash.Error(f"Could not find Target in JSON_INPUT ({stash.JSON_INPUT['args']})") + return + scene = stash.JSON_INPUT['args']['Target'] + stash.Log(f"Processing scene ID# {scene}") + stash.removeTag(scene, duplicateMarkForDeletion) + stash.Log(f"Done removing tag from scene {scene}.") + jsonReturn = "{'removeDupTag' : 'complete', 'id': '" + f"{scene}" + "'}" + stash.Log(f"Sending json value {jsonReturn}") + sys.stdout.write(jsonReturn) + +def addExcludeTag(): + if 'Target' not in stash.JSON_INPUT['args']: + stash.Error(f"Could not find Target in JSON_INPUT ({stash.JSON_INPUT['args']})") + return + scene = stash.JSON_INPUT['args']['Target'] + stash.Log(f"Processing scene ID# {scene}") + stash.addTag(scene, excludeDupFileDeleteTag) + stash.Log(f"Done adding exclude tag to scene {scene}.") + sys.stdout.write("{" + f"addExcludeTag : 'complete', id: '{scene}'" + "}") + +def removeExcludeTag(): + if 'Target' not in stash.JSON_INPUT['args']: + stash.Error(f"Could not find Target in JSON_INPUT ({stash.JSON_INPUT['args']})") + return + scene = stash.JSON_INPUT['args']['Target'] + stash.Log(f"Processing scene ID# {scene}") + stash.removeTag(scene, excludeDupFileDeleteTag) + stash.Log(f"Done removing exclude tag from scene {scene}.") + sys.stdout.write("{" + f"removeExcludeTag : 'complete', id: '{scene}'" + "}") + +def getParseData(getSceneDetails1=True, getSceneDetails2=True): + if 'Target' not in stash.JSON_INPUT['args']: + stash.Error(f"Could not find Target in JSON_INPUT ({stash.JSON_INPUT['args']})") + return None, None + targetsSrc = stash.JSON_INPUT['args']['Target'] + targets = targetsSrc.split(":") + if len(targets) < 2: + stash.Error(f"Could not get both targets from string {targetsSrc}") + return None, None + stash.Log(f"Parsed targets {targets[0]} and {targets[1]}") + target1 = targets[0] + target2 = targets[1] + if getSceneDetails1: + target1 = stash.find_scene(int(target1)) + if getSceneDetails2: + target2 = stash.find_scene(int(target2)) + elif len(targets) > 2: + target2 = target2 + targets[2] + return target1, target2 + + +def mergeTags(): + scene1, scene2 = getParseData() + if scene1 == None or scene2 == None: + sys.stdout.write("{" + f"mergeTags : 'failed', id1: '{scene1}', id2: '{scene2}'" + "}") + return + stash.mergeMetadata(scene1, scene2) + stash.Log(f"Done merging scenes for scene {scene1['id']} and scene {scene2['id']}") + sys.stdout.write("{" + f"mergeTags : 'complete', id1: '{scene1['id']}', id2: '{scene2['id']}'" + "}") + +def getLocalDupReportPath(): + htmlReportExist = "true" if os.path.isfile(htmlReportName) else "false" + localPath = htmlReportName.replace("\\", "\\\\") + jsonReturn = "{'LocalDupReportExist' : " + f"{htmlReportExist}" + ", 'Path': '" + f"{localPath}" + "'}" + stash.Log(f"Sending json value {jsonReturn}") + sys.stdout.write(jsonReturn) + +def deleteLocalDupReportHtmlFiles(doJsonOutput = True): + htmlReportExist = "true" if os.path.isfile(htmlReportName) else "false" + if os.path.isfile(htmlReportName): + stash.Log(f"Deleting file {htmlReportName}") + os.remove(htmlReportName) + for x in range(2, 9999): + fileName = htmlReportName.replace(".html", f"_{x-1}.html") + stash.Debug(f"Checking if file '{fileName}' exist.") + if not os.path.isfile(fileName): + break + stash.Log(f"Deleting file {fileName}") + os.remove(fileName) + else: + stash.Log(f"Report file does not exist: {htmlReportName}") + if doJsonOutput: + jsonReturn = "{'LocalDupReportExist' : " + f"{htmlReportExist}" + ", 'Path': '" + f"{htmlReportName}" + "', 'qty': '" + f"{x}" + "'}" + stash.Log(f"Sending json value {jsonReturn}") + sys.stdout.write(jsonReturn) + +def removeTagFromAllScenes(tagName, deleteTags): + # ToDo: Replace code with SQL code if DB version 68 + tagId = stash.find_tags(q=tagName) + if len(tagId) > 0 and 'id' in tagId[0]: + if deleteTags: + stash.Debug(f"Deleting tag name {tagName} with Tag ID {tagId[0]['id']} from stash.") + stash.destroy_tag(int(tagId[0]['id'])) + else: + stash.Debug(f"Removing tag name {tagName} with Tag ID {tagId[0]['id']} from all scenes.") + manageTagggedDuplicates(clearTag=True, tagId=int(tagId[0]['id'])) + return True + return False + +def removeAllDupTagsFromAllScenes(deleteTags=False): + tagsToClear = [duplicateMarkForDeletion, base1_duplicateMarkForDeletion, base2_duplicateMarkForDeletion, graylistMarkForDeletion, longerDurationLowerResolution, duplicateWhitelistTag] + for x in range(0, 3): + tagsToClear += [base1_duplicateMarkForDeletion + f"_{x}"] + for x in range(0, 3): + tagsToClear += [base2_duplicateMarkForDeletion + f"_{x}"] + tagsToClear = list(set(tagsToClear)) # Remove duplicates + validTags = [] + for tagToClear in tagsToClear: + if removeTagFromAllScenes(tagToClear, deleteTags): + validTags +=[tagToClear] + if doJsonReturn: + jsonReturn = "{'removeAllDupTagFromAllScenes' : " + f"{duplicateMarkForDeletion}" + ", 'OtherTags': '" + f"{validTags}" + "'}" + stash.Log(f"Sending json value {jsonReturn}") + sys.stdout.write(jsonReturn) + else: + stash.Log(f"Clear tags {tagsToClear}") + +def insertDisplayNone(htmlReportName, scene): + stash.Log(f"Inserting display none for scene {scene} in file {htmlReportName}") + import in_place + doStyleEndTagCheck = True + with in_place.InPlace(htmlReportName) as file: + for line in file: + if doStyleEndTagCheck and line.startsWith(""): + file.write(f".ID_{scene}" + "{display:none;}") + doStyleEndTagCheck = False + file.write(line) + file.close() + +def hideScene(scene): + if os.path.isfile(htmlReportName): + insertDisplayNone(htmlReportName, scene) + for x in range(2, 9999): + fileName = htmlReportName.replace(".html", f"_{x-1}.html") + stash.Debug(f"Checking if file '{fileName}' exist.") + if not os.path.isfile(fileName): + break + insertDisplayNone(fileName, scene) + else: + stash.Log(f"Report file does not exist: {htmlReportName}") + +def deleteScene(hideInReport=True, deleteFile=True): + if 'Target' not in stash.JSON_INPUT['args']: + stash.Error(f"Could not find Target in JSON_INPUT ({stash.JSON_INPUT['args']})") + return + scene = stash.JSON_INPUT['args']['Target'] + stash.Log(f"Processing scene ID# {scene}") + result = stash.destroyScene(scene, delete_file=deleteFile) + if hideInReport: + hideScene(scene) + stash.Log(f"{stash.PLUGIN_TASK_NAME} complete for scene {scene} with results = {result}") + sys.stdout.write("{" + f"{stash.PLUGIN_TASK_NAME} : 'complete', id: '{scene}', result: '{result}'" + "}") + +def copyScene(moveScene=False): + scene1, scene2 = getParseData() + if scene1 == None or scene2 == None: + sys.stdout.write("{" + f"{stash.PLUGIN_TASK_NAME} : 'failed', id1: '{scene1}', id2: '{scene2}'" + "}") + return + if moveScene: + stash.mergeMetadata(scene1, scene2) + result = shutil.copy(scene1['file']['path'], scene2['file']['path']) + if moveScene: + result = stash.destroyScene(scene1['id'], delete_file=True) + stash.Log(f"destroyScene for scene {scene1['id']} results = {result}") + stash.Log(f"{stash.PLUGIN_TASK_NAME} complete for scene {scene1['id']} and {scene2['id]}") + sys.stdout.write("{" + f"{stash.PLUGIN_TASK_NAME} : 'complete', id1: '{scene1['id']}', id2: '{scene2['id']}', result: '{result}'" + "}") + +def renameFile(): + scene, newName = getParseData(getSceneDetails2=False) + if scene == None or newName == None: + sys.stdout.write("{" + f"{stash.PLUGIN_TASK_NAME} : 'failed', scene: '{scene}', newName: '{newName}'" + "}") + return + newName = scene['file']['path'].replace(pathlib.Path(scene['file']['path']).stem, newName) + result = shutil.rename(scene['file']['path'], newName) + stash.Log(f"{stash.PLUGIN_TASK_NAME} complete for scene {scene['id']} ;renamed to {newName}; result={resul}") + sys.stdout.write("{" + f"{stash.PLUGIN_TASK_NAME} : 'complete', scene: '{scene['id']}', newName: '{newName}', result: '{result}'" + "}") + + +# ToDo: Add to UI menu +# Remove all Dup tagged files (Just remove from stash, and leave file) +# Clear GraylistMarkForDel tag +# Delete GraylistMarkForDel tag +# Remove from stash all files no longer part of stash library +# Remove from stash all files in the Exclusion list (Not supporting regexps) +# ToDo: Add to advance menu +# Remove only graylist dup +# Exclude graylist from delete +# Include graylist in delete + +try: + if stash.PLUGIN_TASK_NAME == "tag_duplicates_task": + mangeDupFiles(tagDuplicates=True, merge=mergeDupFilename) + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif stash.PLUGIN_TASK_NAME == "delete_tagged_duplicates_task": + manageTagggedDuplicates(deleteScenes=True) + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif stash.PLUGIN_TASK_NAME == "delete_duplicates_task": + mangeDupFiles(deleteDup=True, merge=mergeDupFilename) + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif stash.PLUGIN_TASK_NAME == "clear_duplicate_tags_task": + removeAllDupTagsFromAllScenes() + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif stash.PLUGIN_TASK_NAME == "graylist_tag_task": + manageTagggedDuplicates(setGrayListTag=True) + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif stash.PLUGIN_TASK_NAME == "generate_phash_task": + stash.metadata_generate({"phashes": True}) + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif stash.PLUGIN_TASK_NAME == "deleteScene": + deleteScene() + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif stash.PLUGIN_TASK_NAME == "removeScene": + deleteScene(deleteFile=False) + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif stash.PLUGIN_TASK_NAME == "renameFile": + renameFile() + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif stash.PLUGIN_TASK_NAME == "copyScene": + copyScene() + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif stash.PLUGIN_TASK_NAME == "moveScene": + copyScene(moveScene=True) + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif stash.PLUGIN_TASK_NAME == "removeDupTag": + removeDupTag() + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif stash.PLUGIN_TASK_NAME == "addExcludeTag": + addExcludeTag() + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif stash.PLUGIN_TASK_NAME == "removeExcludeTag": + removeExcludeTag() + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif stash.PLUGIN_TASK_NAME == "mergeTags": + mergeTags() + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif stash.PLUGIN_TASK_NAME == "getLocalDupReportPath": + getLocalDupReportPath() + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif stash.PLUGIN_TASK_NAME == "deleteLocalDupReportHtmlFiles": + deleteLocalDupReportHtmlFiles() + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif stash.PLUGIN_TASK_NAME == "createDuplicateReportWithoutTagging": + mangeDupFiles(tagDuplicates=False, merge=mergeDupFilename) + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif stash.PLUGIN_TASK_NAME == "deleteAllDupFileManagerTags": + removeAllDupTagsFromAllScenes(deleteTags=True) + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif stash.PLUGIN_TASK_NAME == "deleteBlackListTaggedDuplicatesTask": + mangeDupFiles(deleteDup=True, merge=mergeDupFilename, deleteBlacklistOnly=True) + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif stash.PLUGIN_TASK_NAME == "deleteTaggedDuplicatesLwrResOrLwrDuration": + mangeDupFiles(deleteDup=True, merge=mergeDupFilename, deleteLowerResAndDuration=True) + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif stash.PLUGIN_TASK_NAME == "deleteBlackListTaggedDuplicatesLwrResOrLwrDuration": + mangeDupFiles(deleteDup=True, merge=mergeDupFilename, deleteBlacklistOnly=True, deleteLowerResAndDuration=True) + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif parse_args.dup_tag: + stash.PLUGIN_TASK_NAME = "dup_tag" + mangeDupFiles(tagDuplicates=True, merge=mergeDupFilename) + stash.Debug(f"Tag duplicate EXIT") + elif parse_args.del_tag: + stash.PLUGIN_TASK_NAME = "del_tag" + manageTagggedDuplicates(deleteScenes=True) + stash.Debug(f"Delete Tagged duplicates EXIT") + elif parse_args.clear_tag: + stash.PLUGIN_TASK_NAME = "clear_tag" + removeAllDupTagsFromAllScenes() + stash.Debug(f"Clear duplicate tags EXIT") + elif parse_args.remove: + stash.PLUGIN_TASK_NAME = "remove" + mangeDupFiles(deleteDup=True, merge=mergeDupFilename) + stash.Debug(f"Delete duplicate EXIT") + elif len(sys.argv) < 2 and stash.PLUGIN_TASK_NAME in advanceMenuOptions: + manageTagggedDuplicates(deleteScenes=True, advanceMenuOptionSelected=True) + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + else: + stash.Log(f"Nothing to do!!! (PLUGIN_ARGS_MODE={stash.PLUGIN_TASK_NAME})") +except Exception as e: + tb = traceback.format_exc() + + stash.Error(f"Exception while running DupFileManager Task({stash.PLUGIN_TASK_NAME}); Error: {e}\nTraceBack={tb}") + killScanningJobs() + stash.convertToAscii = False + stash.Error(f"Error: {e}\nTraceBack={tb}") + if doJsonReturn: + sys.stdout.write("{" + f"Exception : '{e}; See log file for TraceBack' " + "}") + +stash.Log("\n*********************************\nEXITING ***********************\n*********************************") diff --git a/plugins/DupFileManager/DupFileManager.js b/plugins/DupFileManager/DupFileManager.js new file mode 100644 index 00000000..994a4abd --- /dev/null +++ b/plugins/DupFileManager/DupFileManager.js @@ -0,0 +1,310 @@ +(function () { + /*! jQuery v3.7.1 | (c) OpenJS Foundation and other contributors | jquery.org/license */ + !function(e,t){"use strict";"object"==typeof module&&"object"==typeof module.exports?module.exports=e.document?t(e,!0):function(e){if(!e.document)throw new Error("jQuery requires a window with a document");return t(e)}:t(e)}("undefined"!=typeof window?window:this,function(ie,e){"use strict";var oe=[],r=Object.getPrototypeOf,ae=oe.slice,g=oe.flat?function(e){return oe.flat.call(e)}:function(e){return oe.concat.apply([],e)},s=oe.push,se=oe.indexOf,n={},i=n.toString,ue=n.hasOwnProperty,o=ue.toString,a=o.call(Object),le={},v=function(e){return"function"==typeof e&&"number"!=typeof e.nodeType&&"function"!=typeof e.item},y=function(e){return null!=e&&e===e.window},C=ie.document,u={type:!0,src:!0,nonce:!0,noModule:!0};function m(e,t,n){var r,i,o=(n=n||C).createElement("script");if(o.text=e,t)for(r in u)(i=t[r]||t.getAttribute&&t.getAttribute(r))&&o.setAttribute(r,i);n.head.appendChild(o).parentNode.removeChild(o)}function x(e){return null==e?e+"":"object"==typeof e||"function"==typeof e?n[i.call(e)]||"object":typeof e}var t="3.7.1",l=/HTML$/i,ce=function(e,t){return new ce.fn.init(e,t)};function c(e){var t=!!e&&"length"in e&&e.length,n=x(e);return!v(e)&&!y(e)&&("array"===n||0===t||"number"==typeof t&&0+~]|"+ge+")"+ge+"*"),x=new RegExp(ge+"|>"),j=new RegExp(g),A=new RegExp("^"+t+"$"),D={ID:new RegExp("^#("+t+")"),CLASS:new RegExp("^\\.("+t+")"),TAG:new RegExp("^("+t+"|[*])"),ATTR:new RegExp("^"+p),PSEUDO:new RegExp("^"+g),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+ge+"*(even|odd|(([+-]|)(\\d*)n|)"+ge+"*(?:([+-]|)"+ge+"*(\\d+)|))"+ge+"*\\)|)","i"),bool:new RegExp("^(?:"+f+")$","i"),needsContext:new RegExp("^"+ge+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+ge+"*((?:-\\d)?\\d*)"+ge+"*\\)|)(?=[^-]|$)","i")},N=/^(?:input|select|textarea|button)$/i,q=/^h\d$/i,L=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,H=/[+~]/,O=new RegExp("\\\\[\\da-fA-F]{1,6}"+ge+"?|\\\\([^\\r\\n\\f])","g"),P=function(e,t){var n="0x"+e.slice(1)-65536;return t||(n<0?String.fromCharCode(n+65536):String.fromCharCode(n>>10|55296,1023&n|56320))},M=function(){V()},R=J(function(e){return!0===e.disabled&&fe(e,"fieldset")},{dir:"parentNode",next:"legend"});try{k.apply(oe=ae.call(ye.childNodes),ye.childNodes),oe[ye.childNodes.length].nodeType}catch(e){k={apply:function(e,t){me.apply(e,ae.call(t))},call:function(e){me.apply(e,ae.call(arguments,1))}}}function I(t,e,n,r){var i,o,a,s,u,l,c,f=e&&e.ownerDocument,p=e?e.nodeType:9;if(n=n||[],"string"!=typeof t||!t||1!==p&&9!==p&&11!==p)return n;if(!r&&(V(e),e=e||T,C)){if(11!==p&&(u=L.exec(t)))if(i=u[1]){if(9===p){if(!(a=e.getElementById(i)))return n;if(a.id===i)return k.call(n,a),n}else if(f&&(a=f.getElementById(i))&&I.contains(e,a)&&a.id===i)return k.call(n,a),n}else{if(u[2])return k.apply(n,e.getElementsByTagName(t)),n;if((i=u[3])&&e.getElementsByClassName)return k.apply(n,e.getElementsByClassName(i)),n}if(!(h[t+" "]||d&&d.test(t))){if(c=t,f=e,1===p&&(x.test(t)||m.test(t))){(f=H.test(t)&&U(e.parentNode)||e)==e&&le.scope||((s=e.getAttribute("id"))?s=ce.escapeSelector(s):e.setAttribute("id",s=S)),o=(l=Y(t)).length;while(o--)l[o]=(s?"#"+s:":scope")+" "+Q(l[o]);c=l.join(",")}try{return k.apply(n,f.querySelectorAll(c)),n}catch(e){h(t,!0)}finally{s===S&&e.removeAttribute("id")}}}return re(t.replace(ve,"$1"),e,n,r)}function W(){var r=[];return function e(t,n){return r.push(t+" ")>b.cacheLength&&delete e[r.shift()],e[t+" "]=n}}function F(e){return e[S]=!0,e}function $(e){var t=T.createElement("fieldset");try{return!!e(t)}catch(e){return!1}finally{t.parentNode&&t.parentNode.removeChild(t),t=null}}function B(t){return function(e){return fe(e,"input")&&e.type===t}}function _(t){return function(e){return(fe(e,"input")||fe(e,"button"))&&e.type===t}}function z(t){return function(e){return"form"in e?e.parentNode&&!1===e.disabled?"label"in e?"label"in e.parentNode?e.parentNode.disabled===t:e.disabled===t:e.isDisabled===t||e.isDisabled!==!t&&R(e)===t:e.disabled===t:"label"in e&&e.disabled===t}}function X(a){return F(function(o){return o=+o,F(function(e,t){var n,r=a([],e.length,o),i=r.length;while(i--)e[n=r[i]]&&(e[n]=!(t[n]=e[n]))})})}function U(e){return e&&"undefined"!=typeof e.getElementsByTagName&&e}function V(e){var t,n=e?e.ownerDocument||e:ye;return n!=T&&9===n.nodeType&&n.documentElement&&(r=(T=n).documentElement,C=!ce.isXMLDoc(T),i=r.matches||r.webkitMatchesSelector||r.msMatchesSelector,r.msMatchesSelector&&ye!=T&&(t=T.defaultView)&&t.top!==t&&t.addEventListener("unload",M),le.getById=$(function(e){return r.appendChild(e).id=ce.expando,!T.getElementsByName||!T.getElementsByName(ce.expando).length}),le.disconnectedMatch=$(function(e){return i.call(e,"*")}),le.scope=$(function(){return T.querySelectorAll(":scope")}),le.cssHas=$(function(){try{return T.querySelector(":has(*,:jqfake)"),!1}catch(e){return!0}}),le.getById?(b.filter.ID=function(e){var t=e.replace(O,P);return function(e){return e.getAttribute("id")===t}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&C){var n=t.getElementById(e);return n?[n]:[]}}):(b.filter.ID=function(e){var n=e.replace(O,P);return function(e){var t="undefined"!=typeof e.getAttributeNode&&e.getAttributeNode("id");return t&&t.value===n}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&C){var n,r,i,o=t.getElementById(e);if(o){if((n=o.getAttributeNode("id"))&&n.value===e)return[o];i=t.getElementsByName(e),r=0;while(o=i[r++])if((n=o.getAttributeNode("id"))&&n.value===e)return[o]}return[]}}),b.find.TAG=function(e,t){return"undefined"!=typeof t.getElementsByTagName?t.getElementsByTagName(e):t.querySelectorAll(e)},b.find.CLASS=function(e,t){if("undefined"!=typeof t.getElementsByClassName&&C)return t.getElementsByClassName(e)},d=[],$(function(e){var t;r.appendChild(e).innerHTML="",e.querySelectorAll("[selected]").length||d.push("\\["+ge+"*(?:value|"+f+")"),e.querySelectorAll("[id~="+S+"-]").length||d.push("~="),e.querySelectorAll("a#"+S+"+*").length||d.push(".#.+[+~]"),e.querySelectorAll(":checked").length||d.push(":checked"),(t=T.createElement("input")).setAttribute("type","hidden"),e.appendChild(t).setAttribute("name","D"),r.appendChild(e).disabled=!0,2!==e.querySelectorAll(":disabled").length&&d.push(":enabled",":disabled"),(t=T.createElement("input")).setAttribute("name",""),e.appendChild(t),e.querySelectorAll("[name='']").length||d.push("\\["+ge+"*name"+ge+"*="+ge+"*(?:''|\"\")")}),le.cssHas||d.push(":has"),d=d.length&&new RegExp(d.join("|")),l=function(e,t){if(e===t)return a=!0,0;var n=!e.compareDocumentPosition-!t.compareDocumentPosition;return n||(1&(n=(e.ownerDocument||e)==(t.ownerDocument||t)?e.compareDocumentPosition(t):1)||!le.sortDetached&&t.compareDocumentPosition(e)===n?e===T||e.ownerDocument==ye&&I.contains(ye,e)?-1:t===T||t.ownerDocument==ye&&I.contains(ye,t)?1:o?se.call(o,e)-se.call(o,t):0:4&n?-1:1)}),T}for(e in I.matches=function(e,t){return I(e,null,null,t)},I.matchesSelector=function(e,t){if(V(e),C&&!h[t+" "]&&(!d||!d.test(t)))try{var n=i.call(e,t);if(n||le.disconnectedMatch||e.document&&11!==e.document.nodeType)return n}catch(e){h(t,!0)}return 0":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace(O,P),e[3]=(e[3]||e[4]||e[5]||"").replace(O,P),"~="===e[2]&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),"nth"===e[1].slice(0,3)?(e[3]||I.error(e[0]),e[4]=+(e[4]?e[5]+(e[6]||1):2*("even"===e[3]||"odd"===e[3])),e[5]=+(e[7]+e[8]||"odd"===e[3])):e[3]&&I.error(e[0]),e},PSEUDO:function(e){var t,n=!e[6]&&e[2];return D.CHILD.test(e[0])?null:(e[3]?e[2]=e[4]||e[5]||"":n&&j.test(n)&&(t=Y(n,!0))&&(t=n.indexOf(")",n.length-t)-n.length)&&(e[0]=e[0].slice(0,t),e[2]=n.slice(0,t)),e.slice(0,3))}},filter:{TAG:function(e){var t=e.replace(O,P).toLowerCase();return"*"===e?function(){return!0}:function(e){return fe(e,t)}},CLASS:function(e){var t=s[e+" "];return t||(t=new RegExp("(^|"+ge+")"+e+"("+ge+"|$)"))&&s(e,function(e){return t.test("string"==typeof e.className&&e.className||"undefined"!=typeof e.getAttribute&&e.getAttribute("class")||"")})},ATTR:function(n,r,i){return function(e){var t=I.attr(e,n);return null==t?"!="===r:!r||(t+="","="===r?t===i:"!="===r?t!==i:"^="===r?i&&0===t.indexOf(i):"*="===r?i&&-1:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i;function T(e,n,r){return v(n)?ce.grep(e,function(e,t){return!!n.call(e,t,e)!==r}):n.nodeType?ce.grep(e,function(e){return e===n!==r}):"string"!=typeof n?ce.grep(e,function(e){return-1)[^>]*|#([\w-]+))$/;(ce.fn.init=function(e,t,n){var r,i;if(!e)return this;if(n=n||k,"string"==typeof e){if(!(r="<"===e[0]&&">"===e[e.length-1]&&3<=e.length?[null,e,null]:S.exec(e))||!r[1]&&t)return!t||t.jquery?(t||n).find(e):this.constructor(t).find(e);if(r[1]){if(t=t instanceof ce?t[0]:t,ce.merge(this,ce.parseHTML(r[1],t&&t.nodeType?t.ownerDocument||t:C,!0)),w.test(r[1])&&ce.isPlainObject(t))for(r in t)v(this[r])?this[r](t[r]):this.attr(r,t[r]);return this}return(i=C.getElementById(r[2]))&&(this[0]=i,this.length=1),this}return e.nodeType?(this[0]=e,this.length=1,this):v(e)?void 0!==n.ready?n.ready(e):e(ce):ce.makeArray(e,this)}).prototype=ce.fn,k=ce(C);var E=/^(?:parents|prev(?:Until|All))/,j={children:!0,contents:!0,next:!0,prev:!0};function A(e,t){while((e=e[t])&&1!==e.nodeType);return e}ce.fn.extend({has:function(e){var t=ce(e,this),n=t.length;return this.filter(function(){for(var e=0;e\x20\t\r\n\f]*)/i,Ce=/^$|^module$|\/(?:java|ecma)script/i;xe=C.createDocumentFragment().appendChild(C.createElement("div")),(be=C.createElement("input")).setAttribute("type","radio"),be.setAttribute("checked","checked"),be.setAttribute("name","t"),xe.appendChild(be),le.checkClone=xe.cloneNode(!0).cloneNode(!0).lastChild.checked,xe.innerHTML="",le.noCloneChecked=!!xe.cloneNode(!0).lastChild.defaultValue,xe.innerHTML="",le.option=!!xe.lastChild;var ke={thead:[1,"","
"],col:[2,"","
"],tr:[2,"","
"],td:[3,"","
"],_default:[0,"",""]};function Se(e,t){var n;return n="undefined"!=typeof e.getElementsByTagName?e.getElementsByTagName(t||"*"):"undefined"!=typeof e.querySelectorAll?e.querySelectorAll(t||"*"):[],void 0===t||t&&fe(e,t)?ce.merge([e],n):n}function Ee(e,t){for(var n=0,r=e.length;n",""]);var je=/<|&#?\w+;/;function Ae(e,t,n,r,i){for(var o,a,s,u,l,c,f=t.createDocumentFragment(),p=[],d=0,h=e.length;d\s*$/g;function Re(e,t){return fe(e,"table")&&fe(11!==t.nodeType?t:t.firstChild,"tr")&&ce(e).children("tbody")[0]||e}function Ie(e){return e.type=(null!==e.getAttribute("type"))+"/"+e.type,e}function We(e){return"true/"===(e.type||"").slice(0,5)?e.type=e.type.slice(5):e.removeAttribute("type"),e}function Fe(e,t){var n,r,i,o,a,s;if(1===t.nodeType){if(_.hasData(e)&&(s=_.get(e).events))for(i in _.remove(t,"handle events"),s)for(n=0,r=s[i].length;n").attr(n.scriptAttrs||{}).prop({charset:n.scriptCharset,src:n.url}).on("load error",i=function(e){r.remove(),i=null,e&&t("error"===e.type?404:200,e.type)}),C.head.appendChild(r[0])},abort:function(){i&&i()}}});var Jt,Kt=[],Zt=/(=)\?(?=&|$)|\?\?/;ce.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var e=Kt.pop()||ce.expando+"_"+jt.guid++;return this[e]=!0,e}}),ce.ajaxPrefilter("json jsonp",function(e,t,n){var r,i,o,a=!1!==e.jsonp&&(Zt.test(e.url)?"url":"string"==typeof e.data&&0===(e.contentType||"").indexOf("application/x-www-form-urlencoded")&&Zt.test(e.data)&&"data");if(a||"jsonp"===e.dataTypes[0])return r=e.jsonpCallback=v(e.jsonpCallback)?e.jsonpCallback():e.jsonpCallback,a?e[a]=e[a].replace(Zt,"$1"+r):!1!==e.jsonp&&(e.url+=(At.test(e.url)?"&":"?")+e.jsonp+"="+r),e.converters["script json"]=function(){return o||ce.error(r+" was not called"),o[0]},e.dataTypes[0]="json",i=ie[r],ie[r]=function(){o=arguments},n.always(function(){void 0===i?ce(ie).removeProp(r):ie[r]=i,e[r]&&(e.jsonpCallback=t.jsonpCallback,Kt.push(r)),o&&v(i)&&i(o[0]),o=i=void 0}),"script"}),le.createHTMLDocument=((Jt=C.implementation.createHTMLDocument("").body).innerHTML="
",2===Jt.childNodes.length),ce.parseHTML=function(e,t,n){return"string"!=typeof e?[]:("boolean"==typeof t&&(n=t,t=!1),t||(le.createHTMLDocument?((r=(t=C.implementation.createHTMLDocument("")).createElement("base")).href=C.location.href,t.head.appendChild(r)):t=C),o=!n&&[],(i=w.exec(e))?[t.createElement(i[1])]:(i=Ae([e],t,o),o&&o.length&&ce(o).remove(),ce.merge([],i.childNodes)));var r,i,o},ce.fn.load=function(e,t,n){var r,i,o,a=this,s=e.indexOf(" ");return-1").append(ce.parseHTML(e)).find(r):e)}).always(n&&function(e,t){a.each(function(){n.apply(this,o||[e.responseText,t,e])})}),this},ce.expr.pseudos.animated=function(t){return ce.grep(ce.timers,function(e){return t===e.elem}).length},ce.offset={setOffset:function(e,t,n){var r,i,o,a,s,u,l=ce.css(e,"position"),c=ce(e),f={};"static"===l&&(e.style.position="relative"),s=c.offset(),o=ce.css(e,"top"),u=ce.css(e,"left"),("absolute"===l||"fixed"===l)&&-1<(o+u).indexOf("auto")?(a=(r=c.position()).top,i=r.left):(a=parseFloat(o)||0,i=parseFloat(u)||0),v(t)&&(t=t.call(e,n,ce.extend({},s))),null!=t.top&&(f.top=t.top-s.top+a),null!=t.left&&(f.left=t.left-s.left+i),"using"in t?t.using.call(e,f):c.css(f)}},ce.fn.extend({offset:function(t){if(arguments.length)return void 0===t?this:this.each(function(e){ce.offset.setOffset(this,t,e)});var e,n,r=this[0];return r?r.getClientRects().length?(e=r.getBoundingClientRect(),n=r.ownerDocument.defaultView,{top:e.top+n.pageYOffset,left:e.left+n.pageXOffset}):{top:0,left:0}:void 0},position:function(){if(this[0]){var e,t,n,r=this[0],i={top:0,left:0};if("fixed"===ce.css(r,"position"))t=r.getBoundingClientRect();else{t=this.offset(),n=r.ownerDocument,e=r.offsetParent||n.documentElement;while(e&&(e===n.body||e===n.documentElement)&&"static"===ce.css(e,"position"))e=e.parentNode;e&&e!==r&&1===e.nodeType&&((i=ce(e).offset()).top+=ce.css(e,"borderTopWidth",!0),i.left+=ce.css(e,"borderLeftWidth",!0))}return{top:t.top-i.top-ce.css(r,"marginTop",!0),left:t.left-i.left-ce.css(r,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){var e=this.offsetParent;while(e&&"static"===ce.css(e,"position"))e=e.offsetParent;return e||J})}}),ce.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(t,i){var o="pageYOffset"===i;ce.fn[t]=function(e){return M(this,function(e,t,n){var r;if(y(e)?r=e:9===e.nodeType&&(r=e.defaultView),void 0===n)return r?r[i]:e[t];r?r.scrollTo(o?r.pageXOffset:n,o?n:r.pageYOffset):e[t]=n},t,e,arguments.length)}}),ce.each(["top","left"],function(e,n){ce.cssHooks[n]=Ye(le.pixelPosition,function(e,t){if(t)return t=Ge(e,n),_e.test(t)?ce(e).position()[n]+"px":t})}),ce.each({Height:"height",Width:"width"},function(a,s){ce.each({padding:"inner"+a,content:s,"":"outer"+a},function(r,o){ce.fn[o]=function(e,t){var n=arguments.length&&(r||"boolean"!=typeof e),i=r||(!0===e||!0===t?"margin":"border");return M(this,function(e,t,n){var r;return y(e)?0===o.indexOf("outer")?e["inner"+a]:e.document.documentElement["client"+a]:9===e.nodeType?(r=e.documentElement,Math.max(e.body["scroll"+a],r["scroll"+a],e.body["offset"+a],r["offset"+a],r["client"+a])):void 0===n?ce.css(e,t,i):ce.style(e,t,n,i)},s,n?e:void 0,n)}})}),ce.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],function(e,t){ce.fn[t]=function(e){return this.on(t,e)}}),ce.fn.extend({bind:function(e,t,n){return this.on(e,null,t,n)},unbind:function(e,t){return this.off(e,null,t)},delegate:function(e,t,n,r){return this.on(t,e,n,r)},undelegate:function(e,t,n){return 1===arguments.length?this.off(e,"**"):this.off(t,e||"**",n)},hover:function(e,t){return this.on("mouseenter",e).on("mouseleave",t||e)}}),ce.each("blur focus focusin focusout resize scroll click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup contextmenu".split(" "),function(e,n){ce.fn[n]=function(e,t){return 0 { + var LocalDuplicateReportPath = GetLocalDuplicateReportPath(); + console.log(LocalDupReportExist); + var MyHeader = React.createElement("h1", null, "DupFileManager Report Menu"); + if (LocalDupReportExist) + return (React.createElement("center", null, + MyHeader, + GetShowReportButton(LocalDuplicateReportPath, "Show Duplicate-File Report"), + React.createElement("p", null), + GetAdvanceMenuButton(), + React.createElement("p", null), + GetCreateReportNoTagButton("Create New Report (NO Tagging)"), + React.createElement("p", null), + GetCreateReportButton("Create New Report with Tagging"), + React.createElement("p", null), + ToolsMenuOptionButton + )); + return (React.createElement("center", null, + MyHeader, + GetCreateReportNoTagButton("Create Duplicate-File Report (NO Tagging)"), + React.createElement("p", null), + GetCreateReportButton("Create Duplicate-File Report with Tagging"), + React.createElement("p", null), + ToolsMenuOptionButton + )); + }; + const CreateReport = () => { + const componentsLoading = PluginApi.hooks.useLoadComponents([PluginApi.loadableComponents.SceneCard]); + if (componentsLoading) + return (React.createElement(LoadingIndicator, {message: "Running task to create report. This may take a while. Please standby."})); + RunPluginDupFileManager("tag_duplicates_task"); + return (React.createElement("center", null, + React.createElement("h1", null, "Report complete. Click [Show Report] to view report."), + GetShowReportButton(GetLocalDuplicateReportPath(), "Show Report"), + React.createElement("p", null), + GetAdvanceMenuButton(), + React.createElement("p", null), DupFileManagerReportMenuButton, React.createElement("p", null), ToolsMenuOptionButton + )); + }; + const CreateReportWithNoTagging = () => { + const componentsLoading = PluginApi.hooks.useLoadComponents([PluginApi.loadableComponents.SceneCard]); + if (componentsLoading) + return (React.createElement(LoadingIndicator, {message: "Running task to create report. Please standby."})); + RunPluginDupFileManager("createDuplicateReportWithoutTagging"); + return (React.createElement("center", null, + React.createElement("h1", null, "Created HTML report without tagging. Click [Show Report] to view report."), + GetShowReportButton(GetLocalDuplicateReportPath(), "Show Report"), + React.createElement("p", null), + GetAdvanceMenuButton(), + React.createElement("p", null), DupFileManagerReportMenuButton, React.createElement("p", null), ToolsMenuOptionButton + )); + }; + const ToolsAndUtilities = () => { + return (React.createElement("center", null, + React.createElement("h1", null, "DupFileManager Tools and Utilities"), + React.createElement("p", null), + + React.createElement("h3", {class:"submenu"}, "Report Options"), + React.createElement("p", null), + GetCreateReportNoTagButton("Create Report (NO Tagging)"), + React.createElement("p", null), + GetCreateReportButton("Create Report (Tagging)"), + React.createElement("p", null), + DupFileManagerReportMenuButton, + React.createElement("p", null), + GetShowReportButton(GetLocalDuplicateReportPath(), "Show Duplicate-File Report"), + React.createElement("p", null), + React.createElement(Link, { to: "/plugin/DupFileManager_deleteLocalDupReportHtmlFiles", title: "Delete local HTML duplicate file report." }, React.createElement(Button, null, "Delete Duplicate-File Report HTML Files")), + React.createElement("hr", {class:"dotted"}), + + React.createElement("h3", {class:"submenu"}, "Tagged Duplicates Options"), + React.createElement("p", null), + GetAdvanceMenuButton(), + React.createElement("p", null), + React.createElement(Link, { to: "/plugin/DupFileManager_deleteTaggedDuplicatesTask", title: "Delete scenes previously given duplicate tag (_DuplicateMarkForDeletion)." }, React.createElement(Button, null, "Delete Tagged Duplicates")), + React.createElement("p", null), + React.createElement(Link, { to: "/plugin/DupFileManager_deleteBlackListTaggedDuplicatesTask", title: "Delete scenes only in blacklist which where previously given duplicate tag (_DuplicateMarkForDeletion)." }, React.createElement(Button, null, "Delete Tagged Duplicates in Blacklist Only")), + React.createElement("p", null), + React.createElement(Link, { to: "/plugin/DupFileManager_deleteTaggedDuplicatesLwrResOrLwrDuration", title: "Delete scenes previously given duplicate tag (_DuplicateMarkForDeletion) and lower resultion or duration compare to primary (ToKeep) duplicate." }, React.createElement(Button, null, "Delete Low Res/Dur Tagged Duplicates")), + React.createElement("p", null), + React.createElement(Link, { to: "/plugin/DupFileManager_deleteBlackListTaggedDuplicatesLwrResOrLwrDuration", title: "Delete scenes only in blacklist which where previously given duplicate tag (_DuplicateMarkForDeletion) and lower resultion or duration compare to primary (ToKeep) duplicate." }, React.createElement(Button, null, "Delete Low Res/Dur Tagged Duplicates in Blacklist Only")), + React.createElement("p", null), + React.createElement("hr", {class:"dotted"}), + + React.createElement("h3", {class:"submenu"}, "Tagged Management Options"), + React.createElement("p", null), + React.createElement(Link, { to: "/plugin/DupFileManager_ClearAllDuplicateTags", title: "Remove duplicate tag from all scenes. This task may take some time to complete." }, React.createElement(Button, null, "Clear All Duplicate Tags")), + React.createElement("p", null), + React.createElement(Link, { to: "/plugin/DupFileManager_deleteAllDupFileManagerTags", title: "Delete all DupFileManager tags from stash." }, React.createElement(Button, null, "Delete All DupFileManager Tags")), + React.createElement("p", null), + React.createElement(Link, { to: "/plugin/DupFileManager_tagGrayList", title: "Set tag _GraylistMarkForDeletion to scenes having DuplicateMarkForDeletion tag and that are in the Graylist." }, React.createElement(Button, null, "Tag Graylist")), + React.createElement("hr", {class:"dotted"}), + + React.createElement("h3", {class:"submenu"}, "Miscellaneous Options"), + React.createElement(Link, { to: "/plugin/DupFileManager_generatePHASH_Matching", title: "Generate PHASH (Perceptual hashes) matching. Used for file comparisons." }, React.createElement(Button, null, "Generate PHASH (Perceptual hashes) Matching")), + React.createElement("p", null), + React.createElement("p", null), + React.createElement("p", null), + React.createElement("p", null), + )); + }; + const ClearAllDuplicateTags = () => { + const componentsLoading = PluginApi.hooks.useLoadComponents([PluginApi.loadableComponents.SceneCard]); + if (componentsLoading) + return (React.createElement(LoadingIndicator, {message: "Running clear duplicate tags in background. This may take a while. Please standby."})); + RunPluginDupFileManager("clear_duplicate_tags_task"); + return (React.createElement("div", null, + React.createElement("h1", null, "Removed duplicate tags from all scenes."), + DupFileManagerReportMenuButton, React.createElement("p", null), ToolsMenuOptionButton + )); + }; + const deleteLocalDupReportHtmlFiles = () => { + const componentsLoading = PluginApi.hooks.useLoadComponents([PluginApi.loadableComponents.SceneCard]); + if (componentsLoading) + return (React.createElement(LoadingIndicator, {message: "Running task to delete HTML files. Please standby."})); + RunPluginDupFileManager("deleteLocalDupReportHtmlFiles"); + return (React.createElement("div", null, + React.createElement("h2", null, "Deleted the HTML duplicate file report from local files."), + DupFileManagerReportMenuButton, React.createElement("p", null), ToolsMenuOptionButton + )); + }; + const deleteAllDupFileManagerTags = () => { + const componentsLoading = PluginApi.hooks.useLoadComponents([PluginApi.loadableComponents.SceneCard]); + if (componentsLoading) + return (React.createElement(LoadingIndicator, {message: "Running task to delete all DupFileManager tags in background. This may take a while. Please standby."})); + RunPluginDupFileManager("deleteAllDupFileManagerTags"); + return (React.createElement("div", null, + React.createElement("h1", null, "Deleted all DupFileManager tags."), + DupFileManagerReportMenuButton, React.createElement("p", null), ToolsMenuOptionButton + )); + }; + const generatePHASH_Matching = () => { + const componentsLoading = PluginApi.hooks.useLoadComponents([PluginApi.loadableComponents.SceneCard]); + if (componentsLoading) + return (React.createElement(LoadingIndicator, {message: "Running task generate PHASH (Perceptual hashes) matching in background. This may take a while. Please standby."})); + RunPluginDupFileManager("generate_phash_task"); + return (React.createElement("div", null, + React.createElement("h1", null, "PHASH (Perceptual hashes) complete."), + DupFileManagerReportMenuButton, React.createElement("p", null), ToolsMenuOptionButton + )); + }; + const tagGrayList = () => { + const componentsLoading = PluginApi.hooks.useLoadComponents([PluginApi.loadableComponents.SceneCard]); + if (componentsLoading) + return (React.createElement(LoadingIndicator, {message: "Running task to tag _GraylistMarkForDeletion to scenes having DuplicateMarkForDeletion tag and that are in the Graylist. This may take a while. Please standby."})); + RunPluginDupFileManager("graylist_tag_task"); + return (React.createElement("div", null, + React.createElement("h1", null, "Gray list tagging complete."), + DupFileManagerReportMenuButton, React.createElement("p", null), ToolsMenuOptionButton + )); + }; + const deleteTaggedDuplicatesTask = () => { + let result = confirm("Are you sure you want to delete all scenes having _DuplicateMarkForDeletion tags? This will delete the files, and remove them from stash."); + if (result) + { + const componentsLoading = PluginApi.hooks.useLoadComponents([PluginApi.loadableComponents.SceneCard]); + if (componentsLoading) + return (React.createElement(LoadingIndicator, {message: "Running task to delete all scenes with _DuplicateMarkForDeletion tag. This may take a while. Please standby."})); + RunPluginDupFileManager("delete_tagged_duplicates_task"); + return (React.createElement("div", null, + React.createElement("h1", null, "Scenes with dup tag deleted."), + DupFileManagerReportMenuButton, React.createElement("p", null), ToolsMenuOptionButton + )); + } + return ToolsAndUtilities(); + }; + const deleteBlackListTaggedDuplicatesTask = () => { + let result = confirm("Are you sure you want to delete all scenes in blacklist having _DuplicateMarkForDeletion tags? This will delete the files, and remove tem from stash."); + if (result) + { + const componentsLoading = PluginApi.hooks.useLoadComponents([PluginApi.loadableComponents.SceneCard]); + if (componentsLoading) + return (React.createElement(LoadingIndicator, {message: "Running task to delete all scenes in blacklist with _DuplicateMarkForDeletion tag. This may take a while. Please standby."})); + RunPluginDupFileManager("deleteBlackListTaggedDuplicatesTask"); + return (React.createElement("div", null, + React.createElement("h1", null, "Blacklist scenes with dup tag deleted."), + DupFileManagerReportMenuButton, React.createElement("p", null), ToolsMenuOptionButton + )); + } + return ToolsAndUtilities(); + }; + const deleteTaggedDuplicatesLwrResOrLwrDuration = () => { + let result = confirm("Are you sure you want to delete scenes having _DuplicateMarkForDeletion tags and lower resultion or duration? This will delete the files, and remove them from stash."); + if (result) + { + const componentsLoading = PluginApi.hooks.useLoadComponents([PluginApi.loadableComponents.SceneCard]); + if (componentsLoading) + return (React.createElement(LoadingIndicator, {message: "Running task to delete all scenes with _DuplicateMarkForDeletion tag and lower resultion or duration. This may take a while. Please standby."})); + RunPluginDupFileManager("deleteTaggedDuplicatesLwrResOrLwrDuration"); + return (React.createElement("div", null, + React.createElement("h1", null, "Scenes with dup tag and lower resultion or duration deleted."), + DupFileManagerReportMenuButton, React.createElement("p", null), ToolsMenuOptionButton + )); + } + return ToolsAndUtilities(); + }; + const deleteBlackListTaggedDuplicatesLwrResOrLwrDuration = () => { + let result = confirm("Are you sure you want to delete scenes in blacklist having _DuplicateMarkForDeletion tags and lower resultion or duration? This will delete the files, and remove tem from stash."); + if (result) + { + const componentsLoading = PluginApi.hooks.useLoadComponents([PluginApi.loadableComponents.SceneCard]); + if (componentsLoading) + return (React.createElement(LoadingIndicator, {message: "Running task to delete all scenes in blacklist with _DuplicateMarkForDeletion tag and lower resultion or duration. This may take a while. Please standby."})); + RunPluginDupFileManager("deleteBlackListTaggedDuplicatesLwrResOrLwrDuration"); + return (React.createElement("div", null, + React.createElement("h1", null, "Blacklist scenes with dup tag and lower resultion or duration deleted."), + DupFileManagerReportMenuButton, React.createElement("p", null), ToolsMenuOptionButton + )); + } + return ToolsAndUtilities(); + }; + PluginApi.register.route("/plugin/DupFileManager", HomePage); + PluginApi.register.route("/plugin/DupFileManager_CreateReport", CreateReport); + PluginApi.register.route("/plugin/DupFileManager_CreateReportWithNoTagging", CreateReportWithNoTagging); + PluginApi.register.route("/plugin/DupFileManager_ToolsAndUtilities", ToolsAndUtilities); + PluginApi.register.route("/plugin/DupFileManager_ClearAllDuplicateTags", ClearAllDuplicateTags); + PluginApi.register.route("/plugin/DupFileManager_deleteLocalDupReportHtmlFiles", deleteLocalDupReportHtmlFiles); + PluginApi.register.route("/plugin/DupFileManager_deleteAllDupFileManagerTags", deleteAllDupFileManagerTags); + PluginApi.register.route("/plugin/DupFileManager_generatePHASH_Matching", generatePHASH_Matching); + PluginApi.register.route("/plugin/DupFileManager_tagGrayList", tagGrayList); + PluginApi.register.route("/plugin/DupFileManager_deleteTaggedDuplicatesTask", deleteTaggedDuplicatesTask); + PluginApi.register.route("/plugin/DupFileManager_deleteBlackListTaggedDuplicatesTask", deleteBlackListTaggedDuplicatesTask); + PluginApi.register.route("/plugin/DupFileManager_deleteTaggedDuplicatesLwrResOrLwrDuration", deleteTaggedDuplicatesLwrResOrLwrDuration); + PluginApi.register.route("/plugin/DupFileManager_deleteBlackListTaggedDuplicatesLwrResOrLwrDuration", deleteBlackListTaggedDuplicatesLwrResOrLwrDuration); + PluginApi.patch.before("SettingsToolsSection", function (props) { + const { Setting, } = PluginApi.components; + return [ + { + children: (React.createElement(React.Fragment, null, + props.children, + React.createElement(Setting, { heading: React.createElement(Link, { to: "/plugin/DupFileManager", title: ReportMenuButtonToolTip }, React.createElement(Button, null, "Duplicate File Report (DupFileManager)"))}), + React.createElement(Setting, { heading: React.createElement(Link, { to: "/plugin/DupFileManager_ToolsAndUtilities", title: ToolsMenuToolTip }, React.createElement(Button, null, "DupFileManager Tools and Utilities"))}), + )), + }, + ]; + }); + PluginApi.patch.before("MainNavBar.UtilityItems", function (props) { + const { Icon, } = PluginApi.components; + return [ + { + children: (React.createElement(React.Fragment, null, + props.children, + React.createElement(NavLink, { className: "nav-utility", exact: true, to: "/plugin/DupFileManager" }, + React.createElement(Button, { className: "minimal d-flex align-items-center h-100", title: ReportMenuButtonToolTip }, + React.createElement(Icon, { icon: faEthernet }))))) + } + ]; + }); +})(); diff --git a/plugins/DupFileManager/DupFileManager.js.map b/plugins/DupFileManager/DupFileManager.js.map new file mode 100644 index 00000000..5fdfda50 --- /dev/null +++ b/plugins/DupFileManager/DupFileManager.js.map @@ -0,0 +1 @@ +{"version":3,"file":"DupFileManager.js","sourceRoot":"","sources":["../src/DupFileManager.tsx"],"names":[],"mappings":";AA0CA,CAAC;IACC,MAAM,SAAS,GAAI,MAAc,CAAC,SAAuB,CAAC;IAC1D,MAAM,KAAK,GAAG,SAAS,CAAC,KAAK,CAAC;IAC9B,MAAM,GAAG,GAAG,SAAS,CAAC,GAAG,CAAC;IAE1B,MAAM,EAAE,MAAM,EAAE,GAAG,SAAS,CAAC,SAAS,CAAC,SAAS,CAAC;IACjD,MAAM,EAAE,UAAU,EAAE,GAAG,SAAS,CAAC,SAAS,CAAC,gBAAgB,CAAC;IAC5D,MAAM,EACJ,IAAI,EACJ,OAAO,GACR,GAAG,SAAS,CAAC,SAAS,CAAC,cAAc,CAAC;IAEvC,MAAM,EACJ,QAAQ,EACT,GAAG,SAAS,CAAC,KAAK,CAAC;IAEpB,SAAS,CAAC,KAAK,CAAC,gBAAgB,CAAC,gBAAgB,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,QAAQ,EAAE,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAA;IAEtJ,MAAM,cAAc,GAEf,CAAC,EAAE,SAAS,EAAE,EAAE,EAAE;QACrB,8EAA8E;QAC9E,yDAAyD;QACzD,MAAM,EACJ,YAAY,GACb,GAAG,SAAS,CAAC,UAAU,CAAC;QAEzB,MAAM,cAAc,GAAG,KAAK,CAAC,OAAO,CAClC,GAAG,EAAE;;YAAC,OAAA,CACJ,6BAAK,SAAS,EAAC,yBAAyB;gBACtC,oBAAC,IAAI,IAAC,EAAE,EAAE,eAAe,SAAS,CAAC,EAAE,EAAE;oBACrC,6BACE,SAAS,EAAC,iBAAiB,EAC3B,GAAG,EAAE,MAAA,SAAS,CAAC,IAAI,mCAAI,EAAE,EACzB,GAAG,EAAE,MAAA,SAAS,CAAC,UAAU,mCAAI,EAAE,GAC/B,CACG,CACH,CACP,CAAA;SAAA,EACD,CAAC,SAAS,CAAC,CACZ,CAAC;QAEF,OAAO,CACL,oBAAC,YAAY,IACX,SAAS,EAAC,uBAAuB,EACjC,SAAS,EAAC,KAAK,EACf,OAAO,EAAE,cAAc,EACvB,UAAU,EAAE,GAAG;YAEf,2BAAG,IAAI,EAAE,QAAQ,CAAC,sBAAsB,CAAC,SAAS,CAAC,IAAG,SAAS,CAAC,IAAI,CAAK,CAC5D,CAChB,CAAC;IACJ,CAAC,CAAC;IAEF,SAAS,YAAY,CAAC,KAAU;QAC9B,MAAM,EACJ,OAAO,GACR,GAAG,SAAS,CAAC,UAAU,CAAC;QAEzB,SAAS,qBAAqB;YAC5B,IAAI,KAAK,CAAC,KAAK,CAAC,UAAU,CAAC,MAAM,IAAI,CAAC;gBAAE,OAAO;YAE/C,OAAO,CACL,6BAAK,SAAS,EAAC,wBAAwB,IACpC,KAAK,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,SAAc,EAAE,EAAE,CAAC,CAC9C,oBAAC,cAAc,IAAC,SAAS,EAAE,SAAS,EAAE,GAAG,EAAE,SAAS,CAAC,EAAE,GAAI,CAC5D,CAAC,CACE,CACP,CAAC;QACJ,CAAC;QAED,SAAS,eAAe;YACtB,IAAI,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,IAAI,CAAC;gBAAE,OAAO;YAEzC,OAAO,CACL,6BAAK,SAAS,EAAC,kBAAkB,IAC9B,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,GAAQ,EAAE,EAAE,CAAC,CAClC,oBAAC,OAAO,IAAC,GAAG,EAAE,GAAG,CAAC,EAAE,EAAE,GAAG,EAAE,GAAG,GAAI,CACnC,CAAC,CACE,CACP,CAAC;QACJ,CAAC;QAED,OAAO,CACL,6BAAK,SAAS,EAAC,qBAAqB;YAClC,8BAAM,SAAS,EAAC,kBAAkB,IAAE,KAAK,CAAC,KAAK,CAAC,IAAI,CAAQ;YAC3D,qBAAqB,EAAE;YACvB,eAAe,EAAE,CACd,CACP,CAAC;IACJ,CAAC;IAED,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,mBAAmB,EAAE,UAAU,KAAU,EAAE,CAAM,EAAE,QAAa;QACtF,OAAO,oBAAC,YAAY,OAAK,KAAK,GAAI,CAAC;IACrC,CAAC,CAAC,CAAC;IAEH,MAAM,QAAQ,GAAa,GAAG,EAAE;QAC9B,MAAM,iBAAiB,GAAG,SAAS,CAAC,KAAK,CAAC,iBAAiB,CAAC,CAAC,SAAS,CAAC,kBAAkB,CAAC,SAAS,CAAC,CAAC,CAAC;QAEtG,MAAM,EACJ,SAAS,EACT,gBAAgB,GACjB,GAAG,SAAS,CAAC,UAAU,CAAC;QAEzB,mDAAmD;QACnD,MAAM,EAAE,IAAI,EAAE,GAAG,GAAG,CAAC,kBAAkB,CAAC;YACtC,SAAS,EAAE;gBACT,MAAM,EAAE;oBACN,QAAQ,EAAE,CAAC;oBACX,IAAI,EAAE,QAAQ;iBACf;aACF;SACF,CAAC,CAAC;QAEH,MAAM,KAAK,GAAG,IAAI,aAAJ,IAAI,uBAAJ,IAAI,CAAE,UAAU,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;QAEzC,IAAI,iBAAiB;YAAE,OAAO,CAC5B,oBAAC,gBAAgB,OAAG,CACrB,CAAC;QAEF,OAAO,CACL;YACE,wDAA+B;YAC9B,CAAC,CAAC,KAAK,IAAI,oBAAC,SAAS,IAAC,KAAK,EAAE,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,CAAC,GAAI,CACvD,CACP,CAAC;IACJ,CAAC,CAAC;IAEF,SAAS,CAAC,QAAQ,CAAC,KAAK,CAAC,oBAAoB,EAAE,QAAQ,CAAC,CAAC;IAEzD,SAAS,CAAC,KAAK,CAAC,MAAM,CAAC,sBAAsB,EAAE,UAAU,KAAU;QACjE,MAAM,EACJ,OAAO,GACR,GAAG,SAAS,CAAC,UAAU,CAAC;QAEzB,OAAO;YACL;gBACE,QAAQ,EAAE,CACR;oBACG,KAAK,CAAC,QAAQ;oBACf,oBAAC,OAAO,IACN,OAAO,EACL,oBAAC,IAAI,IAAC,EAAE,EAAC,oBAAoB;4BAC3B,oBAAC,MAAM,oBAEE,CACJ,GAET,CACD,CACJ;aACF;SACF,CAAC;IACJ,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,KAAK,CAAC,MAAM,CAAC,yBAAyB,EAAE,UAAU,KAAU;QACpE,MAAM,EACJ,IAAI,GACL,GAAG,SAAS,CAAC,UAAU,CAAC;QAEzB,OAAO;YACL;gBACE,QAAQ,EAAE,CACR;oBACG,KAAK,CAAC,QAAQ;oBACf,oBAAC,OAAO,IACN,SAAS,EAAC,aAAa,EACvB,KAAK,QACL,EAAE,EAAC,oBAAoB;wBAEvB,oBAAC,MAAM,IACL,SAAS,EAAC,yCAAyC,EACnD,KAAK,EAAC,WAAW;4BAEjB,oBAAC,IAAI,IAAC,IAAI,EAAE,UAAU,GAAI,CACnB,CACD,CACT,CACJ;aACF;SACF,CAAA;IACH,CAAC,CAAC,CAAA;AACJ,CAAC,CAAC,EAAE,CAAC"} \ No newline at end of file diff --git a/plugins/DupFileManager/DupFileManager.py b/plugins/DupFileManager/DupFileManager.py index c9ef4a16..16625534 100644 --- a/plugins/DupFileManager/DupFileManager.py +++ b/plugins/DupFileManager/DupFileManager.py @@ -3,31 +3,60 @@ # Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/DupFileManager # Note: To call this script outside of Stash, pass argument --url # Example: python DupFileManager.py --url http://localhost:9999 -a -import os, sys, time, pathlib, argparse, platform, shutil, logging +try: + import ModulesValidate + ModulesValidate.modulesInstalled(["send2trash", "requests"], silent=True) +except Exception as e: + import traceback, sys + tb = traceback.format_exc() + print(f"ModulesValidate Exception. Error: {e}\nTraceBack={tb}", file=sys.stderr) +import os, sys, time, pathlib, argparse, platform, shutil, traceback, logging, requests +from datetime import datetime from StashPluginHelper import StashPluginHelper +from stashapi.stash_types import PhashDistance from DupFileManager_config import config # Import config from DupFileManager_config.py +from DupFileManager_report_config import report_config + +# ToDo: make sure the following line of code works +config |= report_config parser = argparse.ArgumentParser() parser.add_argument('--url', '-u', dest='stash_url', type=str, help='Add Stash URL') parser.add_argument('--trace', '-t', dest='trace', action='store_true', help='Enables debug trace mode.') parser.add_argument('--add_dup_tag', '-a', dest='dup_tag', action='store_true', help='Set a tag to duplicate files.') +parser.add_argument('--clear_dup_tag', '-c', dest='clear_tag', action='store_true', help='Clear duplicates of duplicate tags.') parser.add_argument('--del_tag_dup', '-d', dest='del_tag', action='store_true', help='Only delete scenes having DuplicateMarkForDeletion tag.') parser.add_argument('--remove_dup', '-r', dest='remove', action='store_true', help='Remove (delete) duplicate files.') parse_args = parser.parse_args() settings = { + "matchDupDistance": 0, "mergeDupFilename": False, - "permanentlyDelete": False, "whitelistDelDupInSameFolder": False, - "whitelistDoTagLowResDup": False, - "zCleanAfterDel": False, - "zSwapHighRes": False, - "zSwapLongLength": False, + "zvWhitelist": "", + "zwGraylist": "", + "zxBlacklist": "", + "zyMaxDupToProcess": 0, + "zySwapHighRes": False, + "zySwapLongLength": False, + "zySwapBetterBitRate": False, + "zySwapCodec": False, + "zySwapBetterFrameRate": False, + "zzDebug": False, + "zzTracing": False, + + "zzObsoleteSettingsCheckVer2": False, # This is a hidden variable that is NOT displayed in the UI + + # Obsolete setting names "zWhitelist": "", "zxGraylist": "", "zyBlacklist": "", - "zyMaxDupToProcess": 0, - "zzdebugTracing": False, + "zyMatchDupDistance": 0, + "zSwapHighRes": False, + "zSwapLongLength": False, + "zSwapBetterBitRate": False, + "zSwapCodec": False, + "zSwapBetterFrameRate": False, } stash = StashPluginHelper( stash_url=parse_args.stash_url, @@ -35,64 +64,172 @@ settings=settings, config=config, maxbytes=10*1024*1024, + DebugTraceFieldName="zzTracing", + DebugFieldName="zzDebug", ) +stash.convertToAscii = True + +advanceMenuOptions = [ "applyCombo", "applyComboBlacklist", "pathToDelete", "pathToDeleteBlacklist", "sizeToDeleteLess", "sizeToDeleteGreater", "sizeToDeleteBlacklistLess", "sizeToDeleteBlacklistGreater", "durationToDeleteLess", "durationToDeleteGreater", "durationToDeleteBlacklistLess", "durationToDeleteBlacklistGreater", + "commonResToDeleteLess", "commonResToDeleteEq", "commonResToDeleteGreater", "commonResToDeleteBlacklistLess", "commonResToDeleteBlacklistEq", "commonResToDeleteBlacklistGreater", "resolutionToDeleteLess", "resolutionToDeleteEq", "resolutionToDeleteGreater", + "resolutionToDeleteBlacklistLess", "resolutionToDeleteBlacklistEq", "resolutionToDeleteBlacklistGreater", "ratingToDeleteLess", "ratingToDeleteEq", "ratingToDeleteGreater", "ratingToDeleteBlacklistLess", "ratingToDeleteBlacklistEq", "ratingToDeleteBlacklistGreater", + "tagToDelete", "tagToDeleteBlacklist", "titleToDelete", "titleToDeleteBlacklist", "pathStrToDelete", "pathStrToDeleteBlacklist"] + +doJsonReturnModeTypes = ["tag_duplicates_task", "removeDupTag", "addExcludeTag", "removeExcludeTag", "mergeTags", "getLocalDupReportPath", + "createDuplicateReportWithoutTagging", "deleteLocalDupReportHtmlFiles", "clear_duplicate_tags_task", + "deleteAllDupFileManagerTags", "deleteBlackListTaggedDuplicatesTask", "deleteTaggedDuplicatesLwrResOrLwrDuration", + "deleteBlackListTaggedDuplicatesLwrResOrLwrDuration", "create_duplicate_report_task"] +doJsonReturnModeTypes += [advanceMenuOptions] +doJsonReturn = False +if len(sys.argv) < 2 and stash.PLUGIN_TASK_NAME in doJsonReturnModeTypes: + doJsonReturn = True + stash.log_to_norm = stash.LogTo.FILE +elif stash.PLUGIN_TASK_NAME == "doEarlyExit": + time.sleep(3) + stash.Log("Doing early exit because of task name") + time.sleep(3) + exit(0) + +stash.Log("******************* Starting *******************") if len(sys.argv) > 1: stash.Log(f"argv = {sys.argv}") else: - stash.Trace(f"No command line arguments. JSON_INPUT['args'] = {stash.JSON_INPUT['args']}") -stash.Status(logLevel=logging.DEBUG) + stash.Debug(f"No command line arguments. JSON_INPUT['args'] = {stash.JSON_INPUT['args']}; PLUGIN_TASK_NAME = {stash.PLUGIN_TASK_NAME}; argv = {sys.argv}") +stash.status(logLevel=logging.DEBUG) -# stash.Trace(f"\nStarting (__file__={__file__}) (stash.CALLED_AS_STASH_PLUGIN={stash.CALLED_AS_STASH_PLUGIN}) (stash.DEBUG_TRACING={stash.DEBUG_TRACING}) (stash.PLUGIN_TASK_NAME={stash.PLUGIN_TASK_NAME})************************************************") -# stash.encodeToUtf8 = True +obsoleteSettingsToConvert = {"zWhitelist" : "zvWhitelist", "zxGraylist" : "zwGraylist", "zyBlacklist" : "zxBlacklist", "zyMatchDupDistance" : "matchDupDistance", "zSwapHighRes" : "zySwapHighRes", "zSwapLongLength" : "zySwapLongLength", "zSwapBetterBitRate" : "zySwapBetterBitRate", "zSwapCodec" : "zySwapCodec", "zSwapBetterFrameRate" : "zySwapBetterFrameRate"} +stash.replaceObsoleteSettings(obsoleteSettingsToConvert, "zzObsoleteSettingsCheckVer2") -LOG_STASH_N_PLUGIN = stash.LOG_TO_STASH if stash.CALLED_AS_STASH_PLUGIN else stash.LOG_TO_CONSOLE + stash.LOG_TO_FILE +LOG_STASH_N_PLUGIN = stash.LogTo.STASH if stash.CALLED_AS_STASH_PLUGIN else stash.LogTo.CONSOLE + stash.LogTo.FILE listSeparator = stash.Setting('listSeparator', ',', notEmpty=True) addPrimaryDupPathToDetails = stash.Setting('addPrimaryDupPathToDetails') +clearAllDupfileManagerTags = stash.Setting('clearAllDupfileManagerTags') +doGeneratePhash = stash.Setting('doGeneratePhash') mergeDupFilename = stash.Setting('mergeDupFilename') moveToTrashCan = False if stash.Setting('permanentlyDelete') else True alternateTrashCanPath = stash.Setting('dup_path') whitelistDelDupInSameFolder = stash.Setting('whitelistDelDupInSameFolder') -whitelistDoTagLowResDup = stash.Setting('whitelistDoTagLowResDup') +graylistTagging = stash.Setting('graylistTagging') maxDupToProcess = int(stash.Setting('zyMaxDupToProcess')) -swapHighRes = stash.Setting('zSwapHighRes') -swapLongLength = stash.Setting('zSwapLongLength') -significantTimeDiff = stash.Setting('significantTimeDiff') +significantTimeDiff = float(stash.Setting('significantTimeDiff')) toRecycleBeforeSwap = stash.Setting('toRecycleBeforeSwap') -cleanAfterDel = stash.Setting('zCleanAfterDel') -duration_diff = float(stash.Setting('duration_diff')) -if duration_diff > 10: - duration_diff = 10 -elif duration_diff < 1: - duration_diff = 1 +cleanAfterDel = stash.Setting('cleanAfterDel') + +swapHighRes = stash.Setting('zySwapHighRes') +swapLongLength = stash.Setting('zySwapLongLength') +swapBetterBitRate = stash.Setting('zySwapBetterBitRate') +swapCodec = stash.Setting('zySwapCodec') +swapBetterFrameRate = stash.Setting('zySwapBetterFrameRate') +favorLongerFileName = stash.Setting('favorLongerFileName') +favorLargerFileSize = stash.Setting('favorLargerFileSize') +favorBitRateChange = stash.Setting('favorBitRateChange') +favorHighBitRate = stash.Setting('favorHighBitRate') +favorFrameRateChange = stash.Setting('favorFrameRateChange') +favorHigherFrameRate = stash.Setting('favorHigherFrameRate') +favorCodecRanking = stash.Setting('favorCodecRanking') +codecRankingSetToUse = stash.Setting('codecRankingSetToUse') +if codecRankingSetToUse == 4: + codecRanking = stash.Setting('codecRankingSet4') +elif codecRankingSetToUse == 3: + codecRanking = stash.Setting('codecRankingSet3') +elif codecRankingSetToUse == 2: + codecRanking = stash.Setting('codecRankingSet2') +else: + codecRanking = stash.Setting('codecRankingSet1') +skipIfTagged = stash.Setting('skipIfTagged') +killScanningPostProcess = stash.Setting('killScanningPostProcess') +tagLongDurationLowRes = stash.Setting('tagLongDurationLowRes') +bitRateIsImporantComp = stash.Setting('bitRateIsImporantComp') +codecIsImporantComp = stash.Setting('codecIsImporantComp') + +excludeFromReportIfSignificantTimeDiff = False + +matchDupDistance = int(stash.Setting('matchDupDistance')) +matchPhaseDistance = PhashDistance.EXACT +matchPhaseDistanceText = "Exact Match" +if (stash.PLUGIN_TASK_NAME == "tag_duplicates_task" or stash.PLUGIN_TASK_NAME == "create_duplicate_report_task") and 'Target' in stash.JSON_INPUT['args']: + stash.enableProgressBar(False) + if stash.JSON_INPUT['args']['Target'].startswith("0"): + matchDupDistance = 0 + elif stash.JSON_INPUT['args']['Target'].startswith("1"): + matchDupDistance = 1 + elif stash.JSON_INPUT['args']['Target'].startswith("2"): + matchDupDistance = 2 + elif stash.JSON_INPUT['args']['Target'].startswith("3"): + matchDupDistance = 3 + + if stash.JSON_INPUT['args']['Target'].find(":") == 1: + significantTimeDiff = float(stash.JSON_INPUT['args']['Target'][2:]) + excludeFromReportIfSignificantTimeDiff = True + +if matchDupDistance == 1: + matchPhaseDistance = PhashDistance.HIGH + matchPhaseDistanceText = "High Match" +elif matchDupDistance == 2: + matchPhaseDistance = PhashDistance.MEDIUM + matchPhaseDistanceText = "Medium Match" +elif matchDupDistance == 3: + matchPhaseDistance = PhashDistance.LOW + matchPhaseDistanceText = "Low Match" # significantTimeDiff can not be higher than 1 and shouldn't be lower than .5 if significantTimeDiff > 1: - significantTimeDiff = 1 -if significantTimeDiff < .5: - significantTimeDiff = .5 + significantTimeDiff = float(1.00) +if significantTimeDiff < .25: + significantTimeDiff = float(0.25) duplicateMarkForDeletion = stash.Setting('DupFileTag') if duplicateMarkForDeletion == "": duplicateMarkForDeletion = 'DuplicateMarkForDeletion' +base1_duplicateMarkForDeletion = duplicateMarkForDeletion + duplicateWhitelistTag = stash.Setting('DupWhiteListTag') if duplicateWhitelistTag == "": - duplicateWhitelistTag = 'DuplicateWhitelistFile' + duplicateWhitelistTag = '_DuplicateWhitelistFile' + +excludeDupFileDeleteTag = stash.Setting('excludeDupFileDeleteTag') +if excludeDupFileDeleteTag == "": + excludeDupFileDeleteTag = '_ExcludeDuplicateMarkForDeletion' + +graylistMarkForDeletion = stash.Setting('graylistMarkForDeletion') +if graylistMarkForDeletion == "": + graylistMarkForDeletion = '_GraylistMarkForDeletion' + +longerDurationLowerResolution = stash.Setting('longerDurationLowerResolution') +if longerDurationLowerResolution == "": + longerDurationLowerResolution = '_LongerDurationLowerResolution' -excludeMergeTags = [duplicateMarkForDeletion, duplicateWhitelistTag] -stash.init_mergeMetadata(excludeMergeTags) +excludeMergeTags = [duplicateMarkForDeletion, duplicateWhitelistTag, excludeDupFileDeleteTag] -graylist = stash.Setting('zxGraylist').split(listSeparator) +if stash.Setting('underscoreDupFileTag') and not duplicateMarkForDeletion.startswith('_'): + duplicateMarkForDeletionWithOutUnderscore = duplicateMarkForDeletion + duplicateMarkForDeletion = "_" + duplicateMarkForDeletion + if stash.renameTag(duplicateMarkForDeletionWithOutUnderscore, duplicateMarkForDeletion): + stash.Log(f"Renamed tag {duplicateMarkForDeletionWithOutUnderscore} to {duplicateMarkForDeletion}") + stash.Trace(f"Added underscore to {duplicateMarkForDeletionWithOutUnderscore} = {duplicateMarkForDeletion}") + excludeMergeTags += [duplicateMarkForDeletion] +else: + stash.Trace(f"duplicateMarkForDeletion = {duplicateMarkForDeletion}") + +base2_duplicateMarkForDeletion = duplicateMarkForDeletion + +if stash.Setting('appendMatchDupDistance'): + duplicateMarkForDeletion += f"_{matchDupDistance}" + excludeMergeTags += [duplicateMarkForDeletion] + +stash.initMergeMetadata(excludeMergeTags) + +graylist = stash.Setting('zwGraylist').split(listSeparator) graylist = [item.lower() for item in graylist] if graylist == [""] : graylist = [] stash.Trace(f"graylist = {graylist}") -whitelist = stash.Setting('zWhitelist').split(listSeparator) +whitelist = stash.Setting('zvWhitelist').split(listSeparator) whitelist = [item.lower() for item in whitelist] if whitelist == [""] : whitelist = [] stash.Trace(f"whitelist = {whitelist}") -blacklist = stash.Setting('zyBlacklist').split(listSeparator) +blacklist = stash.Setting('zxBlacklist').split(listSeparator) blacklist = [item.lower() for item in blacklist] if blacklist == [""] : blacklist = [] stash.Trace(f"blacklist = {blacklist}") @@ -169,51 +306,49 @@ def testReparsePointAndSymLink(merge=False, deleteDup=False): stash.Log(f"Not isSymLink '{myTestPath6}'") return +detailPrefix = "BaseDup=" +detailPostfix = "\n" -def createTagId(tagName, tagName_descp, deleteIfExist = False): - tagId = stash.find_tags(q=tagName) - if len(tagId): - tagId = tagId[0] - if deleteIfExist: - stash.destroy_tag(int(tagId['id'])) - else: - return tagId['id'] - tagId = stash.create_tag({"name":tagName, "description":tagName_descp, "ignore_auto_tag": True}) - stash.Log(f"Dup-tagId={tagId['id']}") - return tagId['id'] - -def setTagId(tagId, tagName, sceneDetails, DupFileToKeep): +def setTagId(tagName, sceneDetails, DupFileToKeep, TagReason="", ignoreAutoTag=False): details = "" ORG_DATA_DICT = {'id' : sceneDetails['id']} dataDict = ORG_DATA_DICT.copy() doAddTag = True if addPrimaryDupPathToDetails: - BaseDupStr = f"BaseDup={DupFileToKeep['files'][0]['path']}\n{stash.STASH_URL}/scenes/{DupFileToKeep['id']}\n" + BaseDupStr = f"{detailPrefix}{DupFileToKeep['files'][0]['path']}\n{stash.STASH_URL}/scenes/{DupFileToKeep['id']}\n{TagReason}(matchDupDistance={matchPhaseDistanceText})\n{detailPostfix}" if sceneDetails['details'] == "": details = BaseDupStr - elif not sceneDetails['details'].startswith(BaseDupStr): + elif not sceneDetails['details'].startswith(detailPrefix): details = f"{BaseDupStr};\n{sceneDetails['details']}" for tag in sceneDetails['tags']: if tag['name'] == tagName: doAddTag = False break if doAddTag: - dataDict.update({'tag_ids' : tagId}) + stash.addTag(sceneDetails, tagName, ignoreAutoTag=ignoreAutoTag) if details != "": dataDict.update({'details' : details}) if dataDict != ORG_DATA_DICT: - stash.update_scene(dataDict) - stash.Trace(f"[setTagId] Updated {sceneDetails['files'][0]['path']} with metadata {dataDict}", toAscii=True) + stash.updateScene(dataDict) + stash.Trace(f"[setTagId] Updated {sceneDetails['files'][0]['path']} with metadata {dataDict} and tag {tagName}", toAscii=True) else: - stash.Trace(f"[setTagId] Nothing to update {sceneDetails['files'][0]['path']}.", toAscii=True) - + stash.Trace(f"[setTagId] Nothing to update {sceneDetails['files'][0]['path']} already has tag {tagName}.", toAscii=True) + return doAddTag -def isInList(listToCk, pathToCk): - pathToCk = pathToCk.lower() - for item in listToCk: - if pathToCk.startswith(item): - return True - return False +def setTagId_withRetry(tagName, sceneDetails, DupFileToKeep, TagReason="", ignoreAutoTag=False, retryCount = 12, sleepSecondsBetweenRetry = 5): + errMsg = None + for i in range(0, retryCount): + try: + if errMsg != None: + stash.Warn(errMsg) + return setTagId(tagName, sceneDetails, DupFileToKeep, TagReason, ignoreAutoTag) + except (requests.exceptions.ConnectionError, ConnectionResetError): + tb = traceback.format_exc() + errMsg = f"[setTagId] Exception calling setTagId. Will retry; count({i}); Error: {e}\nTraceBack={tb}" + except Exception as e: + tb = traceback.format_exc() + errMsg = f"[setTagId] Unknown exception calling setTagId. Will retry; count({i}); Error: {e}\nTraceBack={tb}" + time.sleep(sleepSecondsBetweenRetry) def hasSameDir(path1, path2): if pathlib.Path(path1).resolve().parent == pathlib.Path(path2).resolve().parent: @@ -237,39 +372,284 @@ def sendToTrash(path): except Exception as e: stash.Error(f"Failed to delete file {path}. Error: {e}", toAscii=True) return False - -def significantLessTime(durrationToKeep, durrationOther): - timeDiff = durrationToKeep / durrationOther +# If ckTimeDiff=False: Does durration2 have significant more time than durration1 +def significantTimeDiffCheck(durration1, durration2, ckTimeDiff = False): # If ckTimeDiff=True: is time different significant in either direction. + if not isinstance(durration1, int) and 'files' in durration1: + durration1 = int(durration1['files'][0]['duration']) + durration2 = int(durration2['files'][0]['duration']) + timeDiff = getTimeDif(durration1, durration2) + if ckTimeDiff and timeDiff > 1: + timeDiff = getTimeDif(durration2, durration1) if timeDiff < significantTimeDiff: return True return False +def getTimeDif(durration1, durration2): # Where durration1 is ecpected to be smaller than durration2 IE(45/60=.75) + return durration1 / durration2 + +def isBetterVideo(scene1, scene2, swapCandidateCk = False): # is scene2 better than scene1 + # Prioritize higher reslution over codec, bit rate, and frame rate + if int(scene1['files'][0]['width']) * int(scene1['files'][0]['height']) > int(scene2['files'][0]['width']) * int(scene2['files'][0]['height']): + return False + if (favorBitRateChange and swapCandidateCk == False) or (swapCandidateCk and swapBetterBitRate): + if (favorHighBitRate and int(scene2['files'][0]['bit_rate']) > int(scene1['files'][0]['bit_rate'])) or (not favorHighBitRate and int(scene2['files'][0]['bit_rate']) < int(scene1['files'][0]['bit_rate'])): + stash.Trace(f"[isBetterVideo]:[favorHighBitRate={favorHighBitRate}] Better bit rate. {scene1['files'][0]['path']}={scene1['files'][0]['bit_rate']} v.s. {scene2['files'][0]['path']}={scene2['files'][0]['bit_rate']}") + return True + if (favorCodecRanking and swapCandidateCk == False) or (swapCandidateCk and swapCodec): + scene1CodecRank = stash.indexStartsWithInList(codecRanking, scene1['files'][0]['video_codec']) + scene2CodecRank = stash.indexStartsWithInList(codecRanking, scene2['files'][0]['video_codec']) + if scene2CodecRank < scene1CodecRank: + stash.Trace(f"[isBetterVideo] Better codec. {scene1['files'][0]['path']}={scene1['files'][0]['video_codec']}:Rank={scene1CodecRank} v.s. {scene2['files'][0]['path']}={scene2['files'][0]['video_codec']}:Rank={scene2CodecRank}") + return True + if (favorFrameRateChange and swapCandidateCk == False) or (swapCandidateCk and swapBetterFrameRate): + if (favorHigherFrameRate and int(scene2['files'][0]['frame_rate']) > int(scene1['files'][0]['frame_rate'])) or (not favorHigherFrameRate and int(scene2['files'][0]['frame_rate']) < int(scene1['files'][0]['frame_rate'])): + stash.Trace(f"[isBetterVideo]:[favorHigherFrameRate={favorHigherFrameRate}] Better frame rate. {scene1['files'][0]['path']}={scene1['files'][0]['frame_rate']} v.s. {scene2['files'][0]['path']}={scene2['files'][0]['frame_rate']}") + return True + return False + +def significantMoreTimeCompareToBetterVideo(scene1, scene2): # is scene2 better than scene1 + if isinstance(scene1, int): + scene1 = stash.find_scene(scene1) + scene2 = stash.find_scene(scene2) + if int(scene1['files'][0]['duration']) >= int(scene2['files'][0]['duration']): + return False + if int(scene1['files'][0]['width']) * int(scene1['files'][0]['height']) > int(scene2['files'][0]['width']) * int(scene2['files'][0]['height']): + if significantTimeDiffCheck(scene1, scene2): + if tagLongDurationLowRes: + didAddTag = setTagId_withRetry(longerDurationLowerResolution, scene2, scene1, ignoreAutoTag=True) + stash.Log(f"Tagged sene2 with tag {longerDurationLowerResolution}, because scene1 is better video, but it has significant less time ({getTimeDif(int(scene1['files'][0]['duration']), int(scene2['files'][0]['duration']))}%) compare to scene2; scene1={scene1['files'][0]['path']} (ID={scene1['id']})(duration={scene1['files'][0]['duration']}); scene2={scene2['files'][0]['path']} (ID={scene2['id']}) (duration={scene1['files'][0]['duration']}); didAddTag={didAddTag}") + else: + stash.Warn(f"Scene1 is better video, but it has significant less time ({getTimeDif(int(scene1['files'][0]['duration']), int(scene2['files'][0]['duration']))}%) compare to scene2; Scene1={scene1['files'][0]['path']} (ID={scene1['id']})(duration={scene1['files'][0]['duration']}); Scene2={scene2['files'][0]['path']} (ID={scene2['id']}) (duration={scene1['files'][0]['duration']})") + return False + return True + +def allThingsEqual(scene1, scene2): # If all important things are equal, return true + if int(scene1['files'][0]['duration']) != int(scene2['files'][0]['duration']): + return False + if scene1['files'][0]['width'] != scene2['files'][0]['width']: + return False + if scene1['files'][0]['height'] != scene2['files'][0]['height']: + return False + if bitRateIsImporantComp and scene1['files'][0]['bit_rate'] != scene2['files'][0]['bit_rate']: + return False + if codecIsImporantComp and scene1['files'][0]['video_codec'] != scene2['files'][0]['video_codec']: + return False + return True + def isSwapCandidate(DupFileToKeep, DupFile): # Don't move if both are in whitelist - if isInList(whitelist, DupFileToKeep['files'][0]['path']) and isInList(whitelist, DupFile['files'][0]['path']): + if stash.startsWithInList(whitelist, DupFileToKeep['files'][0]['path']) and stash.startsWithInList(whitelist, DupFile['files'][0]['path']): return False - if swapHighRes and (int(DupFileToKeep['files'][0]['width']) > int(DupFile['files'][0]['width']) or int(DupFileToKeep['files'][0]['height']) > int(DupFile['files'][0]['height'])): - if not significantLessTime(int(DupFileToKeep['files'][0]['duration']), int(DupFile['files'][0]['duration'])): + if swapHighRes and int(DupFileToKeep['files'][0]['width']) * int(DupFileToKeep['files'][0]['height']) > int(DupFile['files'][0]['width']) * int(DupFile['files'][0]['height']): + if not significantTimeDiffCheck(DupFileToKeep, DupFile): return True else: stash.Warn(f"File '{DupFileToKeep['files'][0]['path']}' has a higher resolution than '{DupFile['files'][0]['path']}', but the duration is significantly shorter.", toAscii=True) if swapLongLength and int(DupFileToKeep['files'][0]['duration']) > int(DupFile['files'][0]['duration']): if int(DupFileToKeep['files'][0]['width']) >= int(DupFile['files'][0]['width']) or int(DupFileToKeep['files'][0]['height']) >= int(DupFile['files'][0]['height']): return True + if isBetterVideo(DupFile, DupFileToKeep, swapCandidateCk=True): + if not significantTimeDiffCheck(DupFileToKeep, DupFile): + return True + else: + stash.Warn(f"File '{DupFileToKeep['files'][0]['path']}' has better codec/bit-rate than '{DupFile['files'][0]['path']}', but the duration is significantly shorter; DupFileToKeep-ID={DupFileToKeep['id']};DupFile-ID={DupFile['id']};BitRate {DupFileToKeep['files'][0]['bit_rate']} vs {DupFile['files'][0]['bit_rate']};Codec {DupFileToKeep['files'][0]['video_codec']} vs {DupFile['files'][0]['video_codec']};FrameRate {DupFileToKeep['files'][0]['frame_rate']} vs {DupFile['files'][0]['frame_rate']};", toAscii=True) + return False + +dupWhitelistTagId = None +def addDupWhitelistTag(): + global dupWhitelistTagId + stash.Trace(f"Adding tag duplicateWhitelistTag = {duplicateWhitelistTag}") + descp = 'Tag added to duplicate scenes which are in the whitelist. This means there are two or more duplicates in the whitelist.' + dupWhitelistTagId = stash.createTagId(duplicateWhitelistTag, descp, ignoreAutoTag=True) + stash.Trace(f"dupWhitelistTagId={dupWhitelistTagId} name={duplicateWhitelistTag}") + +excludeDupFileDeleteTagId = None +def addExcludeDupTag(): + global excludeDupFileDeleteTagId + stash.Trace(f"Adding tag excludeDupFileDeleteTag = {excludeDupFileDeleteTag}") + descp = 'Excludes duplicate scene from DupFileManager tagging and deletion process. A scene having this tag will not get deleted by DupFileManager' + excludeDupFileDeleteTagId = stash.createTagId(excludeDupFileDeleteTag, descp, ignoreAutoTag=True) + stash.Trace(f"dupWhitelistTagId={excludeDupFileDeleteTagId} name={excludeDupFileDeleteTag}") + +def isTaggedExcluded(Scene): + for tag in Scene['tags']: + if tag['name'] == excludeDupFileDeleteTag: + return True + return False + +def isWorseKeepCandidate(DupFileToKeep, Scene): + if not stash.startsWithInList(whitelist, Scene['files'][0]['path']) and stash.startsWithInList(whitelist, DupFileToKeep['files'][0]['path']): + return True + if not stash.startsWithInList(graylist, Scene['files'][0]['path']) and stash.startsWithInList(graylist, DupFileToKeep['files'][0]['path']): + return True + if not stash.startsWithInList(blacklist, DupFileToKeep['files'][0]['path']) and stash.startsWithInList(blacklist, Scene['files'][0]['path']): + return True + + if stash.startsWithInList(graylist, Scene['files'][0]['path']) and stash.startsWithInList(graylist, DupFileToKeep['files'][0]['path']) and stash.indexStartsWithInList(graylist, DupFileToKeep['files'][0]['path']) < stash.indexStartsWithInList(graylist, Scene['files'][0]['path']): + return True + if stash.startsWithInList(blacklist, DupFileToKeep['files'][0]['path']) and stash.startsWithInList(blacklist, Scene['files'][0]['path']) and stash.indexStartsWithInList(blacklist, DupFileToKeep['files'][0]['path']) < stash.indexStartsWithInList(blacklist, Scene['files'][0]['path']): + return True return False -def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False): +def killScanningJobs(): + try: + if killScanningPostProcess: + stash.stopJobs(1, "Scanning...") + except Exception as e: + tb = traceback.format_exc() + stash.Error(f"Exception while trying to kill scan jobs; Error: {e}\nTraceBack={tb}") + +def getPath(Scene, getParent = False): + path = stash.asc2(Scene['files'][0]['path']) + path = path.replace("'", "") + path = path.replace("\\\\", "\\") + if getParent: + return pathlib.Path(path).resolve().parent + return path + +def getHtmlReportTableRow(qtyResults, tagDuplicates): + htmlReportPrefix = stash.Setting('htmlReportPrefix') + htmlReportPrefix = htmlReportPrefix.replace('http://127.0.0.1:9999/graphql', stash.url) + htmlReportPrefix = htmlReportPrefix.replace('http://localhost:9999/graphql', stash.url) + if tagDuplicates == False: + htmlReportPrefix = htmlReportPrefix.replace('") + fileHtmlReport.write(f"") + fileHtmlReport.write(f"") + fileHtmlReport.write(f"") + fileHtmlReport.write(f"") + fileHtmlReport.write(f"") + # ToDo: Add following buttons: + # rename file + if dupFileExist and tagDuplicates: + fileHtmlReport.write(f"") + fileHtmlReport.write(f"") + fileHtmlReport.write(f"") + if dupFileExist: + fileHtmlReport.write(f"[Folder]") + fileHtmlReport.write(f"[Play]") + else: + fileHtmlReport.write("[File NOT Exist]") + fileHtmlReport.write("

") + + videoPreview = f"" + if htmlIncludeImagePreview: + imagePreview = f"
  • \"\"\"\"
" + fileHtmlReport.write(f"{getSceneID(DupFileToKeep)}
{videoPreview}{imagePreview}
") + else: + fileHtmlReport.write(f"{getSceneID(DupFileToKeep)}{videoPreview}") + fileHtmlReport.write(f"{getSceneID(DupFileToKeep)}{getPath(DupFileToKeep)}") + fileHtmlReport.write(f"

") + fileHtmlReport.write(f"
ResDurrationBitRateCodecFrameRatesizeID
{DupFileToKeep['files'][0]['width']}x{DupFileToKeep['files'][0]['height']}{DupFileToKeep['files'][0]['duration']}{DupFileToKeep['files'][0]['bit_rate']}{DupFileToKeep['files'][0]['video_codec']}{DupFileToKeep['files'][0]['frame_rate']}{DupFileToKeep['files'][0]['size']}{DupFileToKeep['id']}
") + fileHtmlReport.write(f"") + fileHtmlReport.write(f"") + fileHtmlReport.write(f"") + if isTaggedExcluded(DupFileToKeep): + fileHtmlReport.write(f"") + fileHtmlReport.write(f"[Folder]") + if toKeepFileExist: + fileHtmlReport.write(f"[Play]") + else: + fileHtmlReport.write("[File NOT Exist]") + fileHtmlReport.write(f"") + # ToDo: Add following buttons: + # rename file + fileHtmlReport.write(f"

") + + fileHtmlReport.write("\n") + +def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False, deleteBlacklistOnly=False, deleteLowerResAndDuration=False): + global reasonDict duplicateMarkForDeletion_descp = 'Tag added to duplicate scenes so-as to tag them for deletion.' stash.Trace(f"duplicateMarkForDeletion = {duplicateMarkForDeletion}") - dupTagId = createTagId(duplicateMarkForDeletion, duplicateMarkForDeletion_descp) + dupTagId = stash.createTagId(duplicateMarkForDeletion, duplicateMarkForDeletion_descp, ignoreAutoTag=True) stash.Trace(f"dupTagId={dupTagId} name={duplicateMarkForDeletion}") + createHtmlReport = stash.Setting('createHtmlReport') + htmlReportNameHomePage = htmlReportName + htmlReportPaginate = stash.Setting('htmlReportPaginate') + - dupWhitelistTagId = None - if whitelistDoTagLowResDup: - stash.Trace(f"duplicateWhitelistTag = {duplicateWhitelistTag}") - duplicateWhitelistTag_descp = 'Tag added to duplicate scenes which are in the whitelist. This means there are two or more duplicates in the whitelist.' - dupWhitelistTagId = createTagId(duplicateWhitelistTag, duplicateWhitelistTag_descp) - stash.Trace(f"dupWhitelistTagId={dupWhitelistTagId} name={duplicateWhitelistTag}") + addDupWhitelistTag() + addExcludeDupTag() QtyDupSet = 0 QtyDup = 0 @@ -277,187 +657,897 @@ def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False): QtyAlmostDup = 0 QtyRealTimeDiff = 0 QtyTagForDel = 0 + QtyTagForDelPaginate = 0 + PaginateId = 0 + QtyNewlyTag = 0 QtySkipForDel = 0 + QtyExcludeForDel = 0 QtySwap = 0 QtyMerge = 0 QtyDeleted = 0 stash.Log("#########################################################################") stash.Trace("#########################################################################") - stash.Log(f"Waiting for find_duplicate_scenes_diff to return results; duration_diff={duration_diff}; significantTimeDiff={significantTimeDiff}", printTo=LOG_STASH_N_PLUGIN) - DupFileSets = stash.find_duplicate_scenes_diff(duration_diff=duration_diff) + stash.Log(f"Waiting for find_duplicate_scenes_diff to return results; matchDupDistance={matchPhaseDistanceText}; significantTimeDiff={significantTimeDiff}", printTo=LOG_STASH_N_PLUGIN) + stash.startSpinningProcessBar() + htmlFileData = " paths {screenshot sprite " + htmlPreviewOrStream + "} " if createHtmlReport else "" + mergeFieldData = " code director title rating100 date studio {id} movies {movie {id} } galleries {id} performers {id} urls " if merge else "" + DupFileSets = stash.find_duplicate_scenes(matchPhaseDistance, fragment='id tags {id name} files {path width height duration size video_codec bit_rate frame_rate} details ' + mergeFieldData + htmlFileData) + stash.stopSpinningProcessBar() qtyResults = len(DupFileSets) + stash.setProgressBarIter(qtyResults) stash.Trace("#########################################################################") + stash.Log(f"Found {qtyResults} duplicate sets...") + fileHtmlReport = None + if createHtmlReport: + if not os.path.isdir(htmlReportNameFolder): + os.mkdir(htmlReportNameFolder) + if not os.path.isdir(htmlReportNameFolder): + stash.Error(f"Failed to create report directory {htmlReportNameFolder}.") + return + deleteLocalDupReportHtmlFiles(False) + fileHtmlReport = open(htmlReportName, "w") + fileHtmlReport.write(f"{getHtmlReportTableRow(qtyResults, tagDuplicates)}\n") + fileHtmlReport.write(f"{stash.Setting('htmlReportTable')}\n") + htmlReportTableHeader = stash.Setting('htmlReportTableHeader') + fileHtmlReport.write(f"{htmlReportTableRow}{htmlReportTableHeader}Scene{htmlReportTableHeader}Duplicate to Delete{htmlReportTableHeader}Scene-ToKeep{htmlReportTableHeader}Duplicate to Keep\n") + for DupFileSet in DupFileSets: - stash.Trace(f"DupFileSet={DupFileSet}") + # stash.Trace(f"DupFileSet={DupFileSet}", toAscii=True) QtyDupSet+=1 - stash.Progress(QtyDupSet, qtyResults) + stash.progressBar(QtyDupSet, qtyResults) SepLine = "---------------------------" - DupFileToKeep = "" + DupFileToKeep = None DupToCopyFrom = "" DupFileDetailList = [] for DupFile in DupFileSet: QtyDup+=1 - stash.log.sl.progress(f"Scene ID = {DupFile['id']}") - time.sleep(2) - Scene = stash.find_scene(DupFile['id']) - sceneData = f"Scene = {Scene}" - stash.Trace(sceneData, toAscii=True) + Scene = DupFile + if skipIfTagged and createHtmlReport == False and duplicateMarkForDeletion in Scene['tags']: + stash.Trace(f"Skipping scene '{Scene['files'][0]['path']}' because already tagged with {duplicateMarkForDeletion}") + continue + stash.TraceOnce(f"Scene = {Scene}", toAscii=True) DupFileDetailList = DupFileDetailList + [Scene] - if DupFileToKeep != "": - if int(DupFileToKeep['files'][0]['duration']) == int(Scene['files'][0]['duration']): # Do not count fractions of a second as a difference - QtyExactDup+=1 + if os.path.isfile(Scene['files'][0]['path']): + if DupFileToKeep != None: + if int(DupFileToKeep['files'][0]['duration']) == int(Scene['files'][0]['duration']): # Do not count fractions of a second as a difference + QtyExactDup+=1 + else: + QtyAlmostDup+=1 + SepLine = "***************************" + if significantTimeDiffCheck(DupFileToKeep, Scene): + QtyRealTimeDiff += 1 + + if int(DupFileToKeep['files'][0]['width']) * int(DupFileToKeep['files'][0]['height']) < int(Scene['files'][0]['width']) * int(Scene['files'][0]['height']): + logReason(DupFileToKeep, Scene, f"resolution: {DupFileToKeep['files'][0]['width']}x{DupFileToKeep['files'][0]['height']} < {Scene['files'][0]['width']}x{Scene['files'][0]['height']}") + DupFileToKeep = Scene + elif significantMoreTimeCompareToBetterVideo(DupFileToKeep, Scene): + if significantTimeDiffCheck(DupFileToKeep, Scene): + theReason = f"significant-duration: {DupFileToKeep['files'][0]['duration']} < {Scene['files'][0]['duration']}" + else: + theReason = f"duration: {DupFileToKeep['files'][0]['duration']} < {Scene['files'][0]['duration']}" + reasonKeyword = "significant-duration" if significantTimeDiffCheck(DupFileToKeep, Scene) else "duration" + logReason(DupFileToKeep, Scene, theReason) + DupFileToKeep = Scene + elif isBetterVideo(DupFileToKeep, Scene): + logReason(DupFileToKeep, Scene, f"codec,bit_rate, or frame_rate: {DupFileToKeep['files'][0]['video_codec']}, {DupFileToKeep['files'][0]['bit_rate']}, {DupFileToKeep['files'][0]['frame_rate']} : {Scene['files'][0]['video_codec']}, {Scene['files'][0]['bit_rate']}, {Scene['files'][0]['frame_rate']}") + DupFileToKeep = Scene + elif stash.startsWithInList(whitelist, Scene['files'][0]['path']) and not stash.startsWithInList(whitelist, DupFileToKeep['files'][0]['path']): + logReason(DupFileToKeep, Scene, f"not whitelist vs whitelist") + DupFileToKeep = Scene + elif isTaggedExcluded(Scene) and not isTaggedExcluded(DupFileToKeep): + logReason(DupFileToKeep, Scene, f"not ExcludeTag vs ExcludeTag") + DupFileToKeep = Scene + elif allThingsEqual(DupFileToKeep, Scene): + # Only do below checks if all imporant things are equal. + if stash.startsWithInList(blacklist, DupFileToKeep['files'][0]['path']) and not stash.startsWithInList(blacklist, Scene['files'][0]['path']): + logReason(DupFileToKeep, Scene, f"blacklist vs not blacklist") + DupFileToKeep = Scene + elif stash.startsWithInList(blacklist, DupFileToKeep['files'][0]['path']) and stash.startsWithInList(blacklist, Scene['files'][0]['path']) and stash.indexStartsWithInList(blacklist, DupFileToKeep['files'][0]['path']) > stash.indexStartsWithInList(blacklist, Scene['files'][0]['path']): + logReason(DupFileToKeep, Scene, f"blacklist-index {stash.indexStartsWithInList(blacklist, DupFileToKeep['files'][0]['path'])} > {stash.indexStartsWithInList(blacklist, Scene['files'][0]['path'])}") + DupFileToKeep = Scene + elif stash.startsWithInList(graylist, Scene['files'][0]['path']) and not stash.startsWithInList(graylist, DupFileToKeep['files'][0]['path']): + logReason(DupFileToKeep, Scene, f"not graylist vs graylist") + DupFileToKeep = Scene + elif stash.startsWithInList(graylist, Scene['files'][0]['path']) and stash.startsWithInList(graylist, DupFileToKeep['files'][0]['path']) and stash.indexStartsWithInList(graylist, DupFileToKeep['files'][0]['path']) > stash.indexStartsWithInList(graylist, Scene['files'][0]['path']): + logReason(DupFileToKeep, Scene, f"graylist-index {stash.indexStartsWithInList(graylist, DupFileToKeep['files'][0]['path'])} > {stash.indexStartsWithInList(graylist, Scene['files'][0]['path'])}") + DupFileToKeep = Scene + elif favorLongerFileName and len(DupFileToKeep['files'][0]['path']) < len(Scene['files'][0]['path']) and not isWorseKeepCandidate(DupFileToKeep, Scene): + logReason(DupFileToKeep, Scene, f"path-len {len(DupFileToKeep['files'][0]['path'])} < {len(Scene['files'][0]['path'])}") + DupFileToKeep = Scene + elif favorLargerFileSize and int(DupFileToKeep['files'][0]['size']) < int(Scene['files'][0]['size']) and not isWorseKeepCandidate(DupFileToKeep, Scene): + logReason(DupFileToKeep, Scene, f"size {DupFileToKeep['files'][0]['size']} < {Scene['files'][0]['size']}") + DupFileToKeep = Scene + elif not favorLongerFileName and len(DupFileToKeep['files'][0]['path']) > len(Scene['files'][0]['path']) and not isWorseKeepCandidate(DupFileToKeep, Scene): + logReason(DupFileToKeep, Scene, f"path-len {len(DupFileToKeep['files'][0]['path'])} > {len(Scene['files'][0]['path'])}") + DupFileToKeep = Scene + elif not favorLargerFileSize and int(DupFileToKeep['files'][0]['size']) > int(Scene['files'][0]['size']) and not isWorseKeepCandidate(DupFileToKeep, Scene): + logReason(DupFileToKeep, Scene, f"size {DupFileToKeep['files'][0]['size']} > {Scene['files'][0]['size']}") + DupFileToKeep = Scene else: - QtyAlmostDup+=1 - SepLine = "***************************" - if significantLessTime(int(DupFileToKeep['files'][0]['duration']), int(Scene['files'][0]['duration'])): - QtyRealTimeDiff += 1 - if int(DupFileToKeep['files'][0]['width']) < int(Scene['files'][0]['width']) or int(DupFileToKeep['files'][0]['height']) < int(Scene['files'][0]['height']): - DupFileToKeep = Scene - elif int(DupFileToKeep['files'][0]['duration']) < int(Scene['files'][0]['duration']): - DupFileToKeep = Scene - elif isInList(whitelist, Scene['files'][0]['path']) and not isInList(whitelist, DupFileToKeep['files'][0]['path']): - DupFileToKeep = Scene - elif isInList(blacklist, DupFileToKeep['files'][0]['path']) and not isInList(blacklist, Scene['files'][0]['path']): - DupFileToKeep = Scene - elif isInList(graylist, Scene['files'][0]['path']) and not isInList(graylist, DupFileToKeep['files'][0]['path']): - DupFileToKeep = Scene - elif len(DupFileToKeep['files'][0]['path']) < len(Scene['files'][0]['path']): - DupFileToKeep = Scene - elif int(DupFileToKeep['files'][0]['size']) < int(Scene['files'][0]['size']): DupFileToKeep = Scene + # stash.Trace(f"DupFileToKeep = {DupFileToKeep}") + stash.Debug(f"KeepID={DupFileToKeep['id']}, ID={DupFile['id']} duration=({Scene['files'][0]['duration']}), Size=({Scene['files'][0]['size']}), Res=({Scene['files'][0]['width']} x {Scene['files'][0]['height']}) Name={Scene['files'][0]['path']}, KeepPath={DupFileToKeep['files'][0]['path']}", toAscii=True) else: - DupFileToKeep = Scene - # stash.Trace(f"DupFileToKeep = {DupFileToKeep}") - stash.Trace(f"KeepID={DupFileToKeep['id']}, ID={DupFile['id']} duration=({Scene['files'][0]['duration']}), Size=({Scene['files'][0]['size']}), Res=({Scene['files'][0]['width']} x {Scene['files'][0]['height']}) Name={Scene['files'][0]['path']}, KeepPath={DupFileToKeep['files'][0]['path']}", toAscii=True) + stash.Error(f"Scene does NOT exist; path={Scene['files'][0]['path']}; ID={Scene['id']}") for DupFile in DupFileDetailList: - if DupFile['id'] != DupFileToKeep['id']: + if DupFileToKeep != None and DupFile['id'] != DupFileToKeep['id']: if merge: - result = stash.merge_metadata(DupFile, DupFileToKeep) + result = stash.mergeMetadata(DupFile, DupFileToKeep) if result != "Nothing To Merge": QtyMerge += 1 - - if isInList(whitelist, DupFile['files'][0]['path']) and (not whitelistDelDupInSameFolder or not hasSameDir(DupFile['files'][0]['path'], DupFileToKeep['files'][0]['path'])): + didAddTag = False + if stash.startsWithInList(whitelist, DupFile['files'][0]['path']) and (not whitelistDelDupInSameFolder or not hasSameDir(DupFile['files'][0]['path'], DupFileToKeep['files'][0]['path'])): + QtySkipForDel+=1 if isSwapCandidate(DupFileToKeep, DupFile): if merge: - stash.merge_metadata(DupFileToKeep, DupFile) + stash.mergeMetadata(DupFileToKeep, DupFile) if toRecycleBeforeSwap: sendToTrash(DupFile['files'][0]['path']) - shutil.move(DupFileToKeep['files'][0]['path'], DupFile['files'][0]['path']) - stash.Log(f"Moved better file '{DupFileToKeep['files'][0]['path']}' to '{DupFile['files'][0]['path']}'", toAscii=True, printTo=LOG_STASH_N_PLUGIN) + stash.Log(f"Moving better file '{DupFileToKeep['files'][0]['path']}' to '{DupFile['files'][0]['path']}'; SrcID={DupFileToKeep['id']};DescID={DupFile['id']};QtyDup={QtyDup};Set={QtyDupSet} of {qtyResults};QtySwap={QtySwap};QtySkipForDel={QtySkipForDel}", toAscii=True, printTo=LOG_STASH_N_PLUGIN) + try: + shutil.move(DupFileToKeep['files'][0]['path'], DupFile['files'][0]['path']) + QtySwap+=1 + except Exception as e: + tb = traceback.format_exc() + stash.Error(f"Exception while moving file '{DupFileToKeep['files'][0]['path']}' to '{DupFile['files'][0]['path']}; SrcID={DupFileToKeep['id']};DescID={DupFile['id']}'; Error: {e}\nTraceBack={tb}") DupFileToKeep = DupFile - QtySwap+=1 else: - stash.Log(f"NOT processing duplicate, because it's in whitelist. '{DupFile['files'][0]['path']}'", toAscii=True) if dupWhitelistTagId and tagDuplicates: - setTagId(dupWhitelistTagId, duplicateWhitelistTag, DupFile, DupFileToKeep) - QtySkipForDel+=1 + didAddTag = setTagId_withRetry(duplicateWhitelistTag, DupFile, DupFileToKeep, ignoreAutoTag=True) + stash.Log(f"NOT processing duplicate, because it's in whitelist. '{DupFile['files'][0]['path']}';AddTagW={didAddTag};QtyDup={QtyDup};Set={QtyDupSet} of {qtyResults};QtySkipForDel={QtySkipForDel}", toAscii=True) else: - if deleteDup: - DupFileName = DupFile['files'][0]['path'] - DupFileNameOnly = pathlib.Path(DupFileName).stem - stash.Warn(f"Deleting duplicate '{DupFileName}'", toAscii=True, printTo=LOG_STASH_N_PLUGIN) - if alternateTrashCanPath != "": - destPath = f"{alternateTrashCanPath }{os.sep}{DupFileNameOnly}" - if os.path.isfile(destPath): - destPath = f"{alternateTrashCanPath }{os.sep}_{time.time()}_{DupFileNameOnly}" - shutil.move(DupFileName, destPath) - elif moveToTrashCan: - sendToTrash(DupFileName) - stash.destroy_scene(DupFile['id'], delete_file=True) - QtyDeleted += 1 - elif tagDuplicates: - if QtyTagForDel == 0: - stash.Log(f"Tagging duplicate {DupFile['files'][0]['path']} for deletion with tag {duplicateMarkForDeletion}.", toAscii=True, printTo=LOG_STASH_N_PLUGIN) - else: - stash.Log(f"Tagging duplicate {DupFile['files'][0]['path']} for deletion.", toAscii=True, printTo=LOG_STASH_N_PLUGIN) - setTagId(dupTagId, duplicateMarkForDeletion, DupFile, DupFileToKeep) - QtyTagForDel+=1 + if isTaggedExcluded(DupFile): + QtyExcludeForDel+=1 + stash.Log(f"Excluding file {DupFile['files'][0]['path']} because tagged for exclusion via tag {excludeDupFileDeleteTag};QtyDup={QtyDup};Set={QtyDupSet} of {qtyResults}") + else: + # ToDo: Add merge logic here + if deleteDup: + DupFileName = DupFile['files'][0]['path'] + if not deleteBlacklistOnly or stash.startsWithInList(blacklist, DupFile['files'][0]['path']): + if not deleteLowerResAndDuration or (isBetterVideo(DupFile, DupFileToKeep) and not significantMoreTimeCompareToBetterVideo(DupFileToKeep, DupFile)) or (significantMoreTimeCompareToBetterVideo(DupFile, DupFileToKeep) and not isBetterVideo(DupFileToKeep, DupFile)): + QtyDeleted += 1 + DupFileNameOnly = pathlib.Path(DupFileName).stem + stash.Warn(f"Deleting duplicate '{DupFileName}';QtyDup={QtyDup};Set={QtyDupSet} of {qtyResults};QtyDeleted={QtyDeleted}", toAscii=True, printTo=LOG_STASH_N_PLUGIN) + if alternateTrashCanPath != "": + destPath = f"{alternateTrashCanPath }{os.sep}{DupFileNameOnly}" + if os.path.isfile(destPath): + destPath = f"{alternateTrashCanPath }{os.sep}_{time.time()}_{DupFileNameOnly}" + shutil.move(DupFileName, destPath) + elif moveToTrashCan: + sendToTrash(DupFileName) + stash.destroyScene(DupFile['id'], delete_file=True) + elif tagDuplicates or fileHtmlReport != None: + if excludeFromReportIfSignificantTimeDiff and significantTimeDiffCheck(DupFile, DupFileToKeep, True): + stash.Log(f"Skipping duplicate {DupFile['files'][0]['path']} (ID={DupFile['id']}), because of time difference greater than {significantTimeDiff} for file {DupFileToKeep['files'][0]['path']}.") + continue + QtyTagForDel+=1 + QtyTagForDelPaginate+=1 + didAddTag = False + if tagDuplicates: + didAddTag = setTagId_withRetry(duplicateMarkForDeletion, DupFile, DupFileToKeep, ignoreAutoTag=True) + if fileHtmlReport != None: + # ToDo: Add icons using github path + # add copy button with copy icon + # add move button with r-sqr icon + # repace delete button with trashcan icon + # add rename file code and button + # add delete only from stash db code and button using DB delete icon + stash.Debug(f"Adding scene {DupFile['id']} to HTML report.") + writeRowToHtmlReport(fileHtmlReport, DupFile, DupFileToKeep, QtyTagForDel, tagDuplicates) + if QtyTagForDelPaginate >= htmlReportPaginate: + QtyTagForDelPaginate = 0 + fileHtmlReport.write("\n") + homeHtmReportLink = f"[Home]" + prevHtmReportLink = "" + if PaginateId > 0: + if PaginateId > 1: + prevHtmReport = htmlReportNameHomePage.replace(".html", f"_{PaginateId-1}.html") + else: + prevHtmReport = htmlReportNameHomePage + prevHtmReportLink = f"[Prev]" + nextHtmReport = htmlReportNameHomePage.replace(".html", f"_{PaginateId+1}.html") + nextHtmReportLink = f"[Next]" + fileHtmlReport.write(f"
{homeHtmReportLink}{prevHtmReportLink}{nextHtmReportLink}
") + fileHtmlReport.write(f"{stash.Setting('htmlReportPostfix')}") + fileHtmlReport.close() + PaginateId+=1 + fileHtmlReport = open(nextHtmReport, "w") + fileHtmlReport.write(f"{getHtmlReportTableRow(qtyResults, tagDuplicates)}\n") + if PaginateId > 1: + prevHtmReport = htmlReportNameHomePage.replace(".html", f"_{PaginateId-1}.html") + else: + prevHtmReport = htmlReportNameHomePage + prevHtmReportLink = f"[Prev]" + if len(DupFileSets) > (QtyTagForDel + htmlReportPaginate): + nextHtmReport = htmlReportNameHomePage.replace(".html", f"_{PaginateId+1}.html") + nextHtmReportLink = f"[Next]" + fileHtmlReport.write(f"
{homeHtmReportLink}{prevHtmReportLink}{nextHtmReportLink}
") + else: + stash.Debug(f"DupFileSets Qty = {len(DupFileSets)}; DupFileDetailList Qty = {len(DupFileDetailList)}; QtyTagForDel = {QtyTagForDel}; htmlReportPaginate = {htmlReportPaginate}; QtyTagForDel + htmlReportPaginate = {QtyTagForDel+htmlReportPaginate}") + fileHtmlReport.write(f"
{homeHtmReportLink}{prevHtmReportLink}
") + fileHtmlReport.write(f"{stash.Setting('htmlReportTable')}\n") + fileHtmlReport.write(f"{htmlReportTableRow}{htmlReportTableHeader}Scene{htmlReportTableHeader}Duplicate to Delete{htmlReportTableHeader}Scene-ToKeep{htmlReportTableHeader}Duplicate to Keep\n") + + if tagDuplicates and graylistTagging and stash.startsWithInList(graylist, DupFile['files'][0]['path']): + stash.addTag(DupFile, graylistMarkForDeletion, ignoreAutoTag=True) + if didAddTag: + QtyNewlyTag+=1 + if QtyTagForDel == 1: + stash.Log(f"Tagging duplicate {DupFile['files'][0]['path']} for deletion with tag {duplicateMarkForDeletion}", toAscii=True, printTo=LOG_STASH_N_PLUGIN) + else: + didAddTag = 1 if didAddTag else 0 + stash.Log(f"Tagging duplicate {DupFile['files'][0]['path']} for deletion;AddTag={didAddTag};Qty={QtyDup};Set={QtyDupSet} of {qtyResults};NewlyTag={QtyNewlyTag};isTag={QtyTagForDel}", toAscii=True, printTo=LOG_STASH_N_PLUGIN) stash.Trace(SepLine) - if maxDupToProcess > 0 and QtyDup > maxDupToProcess: + if maxDupToProcess > 0 and ((QtyTagForDel > maxDupToProcess) or (QtyTagForDel == 0 and QtyDup > maxDupToProcess)): break - stash.Log(f"QtyDupSet={QtyDupSet}, QtyDup={QtyDup}, QtyDeleted={QtyDeleted}, QtySwap={QtySwap}, QtyTagForDel={QtyTagForDel}, QtySkipForDel={QtySkipForDel}, QtyExactDup={QtyExactDup}, QtyAlmostDup={QtyAlmostDup}, QtyMerge={QtyMerge}, QtyRealTimeDiff={QtyRealTimeDiff}", printTo=LOG_STASH_N_PLUGIN) - if cleanAfterDel: + if fileHtmlReport != None: + fileHtmlReport.write("\n") + if PaginateId > 0: + homeHtmReportLink = f"[Home]" + if PaginateId > 1: + prevHtmReport = htmlReportNameHomePage.replace(".html", f"_{PaginateId-1}.html") + else: + prevHtmReport = htmlReportNameHomePage + prevHtmReportLink = f"[Prev]" + fileHtmlReport.write(f"
{homeHtmReportLink}{prevHtmReportLink}
") + fileHtmlReport.write(f"

Total Tagged for Deletion {QtyTagForDel}

\n") + fileHtmlReport.write(f"{stash.Setting('htmlReportPostfix')}") + fileHtmlReport.close() + stash.Log(f"************************************************************", printTo = stash.LogTo.STASH) + stash.Log(f"************************************************************", printTo = stash.LogTo.STASH) + stash.Log(f"View Stash duplicate report using Stash->Settings->Tools->[Duplicate File Report]", printTo = stash.LogTo.STASH) + stash.Log(f"************************************************************", printTo = stash.LogTo.STASH) + stash.Log(f"************************************************************", printTo = stash.LogTo.STASH) + + + stash.Debug("#####################################################") + stash.Log(f"QtyDupSet={QtyDupSet}, QtyDup={QtyDup}, QtyDeleted={QtyDeleted}, QtySwap={QtySwap}, QtyTagForDel={QtyTagForDel}, QtySkipForDel={QtySkipForDel}, QtyExcludeForDel={QtyExcludeForDel}, QtyExactDup={QtyExactDup}, QtyAlmostDup={QtyAlmostDup}, QtyMerge={QtyMerge}, QtyRealTimeDiff={QtyRealTimeDiff}", printTo=LOG_STASH_N_PLUGIN) + killScanningJobs() + if cleanAfterDel and deleteDup: stash.Log("Adding clean jobs to the Task Queue", printTo=LOG_STASH_N_PLUGIN) - stash.metadata_clean(paths=stash.STASH_PATHS) + stash.metadata_clean() stash.metadata_clean_generated() stash.optimise_database() + if doGeneratePhash: + stash.metadata_generate({"phashes": True}) + sys.stdout.write("Report complete") -def deleteTagggedDuplicates(): - tagId = stash.find_tags(q=duplicateMarkForDeletion) - if len(tagId) > 0 and 'id' in tagId[0]: - tagId = tagId[0]['id'] - else: +def findCurrentTagId(tagNames): + # tagNames = [i for n, i in enumerate(tagNames) if i not in tagNames[:n]] + for tagName in tagNames: + tagId = stash.find_tags(q=tagName) + if len(tagId) > 0 and 'id' in tagId[0]: + stash.Debug(f"Using tag name {tagName} with Tag ID {tagId[0]['id']}") + return tagId[0]['id'] + return "-1" + +def toJson(data): + import json + # data = data.replace("'", '"') + data = data.replace("\\", "\\\\") + data = data.replace("\\\\\\\\", "\\\\") + return json.loads(data) + +def getAnAdvanceMenuOptionSelected(taskName, target, isBlackList, pathToDelete, sizeToDelete, durationToDelete, resolutionToDelete, ratingToDelete, tagToDelete, titleToDelete, pathStrToDelete, fileNotExistToDelete, compareToLess, compareToGreater): + stash.Log(f"Processing taskName = {taskName}, target = {target}") + if "Blacklist" in taskName: + isBlackList = True + if "Less" in taskName: + compareToLess = True + if "Greater" in taskName: + compareToGreater = True + + if "pathToDelete" in taskName: + pathToDelete = target.lower() + elif "sizeToDelete" in taskName: + sizeToDelete = int(target) + elif "durationToDelete" in taskName: + durationToDelete = int(target) + elif "commonResToDelete" in taskName: + resolutionToDelete = int(target) + elif "resolutionToDelete" in taskName: + resolutionToDelete = int(target) + elif "ratingToDelete" in taskName: + ratingToDelete = int(target) * 20 + elif "tagToDelete" in taskName: + tagToDelete = target.lower() + elif "titleToDelete" in taskName: + titleToDelete = target.lower() + elif "pathStrToDelete" in taskName: + pathStrToDelete = target.lower() + elif "fileNotExistToDelete" in taskName: + fileNotExistToDelete = True + return isBlackList, pathToDelete, sizeToDelete, durationToDelete, resolutionToDelete, ratingToDelete, tagToDelete, titleToDelete, pathStrToDelete, fileNotExistToDelete, compareToLess, compareToGreater + +def getAdvanceMenuOptionSelected(advanceMenuOptionSelected): + isBlackList = False + pathToDelete = "" + sizeToDelete = -1 + durationToDelete = -1 + resolutionToDelete = -1 + ratingToDelete = -1 + tagToDelete = "" + titleToDelete = "" + pathStrToDelete = "" + fileNotExistToDelete = False + compareToLess = False + compareToGreater = False + if advanceMenuOptionSelected: + stash.enableProgressBar(False) + if 'Target' in stash.JSON_INPUT['args']: + if "applyCombo" in stash.PLUGIN_TASK_NAME: + jsonObject = toJson(stash.JSON_INPUT['args']['Target']) + for taskName in jsonObject: + isBlackList, pathToDelete, sizeToDelete, durationToDelete, resolutionToDelete, ratingToDelete, tagToDelete, titleToDelete, pathStrToDelete, fileNotExistToDelete, compareToLess, compareToGreater = getAnAdvanceMenuOptionSelected(taskName, jsonObject[taskName], isBlackList, pathToDelete, sizeToDelete, durationToDelete, resolutionToDelete, ratingToDelete, tagToDelete, titleToDelete, pathStrToDelete, compareToLess, compareToGreater) + else: + return getAnAdvanceMenuOptionSelected(stash.PLUGIN_TASK_NAME, stash.JSON_INPUT['args']['Target'], isBlackList, pathToDelete, sizeToDelete, durationToDelete, resolutionToDelete, ratingToDelete, tagToDelete, titleToDelete, pathStrToDelete, compareToLess, compareToGreater) + return isBlackList, pathToDelete, sizeToDelete, durationToDelete, resolutionToDelete, ratingToDelete, tagToDelete, titleToDelete, pathStrToDelete, fileNotExistToDelete, compareToLess, compareToGreater + +# ////////////////////////////////////////////////////////////////////////////// +# ////////////////////////////////////////////////////////////////////////////// +def manageTagggedDuplicates(deleteScenes=False, clearTag=False, setGrayListTag=False, tagId=-1, advanceMenuOptionSelected=False): + if tagId == -1: + tagId = findCurrentTagId([duplicateMarkForDeletion, base1_duplicateMarkForDeletion, base2_duplicateMarkForDeletion, 'DuplicateMarkForDeletion', '_DuplicateMarkForDeletion']) + if int(tagId) < 0: stash.Warn(f"Could not find tag ID for tag '{duplicateMarkForDeletion}'.") return + + excludedTags = [duplicateMarkForDeletion] + if clearAllDupfileManagerTags: + excludedTags = [duplicateMarkForDeletion, duplicateWhitelistTag, excludeDupFileDeleteTag, graylistMarkForDeletion, longerDurationLowerResolution] + + isBlackList, pathToDelete, sizeToDelete, durationToDelete, resolutionToDelete, ratingToDelete, tagToDelete, titleToDelete, pathStrToDelete, fileNotExistToDelete, compareToLess, compareToGreater = getAdvanceMenuOptionSelected(advanceMenuOptionSelected) + if advanceMenuOptionSelected and deleteScenes and pathToDelete == "" and tagToDelete == "" and titleToDelete == "" and pathStrToDelete == "" and sizeToDelete == -1 and durationToDelete == -1 and resolutionToDelete == -1 and ratingToDelete == -1 and fileNotExistToDelete == False: + stash.Error("Running advance menu option with no options enabled.") + return + QtyDup = 0 QtyDeleted = 0 + QtyClearedTags = 0 + QtySetGraylistTag = 0 QtyFailedQuery = 0 - stash.Trace("#########################################################################") - sceneIDs = stash.find_scenes(f={"tags": {"value":tagId, "modifier":"INCLUDES"}}, fragment='id') - qtyResults = len(sceneIDs) - stash.Trace(f"Found {qtyResults} scenes with tag ({duplicateMarkForDeletion}): sceneIDs = {sceneIDs}") - for sceneID in sceneIDs: - # stash.Trace(f"Getting scene data for scene ID {sceneID['id']}.") + stash.Debug("#########################################################################") + stash.startSpinningProcessBar() + scenes = stash.find_scenes(f={"tags": {"value":tagId, "modifier":"INCLUDES"}}, fragment='id tags {id name} files {path width height duration size video_codec bit_rate frame_rate} details title rating100') + stash.stopSpinningProcessBar() + qtyResults = len(scenes) + stash.Log(f"Found {qtyResults} scenes with tag ({duplicateMarkForDeletion})") + stash.setProgressBarIter(qtyResults) + for scene in scenes: QtyDup += 1 - prgs = QtyDup / qtyResults - stash.Progress(QtyDup, qtyResults) - scene = stash.find_scene(sceneID['id']) - if scene == None or len(scene) == 0: - stash.Warn(f"Could not get scene data for scene ID {sceneID['id']}.") - QtyFailedQuery += 1 - continue - # stash.Log(f"scene={scene}") - DupFileName = scene['files'][0]['path'] - DupFileNameOnly = pathlib.Path(DupFileName).stem - stash.Warn(f"Deleting duplicate '{DupFileName}'", toAscii=True, printTo=LOG_STASH_N_PLUGIN) - if alternateTrashCanPath != "": - destPath = f"{alternateTrashCanPath }{os.sep}{DupFileNameOnly}" - if os.path.isfile(destPath): - destPath = f"{alternateTrashCanPath }{os.sep}_{time.time()}_{DupFileNameOnly}" - shutil.move(DupFileName, destPath) - elif moveToTrashCan: - sendToTrash(DupFileName) - result = stash.destroy_scene(scene['id'], delete_file=True) - stash.Trace(f"destroy_scene result={result} for file {DupFileName}", toAscii=True) - QtyDeleted += 1 - stash.Log(f"QtyDup={QtyDup}, QtyDeleted={QtyDeleted}, QtyFailedQuery={QtyFailedQuery}", printTo=LOG_STASH_N_PLUGIN) - return + stash.progressBar(QtyDup, qtyResults) + # scene = stash.find_scene(sceneID['id']) + # if scene == None or len(scene) == 0: + # stash.Warn(f"Could not get scene data for scene ID {scene['id']}.") + # QtyFailedQuery += 1 + # continue + # stash.Trace(f"scene={scene}") + if clearTag: + QtyClearedTags += 1 + # ToDo: Add logic to exclude graylistMarkForDeletion + tags = [int(item['id']) for item in scene["tags"] if item['name'] not in excludedTags] + # if clearAllDupfileManagerTags: + # tags = [] + # for tag in scene["tags"]: + # if tag['name'] in excludedTags: + # continue + # tags += [int(tag['id'])] + stash.TraceOnce(f"tagId={tagId}, len={len(tags)}, tags = {tags}") + dataDict = {'id' : scene['id']} + if addPrimaryDupPathToDetails: + sceneDetails = scene['details'] + if sceneDetails.find(detailPrefix) == 0 and sceneDetails.find(detailPostfix) > 1: + Pos1 = sceneDetails.find(detailPrefix) + Pos2 = sceneDetails.find(detailPostfix) + sceneDetails = sceneDetails[0:Pos1] + sceneDetails[Pos2 + len(detailPostfix):] + dataDict.update({'details' : sceneDetails}) + dataDict.update({'tag_ids' : tags}) + stash.Log(f"Updating scene with {dataDict};QtyClearedTags={QtyClearedTags};Count={QtyDup} of {qtyResults}") + stash.updateScene(dataDict) + # stash.removeTag(scene, duplicateMarkForDeletion) + elif setGrayListTag: + if stash.startsWithInList(graylist, scene['files'][0]['path']): + QtySetGraylistTag+=1 + if stash.addTag(scene, graylistMarkForDeletion, ignoreAutoTag=True): + stash.Log(f"Added tag {graylistMarkForDeletion} to scene {scene['files'][0]['path']};QtySetGraylistTag={QtySetGraylistTag};Count={QtyDup} of {qtyResults}") + else: + stash.Trace(f"Scene already had tag {graylistMarkForDeletion}; {scene['files'][0]['path']}") + elif deleteScenes: + DupFileName = scene['files'][0]['path'] + DupFileNameOnly = pathlib.Path(DupFileName).stem + if advanceMenuOptionSelected: + if isBlackList: + if not stash.startsWithInList(blacklist, scene['files'][0]['path']): + continue + if pathToDelete != "": + if not DupFileName.lower().startswith(pathToDelete): + stash.Debug(f"Skipping file {DupFileName} because it does not start with {pathToDelete}.") + continue + if pathStrToDelete != "": + if not pathStrToDelete in DupFileName.lower(): + stash.Debug(f"Skipping file {DupFileName} because it does not contain value {pathStrToDelete}.") + continue + if sizeToDelete != -1: + compareTo = int(scene['files'][0]['size']) + if compareToLess: + if not (compareTo < sizeToDelete): + continue + elif compareToGreater: + if not (compareTo > sizeToDelete): + continue + else: + if not compareTo == sizeToDelete: + continue + if durationToDelete != -1: + compareTo = int(scene['files'][0]['duration']) + if compareToLess: + if not (compareTo < durationToDelete): + continue + elif compareToGreater: + if not (compareTo > durationToDelete): + continue + else: + if not compareTo == durationToDelete: + continue + if resolutionToDelete != -1: + compareTo = int(scene['files'][0]['width']) * int(scene['files'][0]['height']) + if compareToLess: + if not (compareTo < resolutionToDelete): + continue + elif compareToGreater: + if not (compareTo > resolutionToDelete): + continue + else: + if not compareTo == resolutionToDelete: + continue + if ratingToDelete != -1: + if scene['rating100'] == "None": + compareTo = 0 + else: + compareTo = int(scene['rating100']) + if compareToLess: + if not (compareTo < resolutionToDelete): + continue + elif compareToGreater: + if not (compareTo > resolutionToDelete): + continue + else: + if not compareTo == resolutionToDelete: + continue + if titleToDelete != "": + if not titleToDelete in scene['title'].lower(): + stash.Debug(f"Skipping file {DupFileName} because it does not contain value {titleToDelete} in title ({scene['title']}).") + continue + if tagToDelete != "": + doProcessThis = False + for tag in scene['tags']: + if tag['name'].lower() == tagToDelete: + doProcessThis = True + break + if doProcessThis == False: + continue + if fileNotExistToDelete: + if os.path.isfile(scene['files'][0]['path']): + continue + stash.Warn(f"Deleting duplicate '{DupFileName}'", toAscii=True, printTo=LOG_STASH_N_PLUGIN) + if alternateTrashCanPath != "": + destPath = f"{alternateTrashCanPath }{os.sep}{DupFileNameOnly}" + if os.path.isfile(destPath): + destPath = f"{alternateTrashCanPath }{os.sep}_{time.time()}_{DupFileNameOnly}" + shutil.move(DupFileName, destPath) + elif moveToTrashCan: + sendToTrash(DupFileName) + result = stash.destroyScene(scene['id'], delete_file=True) + QtyDeleted += 1 + stash.Debug(f"destroyScene result={result} for file {DupFileName};QtyDeleted={QtyDeleted};Count={QtyDup} of {qtyResults}", toAscii=True) + else: + stash.Error("manageTagggedDuplicates called with invlaid input arguments. Doing early exit.") + return + stash.Debug("#####################################################") + stash.Log(f"QtyDup={QtyDup}, QtyClearedTags={QtyClearedTags}, QtySetGraylistTag={QtySetGraylistTag}, QtyDeleted={QtyDeleted}, QtyFailedQuery={QtyFailedQuery}", printTo=LOG_STASH_N_PLUGIN) + killScanningJobs() + if deleteScenes and not advanceMenuOptionSelected: + if cleanAfterDel: + stash.Log("Adding clean jobs to the Task Queue", printTo=LOG_STASH_N_PLUGIN) + stash.metadata_clean() + stash.metadata_clean_generated() + stash.optimise_database() -def testSetDupTagOnScene(sceneId): - scene = stash.find_scene(sceneId) - stash.Log(f"scene={scene}") - stash.Log(f"scene tags={scene['tags']}") - tag_ids = [dupTagId] - for tag in scene['tags']: - tag_ids = tag_ids + [tag['id']] - stash.Log(f"tag_ids={tag_ids}") - stash.update_scene({'id' : scene['id'], 'tag_ids' : tag_ids}) - -if stash.PLUGIN_TASK_NAME == "tag_duplicates_task": - mangeDupFiles(tagDuplicates=True, merge=mergeDupFilename) - stash.Trace(f"{stash.PLUGIN_TASK_NAME} EXIT") -elif stash.PLUGIN_TASK_NAME == "delete_tagged_duplicates_task": - deleteTagggedDuplicates() - stash.Trace(f"{stash.PLUGIN_TASK_NAME} EXIT") -elif stash.PLUGIN_TASK_NAME == "delete_duplicates_task": - mangeDupFiles(deleteDup=True, merge=mergeDupFilename) - stash.Trace(f"{stash.PLUGIN_TASK_NAME} EXIT") -elif parse_args.dup_tag: - mangeDupFiles(tagDuplicates=True, merge=mergeDupFilename) - stash.Trace(f"Tag duplicate EXIT") -elif parse_args.del_tag: - deleteTagggedDuplicates() - stash.Trace(f"Delete Tagged duplicates EXIT") -elif parse_args.remove: - mangeDupFiles(deleteDup=True, merge=mergeDupFilename) - stash.Trace(f"Delete duplicate EXIT") -else: - stash.Log(f"Nothing to do!!! (PLUGIN_ARGS_MODE={stash.PLUGIN_TASK_NAME})") +def removeDupTag(): + if 'Target' not in stash.JSON_INPUT['args']: + stash.Error(f"Could not find Target in JSON_INPUT ({stash.JSON_INPUT['args']})") + return + scene = stash.JSON_INPUT['args']['Target'] + stash.Log(f"Processing scene ID# {scene}") + stash.removeTag(scene, duplicateMarkForDeletion) + stash.Log(f"Done removing tag from scene {scene}.") + jsonReturn = "{'removeDupTag' : 'complete', 'id': '" + f"{scene}" + "'}" + stash.Log(f"Sending json value {jsonReturn}") + sys.stdout.write(jsonReturn) + +def addExcludeTag(): + if 'Target' not in stash.JSON_INPUT['args']: + stash.Error(f"Could not find Target in JSON_INPUT ({stash.JSON_INPUT['args']})") + return + scene = stash.JSON_INPUT['args']['Target'] + stash.Log(f"Processing scene ID# {scene}") + stash.addTag(scene, excludeDupFileDeleteTag) + stash.Log(f"Done adding exclude tag to scene {scene}.") + sys.stdout.write("{" + f"addExcludeTag : 'complete', id: '{scene}'" + "}") + +def removeExcludeTag(): + if 'Target' not in stash.JSON_INPUT['args']: + stash.Error(f"Could not find Target in JSON_INPUT ({stash.JSON_INPUT['args']})") + return + scene = stash.JSON_INPUT['args']['Target'] + stash.Log(f"Processing scene ID# {scene}") + stash.removeTag(scene, excludeDupFileDeleteTag) + stash.Log(f"Done removing exclude tag from scene {scene}.") + sys.stdout.write("{" + f"removeExcludeTag : 'complete', id: '{scene}'" + "}") + +def getParseData(getSceneDetails1=True, getSceneDetails2=True): + if 'Target' not in stash.JSON_INPUT['args']: + stash.Error(f"Could not find Target in JSON_INPUT ({stash.JSON_INPUT['args']})") + return None, None + targetsSrc = stash.JSON_INPUT['args']['Target'] + targets = targetsSrc.split(":") + if len(targets) < 2: + stash.Error(f"Could not get both targets from string {targetsSrc}") + return None, None + stash.Log(f"Parsed targets {targets[0]} and {targets[1]}") + target1 = targets[0] + target2 = targets[1] + if getSceneDetails1: + target1 = stash.find_scene(int(target1)) + if getSceneDetails2: + target2 = stash.find_scene(int(target2)) + elif len(targets) > 2: + target2 = target2 + targets[2] + return target1, target2 + + +def mergeTags(): + scene1, scene2 = getParseData() + if scene1 == None or scene2 == None: + sys.stdout.write("{" + f"mergeTags : 'failed', id1: '{scene1}', id2: '{scene2}'" + "}") + return + stash.mergeMetadata(scene1, scene2) + stash.Log(f"Done merging scenes for scene {scene1['id']} and scene {scene2['id']}") + sys.stdout.write("{" + f"mergeTags : 'complete', id1: '{scene1['id']}', id2: '{scene2['id']}'" + "}") + +def getLocalDupReportPath(): + htmlReportExist = "true" if os.path.isfile(htmlReportName) else "false" + localPath = htmlReportName.replace("\\", "\\\\") + jsonReturn = "{'LocalDupReportExist' : " + f"{htmlReportExist}" + ", 'Path': '" + f"{localPath}" + "'}" + stash.Log(f"Sending json value {jsonReturn}") + sys.stdout.write(jsonReturn) + +def deleteLocalDupReportHtmlFiles(doJsonOutput = True): + htmlReportExist = "true" if os.path.isfile(htmlReportName) else "false" + if os.path.isfile(htmlReportName): + stash.Log(f"Deleting file {htmlReportName}") + os.remove(htmlReportName) + for x in range(2, 9999): + fileName = htmlReportName.replace(".html", f"_{x-1}.html") + stash.Debug(f"Checking if file '{fileName}' exist.") + if not os.path.isfile(fileName): + break + stash.Log(f"Deleting file {fileName}") + os.remove(fileName) + else: + stash.Log(f"Report file does not exist: {htmlReportName}") + if doJsonOutput: + jsonReturn = "{'LocalDupReportExist' : " + f"{htmlReportExist}" + ", 'Path': '" + f"{htmlReportName}" + "', 'qty': '" + f"{x}" + "'}" + stash.Log(f"Sending json value {jsonReturn}") + sys.stdout.write(jsonReturn) + +def removeTagFromAllScenes(tagName, deleteTags): + # ToDo: Replace code with SQL code if DB version 68 + tagId = stash.find_tags(q=tagName) + if len(tagId) > 0 and 'id' in tagId[0]: + if deleteTags: + stash.Debug(f"Deleting tag name {tagName} with Tag ID {tagId[0]['id']} from stash.") + stash.destroy_tag(int(tagId[0]['id'])) + else: + stash.Debug(f"Removing tag name {tagName} with Tag ID {tagId[0]['id']} from all scenes.") + manageTagggedDuplicates(clearTag=True, tagId=int(tagId[0]['id'])) + return True + return False + +def removeAllDupTagsFromAllScenes(deleteTags=False): + tagsToClear = [duplicateMarkForDeletion, base1_duplicateMarkForDeletion, base2_duplicateMarkForDeletion, graylistMarkForDeletion, longerDurationLowerResolution, duplicateWhitelistTag] + for x in range(0, 3): + tagsToClear += [base1_duplicateMarkForDeletion + f"_{x}"] + for x in range(0, 3): + tagsToClear += [base2_duplicateMarkForDeletion + f"_{x}"] + tagsToClear = list(set(tagsToClear)) # Remove duplicates + validTags = [] + for tagToClear in tagsToClear: + if removeTagFromAllScenes(tagToClear, deleteTags): + validTags +=[tagToClear] + if doJsonReturn: + jsonReturn = "{'removeAllDupTagFromAllScenes' : " + f"{duplicateMarkForDeletion}" + ", 'OtherTags': '" + f"{validTags}" + "'}" + stash.Log(f"Sending json value {jsonReturn}") + sys.stdout.write(jsonReturn) + else: + stash.Log(f"Clear tags {tagsToClear}") + +def updateScenesInReport(fileName, scene): + stash.Log(f"Updating table rows with scene {scene} in file {fileName}") + scene1 = -1 + scene2 = -1 + strToFind = "class=\"ID_" + lines = None + with open(fileName, 'r') as file: + lines = file.readlines() + stash.Log(f"line count = {len(lines)}") + with open(fileName, 'w') as file: + for line in lines: + # stash.Debug(f"line = {line}") + if f"class=\"ID_{scene}\"" in line: + idx = 0 + while line.find(strToFind, idx) > -1: + idx = line.find(strToFind, idx) + len(strToFind) + id = line[idx:] + stash.Debug(f"id = {id}, idx = {idx}") + id = id[:id.find('"')] + stash.Debug(f"id = {id}") + if scene1 == -1: + scene1 = int(id) + elif scene1 != int(id) and scene2 == -1: + scene2 = int(id) + elif scene1 != -1 and scene2 != -1: + break + if scene1 != -1 and scene2 != -1: + sceneDetail1 = stash.find_scene(scene1) + sceneDetail2 = stash.find_scene(scene2) + if sceneDetail1 == None or sceneDetail2 == None: + stash.Error("Could not get scene details for both scene1 ({scene1}) and scene2 ({scene2}); sceneDetail1={sceneDetail1}; sceneDetail2={sceneDetail2};") + else: + writeRowToHtmlReport(file, sceneDetail1, sceneDetail2) + else: + stash.Error(f"Could not get both scene ID associated with scene {scene}; scene1 = {scene1}; scene2 = {scene2}") + file.write(line) + else: + file.write(line) +def updateScenesInReports(scene, ReportName = htmlReportName): + if os.path.isfile(ReportName): + updateScenesInReport(ReportName, scene) + for x in range(2, 9999): + fileName = ReportName.replace(".html", f"_{x-1}.html") + stash.Debug(f"Checking if file '{fileName}' exist.") + if not os.path.isfile(fileName): + break + updateScenesInReport(fileName, scene) + else: + stash.Log(f"Report file does not exist: {ReportName}") +def addPropertyToSceneClass(fileName, scene, property): + stash.Log(f"Inserting property {property} for scene {scene} in file {fileName}") + doStyleEndTagCheck = True + lines = None + with open(fileName, 'r') as file: + lines = file.readlines() + stash.Log(f"line count = {len(lines)}") + with open(fileName, 'w') as file: + for line in lines: + # stash.Debug(f"line = {line}") + if doStyleEndTagCheck: + if property == "" and line.startswith(f".ID_{scene}" + "{"): + continue + if line.startswith(""): + if property != "": + styleSetting = f".ID_{scene}{property}\n" + stash.Log(f"styleSetting = {styleSetting}") + file.write(styleSetting) + doStyleEndTagCheck = False + file.write(line) +def addPropertyToSceneClassToAllFiles(scene, property, ReportName = htmlReportName): + if os.path.isfile(ReportName): + addPropertyToSceneClass(ReportName, scene, property) + for x in range(2, 9999): + fileName = ReportName.replace(".html", f"_{x-1}.html") + stash.Debug(f"Checking if file '{fileName}' exist.") + if not os.path.isfile(fileName): + break + addPropertyToSceneClass(fileName, scene, property) + else: + stash.Log(f"Report file does not exist: {ReportName}") + +def deleteScene(disableInReport=True, deleteFile=True): + if 'Target' not in stash.JSON_INPUT['args']: + stash.Error(f"Could not find Target in JSON_INPUT ({stash.JSON_INPUT['args']})") + return + scene = stash.JSON_INPUT['args']['Target'] + stash.Log(f"Processing scene ID# {scene}") + result = None + result = stash.destroyScene(scene, delete_file=deleteFile) + if disableInReport: + addPropertyToSceneClassToAllFiles(scene, "{background-color:gray;pointer-events:none;}") + stash.Log(f"{stash.PLUGIN_TASK_NAME} complete for scene {scene} with results = {result}") + sys.stdout.write("{" + f"{stash.PLUGIN_TASK_NAME} : 'complete', id: '{scene}', result: '{result}'" + "}") + +def copyScene(moveScene=False): + scene1, scene2 = getParseData() + if scene1 == None or scene2 == None: + sys.stdout.write("{" + f"{stash.PLUGIN_TASK_NAME} : 'failed', id1: '{scene1}', id2: '{scene2}'" + "}") + return + if moveScene: + stash.mergeMetadata(scene1, scene2) + result = shutil.copy(scene1['files'][0]['path'], scene2['files'][0]['path']) + if moveScene: + result = stash.destroyScene(scene1['id'], delete_file=True) + stash.Log(f"destroyScene for scene {scene1['id']} results = {result}") + stash.Log(f"{stash.PLUGIN_TASK_NAME} complete for scene {scene1['id']} and {scene2['id']}") + sys.stdout.write("{" + f"{stash.PLUGIN_TASK_NAME} : 'complete', id1: '{scene1['id']}', id2: '{scene2['id']}', result: '{result}'" + "}") + +def renameFile(): + scene, newName = getParseData(getSceneDetails2=False) + if scene == None or newName == None: + sys.stdout.write("{" + f"{stash.PLUGIN_TASK_NAME} : 'failed', scene: '{scene}', newName: '{newName}'" + "}") + return + newName = newName.strip("'") + ext = pathlib.Path(scene['files'][0]['path']).suffix + newNameFull = f"{pathlib.Path(scene['files'][0]['path']).resolve().parent}{os.sep}{newName}{ext}" + newNameFull = newNameFull.strip("'") + newNameFull = newNameFull.replace("\\\\", "\\") + oldNameFull = scene['files'][0]['path'] + oldNameFull = oldNameFull.strip("'") + oldNameFull = oldNameFull.replace("\\\\", "\\") + stash.Log(f"renaming file '{stash.asc2(oldNameFull)}' to '{stash.asc2(newNameFull)}'") + result = os.rename(oldNameFull, newNameFull) + stash.renameFileNameInDB(scene['files'][0]['id'], pathlib.Path(oldNameFull).stem, f"{newName}{ext}", UpdateUsingIdOnly = True) + updateScenesInReports(scene['id']) + stash.Log(f"{stash.PLUGIN_TASK_NAME} complete for scene {scene['id']} ;renamed to {newName}; result={result}") + sys.stdout.write("{" + f"{stash.PLUGIN_TASK_NAME} : 'complete', scene: '{scene['id']}', newName: '{newName}', result: '{result}'" + "}") + +def flagScene(): + scene, flagType = getParseData(False, False) + if scene == None or flagType == None: + sys.stdout.write("{" + f"{stash.PLUGIN_TASK_NAME} : 'failed', scene: '{scene}', flagType: '{flagType}'" + "}") + return + if flagType == "disable-scene": + addPropertyToSceneClassToAllFiles(scene, "{background-color:gray;pointer-events:none;}") + elif flagType == "strike-through": + addPropertyToSceneClassToAllFiles(scene, "{text-decoration: line-through;}") + elif flagType == "yellow highlight": + addPropertyToSceneClassToAllFiles(scene, "{background-color:yellow;}") + elif flagType == "green highlight": + addPropertyToSceneClassToAllFiles(scene, "{background-color:#00FF00;}") + elif flagType == "orange highlight": + addPropertyToSceneClassToAllFiles(scene, "{background-color:orange;}") + elif flagType == "cyan highlight": + addPropertyToSceneClassToAllFiles(scene, "{background-color:cyan;}") + elif flagType == "pink highlight": + addPropertyToSceneClassToAllFiles(scene, "{background-color:pink;}") + elif flagType == "red highlight": + addPropertyToSceneClassToAllFiles(scene, "{background-color:red;}") + elif flagType == "remove all flags": + addPropertyToSceneClassToAllFiles(scene, "") + else: + stash.Log(f"Invalid flagType ({flagType})") + sys.stdout.write("{" + f"{stash.PLUGIN_TASK_NAME} : 'failed', scene: '{scene}', flagType: '{flagType}'" + "}") + return + sys.stdout.write("{" + f"{stash.PLUGIN_TASK_NAME} : 'complete', scene: '{scene}', flagType: '{flagType}'" + "}") + +# ToDo: Add to UI menu +# Remove all Dup tagged files (Just remove from stash, and leave file) +# Clear GraylistMarkForDel tag +# Delete GraylistMarkForDel tag +# Remove from stash all files no longer part of stash library +# Remove from stash all files in the Exclusion list (Not supporting regexps) +# ToDo: Add to advance menu +# Remove only graylist dup +# Exclude graylist from delete +# Include graylist in delete + +try: + if stash.PLUGIN_TASK_NAME == "tag_duplicates_task": + mangeDupFiles(tagDuplicates=True, merge=mergeDupFilename) + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif stash.PLUGIN_TASK_NAME == "create_duplicate_report_task": + mangeDupFiles(tagDuplicates=False, merge=mergeDupFilename) + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif stash.PLUGIN_TASK_NAME == "delete_tagged_duplicates_task": + manageTagggedDuplicates(deleteScenes=True) + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif stash.PLUGIN_TASK_NAME == "delete_duplicates_task": + mangeDupFiles(deleteDup=True, merge=mergeDupFilename) + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif stash.PLUGIN_TASK_NAME == "clear_duplicate_tags_task": + removeAllDupTagsFromAllScenes() + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif stash.PLUGIN_TASK_NAME == "graylist_tag_task": + manageTagggedDuplicates(setGrayListTag=True) + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif stash.PLUGIN_TASK_NAME == "generate_phash_task": + stash.metadata_generate({"phashes": True}) + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif stash.PLUGIN_TASK_NAME == "deleteScene": + deleteScene() + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif stash.PLUGIN_TASK_NAME == "removeScene": + deleteScene(deleteFile=False) + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif stash.PLUGIN_TASK_NAME == "renameFile": + renameFile() + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif stash.PLUGIN_TASK_NAME == "flagScene": + flagScene() + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif stash.PLUGIN_TASK_NAME == "copyScene": + copyScene() + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif stash.PLUGIN_TASK_NAME == "moveScene": + copyScene(moveScene=True) + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif stash.PLUGIN_TASK_NAME == "removeDupTag": + removeDupTag() + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif stash.PLUGIN_TASK_NAME == "addExcludeTag": + addExcludeTag() + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif stash.PLUGIN_TASK_NAME == "removeExcludeTag": + removeExcludeTag() + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif stash.PLUGIN_TASK_NAME == "mergeTags": + mergeTags() + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif stash.PLUGIN_TASK_NAME == "getLocalDupReportPath": + getLocalDupReportPath() + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif stash.PLUGIN_TASK_NAME == "deleteLocalDupReportHtmlFiles": + deleteLocalDupReportHtmlFiles() + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif stash.PLUGIN_TASK_NAME == "createDuplicateReportWithoutTagging": + mangeDupFiles(tagDuplicates=False, merge=mergeDupFilename) + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif stash.PLUGIN_TASK_NAME == "deleteAllDupFileManagerTags": + removeAllDupTagsFromAllScenes(deleteTags=True) + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif stash.PLUGIN_TASK_NAME == "deleteBlackListTaggedDuplicatesTask": + mangeDupFiles(deleteDup=True, merge=mergeDupFilename, deleteBlacklistOnly=True) + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif stash.PLUGIN_TASK_NAME == "deleteTaggedDuplicatesLwrResOrLwrDuration": + mangeDupFiles(deleteDup=True, merge=mergeDupFilename, deleteLowerResAndDuration=True) + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif stash.PLUGIN_TASK_NAME == "deleteBlackListTaggedDuplicatesLwrResOrLwrDuration": + mangeDupFiles(deleteDup=True, merge=mergeDupFilename, deleteBlacklistOnly=True, deleteLowerResAndDuration=True) + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + elif parse_args.dup_tag: + stash.PLUGIN_TASK_NAME = "dup_tag" + mangeDupFiles(tagDuplicates=True, merge=mergeDupFilename) + stash.Debug(f"Tag duplicate EXIT") + elif parse_args.del_tag: + stash.PLUGIN_TASK_NAME = "del_tag" + manageTagggedDuplicates(deleteScenes=True) + stash.Debug(f"Delete Tagged duplicates EXIT") + elif parse_args.clear_tag: + stash.PLUGIN_TASK_NAME = "clear_tag" + removeAllDupTagsFromAllScenes() + stash.Debug(f"Clear duplicate tags EXIT") + elif parse_args.remove: + stash.PLUGIN_TASK_NAME = "remove" + mangeDupFiles(deleteDup=True, merge=mergeDupFilename) + stash.Debug(f"Delete duplicate EXIT") + elif len(sys.argv) < 2 and stash.PLUGIN_TASK_NAME in advanceMenuOptions: + manageTagggedDuplicates(deleteScenes=True, advanceMenuOptionSelected=True) + stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") + else: + stash.Log(f"Nothing to do!!! (PLUGIN_ARGS_MODE={stash.PLUGIN_TASK_NAME})") +except Exception as e: + tb = traceback.format_exc() + + stash.Error(f"Exception while running DupFileManager Task({stash.PLUGIN_TASK_NAME}); Error: {e}\nTraceBack={tb}") + killScanningJobs() + stash.convertToAscii = False + stash.Error(f"Error: {e}\nTraceBack={tb}") + if doJsonReturn: + sys.stdout.write("{" + f"Exception : '{e}; See log file for TraceBack' " + "}") -stash.Trace("\n*********************************\nEXITING ***********************\n*********************************") +stash.Log("\n*********************************\nEXITING ***********************\n*********************************") diff --git a/plugins/DupFileManager/DupFileManager.yml b/plugins/DupFileManager/DupFileManager.yml index c75f561f..f2f3ef6d 100644 --- a/plugins/DupFileManager/DupFileManager.yml +++ b/plugins/DupFileManager/DupFileManager.yml @@ -1,55 +1,70 @@ name: DupFileManager description: Manages duplicate files. -version: 0.1.2 +version: 0.1.9 url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/DupFileManager +ui: + javascript: + - DupFileManager.js + css: + - DupFileManager.css + - DupFileManager.css.map + - DupFileManager.js.map settings: + matchDupDistance: + displayName: Match Duplicate Distance + description: (Default=0) Where 0 = Exact Match, 1 = High Match, 2 = Medium Match, and 3 = Low Match. + type: NUMBER mergeDupFilename: displayName: Merge Duplicate Tags description: Before deletion, merge metadata from duplicate. E.g. Tag names, performers, studios, title, galleries, rating, details, etc... type: BOOLEAN - permanentlyDelete: - displayName: Permanent Delete - description: Enable to permanently delete files, instead of moving files to trash can. - type: BOOLEAN whitelistDelDupInSameFolder: displayName: Whitelist Delete In Same Folder description: Allow whitelist deletion of duplicates within the same whitelist folder. type: BOOLEAN - whitelistDoTagLowResDup: - displayName: Whitelist Duplicate Tagging - description: Enable to tag whitelist duplicates of lower resolution or duration or same folder. - type: BOOLEAN - zCleanAfterDel: - displayName: Run Clean After Delete - description: After running a 'Delete Duplicates' task, run Clean, Clean-Generated, and Optimize-Database. - type: BOOLEAN - zSwapHighRes: - displayName: Swap High Resolution - description: If enabled, swap higher resolution duplicate files to preferred path. - type: BOOLEAN - zSwapLongLength: - displayName: Swap Longer Duration - description: If enabled, swap longer duration media files to preferred path. Longer is determine by significantLongerTime field. - type: BOOLEAN - zWhitelist: + zvWhitelist: displayName: White List description: A comma seperated list of paths NOT to be deleted. E.g. C:\Favorite\,E:\MustKeep\ type: STRING - zxGraylist: + zwGraylist: displayName: Gray List - description: List of preferential paths to determine which duplicate should be the primary. E.g. C:\2nd_Favorite\,H:\ShouldKeep\ + description: Preferential paths to determine which duplicate should be kept. E.g. C:\2nd_Fav,C:\3rd_Fav,C:\4th_Fav,H:\ShouldKeep type: STRING - zyBlacklist: + zxBlacklist: displayName: Black List - description: List of LEAST preferential paths to determine primary candidates for deletion. E.g. C:\Downloads\,F:\DeleteMeFirst\ + description: Least preferential paths; Determine primary deletion candidates. E.g. C:\Downloads,C:\DelMe-3rd,C:\DelMe-2nd,C:\DeleteMeFirst type: STRING zyMaxDupToProcess: displayName: Max Dup Process - description: Maximum number of duplicates to process. If 0, infinity + description: (Default=0) Maximum number of duplicates to process. If 0, infinity. type: NUMBER - zzdebugTracing: - displayName: Debug Tracing - description: (Default=false) [***For Advanced Users***] Enable debug tracing. When enabled, additional tracing logging is added to Stash\plugins\DupFileManager\DupFileManager.log + zySwapBetterBitRate: + displayName: Swap Better Bit Rate + description: Swap better bit rate for duplicate files. Use with DupFileManager_config.py file option favorHighBitRate + type: BOOLEAN + zySwapBetterFrameRate: + displayName: Swap Better Frame Rate + description: Swap better frame rate for duplicates. Use with DupFileManager_config.py file option favorHigherFrameRate + type: BOOLEAN + zySwapCodec: + displayName: Swap Better Codec + description: If enabled, swap better codec duplicate files to preferred path. + type: BOOLEAN + zySwapHighRes: + displayName: Swap Higher Resolution + description: If enabled, swap higher resolution duplicate files to preferred path. + type: BOOLEAN + zySwapLongLength: + displayName: Swap Longer Duration + description: If enabled, swap longer duration media files to preferred path. Longer is determine by significantLongerTime field. + type: BOOLEAN + zzDebug: + displayName: Debug + description: Enable debug so-as to add additional debug logging in Stash\plugins\DupFileManager\DupFileManager.log + type: BOOLEAN + zzTracing: + displayName: Tracing + description: Enable tracing and debug so-as to add additional tracing and debug logging in Stash\plugins\DupFileManager\DupFileManager.log type: BOOLEAN exec: - python @@ -60,7 +75,11 @@ tasks: description: Set tag DuplicateMarkForDeletion to the duplicates with lower resolution, duration, file name length, or black list path. defaultArgs: mode: tag_duplicates_task - - name: Delete Tagged Duplicates + - name: Clear Tags + description: Clear tag DuplicateMarkForDeletion. Remove the tag from all files. + defaultArgs: + mode: clear_duplicate_tags_task + - name: Delete Tagged Scenes description: Only delete scenes having DuplicateMarkForDeletion tag. defaultArgs: mode: delete_tagged_duplicates_task diff --git a/plugins/DupFileManager/DupFileManager_config.py b/plugins/DupFileManager/DupFileManager_config.py index ab5b8178..65ee067c 100644 --- a/plugins/DupFileManager/DupFileManager_config.py +++ b/plugins/DupFileManager/DupFileManager_config.py @@ -8,19 +8,85 @@ "dup_path": "", #Example: "C:\\TempDeleteFolder" # The threshold as to what percentage is consider a significant shorter time. "significantTimeDiff" : .90, # 90% threshold - # Valued passed to stash API function FindDuplicateScenes. - "duration_diff" : 10, # (default=10) A value from 1 to 10. # If enabled, moves destination file to recycle bin before swapping Hi-Res file. "toRecycleBeforeSwap" : True, # Character used to seperate items on the whitelist, blacklist, and graylist "listSeparator" : ",", + # Enable to permanently delete files, instead of moving files to trash can. + "permanentlyDelete" : False, + # After running a 'Delete Duplicates' task, run Clean, Clean-Generated, and Optimize-Database. + "cleanAfterDel" : True, + # Generate PHASH after tag or delete task. + "doGeneratePhash" : False, + # If enabled, skip processing tagged scenes. This option is ignored if createHtmlReport is True + "skipIfTagged" : False, + # If enabled, stop multiple scanning jobs after processing duplicates + "killScanningPostProcess" : True, + # If enabled, tag scenes which have longer duration, but lower resolution + "tagLongDurationLowRes" : True, + # If enabled, bit-rate is used in important comparisons for function allThingsEqual + "bitRateIsImporantComp" : True, + # If enabled, codec is used in important comparisons for function allThingsEqual + "codecIsImporantComp" : True, + + # Tag names ************************************************** # Tag used to tag duplicates with lower resolution, duration, and file name length. "DupFileTag" : "DuplicateMarkForDeletion", - # Tag name used to tag duplicates in the whitelist. E.g. DuplicateWhitelistFile - "DupWhiteListTag" : "DuplicateWhitelistFile", + # Tag name used to tag duplicates in the whitelist. E.g. _DuplicateWhitelistFile + "DupWhiteListTag" : "_DuplicateWhitelistFile", + # Tag name used to exclude duplicate from deletion + "excludeDupFileDeleteTag" : "_ExcludeDuplicateMarkForDeletion", + # Tag name used to tag scenes with existing tag DuplicateMarkForDeletion, and that are in the graylist + "graylistMarkForDeletion" : "_GraylistMarkForDeletion", + # Tag name for scenes with significant longer duration but lower resolution + "longerDurationLowerResolution" : "_LongerDurationLowerResolution", + + # Other tag related options ************************************************** + # If enabled, when adding tag DuplicateMarkForDeletion to graylist scene, also add tag _GraylistMarkForDeletion. + "graylistTagging" : True, + # If enabled, the Clear Tags task clears scenes of all tags (DuplicateMarkForDeletion, _DuplicateWhite..., _ExcludeDup..., _Graylist..., _LongerDur...) + "clearAllDupfileManagerTags" : True, + # If enabled, append dup tag name with match duplicate distance number. I.E. (DuplicateMarkForDeletion_0) or (DuplicateMarkForDeletion_1) + "appendMatchDupDistance" : True, + # If enabled, start dup tag name with an underscore. I.E. (_DuplicateMarkForDeletion). Places tag at the end of tag list. + "underscoreDupFileTag" : True, + + # Favor setings ********************************************* + # If enabled, favor longer file name over shorter. If disabled, favor shorter file name. + "favorLongerFileName" : True, + # If enabled, favor larger file size over smaller. If disabled, favor smaller file size. + "favorLargerFileSize" : True, + # If enabled, favor videos with a different bit rate value. If favorHighBitRate is true, favor higher rate. If favorHighBitRate is false, favor lower rate + "favorBitRateChange" : True, + # If enabled, favor videos with higher bit rate. Used with either favorBitRateChange option or UI [Swap Bit Rate Change] option. + "favorHighBitRate" : True, + # If enabled, favor videos with a different frame rate value. If favorHigherFrameRate is true, favor higher rate. If favorHigherFrameRate is false, favor lower rate + "favorFrameRateChange" : True, + # If enabled, favor videos with higher frame rate. Used with either favorFrameRateChange option or UI [Swap Better Frame Rate] option. + "favorHigherFrameRate" : True, + # If enabled, favor videos with better codec according to codecRanking + "favorCodecRanking" : True, + # Codec Ranking in order of preference (default (codecRankingSet1) is order of ranking based on maximum potential efficiency) + "codecRankingSet1" : ["h266", "vvc", "av1", "vvdec", "shvc", "h265", "hevc", "xvc", "vp9", "h264", "avc", "mvc", "msmpeg4v10", "vp8", "vcb", "msmpeg4v3", "h263", "h263i", "msmpeg4v2", "msmpeg4v1", "mpeg4", "mpeg-4", "mpeg4video", "theora", "vc3", "vc-3", "vp7", "vp6f", "vp6", "vc1", "vc-1", "mpeg2", "mpeg-2", "mpeg2video", "h262", "h222", "h261", "vp5", "vp4", "vp3", "wmv3", "mpeg1", "mpeg-1", "mpeg1video", "vp3", "wmv2", "wmv1", "wmv", "flv1", "png", "gif", "jpeg", "m-jpeg", "mjpeg"], + # codecRankingSet2 is in order of least potential efficiency + "codecRankingSet2" : ["gif", "png", "flv1", "mpeg1video", "mpeg1", "wmv1", "wmv2", "wmv3", "mpeg2video", "mpeg2", "AVC", "vc1", "vc-1", "msmpeg4v1", "msmpeg4v2", "msmpeg4v3", "mpeg4", "vp6f", "vp8", "h263i", "h263", "h264", "h265", "av1", "vp9", "h266"], + # codecRankingSet3 is in order of quality + "codecRankingSet3" : ["h266", "vp9", "av1", "h265", "h264", "h263", "h263i", "vp8", "vp6f", "mpeg4", "msmpeg4v3", "msmpeg4v2", "msmpeg4v1", "vc-1", "vc1", "AVC", "mpeg2", "mpeg2video", "wmv3", "wmv2", "wmv1", "mpeg1", "mpeg1video", "flv1", "png", "gif"], + # codecRankingSet4 is in order of compatibility + "codecRankingSet4" : ["h264", "vp8", "mpeg4", "msmpeg4v3", "msmpeg4v2", "msmpeg4v1", "h266", "vp9", "av1", "h265", "h263", "h263i", "vp6f", "vc-1", "vc1", "AVC", "mpeg2", "mpeg2video", "wmv3", "wmv2", "wmv1", "mpeg1", "mpeg1video", "flv1", "png", "gif"], + # Determines which codecRankingSet to use when ranking codec. Default is 1 for codecRankingSet1 + "codecRankingSetToUse" : 1, # The following fields are ONLY used when running DupFileManager in script mode "endpoint_Scheme" : "http", # Define endpoint to use when contacting the Stash server "endpoint_Host" : "0.0.0.0", # Define endpoint to use when contacting the Stash server "endpoint_Port" : 9999, # Define endpoint to use when contacting the Stash server } + +# Codec ranking research source: + # https://imagekit.io/blog/video-encoding/ + # https://support.spinetix.com/wiki/Video_decoding + # https://en.wikipedia.org/wiki/Comparison_of_video_codecs + # https://en.wikipedia.org/wiki/List_of_open-source_codecs + # https://en.wikipedia.org/wiki/List_of_codecs + # https://en.wikipedia.org/wiki/Comparison_of_video_container_formats diff --git a/plugins/DupFileManager/DupFileManager_config_dev.py b/plugins/DupFileManager/DupFileManager_config_dev.py new file mode 100644 index 00000000..e4829bed --- /dev/null +++ b/plugins/DupFileManager/DupFileManager_config_dev.py @@ -0,0 +1,24 @@ +# Below fields are in the development stage, and should not be used. +config_dev = { + # If enabled, ignore reparsepoints. For Windows NT drives only. + "ignoreReparsepoints" : True, + # If enabled, ignore symbolic links. + "ignoreSymbolicLinks" : True, + + # If enabled, swap longer file name to preferred path. + "swapLongFileName" : False, + + # If enabled, when finding exact duplicate files, keep file with the shorter name. The default is to keep file name with the longer name. + "keepShorterFileName" : False, + # If enabled, when finding duplicate files, keep media with the shorter time length. The default is to keep media with longer time length. + "keepShorterLength" : False, + # If enabled, when finding duplicate files, keep media with the lower resolution. The default is to keep media with higher resolution. + "keepLowerResolution" : False, + # If enabled, keep duplicate media with high resolution over media with significant longer time. + "keepHighResOverLen" : False, # Requires keepBothHighResAndLongerLen = False + # If enabled, keep both duplicate files if the LOWER resolution file is significantly longer. + "keepBothHighResAndLongerLen" : True, + + # Keep empty to check all paths, or populate it with the only paths to check for duplicates + "onlyCheck_paths": [], #Example: "onlyCheck_paths": ['C:\\SomeMediaPath\\subpath', "E:\\YetAnotherPath\\subpath', "E:\\YetAnotherPath\\secondSubPath'] +} diff --git a/plugins/DupFileManager/DupFileManager_report_config.py b/plugins/DupFileManager/DupFileManager_report_config.py new file mode 100644 index 00000000..81151229 --- /dev/null +++ b/plugins/DupFileManager/DupFileManager_report_config.py @@ -0,0 +1,212 @@ +# Description: This is a Stash plugin which manages duplicate files. +# By David Maisonave (aka Axter) Jul-2024 (https://www.axter.com/) +# Get the latest developers version from following link: +# https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/DupFileManager + +# HTML Report Options ************************************************** +report_config = { + # Paginate HTML report. Maximum number of results to display on one page, before adding (paginating) an additional page. + "htmlReportPaginate" : 100, + # Name of the HTML file to create + "htmlReportName" : "DuplicateTagScenes.html", + # If enabled, report displays an image preview similar to sceneDuplicateChecker + "htmlIncludeImagePreview" : False, + "htmlImagePreviewPopupSize" : 600, + # HTML report prefix, before table listing + "htmlReportPrefix" : """ + + +Stash Duplicate Report + + + + + + + +
+ + + + +
Report InfoReport Options
+ + +
Found (QtyPlaceHolder) duplice setsDate Created: (DateCreatedPlaceHolder)
+ + + +


+

Stash Duplicate Scenes Report (MatchTypePlaceHolder)

\n""", + # HTML report postfiox, after table listing + "htmlReportPostfix" : "\n", + # HTML report table + "htmlReportTable" : "", + # HTML report table row + "htmlReportTableRow" : "", + # HTML report table header + "htmlReportTableHeader" : "\n") - - for DupFileSet in DupFileSets: - # stash.Trace(f"DupFileSet={DupFileSet}", toAscii=True) - QtyDupSet+=1 - stash.progressBar(QtyDupSet, qtyResults) - SepLine = "---------------------------" - DupFileToKeep = None - DupToCopyFrom = "" - DupFileDetailList = [] - for DupFile in DupFileSet: - QtyDup+=1 - Scene = DupFile - if skipIfTagged and createHtmlReport == False and duplicateMarkForDeletion in Scene['tags']: - stash.Trace(f"Skipping scene '{Scene['files'][0]['path']}' because already tagged with {duplicateMarkForDeletion}") - continue - stash.TraceOnce(f"Scene = {Scene}", toAscii=True) - DupFileDetailList = DupFileDetailList + [Scene] - if os.path.isfile(Scene['files'][0]['path']): - if DupFileToKeep != None: - if int(DupFileToKeep['files'][0]['duration']) == int(Scene['files'][0]['duration']): # Do not count fractions of a second as a difference - QtyExactDup+=1 - else: - QtyAlmostDup+=1 - SepLine = "***************************" - if significantTimeDiffCheck(DupFileToKeep, Scene): - QtyRealTimeDiff += 1 - - if int(DupFileToKeep['files'][0]['width']) * int(DupFileToKeep['files'][0]['height']) < int(Scene['files'][0]['width']) * int(Scene['files'][0]['height']): - logReason(DupFileToKeep, Scene, f"resolution: {DupFileToKeep['files'][0]['width']}x{DupFileToKeep['files'][0]['height']} < {Scene['files'][0]['width']}x{Scene['files'][0]['height']}") - DupFileToKeep = Scene - elif significantMoreTimeCompareToBetterVideo(DupFileToKeep, Scene): - if significantTimeDiffCheck(DupFileToKeep, Scene): - theReason = f"significant-duration: {DupFileToKeep['files'][0]['duration']} < {Scene['files'][0]['duration']}" - else: - theReason = f"duration: {DupFileToKeep['files'][0]['duration']} < {Scene['files'][0]['duration']}" - reasonKeyword = "significant-duration" if significantTimeDiffCheck(DupFileToKeep, Scene) else "duration" - logReason(DupFileToKeep, Scene, theReason) - DupFileToKeep = Scene - elif isBetterVideo(DupFileToKeep, Scene): - logReason(DupFileToKeep, Scene, f"codec,bit_rate, or frame_rate: {DupFileToKeep['files'][0]['video_codec']}, {DupFileToKeep['files'][0]['bit_rate']}, {DupFileToKeep['files'][0]['frame_rate']} : {Scene['files'][0]['video_codec']}, {Scene['files'][0]['bit_rate']}, {Scene['files'][0]['frame_rate']}") - DupFileToKeep = Scene - elif stash.startsWithInList(whitelist, Scene['files'][0]['path']) and not stash.startsWithInList(whitelist, DupFileToKeep['files'][0]['path']): - logReason(DupFileToKeep, Scene, f"not whitelist vs whitelist") - DupFileToKeep = Scene - elif isTaggedExcluded(Scene) and not isTaggedExcluded(DupFileToKeep): - logReason(DupFileToKeep, Scene, f"not ExcludeTag vs ExcludeTag") - DupFileToKeep = Scene - elif allThingsEqual(DupFileToKeep, Scene): - # Only do below checks if all imporant things are equal. - if stash.startsWithInList(blacklist, DupFileToKeep['files'][0]['path']) and not stash.startsWithInList(blacklist, Scene['files'][0]['path']): - logReason(DupFileToKeep, Scene, f"blacklist vs not blacklist") - DupFileToKeep = Scene - elif stash.startsWithInList(blacklist, DupFileToKeep['files'][0]['path']) and stash.startsWithInList(blacklist, Scene['files'][0]['path']) and stash.indexStartsWithInList(blacklist, DupFileToKeep['files'][0]['path']) > stash.indexStartsWithInList(blacklist, Scene['files'][0]['path']): - logReason(DupFileToKeep, Scene, f"blacklist-index {stash.indexStartsWithInList(blacklist, DupFileToKeep['files'][0]['path'])} > {stash.indexStartsWithInList(blacklist, Scene['files'][0]['path'])}") - DupFileToKeep = Scene - elif stash.startsWithInList(graylist, Scene['files'][0]['path']) and not stash.startsWithInList(graylist, DupFileToKeep['files'][0]['path']): - logReason(DupFileToKeep, Scene, f"not graylist vs graylist") - DupFileToKeep = Scene - elif stash.startsWithInList(graylist, Scene['files'][0]['path']) and stash.startsWithInList(graylist, DupFileToKeep['files'][0]['path']) and stash.indexStartsWithInList(graylist, DupFileToKeep['files'][0]['path']) > stash.indexStartsWithInList(graylist, Scene['files'][0]['path']): - logReason(DupFileToKeep, Scene, f"graylist-index {stash.indexStartsWithInList(graylist, DupFileToKeep['files'][0]['path'])} > {stash.indexStartsWithInList(graylist, Scene['files'][0]['path'])}") - DupFileToKeep = Scene - elif favorLongerFileName and len(DupFileToKeep['files'][0]['path']) < len(Scene['files'][0]['path']) and not isWorseKeepCandidate(DupFileToKeep, Scene): - logReason(DupFileToKeep, Scene, f"path-len {len(DupFileToKeep['files'][0]['path'])} < {len(Scene['files'][0]['path'])}") - DupFileToKeep = Scene - elif favorLargerFileSize and int(DupFileToKeep['files'][0]['size']) < int(Scene['files'][0]['size']) and not isWorseKeepCandidate(DupFileToKeep, Scene): - logReason(DupFileToKeep, Scene, f"size {DupFileToKeep['files'][0]['size']} < {Scene['files'][0]['size']}") - DupFileToKeep = Scene - elif not favorLongerFileName and len(DupFileToKeep['files'][0]['path']) > len(Scene['files'][0]['path']) and not isWorseKeepCandidate(DupFileToKeep, Scene): - logReason(DupFileToKeep, Scene, f"path-len {len(DupFileToKeep['files'][0]['path'])} > {len(Scene['files'][0]['path'])}") - DupFileToKeep = Scene - elif not favorLargerFileSize and int(DupFileToKeep['files'][0]['size']) > int(Scene['files'][0]['size']) and not isWorseKeepCandidate(DupFileToKeep, Scene): - logReason(DupFileToKeep, Scene, f"size {DupFileToKeep['files'][0]['size']} > {Scene['files'][0]['size']}") - DupFileToKeep = Scene - else: - DupFileToKeep = Scene - # stash.Trace(f"DupFileToKeep = {DupFileToKeep}") - stash.Debug(f"KeepID={DupFileToKeep['id']}, ID={DupFile['id']} duration=({Scene['files'][0]['duration']}), Size=({Scene['files'][0]['size']}), Res=({Scene['files'][0]['width']} x {Scene['files'][0]['height']}) Name={Scene['files'][0]['path']}, KeepPath={DupFileToKeep['files'][0]['path']}", toAscii=True) - else: - stash.Error(f"Scene does NOT exist; path={Scene['files'][0]['path']}; ID={Scene['id']}") - - for DupFile in DupFileDetailList: - if DupFileToKeep != None and DupFile['id'] != DupFileToKeep['id']: - if merge: - result = stash.mergeMetadata(DupFile, DupFileToKeep) - if result != "Nothing To Merge": - QtyMerge += 1 - didAddTag = False - if stash.startsWithInList(whitelist, DupFile['files'][0]['path']) and (not whitelistDelDupInSameFolder or not hasSameDir(DupFile['files'][0]['path'], DupFileToKeep['files'][0]['path'])): - QtySkipForDel+=1 - if isSwapCandidate(DupFileToKeep, DupFile): - if merge: - stash.mergeMetadata(DupFileToKeep, DupFile) - if toRecycleBeforeSwap: - sendToTrash(DupFile['files'][0]['path']) - stash.Log(f"Moving better file '{DupFileToKeep['files'][0]['path']}' to '{DupFile['files'][0]['path']}'; SrcID={DupFileToKeep['id']};DescID={DupFile['id']};QtyDup={QtyDup};Set={QtyDupSet} of {qtyResults};QtySwap={QtySwap};QtySkipForDel={QtySkipForDel}", toAscii=True, printTo=LOG_STASH_N_PLUGIN) - try: - shutil.move(DupFileToKeep['files'][0]['path'], DupFile['files'][0]['path']) - QtySwap+=1 - except Exception as e: - tb = traceback.format_exc() - stash.Error(f"Exception while moving file '{DupFileToKeep['files'][0]['path']}' to '{DupFile['files'][0]['path']}; SrcID={DupFileToKeep['id']};DescID={DupFile['id']}'; Error: {e}\nTraceBack={tb}") - DupFileToKeep = DupFile - else: - if dupWhitelistTagId and tagDuplicates: - didAddTag = setTagId_withRetry(duplicateWhitelistTag, DupFile, DupFileToKeep, ignoreAutoTag=True) - stash.Log(f"NOT processing duplicate, because it's in whitelist. '{DupFile['files'][0]['path']}';AddTagW={didAddTag};QtyDup={QtyDup};Set={QtyDupSet} of {qtyResults};QtySkipForDel={QtySkipForDel}", toAscii=True) - else: - if isTaggedExcluded(DupFile): - QtyExcludeForDel+=1 - stash.Log(f"Excluding file {DupFile['files'][0]['path']} because tagged for exclusion via tag {excludeDupFileDeleteTag};QtyDup={QtyDup};Set={QtyDupSet} of {qtyResults}") - else: - # ToDo: Add merge logic here - if deleteDup: - DupFileName = DupFile['files'][0]['path'] - if not deleteBlacklistOnly or stash.startsWithInList(blacklist, DupFile['files'][0]['path']): - if not deleteLowerResAndDuration or (isBetterVideo(DupFile, DupFileToKeep) and not significantMoreTimeCompareToBetterVideo(DupFileToKeep, DupFile)) or (significantMoreTimeCompareToBetterVideo(DupFile, DupFileToKeep) and not isBetterVideo(DupFileToKeep, DupFile)): - QtyDeleted += 1 - DupFileNameOnly = pathlib.Path(DupFileName).stem - stash.Warn(f"Deleting duplicate '{DupFileName}';QtyDup={QtyDup};Set={QtyDupSet} of {qtyResults};QtyDeleted={QtyDeleted}", toAscii=True, printTo=LOG_STASH_N_PLUGIN) - if alternateTrashCanPath != "": - destPath = f"{alternateTrashCanPath }{os.sep}{DupFileNameOnly}" - if os.path.isfile(destPath): - destPath = f"{alternateTrashCanPath }{os.sep}_{time.time()}_{DupFileNameOnly}" - shutil.move(DupFileName, destPath) - elif moveToTrashCan: - sendToTrash(DupFileName) - stash.destroyScene(DupFile['id'], delete_file=True) - elif tagDuplicates or fileHtmlReport != None: - if excludeFromReportIfSignificantTimeDiff and significantTimeDiffCheck(DupFile, DupFileToKeep, True): - stash.Log(f"Skipping duplicate {DupFile['files'][0]['path']} (ID={DupFile['id']}), because of time difference greater than {significantTimeDiff} for file {DupFileToKeep['files'][0]['path']}.") - continue - QtyTagForDel+=1 - QtyTagForDelPaginate+=1 - didAddTag = False - if tagDuplicates: - didAddTag = setTagId_withRetry(duplicateMarkForDeletion, DupFile, DupFileToKeep, ignoreAutoTag=True) - if fileHtmlReport != None: - # ToDo: Add icons using github path - # add copy button with copy icon - # add move button with r-sqr icon - # repace delete button with trashcan icon - # add rename file code and button - # add delete only from stash db code and button using DB delete icon - stash.Debug(f"Adding scene {DupFile['id']} to HTML report.") - dupFileExist = True if os.path.isfile(DupFile['files'][0]['path']) else False - toKeepFileExist = True if os.path.isfile(DupFileToKeep['files'][0]['path']) else False - - fileHtmlReport.write(f"{htmlReportTableRow}") - videoPreview = f"" - if htmlIncludeImagePreview: - imagePreview = f"
  • \"\"\"\"
" - fileHtmlReport.write(f"{getSceneID(DupFile['id'])}
", + # HTML report table data + "htmlReportTableData" : "", + # HTML report video preview + "htmlReportVideoPreview" : "width='160' height='120' controls", # Alternative option "autoplay loop controls" or "autoplay controls" + # The number off seconds in time difference for supper highlight on htmlReport + "htmlHighlightTimeDiff" : 3, + # Supper highlight for details with higher resolution or duration + "htmlSupperHighlight" : "yellow", + # Lower highlight for details with slightly higher duration + "htmlLowerHighlight" : "nyanza", + # Text color for details with different resolution, duration, size, bitrate,codec, or framerate + "htmlDetailDiffTextColor" : "red", + # If enabled, create an HTML report when tagging duplicate files + "createHtmlReport" : True, + # If enabled, report displays stream instead of preview for video + "streamOverPreview" : False, # This option works in Chrome, but does not work very well on firefox. +} diff --git a/plugins/DupFileManager/ModulesValidate.py b/plugins/DupFileManager/ModulesValidate.py new file mode 100644 index 00000000..4de2f3a4 --- /dev/null +++ b/plugins/DupFileManager/ModulesValidate.py @@ -0,0 +1,126 @@ +# ModulesValidate (By David Maisonave aka Axter) +# Description: +# Checks if packages are installed, and optionally install packages if missing. +# The below example usage code should be plave at the very top of the scource code before any other imports. +# Example Usage: +# import ModulesValidate +# ModulesValidate.modulesInstalled(["watchdog", "schedule", "requests"]) +# Testing: +# To test, uninstall packages via command line: pip uninstall -y watchdog schedule requests +import sys, os, pathlib, platform, traceback +# ToDo: Add logic to optionally pull package requirements from requirements.txt file. + +def modulesInstalled(moduleNames, install=True, silent=False): + retrnValue = True + for moduleName in moduleNames: + try: # Try Python 3.3 > way + import importlib + import importlib.util + if moduleName in sys.modules: + if not silent: print(f"{moduleName!r} already in sys.modules") + elif isModuleInstalled(moduleName): + if not silent: print(f"Module {moduleName!r} is available.") + else: + if install and (results:=installModule(moduleName)) > 0: + if results == 1: + print(f"Module {moduleName!r} has been installed") + else: + if not silent: print(f"Module {moduleName!r} is already installed") + continue + else: + if install: + print(f"Can't find the {moduleName!r} module") + retrnValue = False + except Exception as e: + try: + i = importlib.import_module(moduleName) + except ImportError as e: + if install and (results:=installModule(moduleName)) > 0: + if results == 1: + print(f"Module {moduleName!r} has been installed") + else: + if not silent: print(f"Module {moduleName!r} is already installed") + continue + else: + if install: + tb = traceback.format_exc() + print(f"Can't find the {moduleName!r} module! Error: {e}\nTraceBack={tb}") + retrnValue = False + return retrnValue + +def isModuleInstalled(moduleName): + try: + __import__(moduleName) + return True + except Exception as e: + pass + return False + +def installModule(moduleName): + try: + if isLinux(): + # Note: Linux may first need : sudo apt install python3-pip + # if error starts with "Command 'pip' not found" + # or includes "No module named pip" + results = os.popen(f"pip --disable-pip-version-check --version").read() + if results.find("Command 'pip' not found") != -1 or results.find("No module named pip") != -1: + results = os.popen(f"sudo apt install python3-pip").read() + results = os.popen(f"pip --disable-pip-version-check --version").read() + if results.find("Command 'pip' not found") != -1 or results.find("No module named pip") != -1: + return -1 + if isFreeBSD(): + print("Warning: installModule may NOT work on freebsd") + pipArg = " --disable-pip-version-check" + if isDocker(): + pipArg += " --break-system-packages" + results = os.popen(f"{sys.executable} -m pip install {moduleName}{pipArg}").read() # May need to be f"{sys.executable} -m pip install {moduleName}" + results = results.strip("\n") + if results.find("Requirement already satisfied:") > -1: + return 2 + elif results.find("Successfully installed") > -1: + return 1 + elif modulesInstalled(moduleNames=[moduleName], install=False): + return 1 + except Exception as e: + pass + return 0 + +def installPackage(package): # Should delete this. It doesn't work consistently + try: + import pip + if hasattr(pip, 'main'): + pip.main(['install', package]) + else: + pip._internal.main(['install', package]) + except Exception as e: + return False + return True + +def isDocker(): + cgroup = pathlib.Path('/proc/self/cgroup') + return pathlib.Path('/.dockerenv').is_file() or cgroup.is_file() and 'docker' in cgroup.read_text() + +def isWindows(): + if any(platform.win32_ver()): + return True + return False + +def isLinux(): + if platform.system().lower().startswith("linux"): + return True + return False + +def isFreeBSD(): + if platform.system().lower().startswith("freebsd"): + return True + return False + +def isMacOS(): + if sys.platform == "darwin": + return True + return False + +def isWindows(): + if any(platform.win32_ver()): + return True + return False diff --git a/plugins/DupFileManager/README.md b/plugins/DupFileManager/README.md index 7d0cf052..4e76a7f0 100644 --- a/plugins/DupFileManager/README.md +++ b/plugins/DupFileManager/README.md @@ -1,11 +1,40 @@ -# DupFileManager: Ver 0.1.2 (By David Maisonave) +# DupFileManager: Ver 0.1.9 (By David Maisonave) -DupFileManager is a [Stash](https://github.com/stashapp/stash) plugin which manages duplicate file in the Stash system. +DupFileManager is a [Stash](https://github.com/stashapp/stash) plugin which manages duplicate files in the Stash system. +It has both **task** and **tools-UI** components. ### Features +- Creates a duplicate file report which can be accessed from the settings->tools menu options.The report is created as an HTML file and stored in local path under plugins\DupFileManager\report\DuplicateTagScenes.html. + - See screenshot at the bottom of this page for example report. + - Items on the left side of the report are the primary duplicates designated for deletion. By default, these duplicates are given a special _duplicate tag. + - Items on the right side of the report are designated as primary duplicates to keep. They usually have higher resolution, duration and/or preferred paths. + - The report has the following options: + - Delete: Delete file and remove from Stash library. + - Remove: Remove from Stash library. + - Rename: Rename file. + - Copy: Copy file from left (source) to right (to-keep). + - Move: Copy file and metadata left to right. + - Cpy-Name: Copy file name left to right. + - Add-Exclude: Add exclude tag to scene,so that scene is excluded from deletion. + - Remove-Tag: Remove duplicate tag from scene. + - Flag-Scene: Flag (mark) scene in report as reviewed (or as requiring further review). Optional flags (yellow, green, orange, cyan, pink, red, strike-through, & disable-scene) + - Merge: Copy Metadata (tags, performers,& studios) from left to right. - Can merge potential source in the duplicate file names for tag names, performers, and studios. - Normally when Stash searches the file name for tag names, performers, and studios, it only does so using the primary file. +- Advance menu (for specially tagged duplicates) + ![Screenshot 2024-11-22 145139](https://github.com/user-attachments/assets/d76646f0-c5a8-4069-ad0f-a6e5e96e7ed0) + - Delete only specially tagged duplicates in blacklist path. + - Delete duplicates with specified file path. + - Delete duplicates with specific string in File name. + - Delete duplicates with specified file size range. + - Delete with specified duration range. + - Delete with resolution range. + - Delete duplicates having specified tags. + - Delete duplicates with specified rating. + - Delete duplicates with any of the above combinations. +- Bottom extended portion of the Advanced Menu screen. + - ![Screenshot 2024-11-22 232005](https://github.com/user-attachments/assets/9a0d2e9d-783b-4ea2-8fa5-3805b40af4eb) - Delete duplicate file task with the following options: - Tasks (Settings->Task->[Plugin Tasks]->DupFileManager) - **Tag Duplicates** - Set tag DuplicateMarkForDeletion to the duplicates with lower resolution, duration, file name length, and/or black list path. @@ -28,12 +57,14 @@ DupFileManager is a [Stash](https://github.com/stashapp/stash) plugin which mana - **dup_path** - Alternate path to move deleted files to. Example: "C:\TempDeleteFolder" - **toRecycleBeforeSwap** - When enabled, moves destination file to recycle bin before swapping files. - **addPrimaryDupPathToDetails** - If enabled, adds the primary duplicate path to the scene detail. - +- Tools UI Menu +![Screenshot 2024-11-22 145512](https://github.com/user-attachments/assets/03e166eb-ddaa-4eb8-8160-4c9180ca1323) + - Can access either **Duplicate File Report (DupFileManager)** or **DupFileManager Tools and Utilities** menu options. ### Requirements -`pip install --upgrade stashapp-tools` -`pip install pyYAML` -`pip install Send2Trash` +- `pip install --upgrade stashapp-tools` +- `pip install requests` +- `pip install Send2Trash` ### Installation @@ -48,3 +79,33 @@ That's it!!! - Options are accessible in the GUI via Settings->Plugins->Plugins->[DupFileManager]. - More options available in DupFileManager_config.py. + +### Screenshots + +- Example DupFileManager duplicate report. (file names have been edited to PG). + - The report displays preview videos that are playable. Will play a few seconds sample of the video. This requires scan setting **[Generate animated image previews]** to be enabled when scanning all files. + - ![Screenshot 2024-11-22 225359](https://github.com/user-attachments/assets/dc705b24-e2d7-4663-92fd-1516aa7aacf5) + - If there's a scene on the left side that has a higher resolution or duration, it gets a yellow highlight on the report. + - There's an optional setting that allows both preview videos and preview images to be displayed on the report. See settings **htmlIncludeImagePreview** in the **DupFileManager_report_config.py** file. + - There are many more options available for how the report is created. These options are targeted for more advanced users. The options are all available in the **DupFileManager_report_config.py** file, and the settings have commented descriptions preceeding them. See the **DupFileManager_report_config.py** file in the DupFileManager plugin folder for more details. +- Tools UI Menu +![Screenshot 2024-11-22 145512](https://github.com/user-attachments/assets/03e166eb-ddaa-4eb8-8160-4c9180ca1323) + - Can access either **Duplicate File Report (DupFileManager)** or **DupFileManager Tools and Utilities** menu options. +- DupFileManager Report Menu + - ![Screenshot 2024-11-22 151630](https://github.com/user-attachments/assets/834ee60f-1a4a-4a3e-bbf7-23aeca2bda1f) +- DupFileManager Tools and Utilities + - ![Screenshot 2024-11-22 152023](https://github.com/user-attachments/assets/4daaea9e-f603-4619-b536-e6609135bab1) +- Full bottom extended portion of the Advanced Menu screen. + - ![Screenshot 2024-11-22 232208](https://github.com/user-attachments/assets/bf1f3021-3a8c-4875-9737-60ee3d7fe675) + +### Future Planned Features +- Currently, the report and advanced menu do not work with Stash settings requiring a password. Additional logic will be added to have them use the API Key. Planned for 1.0.0 Version. +- Add an advanced menu that will work with non-tagged reports. It will iterated through the existing report file(s) to aplly deletions, instead of searching Stash DB for tagged files. Planned for 1.1.0 Version. +- Greylist deletion option will be added to the advanced menu. Planned for 1.0.5 Version. +- Add advanced menu directly to the Settings->Tools menu. Planned for 1.5.0 Version. +- Add report directly to the Settings->Tools menu. Planned for 1.5.0 Version. +- Remove all flags from all scenes option. Planned for 1.0.5 Version. +- Transfer option settings **[Disable Complete Confirmation]** and **[Disable Delete Confirmation]** when paginating. Planned for 1.0.5 Version. + + + diff --git a/plugins/DupFileManager/StashPluginHelper.py b/plugins/DupFileManager/StashPluginHelper.py index 6f0d3d15..a9be414e 100644 --- a/plugins/DupFileManager/StashPluginHelper.py +++ b/plugins/DupFileManager/StashPluginHelper.py @@ -1,12 +1,3 @@ -from stashapi.stashapp import StashInterface -from logging.handlers import RotatingFileHandler -import re, inspect, sys, os, pathlib, logging, json -import concurrent.futures -from stashapi.stash_types import PhashDistance -import __main__ - -_ARGUMENT_UNSPECIFIED_ = "_ARGUMENT_UNSPECIFIED_" - # StashPluginHelper (By David Maisonave aka Axter) # See end of this file for example usage # Log Features: @@ -24,6 +15,14 @@ # Gets DEBUG_TRACING value from command line argument and/or from UI and/or from config file # Sets RUNNING_IN_COMMAND_LINE_MODE to True if detects multiple arguments # Sets CALLED_AS_STASH_PLUGIN to True if it's able to read from STDIN_READ +from stashapi.stashapp import StashInterface +from logging.handlers import RotatingFileHandler +import re, inspect, sys, os, pathlib, logging, json, platform, subprocess, traceback, time +import concurrent.futures +from stashapi.stash_types import PhashDistance +from enum import Enum, IntEnum +import __main__ + class StashPluginHelper(StashInterface): # Primary Members for external reference PLUGIN_TASK_NAME = None @@ -45,15 +44,44 @@ class StashPluginHelper(StashInterface): API_KEY = None excludeMergeTags = None + # class EnumInt(IntEnum): + # def __repr__(self) -> str: + # return f"{self.__class__.__name__}.{self.name}" + # def __str__(self) -> str: + # return str(self.value) + # def serialize(self): + # return self.value + + class EnumValue(Enum): + def __repr__(self) -> str: + return f"{self.__class__.__name__}.{self.name}" + def __str__(self) -> str: + return str(self.value) + def __add__(self, other): + return self.value + other.value + def serialize(self): + return self.value + # printTo argument - LOG_TO_FILE = 1 - LOG_TO_CONSOLE = 2 # Note: Only see output when running in command line mode. In plugin mode, this output is lost. - LOG_TO_STDERR = 4 # Note: In plugin mode, output to StdErr ALWAYS gets sent to stash logging as an error. - LOG_TO_STASH = 8 - LOG_TO_WARN = 16 - LOG_TO_ERROR = 32 - LOG_TO_CRITICAL = 64 - LOG_TO_ALL = LOG_TO_FILE + LOG_TO_CONSOLE + LOG_TO_STDERR + LOG_TO_STASH + class LogTo(IntEnum): + FILE = 1 + CONSOLE = 2 # Note: Only see output when running in command line mode. In plugin mode, this output is lost. + STDERR = 4 # Note: In plugin mode, output to StdErr ALWAYS gets sent to stash logging as an error. + STASH = 8 + WARN = 16 + ERROR = 32 + CRITICAL = 64 + ALL = FILE + CONSOLE + STDERR + STASH + + class DbgLevel(IntEnum): + TRACE = 1 + DBG = 2 + INF = 3 + WRN = 4 + ERR = 5 + CRITICAL = 6 + + DBG_LEVEL = DbgLevel.INF # Misc class variables MAIN_SCRIPT_NAME = None @@ -61,6 +89,25 @@ class StashPluginHelper(StashInterface): LOG_FILE_DIR = None LOG_FILE_NAME = None STDIN_READ = None + stopProcessBarSpin = True + updateProgressbarOnIter = 0 + currentProgressbarIteration = 0 + + class OS_Type(IntEnum): + WINDOWS = 1 + LINUX = 2 + MAC_OS = 3 + FREEBSD = 4 + UNKNOWN_OS = 5 + + OS_TYPE = OS_Type.UNKNOWN_OS + + IS_DOCKER = False + IS_WINDOWS = False + IS_LINUX = False + IS_FREEBSD = False + IS_MAC_OS = False + pluginLog = None logLinePreviousHits = [] thredPool = None @@ -68,45 +115,76 @@ class StashPluginHelper(StashInterface): _mergeMetadata = None encodeToUtf8 = False convertToAscii = False # If set True, it takes precedence over encodeToUtf8 + progressBarIsEnabled = True # Prefix message value - LEV_TRACE = "TRACE: " - LEV_DBG = "DBG: " - LEV_INF = "INF: " - LEV_WRN = "WRN: " - LEV_ERR = "ERR: " - LEV_CRITICAL = "CRITICAL: " - - # Default format - LOG_FORMAT = "[%(asctime)s] %(message)s" + class Level(EnumValue): + TRACE = "TRACE: " + DBG = "DBG: " + INF = "INF: " + WRN = "WRN: " + ERR = "ERR: " + CRITICAL = "CRITICAL: " + class Constant(EnumValue): + # Default format + LOG_FORMAT = "[%(asctime)s] %(message)s" + ARGUMENT_UNSPECIFIED = "_ARGUMENT_UNSPECIFIED_" + NOT_IN_LIST = 2147483646 + # Externally modifiable variables - log_to_err_set = LOG_TO_FILE + LOG_TO_STDERR # This can be changed by the calling source in order to customize what targets get error messages - log_to_norm = LOG_TO_FILE + LOG_TO_CONSOLE # Can be change so-as to set target output for normal logging + log_to_err_set = LogTo.FILE + LogTo.STDERR # This can be changed by the calling source in order to customize what targets get error messages + log_to_norm = LogTo.FILE + LogTo.CONSOLE # Can be change so-as to set target output for normal logging # Warn message goes to both plugin log file and stash when sent to Stash log file. - log_to_wrn_set = LOG_TO_STASH # This can be changed by the calling source in order to customize what targets get warning messages + log_to_wrn_set = LogTo.STASH # This can be changed by the calling source in order to customize what targets get warning messages def __init__(self, - debugTracing = None, # Set debugTracing to True so as to output debug and trace logging - logFormat = LOG_FORMAT, # Plugin log line format - dateFmt = "%y%m%d %H:%M:%S", # Date format when logging to plugin log file - maxbytes = 8*1024*1024, # Max size of plugin log file - backupcount = 2, # Backup counts when log file size reaches max size - logToWrnSet = 0, # Customize the target output set which will get warning logging - logToErrSet = 0, # Customize the target output set which will get error logging - logToNormSet = 0, # Customize the target output set which will get normal logging - logFilePath = "", # Plugin log file. If empty, the log file name will be set based on current python file name and path - mainScriptName = "", # The main plugin script file name (full path) - pluginID = "", - settings = None, # Default settings for UI fields - config = None, # From pluginName_config.py or pluginName_setting.py - fragmentServer = None, - stash_url = None, # Stash URL (endpoint URL) Example: http://localhost:9999 - apiKey = None, # API Key only needed when username and password set while running script via command line + debugTracing = None, # Set debugTracing to True so as to output debug and trace logging + logFormat = Constant.LOG_FORMAT.value, # Plugin log line format + dateFmt = "%y%m%d %H:%M:%S", # Date format when logging to plugin log file + maxbytes = 8*1024*1024, # Max size of plugin log file + backupcount = 2, # Backup counts when log file size reaches max size + logToWrnSet = 0, # Customize the target output set which will get warning logging + logToErrSet = 0, # Customize the target output set which will get error logging + logToNormSet = 0, # Customize the target output set which will get normal logging + logFilePath = "", # Plugin log file. If empty, the log file name will be set based on current python file name and path + mainScriptName = "", # The main plugin script file name (full path) + pluginID = "", + settings = None, # Default settings for UI fields + config = None, # From pluginName_config.py or pluginName_setting.py + fragmentServer = None, + stash_url = None, # Stash URL (endpoint URL) Example: http://localhost:9999 + apiKey = None, # API Key only needed when username and password set while running script via command line DebugTraceFieldName = "zzdebugTracing", + DebugFieldName = "zzDebug", DryRunFieldName = "zzdryRun", - setStashLoggerAsPluginLogger = False): + setStashLoggerAsPluginLogger = False, + DBG_LEVEL = DbgLevel.INF): + if DBG_LEVEL in list(self.DbgLevel): + self.DBG_LEVEL = DBG_LEVEL + if debugTracing: + self.DEBUG_TRACING = debugTracing + if self.DBG_LEVEL > self.DbgLevel.DBG: + self.DBG_LEVEL = self.DbgLevel.TRACE + elif self.DBG_LEVEL < self.DbgLevel.INF: + self.DEBUG_TRACING = True self.thredPool = concurrent.futures.ThreadPoolExecutor(max_workers=2) + if self.isWindows(): + self.IS_WINDOWS = True + self.OS_TYPE = self.OS_Type.WINDOWS + elif self.isLinux(): + self.IS_LINUX = True + self.OS_TYPE = self.OS_Type.LINUX + if self.isDocker(): + self.IS_DOCKER = True + elif self.isFreeBSD(): + self.IS_FREEBSD = True + self.OS_TYPE = self.OS_Type.FREEBSD + if self.isDocker(): + self.IS_DOCKER = True + elif self.isMacOS(): + self.IS_MAC_OS = True + self.OS_TYPE = self.OS_Type.MAC_OS if logToWrnSet: self.log_to_wrn_set = logToWrnSet if logToErrSet: self.log_to_err_set = logToErrSet if logToNormSet: self.log_to_norm = logToNormSet @@ -129,7 +207,6 @@ def __init__(self, else: self.FRAGMENT_SERVER = {'Scheme': 'http', 'Host': '0.0.0.0', 'Port': '9999', 'SessionCookie': {'Name': 'session', 'Value': '', 'Path': '', 'Domain': '', 'Expires': '0001-01-01T00:00:00Z', 'RawExpires': '', 'MaxAge': 0, 'Secure': False, 'HttpOnly': False, 'SameSite': 0, 'Raw': '', 'Unparsed': None}, 'Dir': os.path.dirname(pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent), 'PluginDir': pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent} - if debugTracing: self.DEBUG_TRACING = debugTracing if config: self.pluginConfig = config if self.Setting('apiKey', "") != "": @@ -191,8 +268,14 @@ def __init__(self, self.API_KEY = self.STASH_CONFIGURATION['apiKey'] self.DRY_RUN = self.Setting(DryRunFieldName, self.DRY_RUN) - self.DEBUG_TRACING = self.Setting(DebugTraceFieldName, self.DEBUG_TRACING) - if self.DEBUG_TRACING: self.LOG_LEVEL = logging.DEBUG + if self.Setting(DebugTraceFieldName, self.DEBUG_TRACING): + self.DEBUG_TRACING = True + self.LOG_LEVEL = logging.TRACE + self.DBG_LEVEL = self.DbgLevel.TRACE + elif self.Setting(DebugFieldName, self.DEBUG_TRACING): + self.DEBUG_TRACING = True + self.LOG_LEVEL = logging.DEBUG + self.DBG_LEVEL = self.DbgLevel.DBG logging.basicConfig(level=self.LOG_LEVEL, format=logFormat, datefmt=dateFmt, handlers=[RFH]) self.pluginLog = logging.getLogger(pathlib.Path(self.MAIN_SCRIPT_NAME).stem) @@ -202,74 +285,104 @@ def __init__(self, def __del__(self): self.thredPool.shutdown(wait=False) - def Setting(self, name, default=_ARGUMENT_UNSPECIFIED_, raiseEx=True, notEmpty=False): + def Setting(self, name, default=Constant.ARGUMENT_UNSPECIFIED.value, raiseEx=True, notEmpty=False): if self.pluginSettings != None and name in self.pluginSettings: if notEmpty == False or self.pluginSettings[name] != "": return self.pluginSettings[name] if self.pluginConfig != None and name in self.pluginConfig: if notEmpty == False or self.pluginConfig[name] != "": return self.pluginConfig[name] - if default == _ARGUMENT_UNSPECIFIED_ and raiseEx: + if default == self.Constant.ARGUMENT_UNSPECIFIED.value and raiseEx: raise Exception(f"Missing {name} from both UI settings and config file settings.") return default - def Log(self, logMsg, printTo = 0, logLevel = logging.INFO, lineNo = -1, levelStr = "", logAlways = False, toAscii = None): - if toAscii or (toAscii == None and (self.encodeToUtf8 or self.convertToAscii)): - logMsg = self.asc2(logMsg) - else: - logMsg = logMsg - if printTo == 0: - printTo = self.log_to_norm - elif printTo == self.LOG_TO_ERROR and logLevel == logging.INFO: - logLevel = logging.ERROR - printTo = self.log_to_err_set - elif printTo == self.LOG_TO_CRITICAL and logLevel == logging.INFO: - logLevel = logging.CRITICAL - printTo = self.log_to_err_set - elif printTo == self.LOG_TO_WARN and logLevel == logging.INFO: - logLevel = logging.WARN - printTo = self.log_to_wrn_set + def Log(self, logMsg, printTo = 0, logLevel = logging.INFO, lineNo = -1, levelStr = "", logAlways = False, toAscii = None, printLogException = False): + try: + if toAscii or (toAscii == None and (self.encodeToUtf8 or self.convertToAscii)): + logMsg = self.asc2(logMsg) + else: + logMsg = logMsg + if printTo == 0: + printTo = self.log_to_norm + elif printTo == self.LogTo.ERROR and logLevel == logging.INFO: + logLevel = logging.ERROR + printTo = self.log_to_err_set + elif printTo == self.LogTo.CRITICAL and logLevel == logging.INFO: + logLevel = logging.CRITICAL + printTo = self.log_to_err_set + elif printTo == self.LogTo.WARN and logLevel == logging.INFO: + logLevel = logging.WARN + printTo = self.log_to_wrn_set + if lineNo == -1: + lineNo = inspect.currentframe().f_back.f_lineno + LN_Str = f"[LN:{lineNo}]" + # print(f"{LN_Str}, {logAlways}, {self.LOG_LEVEL}, {logging.DEBUG}, {levelStr}, {logMsg}") + if logLevel == logging.TRACE and (logAlways == False or self.LOG_LEVEL == logging.TRACE): + if levelStr == "": levelStr = self.Level.DBG + if printTo & self.LogTo.FILE: self.pluginLog.trace(f"{LN_Str} {levelStr}{logMsg}") + if printTo & self.LogTo.STASH: self.log.trace(f"{LN_Str} {levelStr}{logMsg}") + elif logLevel == logging.DEBUG and (logAlways == False or self.LOG_LEVEL == logging.DEBUG or self.LOG_LEVEL == logging.TRACE): + if levelStr == "": levelStr = self.Level.DBG + if printTo & self.LogTo.FILE: self.pluginLog.debug(f"{LN_Str} {levelStr}{logMsg}") + if printTo & self.LogTo.STASH: self.log.debug(f"{LN_Str} {levelStr}{logMsg}") + elif logLevel == logging.INFO or logLevel == logging.DEBUG: + if levelStr == "": levelStr = self.Level.INF if logLevel == logging.INFO else self.Level.DBG + if printTo & self.LogTo.FILE: self.pluginLog.info(f"{LN_Str} {levelStr}{logMsg}") + if printTo & self.LogTo.STASH: self.log.info(f"{LN_Str} {levelStr}{logMsg}") + elif logLevel == logging.WARN: + if levelStr == "": levelStr = self.Level.WRN + if printTo & self.LogTo.FILE: self.pluginLog.warning(f"{LN_Str} {levelStr}{logMsg}") + if printTo & self.LogTo.STASH: self.log.warning(f"{LN_Str} {levelStr}{logMsg}") + elif logLevel == logging.ERROR: + if levelStr == "": levelStr = self.Level.ERR + if printTo & self.LogTo.FILE: self.pluginLog.error(f"{LN_Str} {levelStr}{logMsg}") + if printTo & self.LogTo.STASH: self.log.error(f"{LN_Str} {levelStr}{logMsg}") + elif logLevel == logging.CRITICAL: + if levelStr == "": levelStr = self.Level.CRITICAL + if printTo & self.LogTo.FILE: self.pluginLog.critical(f"{LN_Str} {levelStr}{logMsg}") + if printTo & self.LogTo.STASH: self.log.error(f"{LN_Str} {levelStr}{logMsg}") + if (printTo & self.LogTo.CONSOLE) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways): + print(f"{LN_Str} {levelStr}{logMsg}") + if (printTo & self.LogTo.STDERR) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways): + print(f"StdErr: {LN_Str} {levelStr}{logMsg}", file=sys.stderr) + except Exception as e: + if printLogException: + tb = traceback.format_exc() + print(f"Exception calling [Log]; Error: {e}\nTraceBack={tb}") + pass + + def Trace(self, logMsg = "", printTo = 0, logAlways = False, lineNo = -1, toAscii = None): + if printTo == 0: printTo = self.LogTo.FILE if lineNo == -1: lineNo = inspect.currentframe().f_back.f_lineno - LN_Str = f"[LN:{lineNo}]" - # print(f"{LN_Str}, {logAlways}, {self.LOG_LEVEL}, {logging.DEBUG}, {levelStr}, {logMsg}") - if logLevel == logging.DEBUG and (logAlways == False or self.LOG_LEVEL == logging.DEBUG): - if levelStr == "": levelStr = self.LEV_DBG - if printTo & self.LOG_TO_FILE: self.pluginLog.debug(f"{LN_Str} {levelStr}{logMsg}") - if printTo & self.LOG_TO_STASH: self.log.debug(f"{LN_Str} {levelStr}{logMsg}") - elif logLevel == logging.INFO or logLevel == logging.DEBUG: - if levelStr == "": levelStr = self.LEV_INF if logLevel == logging.INFO else self.LEV_DBG - if printTo & self.LOG_TO_FILE: self.pluginLog.info(f"{LN_Str} {levelStr}{logMsg}") - if printTo & self.LOG_TO_STASH: self.log.info(f"{LN_Str} {levelStr}{logMsg}") - elif logLevel == logging.WARN: - if levelStr == "": levelStr = self.LEV_WRN - if printTo & self.LOG_TO_FILE: self.pluginLog.warning(f"{LN_Str} {levelStr}{logMsg}") - if printTo & self.LOG_TO_STASH: self.log.warning(f"{LN_Str} {levelStr}{logMsg}") - elif logLevel == logging.ERROR: - if levelStr == "": levelStr = self.LEV_ERR - if printTo & self.LOG_TO_FILE: self.pluginLog.error(f"{LN_Str} {levelStr}{logMsg}") - if printTo & self.LOG_TO_STASH: self.log.error(f"{LN_Str} {levelStr}{logMsg}") - elif logLevel == logging.CRITICAL: - if levelStr == "": levelStr = self.LEV_CRITICAL - if printTo & self.LOG_TO_FILE: self.pluginLog.critical(f"{LN_Str} {levelStr}{logMsg}") - if printTo & self.LOG_TO_STASH: self.log.error(f"{LN_Str} {levelStr}{logMsg}") - if (printTo & self.LOG_TO_CONSOLE) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways): - print(f"{LN_Str} {levelStr}{logMsg}") - if (printTo & self.LOG_TO_STDERR) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways): - print(f"StdErr: {LN_Str} {levelStr}{logMsg}", file=sys.stderr) + logLev = logging.INFO if logAlways else logging.TRACE + if self.DBG_LEVEL == self.DbgLevel.TRACE or logAlways: + if logMsg == "": + logMsg = f"Line number {lineNo}..." + self.Log(logMsg, printTo, logLev, lineNo, self.Level.TRACE, logAlways, toAscii=toAscii) - def Trace(self, logMsg = "", printTo = 0, logAlways = False, lineNo = -1, toAscii = None): - if printTo == 0: printTo = self.LOG_TO_FILE + # Log once per session. Only logs the first time called from a particular line number in the code. + def TraceOnce(self, logMsg = "", printTo = 0, logAlways = False, toAscii = None): + lineNo = inspect.currentframe().f_back.f_lineno + if self.DBG_LEVEL == self.DbgLevel.TRACE or logAlways: + FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}" + if FuncAndLineNo in self.logLinePreviousHits: + return + self.logLinePreviousHits.append(FuncAndLineNo) + self.Trace(logMsg, printTo, logAlways, lineNo, toAscii=toAscii) + + def Debug(self, logMsg = "", printTo = 0, logAlways = False, lineNo = -1, toAscii = None): + if printTo == 0: printTo = self.LogTo.FILE if lineNo == -1: lineNo = inspect.currentframe().f_back.f_lineno logLev = logging.INFO if logAlways else logging.DEBUG if self.DEBUG_TRACING or logAlways: if logMsg == "": logMsg = f"Line number {lineNo}..." - self.Log(logMsg, printTo, logLev, lineNo, self.LEV_TRACE, logAlways, toAscii=toAscii) + self.Log(logMsg, printTo, logLev, lineNo, self.Level.DBG, logAlways, toAscii=toAscii) # Log once per session. Only logs the first time called from a particular line number in the code. - def TraceOnce(self, logMsg = "", printTo = 0, logAlways = False, toAscii = None): + def DebugOnce(self, logMsg = "", printTo = 0, logAlways = False, toAscii = None): lineNo = inspect.currentframe().f_back.f_lineno if self.DEBUG_TRACING or logAlways: FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}" @@ -279,8 +392,8 @@ def TraceOnce(self, logMsg = "", printTo = 0, logAlways = False, toAscii = None) self.Trace(logMsg, printTo, logAlways, lineNo, toAscii=toAscii) # Log INFO on first call, then do Trace on remaining calls. - def LogOnce(self, logMsg = "", printTo = 0, logAlways = False, traceOnRemainingCalls = True, toAscii = None): - if printTo == 0: printTo = self.LOG_TO_FILE + def LogOnce(self, logMsg = "", printTo = 0, logAlways = False, traceOnRemainingCalls = True, toAscii = None, printLogException = False): + if printTo == 0: printTo = self.LogTo.FILE lineNo = inspect.currentframe().f_back.f_lineno FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}" if FuncAndLineNo in self.logLinePreviousHits: @@ -288,49 +401,97 @@ def LogOnce(self, logMsg = "", printTo = 0, logAlways = False, traceOnRemainingC self.Trace(logMsg, printTo, logAlways, lineNo, toAscii=toAscii) else: self.logLinePreviousHits.append(FuncAndLineNo) - self.Log(logMsg, printTo, logging.INFO, lineNo, toAscii=toAscii) + self.Log(logMsg, printTo, logging.INFO, lineNo, toAscii=toAscii, printLogException=printLogException) - def Warn(self, logMsg, printTo = 0, toAscii = None): + def Warn(self, logMsg, printTo = 0, toAscii = None, printLogException = False): if printTo == 0: printTo = self.log_to_wrn_set lineNo = inspect.currentframe().f_back.f_lineno - self.Log(logMsg, printTo, logging.WARN, lineNo, toAscii=toAscii) + self.Log(logMsg, printTo, logging.WARN, lineNo, toAscii=toAscii, printLogException=printLogException) - def Error(self, logMsg, printTo = 0, toAscii = None): + def Error(self, logMsg, printTo = 0, toAscii = None, printLogException = False): if printTo == 0: printTo = self.log_to_err_set lineNo = inspect.currentframe().f_back.f_lineno - self.Log(logMsg, printTo, logging.ERROR, lineNo, toAscii=toAscii) + self.Log(logMsg, printTo, logging.ERROR, lineNo, toAscii=toAscii, printLogException=printLogException) - def Status(self, printTo = 0, logLevel = logging.INFO, lineNo = -1): + # Above logging functions all use UpperCamelCase naming convention to avoid conflict with parent class logging function names. + # The below non-loggging functions use (lower) camelCase naming convention. + def status(self, printTo = 0, logLevel = logging.INFO, lineNo = -1): if printTo == 0: printTo = self.log_to_norm if lineNo == -1: lineNo = inspect.currentframe().f_back.f_lineno self.Log(f"StashPluginHelper Status: (CALLED_AS_STASH_PLUGIN={self.CALLED_AS_STASH_PLUGIN}), (RUNNING_IN_COMMAND_LINE_MODE={self.RUNNING_IN_COMMAND_LINE_MODE}), (DEBUG_TRACING={self.DEBUG_TRACING}), (DRY_RUN={self.DRY_RUN}), (PLUGIN_ID={self.PLUGIN_ID}), (PLUGIN_TASK_NAME={self.PLUGIN_TASK_NAME}), (STASH_URL={self.STASH_URL}), (MAIN_SCRIPT_NAME={self.MAIN_SCRIPT_NAME})", printTo, logLevel, lineNo) - def ExecuteProcess(self, args, ExecDetach=False): - import platform, subprocess - is_windows = any(platform.win32_ver()) + # Replaces obsolete UI settings variable with new name. Only use this with strings and numbers. + # Example usage: + # obsoleteSettingsToConvert = {"OldVariableName" : "NewVariableName", "AnotherOldVarName" : "NewName2"} + # stash.replaceObsoleteSettings(obsoleteSettingsToConvert, "ObsoleteSettingsCheckVer2") + def replaceObsoleteSettings(self, settingSet:dict, SettingToCheckFirst="", init_defaults=False): + if SettingToCheckFirst == "" or self.Setting(SettingToCheckFirst) == False: + for key in settingSet: + obsoleteVar = self.Setting(key) + if isinstance(obsoleteVar, bool): + if obsoleteVar: + if self.Setting(settingSet[key]) == False: + self.Log(f"Detected obsolete (bool) settings ({key}). Moving obsolete settings to new setting name {settingSet[key]}.") + results = self.configure_plugin(self.PLUGIN_ID, {settingSet[key]:self.Setting(key), key : False}, init_defaults) + self.Debug(f"configure_plugin = {results}") + else: + self.Log(f"Detected obsolete (bool) settings ({key}), and deleting it's content because new setting name ({settingSet[key]}) is already populated.") + results = self.configure_plugin(self.PLUGIN_ID, {key : False}, init_defaults) + self.Debug(f"configure_plugin = {results}") + elif isinstance(obsoleteVar, int): # Both int and bool type returns true here + if obsoleteVar > 0: + if self.Setting(settingSet[key]) > 0: + self.Log(f"Detected obsolete (int) settings ({key}), and deleting it's content because new setting name ({settingSet[key]}) is already populated.") + results = self.configure_plugin(self.PLUGIN_ID, {key : 0}, init_defaults) + self.Debug(f"configure_plugin = {results}") + else: + self.Log(f"Detected obsolete (int) settings ({key}). Moving obsolete settings to new setting name {settingSet[key]}.") + results = self.configure_plugin(self.PLUGIN_ID, {settingSet[key]:self.Setting(key), key : 0}, init_defaults) + self.Debug(f"configure_plugin = {results}") + elif obsoleteVar != "": + if self.Setting(settingSet[key]) == "": + self.Log(f"Detected obsolete (str) settings ({key}). Moving obsolete settings to new setting name {settingSet[key]}.") + results = self.configure_plugin(self.PLUGIN_ID, {settingSet[key]:self.Setting(key), key : ""}, init_defaults) + self.Debug(f"configure_plugin = {results}") + else: + self.Log(f"Detected obsolete (str) settings ({key}), and deleting it's content because new setting name ({settingSet[key]}) is already populated.") + results = self.configure_plugin(self.PLUGIN_ID, {key : ""}, init_defaults) + self.Debug(f"configure_plugin = {results}") + if SettingToCheckFirst != "": + results = self.configure_plugin(self.PLUGIN_ID, {SettingToCheckFirst : True}, init_defaults) + self.Debug(f"configure_plugin = {results}") + + + def executeProcess(self, args, ExecDetach=False): pid = None - self.Trace(f"is_windows={is_windows} args={args}") - if is_windows: + self.Trace(f"self.IS_WINDOWS={self.IS_WINDOWS} args={args}") + if self.IS_WINDOWS: if ExecDetach: - self.Trace("Executing process using Windows DETACHED_PROCESS") + self.Trace(f"Executing process using Windows DETACHED_PROCESS; args=({args})") DETACHED_PROCESS = 0x00000008 pid = subprocess.Popen(args,creationflags=DETACHED_PROCESS, shell=True).pid else: pid = subprocess.Popen(args, shell=True).pid else: - self.Trace("Executing process using normal Popen") - pid = subprocess.Popen(args).pid + if ExecDetach: + # For linux detached, use nohup. I.E. subprocess.Popen(["nohup", "python", "test.py"]) + if self.IS_LINUX: + args = ["nohup"] + args + self.Trace(f"Executing detached process using Popen({args})") + else: + self.Trace(f"Executing process using normal Popen({args})") + pid = subprocess.Popen(args).pid # On detach, may need the following for MAC OS subprocess.Popen(args, shell=True, start_new_session=True) self.Trace(f"pid={pid}") return pid - def ExecutePythonScript(self, args, ExecDetach=True): + def executePythonScript(self, args, ExecDetach=True): PythonExe = f"{sys.executable}" argsWithPython = [f"{PythonExe}"] + args - return self.ExecuteProcess(argsWithPython,ExecDetach=ExecDetach) + return self.executeProcess(argsWithPython,ExecDetach=ExecDetach) - def Submit(self, *args, **kwargs): + def submit(self, *args, **kwargs): return self.thredPool.submit(*args, **kwargs) def asc2(self, data, convertToAscii=None): @@ -340,24 +501,282 @@ def asc2(self, data, convertToAscii=None): # data = str(data).encode('ascii','ignore') # This works better for logging than ascii function # return str(data)[2:-1] # strip out b'str' - def init_mergeMetadata(self, excludeMergeTags=None): + def initMergeMetadata(self, excludeMergeTags=None): self.excludeMergeTags = excludeMergeTags self._mergeMetadata = mergeMetadata(self, self.excludeMergeTags) - # Must call init_mergeMetadata, before calling merge_metadata - def merge_metadata(self, SrcData, DestData): # Input arguments can be scene ID or scene metadata - if type(SrcData) is int: - SrcData = self.find_scene(SrcData) - DestData = self.find_scene(DestData) - return self._mergeMetadata.merge(SrcData, DestData) + def mergeMetadata(self, SrcData, DestData, retryCount = 12, sleepSecondsBetweenRetry = 5, excludeMergeTags=None): # Input arguments can be scene ID or scene metadata + import requests + if self._mergeMetadata == None: + self.initMergeMetadata(excludeMergeTags) + errMsg = None + for i in range(0, retryCount): + try: + if errMsg != None: + self.Warn(errMsg) + if type(SrcData) is int: + SrcData = self.find_scene(SrcData) + DestData = self.find_scene(DestData) + return self._mergeMetadata.merge(SrcData, DestData) + except (requests.exceptions.ConnectionError, ConnectionResetError): + tb = traceback.format_exc() + errMsg = f"Exception calling [mergeMetadata]. Will retry; count({i}); Error: {e}\nTraceBack={tb}" + except Exception as e: + tb = traceback.format_exc() + errMsg = f"Exception calling [mergeMetadata]. Will retry; count({i}); Error: {e}\nTraceBack={tb}" + time.sleep(sleepSecondsBetweenRetry) + + def getUpdateProgressBarIter(self, qtyResults): + if qtyResults > 40000: + return 100 + if qtyResults > 20000: + return 80 + if qtyResults > 10000: + return 40 + if qtyResults > 5000: + return 20 + if qtyResults > 2000: + return 10 + if qtyResults > 1000: + return 5 + if qtyResults > 500: + return 3 + if qtyResults > 200: + return 2 + return 1 + + def enableProgressBar(self, enable=True): + self.progressBarIsEnabled = enable + + # Use setProgressBarIter to reduce traffic to the server by only updating the progressBar every X(updateProgressbarOnIter) iteration. + def setProgressBarIter(self, qtyResults): + if self.progressBarIsEnabled: + self.updateProgressbarOnIter = self.getUpdateProgressBarIter(qtyResults) + self.currentProgressbarIteration = 0 + + def progressBar(self, currentIndex, maxCount): + if self.progressBarIsEnabled: + if self.updateProgressbarOnIter > 0: + self.currentProgressbarIteration+=1 + if self.currentProgressbarIteration > self.updateProgressbarOnIter: + self.currentProgressbarIteration = 0 + else: + return + progress = (currentIndex / maxCount) if currentIndex < maxCount else (maxCount / currentIndex) + try: + self.log.progress(progress) + except Exception as e: + pass + + def isDocker(self): + cgroup = pathlib.Path('/proc/self/cgroup') + return pathlib.Path('/.dockerenv').is_file() or cgroup.is_file() and 'docker' in cgroup.read_text() + + def isWindows(self): + if any(platform.win32_ver()): + return True + return False + + def isLinux(self): + if platform.system().lower().startswith("linux"): + return True + return False + + def isFreeBSD(self): + if platform.system().lower().startswith("freebsd"): + return True + return False + + def isMacOS(self): + if sys.platform == "darwin": + return True + return False - def Progress(self, currentIndex, maxCount): - progress = (currentIndex / maxCount) if currentIndex < maxCount else (maxCount / currentIndex) - self.log.progress(progress) + def isWindows(self): + if any(platform.win32_ver()): + return True + return False + + def spinProcessBar(self, sleepSeconds = 1, maxPos = 30, trace = False): + if trace: + self.Trace(f"Starting spinProcessBar loop; sleepSeconds={sleepSeconds}, maxPos={maxPos}") + pos = 1 + while self.stopProcessBarSpin == False: + if trace: + self.Trace(f"progressBar({pos}, {maxPos})") + self.progressBar(pos, maxPos) + pos +=1 + if pos > maxPos: + pos = 1 + time.sleep(sleepSeconds) + + def startSpinningProcessBar(self, sleepSeconds = 1, maxPos = 30, trace = False): + self.stopProcessBarSpin = False + if trace: + self.Trace(f"submitting spinProcessBar; sleepSeconds={sleepSeconds}, maxPos={maxPos}, trace={trace}") + self.submit(self.spinProcessBar, sleepSeconds, maxPos, trace) + + def stopSpinningProcessBar(self, sleepSeconds = 1): + self.stopProcessBarSpin = True + time.sleep(sleepSeconds) + + def startsWithInList(self, listToCk, itemToCk): + itemToCk = itemToCk.lower() + for listItem in listToCk: + if itemToCk.startswith(listItem.lower()): + return True + return False + + def indexStartsWithInList(self, listToCk, itemToCk): + itemToCk = itemToCk.lower() + index = -1 + lenItemMatch = 0 + returnValue = self.Constant.NOT_IN_LIST.value + for listItem in listToCk: + index += 1 + if itemToCk.startswith(listItem.lower()): + if len(listItem) > lenItemMatch: # Make sure the best match is selected by getting match with longest string. + lenItemMatch = len(listItem) + returnValue = index + return returnValue + + def checkIfTagInlist(self, somelist, tagName, trace=False): + tagId = self.find_tags(q=tagName) + if len(tagId) > 0 and 'id' in tagId[0]: + tagId = tagId[0]['id'] + else: + self.Warn(f"Could not find tag ID for tag '{tagName}'.") + return + somelist = somelist.split(",") + if trace: + self.Trace("#########################################################################") + scenes = self.find_scenes(f={"tags": {"value":tagId, "modifier":"INCLUDES"}}, fragment='id tags {id name} files {path width height duration size video_codec bit_rate frame_rate} details') + qtyResults = len(scenes) + self.Log(f"Found {qtyResults} scenes with tag ({tagName})") + Qty = 0 + for scene in scenes: + Qty+=1 + if self.startsWithInList(somelist, scene['files'][0]['path']): + self.Log(f"Found scene part of list; {scene['files'][0]['path']}") + elif trace: + self.Trace(f"Not part of list; {scene['files'][0]['path']}") - def run_plugin(self, plugin_id, task_mode=None, args:dict={}, asyn=False): + def createTagId(self, tagName, tagName_descp = "", deleteIfExist = False, ignoreAutoTag = False): + tagId = self.find_tags(q=tagName) + if len(tagId): + tagId = tagId[0] + if deleteIfExist: + self.destroy_tag(int(tagId['id'])) + else: + return tagId['id'] + tagId = self.create_tag({"name":tagName, "description":tagName_descp, "ignore_auto_tag": ignoreAutoTag}) + self.Log(f"Dup-tagId={tagId['id']}") + return tagId['id'] + + def removeTag(self, scene, tagName): # scene can be scene ID or scene metadata + scene_details = scene + if isinstance(scene, int) or 'id' not in scene: + scene_details = self.find_scene(scene) + tagIds = [] + doesHaveTagName = False + for tag in scene_details['tags']: + if tag['name'] != tagName: + tagIds += [tag['id']] + else: + doesHaveTagName = True + if doesHaveTagName: + dataDict = {'id' : scene_details['id']} + dataDict.update({'tag_ids' : tagIds}) + self.update_scene(dataDict) + return doesHaveTagName + + def addTag(self, scene, tagName, tagName_descp = "", ignoreAutoTag=False, retryCount = 12, sleepSecondsBetweenRetry = 5): # scene can be scene ID or scene metadata + errMsg = None + for i in range(0, retryCount): + try: + if errMsg != None: + self.Warn(errMsg) + scene_details = scene + if isinstance(scene, int) or 'id' not in scene: + scene_details = self.find_scene(scene) + tagIds = [self.createTagId(tagName, tagName_descp=tagName_descp, ignoreAutoTag=ignoreAutoTag)] + for tag in scene_details['tags']: + if tag['name'] == tagName: + return False + else: + tagIds += [tag['id']] + dataDict = {'id' : scene_details['id']} + dataDict.update({'tag_ids' : tagIds}) + self.update_scene(dataDict) + return True + except (ConnectionResetError): + tb = traceback.format_exc() + errMsg = f"Exception calling [addTag]. Will retry; count({i}); Error: {e}\nTraceBack={tb}" + except Exception as e: + tb = traceback.format_exc() + errMsg = f"Exception calling [addTag]. Will retry; count({i}); Error: {e}\nTraceBack={tb}" + time.sleep(sleepSecondsBetweenRetry) + + def copyFields(self, srcData, fieldsToCpy): + destData = {} + for key in srcData: + if key in fieldsToCpy: + destData.update({key : srcData[key]}) + return destData + + def renameTag(self,oldTagName, newTagName): + tagMetadata = self.find_tags(q=oldTagName) + if len(tagMetadata) > 0 and 'id' in tagMetadata[0]: + if tagMetadata[0]['name'] == newTagName: + return False + tagMetadata[0]['name'] = newTagName + fieldsToCpy = ["id", "name", "description", "aliases", "ignore_auto_tag", "favorite", "image", "parent_ids", "child_ids"] + tagUpdateInput = self.copyFields(tagMetadata[0], fieldsToCpy) + self.Trace(f"Renaming tag using tagUpdateInput = {tagUpdateInput}") + self.update_tag(tagUpdateInput) + return True + return False + + def updateScene(self, update_input, create=False, retryCount = 24, sleepSecondsBetweenRetry = 5): + errMsg = None + for i in range(0, retryCount): + try: + if errMsg != None: + self.Warn(errMsg) + return self.update_scene(update_input, create) + except (ConnectionResetError): + tb = traceback.format_exc() + errMsg = f"Exception calling [updateScene]. Will retry; count({i}); Error: {e}\nTraceBack={tb}" + except Exception as e: + tb = traceback.format_exc() + errMsg = f"Exception calling [updateScene]. Will retry; count({i}); Error: {e}\nTraceBack={tb}" + time.sleep(sleepSecondsBetweenRetry) + + def destroyScene(self, scene_id, delete_file=False, retryCount = 12, sleepSecondsBetweenRetry = 5): + errMsg = None + for i in range(0, retryCount): + try: + if errMsg != None: + self.Warn(errMsg) + if i > 0: + # Check if file still exist + scene = self.find_scene(scene_id) + if scene == None or len(scene) == 0: + self.Warn(f"Scene {scene_id} not found in Stash.") + return False + return self.destroy_scene(scene_id, delete_file) + except (ConnectionResetError): + tb = traceback.format_exc() + errMsg = f"Exception calling [updateScene]. Will retry; count({i}); Error: {e}\nTraceBack={tb}" + except Exception as e: + tb = traceback.format_exc() + errMsg = f"Exception calling [updateScene]. Will retry; count({i}); Error: {e}\nTraceBack={tb}" + time.sleep(sleepSecondsBetweenRetry) + + def runPlugin(self, plugin_id, task_mode=None, args:dict={}, asyn=False): """Runs a plugin operation. The operation is run immediately and does not use the job queue. + This is a blocking call, and does not return until plugin completes. Args: plugin_id (ID): plugin_id task_name (str, optional): Plugin task to perform @@ -375,30 +794,73 @@ def run_plugin(self, plugin_id, task_mode=None, args:dict={}, asyn=False): "args": args, } if asyn: - self.Submit(self.call_GQL, query, variables) + self.submit(self.call_GQL, query, variables) return f"Made asynchronous call for plugin {plugin_id}" else: return self.call_GQL(query, variables) - - def find_duplicate_scenes_diff(self, distance: PhashDistance=PhashDistance.EXACT, fragment='id', duration_diff: float=10.00 ): - query = """ - query FindDuplicateScenes($distance: Int, $duration_diff: Float) { - findDuplicateScenes(distance: $distance, duration_diff: $duration_diff) { - ...SceneSlim - } - } - """ - if fragment: - query = re.sub(r'\.\.\.SceneSlim', fragment, query) - else: - query += "fragment SceneSlim on Scene { id }" - - variables = { "distance": distance, "duration_diff": duration_diff } - result = self.call_GQL(query, variables) - return result['findDuplicateScenes'] - # ################################################################################################# - # The below functions extends class StashInterface with functions which are not yet in the class + def stopJobs(self, startPos = 0, startsWith = ""): + taskQue = self.job_queue() + if taskQue != None: + count = 0 + for jobDetails in taskQue: + count+=1 + if count > startPos: + if startsWith == "" or jobDetails['description'].startswith(startsWith): + self.Log(f"Killing Job ID({jobDetails['id']}); description={jobDetails['description']}") + self.stop_job(jobDetails['id']) + else: + self.Log(f"Excluding Job ID({jobDetails['id']}); description={jobDetails['description']}; {jobDetails})") + else: + self.Log(f"Skipping Job ID({jobDetails['id']}); description={jobDetails['description']}; {jobDetails})") + + def toJson(self, data, replaceSingleQuote=False): + if replaceSingleQuote: + data = data.replace("'", '"') + data = data.replace("\\", "\\\\") + data = data.replace("\\\\\\\\", "\\\\") + return json.loads(data) + + def isCorrectDbVersion(self, verNumber = 68): + results = self.sql_query("select version from schema_migrations") + # self.Log(results) + if len(results['rows']) == 0 or len(results['rows'][0]) == 0: + return False + return int(results['rows'][0][0]) == verNumber + + def renameFileNameInDB(self, fileId, oldName, newName, UpdateUsingIdOnly = False): + if self.isCorrectDbVersion(): + query = f'update files set basename = "{newName}" where basename = "{oldName}" and id = {fileId};' + if UpdateUsingIdOnly: + query = f'update files set basename = "{newName}" where id = {fileId};' + self.Trace(f"Executing query ({query})") + results = self.sql_commit(query) + if 'rows_affected' in results and results['rows_affected'] == 1: + return True + return False + + def getFileNameFromDB(self, id): + results = self.sql_query(f'select basename from files where id = {id};') + self.Trace(f"results = ({results})") + if len(results['rows']) == 0 or len(results['rows'][0]) == 0: + return None + return results['rows'][0][0] + + # ############################################################################################################ + # Functions which are candidates to be added to parent class use snake_case naming convention. + # ############################################################################################################ + # The below functions extends class StashInterface with functions which are not yet in the class or + # fixes for functions which have not yet made it into official class. + def metadata_scan(self, paths:list=[], flags={}): # ToDo: Add option to add path to library if path not included when calling metadata_scan + query = "mutation MetadataScan($input:ScanMetadataInput!) { metadataScan(input: $input) }" + scan_metadata_input = {"paths": paths} + if flags: + scan_metadata_input.update(flags) + elif scan_config := self.get_configuration_defaults("scan { ...ScanMetadataOptions }").get("scan"): + scan_metadata_input.update(scan_config) + result = self.call_GQL(query, {"input": scan_metadata_input}) + return result["metadataScan"] + def get_all_scenes(self): query_all_scenes = """ query AllScenes { @@ -451,6 +913,43 @@ def metadata_clean_generated(self, blobFiles=True, dryRun=False, imageThumbnails def rename_generated_files(self): return self.call_GQL("mutation MigrateHashNaming {migrateHashNaming}") + + def find_duplicate_scenes_diff(self, distance: PhashDistance=PhashDistance.EXACT, fragment='id', duration_diff: float=10.00 ): + query = """ + query FindDuplicateScenes($distance: Int, $duration_diff: Float) { + findDuplicateScenes(distance: $distance, duration_diff: $duration_diff) { + ...SceneSlim + } + } + """ + if fragment: + query = re.sub(r'\.\.\.SceneSlim', fragment, query) + else: + query += "fragment SceneSlim on Scene { id }" + + variables = { "distance": distance, "duration_diff": duration_diff } + result = self.call_GQL(query, variables) + return result['findDuplicateScenes'] + + # +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + # Direct SQL associated functions + def get_file_metadata(self, data, raw_data = False): # data is either file ID or scene metadata + results = None + if data == None: + return results + if 'files' in data and len(data['files']) > 0 and 'id' in data['files'][0]: + results = self.sql_query(f"select * from files where id = {data['files'][0]['id']}") + else: + results = self.sql_query(f"select * from files where id = {data}") + if raw_data: + return results + if 'rows' in results: + return results['rows'][0] + self.Error(f"Unknown error while SQL query with data='{data}'; Results='{results}'.") + return None + + def set_file_basename(self, id, basename): + return self.sql_commit(f"update files set basename = '{basename}' where id = {id}") class mergeMetadata: # A class to merge scene metadata from source scene to destination scene srcData = None @@ -471,7 +970,8 @@ def merge(self, SrcData, DestData): self.mergeItems('tags', 'tag_ids', [], excludeName=self.excludeMergeTags) self.mergeItems('performers', 'performer_ids', []) self.mergeItems('galleries', 'gallery_ids', []) - self.mergeItems('movies', 'movies', []) + # Looks like movies has been removed from new Stash version + # self.mergeItems('movies', 'movies', []) self.mergeItems('urls', listToAdd=self.destData['urls'], NotStartWith=self.stash.STASH_URL) self.mergeItem('studio', 'studio_id', 'id') self.mergeItem('title') @@ -524,3 +1024,54 @@ def mergeItems(self, fieldName, updateFieldName=None, listToAdd=[], NotStartWith listToAdd += [item['id']] self.dataDict.update({ updateFieldName : listToAdd}) # self.stash.Trace(f"Added {fieldName} ({dataAdded}) to scene ID({self.destData['id']})", toAscii=True) + +class taskQueue: + taskqueue = None + def __init__(self, taskqueue): + self.taskqueue = taskqueue + + def tooManyScanOnTaskQueue(self, tooManyQty = 5): + count = 0 + if self.taskqueue == None: + return False + for jobDetails in self.taskqueue: + if jobDetails['description'] == "Scanning...": + count += 1 + if count < tooManyQty: + return False + return True + + def cleanJobOnTaskQueue(self): + for jobDetails in self.taskqueue: + if jobDetails['description'] == "Cleaning...": + return True + return False + + def cleanGeneratedJobOnTaskQueue(self): + for jobDetails in self.taskqueue: + if jobDetails['description'] == "Cleaning generated files...": + return True + return False + + def isRunningPluginTaskJobOnTaskQueue(self, taskName): + for jobDetails in self.taskqueue: + if jobDetails['description'] == "Running plugin task: {taskName}": + return True + return False + + def tagDuplicatesJobOnTaskQueue(self): + return self.isRunningPluginTaskJobOnTaskQueue("Tag Duplicates") + + def clearDupTagsJobOnTaskQueue(self): + return self.isRunningPluginTaskJobOnTaskQueue("Clear Tags") + + def generatePhashMatchingJobOnTaskQueue(self): + return self.isRunningPluginTaskJobOnTaskQueue("Generate PHASH Matching") + + def deleteDuplicatesJobOnTaskQueue(self): + return self.isRunningPluginTaskJobOnTaskQueue("Delete Duplicates") + + def deleteTaggedScenesJobOnTaskQueue(self): + return self.isRunningPluginTaskJobOnTaskQueue("Delete Tagged Scenes") + + diff --git a/plugins/DupFileManager/advance_options.html b/plugins/DupFileManager/advance_options.html new file mode 100644 index 00000000..262c078d --- /dev/null +++ b/plugins/DupFileManager/advance_options.html @@ -0,0 +1,1902 @@ + + + +DupFileManager Advance Menus + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DupFileManager Advance _DuplicateMarkForDeletion_? Tagged Files MenuApply Multiple Options
+ + +
+ + +
+ + + + +
+
+ + + + + + + + +
+
+ +
+ + + + + + + + +
+
+ +
+ + + + + + + + + + +
+
+ +
+ + + + + + +
+
+ + + + + + +
+
+ + + + + + +
+
+ + + + + + + + + + +
+
+ +
+ + + + +
+ + + + + + + + + + +
+
+ +
+
+
+ +
+ + + + + + +
Create report with different [Match Duplicate Distance] options +
Overrides user [Match Duplicate Distance] and [significantTimeDiff] settings
+
+ + +
+
+ + + + + +
Create Report with Tagging
+ +
+ +
+ +
+ +
+ + + + + +
Create Report without Tagging
+ +
+ +
+ +
+ +
+ Details: +
    +
  1. Match Duplicate Distance Number Details
  2. +
      +
    1. Exact Match
    2. +
        +
      1. Safest and most reliable option
      2. +
      3. Uses tag name _DuplicateMarkForDeletion_0
      4. +
      5. Has the fewest results, and it's very rare to have false matches.
      6. +
      +
    3. High Match
    4. +
        +
      1. Recommended Setting
      2. +
      3. Safe and usually reliable
      4. +
      5. Uses tag name _DuplicateMarkForDeletion_1
      6. +
      7. Scenes tagged by Exact Match will have both tags (_DuplicateMarkForDeletion_0 and _DuplicateMarkForDeletion_1)
      8. +
      +
    5. Medium Match
    6. +
        +
      1. Not so safe. Some false matches
      2. +
      3. To reduce false matches use a time difference of .96 or higher.
      4. +
      5. Uses tag name _DuplicateMarkForDeletion_2
      6. +
      7. Scenes tagged by 0 and 1 will have three tags.
      8. +
      +
    7. Low Match
    8. +
        +
      1. Unsafe, and many false matches
      2. +
      3. To reduce false matches use a time difference of .98 or higher.
      4. +
      5. Uses tag name _DuplicateMarkForDeletion_3
      6. +
      7. Scenes tagged by 0, 1, and 2 will have four tags.
      8. +
      9. Has the most results, but with many false matches.
      10. +
      +
    +
  3. Time Difference
  4. +
      +
    1. Significant time difference setting, where 1 equals 100% and (.9) equals 90%.
    2. +
    3. This setting overrides the setting in DupFileManager_config.py.
    4. +
        +
      1. See setting significantTimeDiff in DupFileManager_config.py
      2. +
      +
    5. This setting is generally not useful for [Exact Match] reports.
    6. +
    7. This is an important setting when creating Low or Medium match reports. It will reduce false matches.
    8. +
    +
  5. Report with tagging
  6. +
      +
    1. Reports with tagging will work with above DupFileManager Advance Menu.
    2. +
    3. The report can take serveral minutes to complete.
    4. +
    5. It takes much more time to produce a report with tagging compare to creating a report without tagging.
    6. +
    +
  7. Report WITHOUT tagging
  8. +
      +
    1. Reports with no tagging can NOT be used with above DupFileManager Advance Menu.
    2. +
    3. The report is created much faster. It usually takes a few seconds to complete.
    4. +
    5. This is the recommended report type to create if the DupFileManager Advance Menu is not needed or desired.
    6. +
    +
+
+ + + + diff --git a/plugins/DupFileManager/requirements.txt b/plugins/DupFileManager/requirements.txt index d503550d..19069845 100644 --- a/plugins/DupFileManager/requirements.txt +++ b/plugins/DupFileManager/requirements.txt @@ -1,4 +1,3 @@ stashapp-tools >= 0.2.50 -pyYAML -watchdog +requests Send2Trash \ No newline at end of file From 18057216adb1c4f94516ca40b88e8b8a9339db21 Mon Sep 17 00:00:00 2001 From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com> Date: Sat, 23 Nov 2024 00:40:14 -0500 Subject: [PATCH 36/39] Delete DupFileManager_config_dev.py --- .../DupFileManager_config_dev.py | 24 ------------------- 1 file changed, 24 deletions(-) delete mode 100644 plugins/DupFileManager/DupFileManager_config_dev.py diff --git a/plugins/DupFileManager/DupFileManager_config_dev.py b/plugins/DupFileManager/DupFileManager_config_dev.py deleted file mode 100644 index e4829bed..00000000 --- a/plugins/DupFileManager/DupFileManager_config_dev.py +++ /dev/null @@ -1,24 +0,0 @@ -# Below fields are in the development stage, and should not be used. -config_dev = { - # If enabled, ignore reparsepoints. For Windows NT drives only. - "ignoreReparsepoints" : True, - # If enabled, ignore symbolic links. - "ignoreSymbolicLinks" : True, - - # If enabled, swap longer file name to preferred path. - "swapLongFileName" : False, - - # If enabled, when finding exact duplicate files, keep file with the shorter name. The default is to keep file name with the longer name. - "keepShorterFileName" : False, - # If enabled, when finding duplicate files, keep media with the shorter time length. The default is to keep media with longer time length. - "keepShorterLength" : False, - # If enabled, when finding duplicate files, keep media with the lower resolution. The default is to keep media with higher resolution. - "keepLowerResolution" : False, - # If enabled, keep duplicate media with high resolution over media with significant longer time. - "keepHighResOverLen" : False, # Requires keepBothHighResAndLongerLen = False - # If enabled, keep both duplicate files if the LOWER resolution file is significantly longer. - "keepBothHighResAndLongerLen" : True, - - # Keep empty to check all paths, or populate it with the only paths to check for duplicates - "onlyCheck_paths": [], #Example: "onlyCheck_paths": ['C:\\SomeMediaPath\\subpath', "E:\\YetAnotherPath\\subpath', "E:\\YetAnotherPath\\secondSubPath'] -} From 78dfa06499bebc5877a165bccac56416df98a71e Mon Sep 17 00:00:00 2001 From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com> Date: Sat, 23 Nov 2024 00:42:57 -0500 Subject: [PATCH 37/39] Delete DupFileManager.dev.py --- plugins/DupFileManager/DupFileManager.dev.py | 1440 ------------------ 1 file changed, 1440 deletions(-) delete mode 100644 plugins/DupFileManager/DupFileManager.dev.py diff --git a/plugins/DupFileManager/DupFileManager.dev.py b/plugins/DupFileManager/DupFileManager.dev.py deleted file mode 100644 index 630e16e2..00000000 --- a/plugins/DupFileManager/DupFileManager.dev.py +++ /dev/null @@ -1,1440 +0,0 @@ -# Description: This is a Stash plugin which manages duplicate files. -# By David Maisonave (aka Axter) Jul-2024 (https://www.axter.com/) -# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/DupFileManager -# Note: To call this script outside of Stash, pass argument --url -# Example: python DupFileManager.py --url http://localhost:9999 -a -try: - import ModulesValidate - ModulesValidate.modulesInstalled(["send2trash", "requests"], silent=True) -except Exception as e: - import traceback, sys - tb = traceback.format_exc() - print(f"ModulesValidate Exception. Error: {e}\nTraceBack={tb}", file=sys.stderr) -import os, sys, time, pathlib, argparse, platform, shutil, traceback, logging, requests -from datetime import datetime -from StashPluginHelper import StashPluginHelper -from stashapi.stash_types import PhashDistance -from DupFileManager_config import config # Import config from DupFileManager_config.py -from DupFileManager_report_config import report_config - -# ToDo: make sure the following line of code works -config += report_config - -parser = argparse.ArgumentParser() -parser.add_argument('--url', '-u', dest='stash_url', type=str, help='Add Stash URL') -parser.add_argument('--trace', '-t', dest='trace', action='store_true', help='Enables debug trace mode.') -parser.add_argument('--add_dup_tag', '-a', dest='dup_tag', action='store_true', help='Set a tag to duplicate files.') -parser.add_argument('--clear_dup_tag', '-c', dest='clear_tag', action='store_true', help='Clear duplicates of duplicate tags.') -parser.add_argument('--del_tag_dup', '-d', dest='del_tag', action='store_true', help='Only delete scenes having DuplicateMarkForDeletion tag.') -parser.add_argument('--remove_dup', '-r', dest='remove', action='store_true', help='Remove (delete) duplicate files.') -parse_args = parser.parse_args() - -settings = { - "matchDupDistance": 0, - "mergeDupFilename": False, - "whitelistDelDupInSameFolder": False, - "zvWhitelist": "", - "zwGraylist": "", - "zxBlacklist": "", - "zyMaxDupToProcess": 0, - "zySwapHighRes": False, - "zySwapLongLength": False, - "zySwapBetterBitRate": False, - "zySwapCodec": False, - "zySwapBetterFrameRate": False, - "zzDebug": False, - "zzTracing": False, - - "zzObsoleteSettingsCheckVer2": False, # This is a hidden variable that is NOT displayed in the UI - - # Obsolete setting names - "zWhitelist": "", - "zxGraylist": "", - "zyBlacklist": "", - "zyMatchDupDistance": 0, - "zSwapHighRes": False, - "zSwapLongLength": False, - "zSwapBetterBitRate": False, - "zSwapCodec": False, - "zSwapBetterFrameRate": False, -} -stash = StashPluginHelper( - stash_url=parse_args.stash_url, - debugTracing=parse_args.trace, - settings=settings, - config=config, - maxbytes=10*1024*1024, - DebugTraceFieldName="zzTracing", - DebugFieldName="zzDebug", - ) -stash.convertToAscii = True - -advanceMenuOptions = [ "applyCombo", "applyComboBlacklist", "pathToDelete", "pathToDeleteBlacklist", "sizeToDeleteLess", "sizeToDeleteGreater", "sizeToDeleteBlacklistLess", "sizeToDeleteBlacklistGreater", "durationToDeleteLess", "durationToDeleteGreater", "durationToDeleteBlacklistLess", "durationToDeleteBlacklistGreater", - "commonResToDeleteLess", "commonResToDeleteEq", "commonResToDeleteGreater", "commonResToDeleteBlacklistLess", "commonResToDeleteBlacklistEq", "commonResToDeleteBlacklistGreater", "resolutionToDeleteLess", "resolutionToDeleteEq", "resolutionToDeleteGreater", - "resolutionToDeleteBlacklistLess", "resolutionToDeleteBlacklistEq", "resolutionToDeleteBlacklistGreater", "ratingToDeleteLess", "ratingToDeleteEq", "ratingToDeleteGreater", "ratingToDeleteBlacklistLess", "ratingToDeleteBlacklistEq", "ratingToDeleteBlacklistGreater", - "tagToDelete", "tagToDeleteBlacklist", "titleToDelete", "titleToDeleteBlacklist", "pathStrToDelete", "pathStrToDeleteBlacklist"] - -doJsonReturnModeTypes = ["tag_duplicates_task", "removeDupTag", "addExcludeTag", "removeExcludeTag", "mergeTags", "getLocalDupReportPath", - "createDuplicateReportWithoutTagging", "deleteLocalDupReportHtmlFiles", "clear_duplicate_tags_task", - "deleteAllDupFileManagerTags", "deleteBlackListTaggedDuplicatesTask", "deleteTaggedDuplicatesLwrResOrLwrDuration", - "deleteBlackListTaggedDuplicatesLwrResOrLwrDuration"] -doJsonReturnModeTypes += [advanceMenuOptions] -doJsonReturn = False -if len(sys.argv) < 2 and stash.PLUGIN_TASK_NAME in doJsonReturnModeTypes: - doJsonReturn = True - stash.log_to_norm = stash.LogTo.FILE -elif stash.PLUGIN_TASK_NAME == "doEarlyExit": - time.sleep(3) - stash.Log("Doing early exit because of task name") - time.sleep(3) - exit(0) - -stash.Log("******************* Starting *******************") -if len(sys.argv) > 1: - stash.Log(f"argv = {sys.argv}") -else: - stash.Debug(f"No command line arguments. JSON_INPUT['args'] = {stash.JSON_INPUT['args']}; PLUGIN_TASK_NAME = {stash.PLUGIN_TASK_NAME}; argv = {sys.argv}") -stash.status(logLevel=logging.DEBUG) - -obsoleteSettingsToConvert = {"zWhitelist" : "zvWhitelist", "zxGraylist" : "zwGraylist", "zyBlacklist" : "zxBlacklist", "zyMatchDupDistance" : "matchDupDistance", "zSwapHighRes" : "zySwapHighRes", "zSwapLongLength" : "zySwapLongLength", "zSwapBetterBitRate" : "zySwapBetterBitRate", "zSwapCodec" : "zySwapCodec", "zSwapBetterFrameRate" : "zySwapBetterFrameRate"} -stash.replaceObsoleteSettings(obsoleteSettingsToConvert, "zzObsoleteSettingsCheckVer2") - - -LOG_STASH_N_PLUGIN = stash.LogTo.STASH if stash.CALLED_AS_STASH_PLUGIN else stash.LogTo.CONSOLE + stash.LogTo.FILE -listSeparator = stash.Setting('listSeparator', ',', notEmpty=True) -addPrimaryDupPathToDetails = stash.Setting('addPrimaryDupPathToDetails') -clearAllDupfileManagerTags = stash.Setting('clearAllDupfileManagerTags') -doGeneratePhash = stash.Setting('doGeneratePhash') -mergeDupFilename = stash.Setting('mergeDupFilename') -moveToTrashCan = False if stash.Setting('permanentlyDelete') else True -alternateTrashCanPath = stash.Setting('dup_path') -whitelistDelDupInSameFolder = stash.Setting('whitelistDelDupInSameFolder') -graylistTagging = stash.Setting('graylistTagging') -maxDupToProcess = int(stash.Setting('zyMaxDupToProcess')) -significantTimeDiff = float(stash.Setting('significantTimeDiff')) -toRecycleBeforeSwap = stash.Setting('toRecycleBeforeSwap') -cleanAfterDel = stash.Setting('cleanAfterDel') - -swapHighRes = stash.Setting('zySwapHighRes') -swapLongLength = stash.Setting('zySwapLongLength') -swapBetterBitRate = stash.Setting('zySwapBetterBitRate') -swapCodec = stash.Setting('zySwapCodec') -swapBetterFrameRate = stash.Setting('zySwapBetterFrameRate') -favorLongerFileName = stash.Setting('favorLongerFileName') -favorLargerFileSize = stash.Setting('favorLargerFileSize') -favorBitRateChange = stash.Setting('favorBitRateChange') -favorHighBitRate = stash.Setting('favorHighBitRate') -favorFrameRateChange = stash.Setting('favorFrameRateChange') -favorHigherFrameRate = stash.Setting('favorHigherFrameRate') -favorCodecRanking = stash.Setting('favorCodecRanking') -codecRankingSetToUse = stash.Setting('codecRankingSetToUse') -if codecRankingSetToUse == 4: - codecRanking = stash.Setting('codecRankingSet4') -elif codecRankingSetToUse == 3: - codecRanking = stash.Setting('codecRankingSet3') -elif codecRankingSetToUse == 2: - codecRanking = stash.Setting('codecRankingSet2') -else: - codecRanking = stash.Setting('codecRankingSet1') -skipIfTagged = stash.Setting('skipIfTagged') -killScanningPostProcess = stash.Setting('killScanningPostProcess') -tagLongDurationLowRes = stash.Setting('tagLongDurationLowRes') -bitRateIsImporantComp = stash.Setting('bitRateIsImporantComp') -codecIsImporantComp = stash.Setting('codecIsImporantComp') - -excludeFromReportIfSignificantTimeDiff = False - -matchDupDistance = int(stash.Setting('matchDupDistance')) -matchPhaseDistance = PhashDistance.EXACT -matchPhaseDistanceText = "Exact Match" -if stash.PLUGIN_TASK_NAME == "tag_duplicates_task" and 'Target' in stash.JSON_INPUT['args']: - if stash.JSON_INPUT['args']['Target'].startswith("0"): - matchDupDistance = 0 - elif stash.JSON_INPUT['args']['Target'].startswith("1"): - matchDupDistance = 1 - elif stash.JSON_INPUT['args']['Target'].startswith("2"): - matchDupDistance = 2 - elif stash.JSON_INPUT['args']['Target'].startswith("3"): - matchDupDistance = 3 - - if stash.JSON_INPUT['args']['Target'].find(":") == 1: - significantTimeDiff = float(stash.JSON_INPUT['args']['Target'][2:]) - excludeFromReportIfSignificantTimeDiff = True - -if matchDupDistance == 1: - matchPhaseDistance = PhashDistance.HIGH - matchPhaseDistanceText = "High Match" -elif matchDupDistance == 2: - matchPhaseDistance = PhashDistance.MEDIUM - matchPhaseDistanceText = "Medium Match" -elif matchDupDistance == 3: - matchPhaseDistance = PhashDistance.LOW - matchPhaseDistanceText = "Low Match" - -# significantTimeDiff can not be higher than 1 and shouldn't be lower than .5 -if significantTimeDiff > 1: - significantTimeDiff = float(1.00) -if significantTimeDiff < .25: - significantTimeDiff = float(0.25) - - -duplicateMarkForDeletion = stash.Setting('DupFileTag') -if duplicateMarkForDeletion == "": - duplicateMarkForDeletion = 'DuplicateMarkForDeletion' - -base1_duplicateMarkForDeletion = duplicateMarkForDeletion - -duplicateWhitelistTag = stash.Setting('DupWhiteListTag') -if duplicateWhitelistTag == "": - duplicateWhitelistTag = '_DuplicateWhitelistFile' - -excludeDupFileDeleteTag = stash.Setting('excludeDupFileDeleteTag') -if excludeDupFileDeleteTag == "": - excludeDupFileDeleteTag = '_ExcludeDuplicateMarkForDeletion' - -graylistMarkForDeletion = stash.Setting('graylistMarkForDeletion') -if graylistMarkForDeletion == "": - graylistMarkForDeletion = '_GraylistMarkForDeletion' - -longerDurationLowerResolution = stash.Setting('longerDurationLowerResolution') -if longerDurationLowerResolution == "": - longerDurationLowerResolution = '_LongerDurationLowerResolution' - -excludeMergeTags = [duplicateMarkForDeletion, duplicateWhitelistTag, excludeDupFileDeleteTag] - -if stash.Setting('underscoreDupFileTag') and not duplicateMarkForDeletion.startswith('_'): - duplicateMarkForDeletionWithOutUnderscore = duplicateMarkForDeletion - duplicateMarkForDeletion = "_" + duplicateMarkForDeletion - if stash.renameTag(duplicateMarkForDeletionWithOutUnderscore, duplicateMarkForDeletion): - stash.Log(f"Renamed tag {duplicateMarkForDeletionWithOutUnderscore} to {duplicateMarkForDeletion}") - stash.Trace(f"Added underscore to {duplicateMarkForDeletionWithOutUnderscore} = {duplicateMarkForDeletion}") - excludeMergeTags += [duplicateMarkForDeletion] -else: - stash.Trace(f"duplicateMarkForDeletion = {duplicateMarkForDeletion}") - -base2_duplicateMarkForDeletion = duplicateMarkForDeletion - -if stash.Setting('appendMatchDupDistance'): - duplicateMarkForDeletion += f"_{matchDupDistance}" - excludeMergeTags += [duplicateMarkForDeletion] - -stash.initMergeMetadata(excludeMergeTags) - -graylist = stash.Setting('zwGraylist').split(listSeparator) -graylist = [item.lower() for item in graylist] -if graylist == [""] : graylist = [] -stash.Trace(f"graylist = {graylist}") -whitelist = stash.Setting('zvWhitelist').split(listSeparator) -whitelist = [item.lower() for item in whitelist] -if whitelist == [""] : whitelist = [] -stash.Trace(f"whitelist = {whitelist}") -blacklist = stash.Setting('zxBlacklist').split(listSeparator) -blacklist = [item.lower() for item in blacklist] -if blacklist == [""] : blacklist = [] -stash.Trace(f"blacklist = {blacklist}") - -def realpath(path): - """ - get_symbolic_target for win - """ - try: - import win32file - f = win32file.CreateFile(path, win32file.GENERIC_READ, - win32file.FILE_SHARE_READ, None, - win32file.OPEN_EXISTING, - win32file.FILE_FLAG_BACKUP_SEMANTICS, None) - target = win32file.GetFinalPathNameByHandle(f, 0) - # an above gives us something like u'\\\\?\\C:\\tmp\\scalarizr\\3.3.0.7978' - return target.strip('\\\\?\\') - except ImportError: - handle = open_dir(path) - target = get_symbolic_target(handle) - check_closed(handle) - return target - -def isReparsePoint(path): - import win32api - import win32con - from parse_reparsepoint import Navigator - FinalPathname = realpath(path) - stash.Log(f"(path='{path}') (FinalPathname='{FinalPathname}')") - if FinalPathname != path: - stash.Log(f"Symbolic link '{path}'") - return True - if not os.path.isdir(path): - path = os.path.dirname(path) - return win32api.GetFileAttributes(path) & win32con.FILE_ATTRIBUTE_REPARSE_POINT - -def testReparsePointAndSymLink(merge=False, deleteDup=False): - stash.Trace(f"Debug Tracing (platform.system()={platform.system()})") - myTestPath1 = r"B:\V\V\Tip\POV - Holly Molly petite ginger anal slut - RedTube.mp4" # not a reparse point or symbolic link - myTestPath2 = r"B:\_\SpecialSet\Amateur Anal Attempts\BRCC test studio name.m2ts" # reparse point - myTestPath3 = r"B:\_\SpecialSet\Amateur Anal Attempts\Amateur Anal Attempts 4.mp4" #symbolic link - myTestPath4 = r"E:\Stash\plugins\RenameFile\README.md" #symbolic link - myTestPath5 = r"E:\_\David-Maisonave\Axter-Stash\plugins\RenameFile\README.md" #symbolic link - myTestPath6 = r"E:\_\David-Maisonave\Axter-Stash\plugins\DeleteMe\Renamer\README.md" # not reparse point - stash.Log(f"Testing '{myTestPath1}'") - if isReparsePoint(myTestPath1): - stash.Log(f"isSymLink '{myTestPath1}'") - else: - stash.Log(f"Not isSymLink '{myTestPath1}'") - - if isReparsePoint(myTestPath2): - stash.Log(f"isSymLink '{myTestPath2}'") - else: - stash.Log(f"Not isSymLink '{myTestPath2}'") - - if isReparsePoint(myTestPath3): - stash.Log(f"isSymLink '{myTestPath3}'") - else: - stash.Log(f"Not isSymLink '{myTestPath3}'") - - if isReparsePoint(myTestPath4): - stash.Log(f"isSymLink '{myTestPath4}'") - else: - stash.Log(f"Not isSymLink '{myTestPath4}'") - - if isReparsePoint(myTestPath5): - stash.Log(f"isSymLink '{myTestPath5}'") - else: - stash.Log(f"Not isSymLink '{myTestPath5}'") - - if isReparsePoint(myTestPath6): - stash.Log(f"isSymLink '{myTestPath6}'") - else: - stash.Log(f"Not isSymLink '{myTestPath6}'") - return - -detailPrefix = "BaseDup=" -detailPostfix = "\n" - -def setTagId(tagName, sceneDetails, DupFileToKeep, TagReason="", ignoreAutoTag=False): - details = "" - ORG_DATA_DICT = {'id' : sceneDetails['id']} - dataDict = ORG_DATA_DICT.copy() - doAddTag = True - if addPrimaryDupPathToDetails: - BaseDupStr = f"{detailPrefix}{DupFileToKeep['files'][0]['path']}\n{stash.STASH_URL}/scenes/{DupFileToKeep['id']}\n{TagReason}(matchDupDistance={matchPhaseDistanceText})\n{detailPostfix}" - if sceneDetails['details'] == "": - details = BaseDupStr - elif not sceneDetails['details'].startswith(detailPrefix): - details = f"{BaseDupStr};\n{sceneDetails['details']}" - for tag in sceneDetails['tags']: - if tag['name'] == tagName: - doAddTag = False - break - if doAddTag: - stash.addTag(sceneDetails, tagName, ignoreAutoTag=ignoreAutoTag) - if details != "": - dataDict.update({'details' : details}) - if dataDict != ORG_DATA_DICT: - stash.updateScene(dataDict) - stash.Trace(f"[setTagId] Updated {sceneDetails['files'][0]['path']} with metadata {dataDict} and tag {tagName}", toAscii=True) - else: - stash.Trace(f"[setTagId] Nothing to update {sceneDetails['files'][0]['path']} already has tag {tagName}.", toAscii=True) - return doAddTag - -def setTagId_withRetry(tagName, sceneDetails, DupFileToKeep, TagReason="", ignoreAutoTag=False, retryCount = 12, sleepSecondsBetweenRetry = 5): - errMsg = None - for i in range(0, retryCount): - try: - if errMsg != None: - stash.Warn(errMsg) - return setTagId(tagName, sceneDetails, DupFileToKeep, TagReason, ignoreAutoTag) - except (requests.exceptions.ConnectionError, ConnectionResetError): - tb = traceback.format_exc() - errMsg = f"[setTagId] Exception calling setTagId. Will retry; count({i}); Error: {e}\nTraceBack={tb}" - except Exception as e: - tb = traceback.format_exc() - errMsg = f"[setTagId] Unknown exception calling setTagId. Will retry; count({i}); Error: {e}\nTraceBack={tb}" - time.sleep(sleepSecondsBetweenRetry) - -def hasSameDir(path1, path2): - if pathlib.Path(path1).resolve().parent == pathlib.Path(path2).resolve().parent: - return True - return False - -def sendToTrash(path): - if not os.path.isfile(path): - stash.Warn(f"File does not exist: {path}.", toAscii=True) - return False - try: - from send2trash import send2trash # Requirement: pip install Send2Trash - send2trash(path) - return True - except Exception as e: - stash.Error(f"Failed to send file {path} to recycle bin. Error: {e}", toAscii=True) - try: - if os.path.isfile(path): - os.remove(path) - return True - except Exception as e: - stash.Error(f"Failed to delete file {path}. Error: {e}", toAscii=True) - return False -# If ckTimeDiff=False: Does durration2 have significant more time than durration1 -def significantTimeDiffCheck(durration1, durration2, ckTimeDiff = False): # If ckTimeDiff=True: is time different significant in either direction. - if not isinstance(durration1, int) and 'files' in durration1: - durration1 = int(durration1['files'][0]['duration']) - durration2 = int(durration2['files'][0]['duration']) - timeDiff = getTimeDif(durration1, durration2) - if ckTimeDiff and timeDiff > 1: - timeDiff = getTimeDif(durration2, durration1) - if timeDiff < significantTimeDiff: - return True - return False - -def getTimeDif(durration1, durration2): # Where durration1 is ecpected to be smaller than durration2 IE(45/60=.75) - return durration1 / durration2 - -def isBetterVideo(scene1, scene2, swapCandidateCk = False): # is scene2 better than scene1 - # Prioritize higher reslution over codec, bit rate, and frame rate - if int(scene1['files'][0]['width']) * int(scene1['files'][0]['height']) > int(scene2['files'][0]['width']) * int(scene2['files'][0]['height']): - return False - if (favorBitRateChange and swapCandidateCk == False) or (swapCandidateCk and swapBetterBitRate): - if (favorHighBitRate and int(scene2['files'][0]['bit_rate']) > int(scene1['files'][0]['bit_rate'])) or (not favorHighBitRate and int(scene2['files'][0]['bit_rate']) < int(scene1['files'][0]['bit_rate'])): - stash.Trace(f"[isBetterVideo]:[favorHighBitRate={favorHighBitRate}] Better bit rate. {scene1['files'][0]['path']}={scene1['files'][0]['bit_rate']} v.s. {scene2['files'][0]['path']}={scene2['files'][0]['bit_rate']}") - return True - if (favorCodecRanking and swapCandidateCk == False) or (swapCandidateCk and swapCodec): - scene1CodecRank = stash.indexStartsWithInList(codecRanking, scene1['files'][0]['video_codec']) - scene2CodecRank = stash.indexStartsWithInList(codecRanking, scene2['files'][0]['video_codec']) - if scene2CodecRank < scene1CodecRank: - stash.Trace(f"[isBetterVideo] Better codec. {scene1['files'][0]['path']}={scene1['files'][0]['video_codec']}:Rank={scene1CodecRank} v.s. {scene2['files'][0]['path']}={scene2['files'][0]['video_codec']}:Rank={scene2CodecRank}") - return True - if (favorFrameRateChange and swapCandidateCk == False) or (swapCandidateCk and swapBetterFrameRate): - if (favorHigherFrameRate and int(scene2['files'][0]['frame_rate']) > int(scene1['files'][0]['frame_rate'])) or (not favorHigherFrameRate and int(scene2['files'][0]['frame_rate']) < int(scene1['files'][0]['frame_rate'])): - stash.Trace(f"[isBetterVideo]:[favorHigherFrameRate={favorHigherFrameRate}] Better frame rate. {scene1['files'][0]['path']}={scene1['files'][0]['frame_rate']} v.s. {scene2['files'][0]['path']}={scene2['files'][0]['frame_rate']}") - return True - return False - -def significantMoreTimeCompareToBetterVideo(scene1, scene2): # is scene2 better than scene1 - if isinstance(scene1, int): - scene1 = stash.find_scene(scene1) - scene2 = stash.find_scene(scene2) - if int(scene1['files'][0]['duration']) >= int(scene2['files'][0]['duration']): - return False - if int(scene1['files'][0]['width']) * int(scene1['files'][0]['height']) > int(scene2['files'][0]['width']) * int(scene2['files'][0]['height']): - if significantTimeDiffCheck(scene1, scene2): - if tagLongDurationLowRes: - didAddTag = setTagId_withRetry(longerDurationLowerResolution, scene2, scene1, ignoreAutoTag=True) - stash.Log(f"Tagged sene2 with tag {longerDurationLowerResolution}, because scene1 is better video, but it has significant less time ({getTimeDif(int(scene1['files'][0]['duration']), int(scene2['files'][0]['duration']))}%) compare to scene2; scene1={scene1['files'][0]['path']} (ID={scene1['id']})(duration={scene1['files'][0]['duration']}); scene2={scene2['files'][0]['path']} (ID={scene2['id']}) (duration={scene1['files'][0]['duration']}); didAddTag={didAddTag}") - else: - stash.Warn(f"Scene1 is better video, but it has significant less time ({getTimeDif(int(scene1['files'][0]['duration']), int(scene2['files'][0]['duration']))}%) compare to scene2; Scene1={scene1['files'][0]['path']} (ID={scene1['id']})(duration={scene1['files'][0]['duration']}); Scene2={scene2['files'][0]['path']} (ID={scene2['id']}) (duration={scene1['files'][0]['duration']})") - return False - return True - -def allThingsEqual(scene1, scene2): # If all important things are equal, return true - if int(scene1['files'][0]['duration']) != int(scene2['files'][0]['duration']): - return False - if scene1['files'][0]['width'] != scene2['files'][0]['width']: - return False - if scene1['files'][0]['height'] != scene2['files'][0]['height']: - return False - if bitRateIsImporantComp and scene1['files'][0]['bit_rate'] != scene2['files'][0]['bit_rate']: - return False - if codecIsImporantComp and scene1['files'][0]['video_codec'] != scene2['files'][0]['video_codec']: - return False - return True - -def isSwapCandidate(DupFileToKeep, DupFile): - # Don't move if both are in whitelist - if stash.startsWithInList(whitelist, DupFileToKeep['files'][0]['path']) and stash.startsWithInList(whitelist, DupFile['files'][0]['path']): - return False - if swapHighRes and int(DupFileToKeep['files'][0]['width']) * int(DupFileToKeep['files'][0]['height']) > int(DupFile['files'][0]['width']) * int(DupFile['files'][0]['height']): - if not significantTimeDiffCheck(DupFileToKeep, DupFile): - return True - else: - stash.Warn(f"File '{DupFileToKeep['files'][0]['path']}' has a higher resolution than '{DupFile['files'][0]['path']}', but the duration is significantly shorter.", toAscii=True) - if swapLongLength and int(DupFileToKeep['files'][0]['duration']) > int(DupFile['files'][0]['duration']): - if int(DupFileToKeep['files'][0]['width']) >= int(DupFile['files'][0]['width']) or int(DupFileToKeep['files'][0]['height']) >= int(DupFile['files'][0]['height']): - return True - if isBetterVideo(DupFile, DupFileToKeep, swapCandidateCk=True): - if not significantTimeDiffCheck(DupFileToKeep, DupFile): - return True - else: - stash.Warn(f"File '{DupFileToKeep['files'][0]['path']}' has better codec/bit-rate than '{DupFile['files'][0]['path']}', but the duration is significantly shorter; DupFileToKeep-ID={DupFileToKeep['id']};DupFile-ID={DupFile['id']};BitRate {DupFileToKeep['files'][0]['bit_rate']} vs {DupFile['files'][0]['bit_rate']};Codec {DupFileToKeep['files'][0]['video_codec']} vs {DupFile['files'][0]['video_codec']};FrameRate {DupFileToKeep['files'][0]['frame_rate']} vs {DupFile['files'][0]['frame_rate']};", toAscii=True) - return False - -dupWhitelistTagId = None -def addDupWhitelistTag(): - global dupWhitelistTagId - stash.Trace(f"Adding tag duplicateWhitelistTag = {duplicateWhitelistTag}") - descp = 'Tag added to duplicate scenes which are in the whitelist. This means there are two or more duplicates in the whitelist.' - dupWhitelistTagId = stash.createTagId(duplicateWhitelistTag, descp, ignoreAutoTag=True) - stash.Trace(f"dupWhitelistTagId={dupWhitelistTagId} name={duplicateWhitelistTag}") - -excludeDupFileDeleteTagId = None -def addExcludeDupTag(): - global excludeDupFileDeleteTagId - stash.Trace(f"Adding tag excludeDupFileDeleteTag = {excludeDupFileDeleteTag}") - descp = 'Excludes duplicate scene from DupFileManager tagging and deletion process. A scene having this tag will not get deleted by DupFileManager' - excludeDupFileDeleteTagId = stash.createTagId(excludeDupFileDeleteTag, descp, ignoreAutoTag=True) - stash.Trace(f"dupWhitelistTagId={excludeDupFileDeleteTagId} name={excludeDupFileDeleteTag}") - -def isTaggedExcluded(Scene): - for tag in Scene['tags']: - if tag['name'] == excludeDupFileDeleteTag: - return True - return False - -def isWorseKeepCandidate(DupFileToKeep, Scene): - if not stash.startsWithInList(whitelist, Scene['files'][0]['path']) and stash.startsWithInList(whitelist, DupFileToKeep['files'][0]['path']): - return True - if not stash.startsWithInList(graylist, Scene['files'][0]['path']) and stash.startsWithInList(graylist, DupFileToKeep['files'][0]['path']): - return True - if not stash.startsWithInList(blacklist, DupFileToKeep['files'][0]['path']) and stash.startsWithInList(blacklist, Scene['files'][0]['path']): - return True - - if stash.startsWithInList(graylist, Scene['files'][0]['path']) and stash.startsWithInList(graylist, DupFileToKeep['files'][0]['path']) and stash.indexStartsWithInList(graylist, DupFileToKeep['files'][0]['path']) < stash.indexStartsWithInList(graylist, Scene['files'][0]['path']): - return True - if stash.startsWithInList(blacklist, DupFileToKeep['files'][0]['path']) and stash.startsWithInList(blacklist, Scene['files'][0]['path']) and stash.indexStartsWithInList(blacklist, DupFileToKeep['files'][0]['path']) < stash.indexStartsWithInList(blacklist, Scene['files'][0]['path']): - return True - return False - -def killScanningJobs(): - try: - if killScanningPostProcess: - stash.stopJobs(1, "Scanning...") - except Exception as e: - tb = traceback.format_exc() - stash.Error(f"Exception while trying to kill scan jobs; Error: {e}\nTraceBack={tb}") - -def getPath(Scene, getParent = False): - path = stash.asc2(Scene['files'][0]['path']) - path = path.replace("'", "") - path = path.replace("\\\\", "\\") - if getParent: - return pathlib.Path(path).resolve().parent - return path - -def getHtmlReportTableRow(qtyResults, tagDuplicates): - htmlReportPrefix = stash.Setting('htmlReportPrefix') - htmlReportPrefix = htmlReportPrefix.replace('http://127.0.0.1:9999/graphql', stash.url) - htmlReportPrefix = htmlReportPrefix.replace('http://localhost:9999/graphql', stash.url) - if tagDuplicates == False: - htmlReportPrefix = htmlReportPrefix.replace('
{videoPreview}{imagePreview}
") - else: - fileHtmlReport.write(f"{getSceneID(DupFile['id'])}{videoPreview}") - fileHtmlReport.write(f"{getSceneID(DupFile['id'])}{getPath(DupFile)}") - fileHtmlReport.write(f"

") - fileHtmlReport.write(f"") - - if DupFile['id'] in reasonDict: - fileHtmlReport.write(f"") - # elif DupFileToKeep['id'] in reasonDict: - # fileHtmlReport.write(f"") - elif int(DupFileToKeep['files'][0]['width']) * int(DupFileToKeep['files'][0]['height']) > int(DupFile['files'][0]['width']) * int(DupFile['files'][0]['height']): - fileHtmlReport.write(f"") - elif significantMoreTimeCompareToBetterVideo(DupFile, DupFileToKeep): - if significantTimeDiffCheck(DupFile, DupFileToKeep): - theReason = f"Significant-Duration: {DupFile['files'][0]['duration']} < {DupFileToKeep['files'][0]['duration']}" - else: - theReason = f"Duration: {DupFile['files'][0]['duration']} < {DupFileToKeep['files'][0]['duration']}" - fileHtmlReport.write(f"") - elif isBetterVideo(DupFile, DupFileToKeep): - fileHtmlReport.write(f"") - elif stash.startsWithInList(DupFileToKeep, DupFile['files'][0]['path']) and not stash.startsWithInList(whitelist, DupFile['files'][0]['path']): - fileHtmlReport.write(f"") - elif isTaggedExcluded(DupFileToKeep) and not isTaggedExcluded(DupFile): - fileHtmlReport.write(f"") - - fileHtmlReport.write("
ResDurrationBitRateCodecFrameRatesizeIDindex
{DupFile['files'][0]['width']}x{DupFile['files'][0]['height']}{DupFile['files'][0]['duration']}{DupFile['files'][0]['bit_rate']}{DupFile['files'][0]['video_codec']}{DupFile['files'][0]['frame_rate']}{DupFile['files'][0]['size']}{DupFile['id']}{QtyTagForDel}
Reason: {reasonDict[DupFile['id']]}
Reason: {reasonDict[DupFileToKeep['id']]}
Reason: Resolution {DupFile['files'][0]['width']}x{DupFile['files'][0]['height']} < {DupFileToKeep['files'][0]['width']}x{DupFileToKeep['files'][0]['height']}
Reason: {theReason}
Reason: Better Video
Reason: not whitelist vs whitelist
Reason: not ExcludeTag vs ExcludeTag
") - fileHtmlReport.write(f"") - fileHtmlReport.write(f"") - fileHtmlReport.write(f"") - fileHtmlReport.write(f"") - fileHtmlReport.write(f"") - # ToDo: Add following buttons: - # rename file - if dupFileExist and tagDuplicates: - fileHtmlReport.write(f"") - fileHtmlReport.write(f"") - fileHtmlReport.write(f"") - if dupFileExist: - fileHtmlReport.write(f"[Folder]") - fileHtmlReport.write(f"[Play]") - else: - fileHtmlReport.write("[File NOT Exist]") - fileHtmlReport.write("

") - - videoPreview = f"" - if htmlIncludeImagePreview: - imagePreview = f"
  • \"\"\"\"
" - fileHtmlReport.write(f"{getSceneID(DupFileToKeep['id'])}
{videoPreview}{imagePreview}
") - else: - fileHtmlReport.write(f"{getSceneID(DupFileToKeep['id'])}{videoPreview}") - fileHtmlReport.write(f"{getSceneID(DupFileToKeep['id'])}{getPath(DupFileToKeep)}") - fileHtmlReport.write(f"

") - fileHtmlReport.write(f"
ResDurrationBitRateCodecFrameRatesizeID
{DupFileToKeep['files'][0]['width']}x{DupFileToKeep['files'][0]['height']}{DupFileToKeep['files'][0]['duration']}{DupFileToKeep['files'][0]['bit_rate']}{DupFileToKeep['files'][0]['video_codec']}{DupFileToKeep['files'][0]['frame_rate']}{DupFileToKeep['files'][0]['size']}{DupFileToKeep['id']}
") - fileHtmlReport.write(f"") - fileHtmlReport.write(f"") - fileHtmlReport.write(f"") - if isTaggedExcluded(DupFileToKeep): - fileHtmlReport.write(f"") - fileHtmlReport.write(f"[Folder]") - if toKeepFileExist: - fileHtmlReport.write(f"[Play]") - else: - fileHtmlReport.write("[File NOT Exist]") - # ToDo: Add following buttons: - # rename file - fileHtmlReport.write(f"

") - - fileHtmlReport.write("\n") - - if QtyTagForDelPaginate >= htmlReportPaginate: - QtyTagForDelPaginate = 0 - fileHtmlReport.write("\n") - homeHtmReportLink = f"[Home]" - prevHtmReportLink = "" - if PaginateId > 0: - if PaginateId > 1: - prevHtmReport = htmlReportNameHomePage.replace(".html", f"_{PaginateId-1}.html") - else: - prevHtmReport = htmlReportNameHomePage - prevHtmReportLink = f"[Prev]" - nextHtmReport = htmlReportNameHomePage.replace(".html", f"_{PaginateId+1}.html") - nextHtmReportLink = f"[Next]" - fileHtmlReport.write(f"
{homeHtmReportLink}{prevHtmReportLink}{nextHtmReportLink}
") - fileHtmlReport.write(f"{stash.Setting('htmlReportPostfix')}") - fileHtmlReport.close() - PaginateId+=1 - fileHtmlReport = open(nextHtmReport, "w") - fileHtmlReport.write(f"{getHtmlReportTableRow(qtyResults, tagDuplicates)}\n") - if PaginateId > 1: - prevHtmReport = htmlReportNameHomePage.replace(".html", f"_{PaginateId-1}.html") - else: - prevHtmReport = htmlReportNameHomePage - prevHtmReportLink = f"[Prev]" - if len(DupFileSets) > (QtyTagForDel + htmlReportPaginate): - nextHtmReport = htmlReportNameHomePage.replace(".html", f"_{PaginateId+1}.html") - nextHtmReportLink = f"[Next]" - fileHtmlReport.write(f"
{homeHtmReportLink}{prevHtmReportLink}{nextHtmReportLink}
") - else: - stash.Debug(f"DupFileSets Qty = {len(DupFileSets)}; DupFileDetailList Qty = {len(DupFileDetailList)}; QtyTagForDel = {QtyTagForDel}; htmlReportPaginate = {htmlReportPaginate}; QtyTagForDel + htmlReportPaginate = {QtyTagForDel+htmlReportPaginate}") - fileHtmlReport.write(f"
{homeHtmReportLink}{prevHtmReportLink}
") - fileHtmlReport.write(f"{stash.Setting('htmlReportTable')}\n") - fileHtmlReport.write(f"{htmlReportTableRow}{htmlReportTableHeader}Scene{htmlReportTableHeader}Duplicate to Delete{htmlReportTableHeader}Scene-ToKeep{htmlReportTableHeader}Duplicate to Keep\n") - - if tagDuplicates and graylistTagging and stash.startsWithInList(graylist, DupFile['files'][0]['path']): - stash.addTag(DupFile, graylistMarkForDeletion, ignoreAutoTag=True) - if didAddTag: - QtyNewlyTag+=1 - if QtyTagForDel == 1: - stash.Log(f"Tagging duplicate {DupFile['files'][0]['path']} for deletion with tag {duplicateMarkForDeletion}", toAscii=True, printTo=LOG_STASH_N_PLUGIN) - else: - didAddTag = 1 if didAddTag else 0 - stash.Log(f"Tagging duplicate {DupFile['files'][0]['path']} for deletion;AddTag={didAddTag};Qty={QtyDup};Set={QtyDupSet} of {qtyResults};NewlyTag={QtyNewlyTag};isTag={QtyTagForDel}", toAscii=True, printTo=LOG_STASH_N_PLUGIN) - stash.Trace(SepLine) - if maxDupToProcess > 0 and ((QtyTagForDel > maxDupToProcess) or (QtyTagForDel == 0 and QtyDup > maxDupToProcess)): - break - - if fileHtmlReport != None: - fileHtmlReport.write("\n") - if PaginateId > 0: - homeHtmReportLink = f"[Home]" - if PaginateId > 1: - prevHtmReport = htmlReportNameHomePage.replace(".html", f"_{PaginateId-1}.html") - else: - prevHtmReport = htmlReportNameHomePage - prevHtmReportLink = f"[Prev]" - fileHtmlReport.write(f"
{homeHtmReportLink}{prevHtmReportLink}
") - fileHtmlReport.write(f"

Total Tagged for Deletion {QtyTagForDel}

\n") - fileHtmlReport.write(f"{stash.Setting('htmlReportPostfix')}") - fileHtmlReport.close() - stash.Log(f"************************************************************", printTo = stash.LogTo.STASH) - stash.Log(f"************************************************************", printTo = stash.LogTo.STASH) - stash.Log(f"View Stash duplicate report using Stash->Settings->Tools->[Duplicate File Report]", printTo = stash.LogTo.STASH) - stash.Log(f"************************************************************", printTo = stash.LogTo.STASH) - stash.Log(f"************************************************************", printTo = stash.LogTo.STASH) - - - stash.Debug("#####################################################") - stash.Log(f"QtyDupSet={QtyDupSet}, QtyDup={QtyDup}, QtyDeleted={QtyDeleted}, QtySwap={QtySwap}, QtyTagForDel={QtyTagForDel}, QtySkipForDel={QtySkipForDel}, QtyExcludeForDel={QtyExcludeForDel}, QtyExactDup={QtyExactDup}, QtyAlmostDup={QtyAlmostDup}, QtyMerge={QtyMerge}, QtyRealTimeDiff={QtyRealTimeDiff}", printTo=LOG_STASH_N_PLUGIN) - killScanningJobs() - if cleanAfterDel and deleteDup: - stash.Log("Adding clean jobs to the Task Queue", printTo=LOG_STASH_N_PLUGIN) - stash.metadata_clean() - stash.metadata_clean_generated() - stash.optimise_database() - if doGeneratePhash: - stash.metadata_generate({"phashes": True}) - sys.stdout.write("Report complete") - -def findCurrentTagId(tagNames): - # tagNames = [i for n, i in enumerate(tagNames) if i not in tagNames[:n]] - for tagName in tagNames: - tagId = stash.find_tags(q=tagName) - if len(tagId) > 0 and 'id' in tagId[0]: - stash.Debug(f"Using tag name {tagName} with Tag ID {tagId[0]['id']}") - return tagId[0]['id'] - return "-1" - -def toJson(data): - import json - # data = data.replace("'", '"') - data = data.replace("\\", "\\\\") - data = data.replace("\\\\\\\\", "\\\\") - return json.loads(data) - -def getAnAdvanceMenuOptionSelected(taskName, target, isBlackList, pathToDelete, sizeToDelete, durationToDelete, resolutionToDelete, ratingToDelete, tagToDelete, titleToDelete, pathStrToDelete, fileNotExistToDelete, compareToLess, compareToGreater): - stash.Log(f"Processing taskName = {taskName}, target = {target}") - if "Blacklist" in taskName: - isBlackList = True - if "Less" in taskName: - compareToLess = True - if "Greater" in taskName: - compareToGreater = True - - if "pathToDelete" in taskName: - pathToDelete = target.lower() - elif "sizeToDelete" in taskName: - sizeToDelete = int(target) - elif "durationToDelete" in taskName: - durationToDelete = int(target) - elif "commonResToDelete" in taskName: - resolutionToDelete = int(target) - elif "resolutionToDelete" in taskName: - resolutionToDelete = int(target) - elif "ratingToDelete" in taskName: - ratingToDelete = int(target) * 20 - elif "tagToDelete" in taskName: - tagToDelete = target.lower() - elif "titleToDelete" in taskName: - titleToDelete = target.lower() - elif "pathStrToDelete" in taskName: - pathStrToDelete = target.lower() - elif "fileNotExistToDelete" in taskName: - fileNotExistToDelete = True - return isBlackList, pathToDelete, sizeToDelete, durationToDelete, resolutionToDelete, ratingToDelete, tagToDelete, titleToDelete, pathStrToDelete, fileNotExistToDelete, compareToLess, compareToGreater - -def getAdvanceMenuOptionSelected(advanceMenuOptionSelected): - isBlackList = False - pathToDelete = "" - sizeToDelete = -1 - durationToDelete = -1 - resolutionToDelete = -1 - ratingToDelete = -1 - tagToDelete = "" - titleToDelete = "" - pathStrToDelete = "" - fileNotExistToDelete = False - compareToLess = False - compareToGreater = False - if advanceMenuOptionSelected: - if 'Target' in stash.JSON_INPUT['args']: - if "applyCombo" in stash.PLUGIN_TASK_NAME: - jsonObject = toJson(stash.JSON_INPUT['args']['Target']) - for taskName in jsonObject: - isBlackList, pathToDelete, sizeToDelete, durationToDelete, resolutionToDelete, ratingToDelete, tagToDelete, titleToDelete, pathStrToDelete, fileNotExistToDelete, compareToLess, compareToGreater = getAnAdvanceMenuOptionSelected(taskName, jsonObject[taskName], isBlackList, pathToDelete, sizeToDelete, durationToDelete, resolutionToDelete, ratingToDelete, tagToDelete, titleToDelete, pathStrToDelete, compareToLess, compareToGreater) - else: - return getAnAdvanceMenuOptionSelected(stash.PLUGIN_TASK_NAME, stash.JSON_INPUT['args']['Target'], isBlackList, pathToDelete, sizeToDelete, durationToDelete, resolutionToDelete, ratingToDelete, tagToDelete, titleToDelete, pathStrToDelete, compareToLess, compareToGreater) - return isBlackList, pathToDelete, sizeToDelete, durationToDelete, resolutionToDelete, ratingToDelete, tagToDelete, titleToDelete, pathStrToDelete, fileNotExistToDelete, compareToLess, compareToGreater - -# ////////////////////////////////////////////////////////////////////////////// -# ////////////////////////////////////////////////////////////////////////////// -def manageTagggedDuplicates(deleteScenes=False, clearTag=False, setGrayListTag=False, tagId=-1, advanceMenuOptionSelected=False): - if tagId == -1: - tagId = findCurrentTagId([duplicateMarkForDeletion, base1_duplicateMarkForDeletion, base2_duplicateMarkForDeletion, 'DuplicateMarkForDeletion', '_DuplicateMarkForDeletion']) - if int(tagId) < 0: - stash.Warn(f"Could not find tag ID for tag '{duplicateMarkForDeletion}'.") - return - - excludedTags = [duplicateMarkForDeletion] - if clearAllDupfileManagerTags: - excludedTags = [duplicateMarkForDeletion, duplicateWhitelistTag, excludeDupFileDeleteTag, graylistMarkForDeletion, longerDurationLowerResolution] - - isBlackList, pathToDelete, sizeToDelete, durationToDelete, resolutionToDelete, ratingToDelete, tagToDelete, titleToDelete, pathStrToDelete, fileNotExistToDelete, compareToLess, compareToGreater = getAdvanceMenuOptionSelected(advanceMenuOptionSelected) - if advanceMenuOptionSelected and deleteScenes and pathToDelete == "" and tagToDelete == "" and titleToDelete == "" and pathStrToDelete == "" and sizeToDelete == -1 and durationToDelete == -1 and resolutionToDelete == -1 and ratingToDelete == -1 and fileNotExistToDelete == False: - stash.Error("Running advance menu option with no options enabled.") - return - - QtyDup = 0 - QtyDeleted = 0 - QtyClearedTags = 0 - QtySetGraylistTag = 0 - QtyFailedQuery = 0 - stash.Debug("#########################################################################") - stash.startSpinningProcessBar() - scenes = stash.find_scenes(f={"tags": {"value":tagId, "modifier":"INCLUDES"}}, fragment='id tags {id name} files {path width height duration size video_codec bit_rate frame_rate} details title rating100') - stash.stopSpinningProcessBar() - qtyResults = len(scenes) - stash.Log(f"Found {qtyResults} scenes with tag ({duplicateMarkForDeletion})") - stash.setProgressBarIter(qtyResults) - for scene in scenes: - QtyDup += 1 - stash.progressBar(QtyDup, qtyResults) - # scene = stash.find_scene(sceneID['id']) - # if scene == None or len(scene) == 0: - # stash.Warn(f"Could not get scene data for scene ID {scene['id']}.") - # QtyFailedQuery += 1 - # continue - # stash.Trace(f"scene={scene}") - if clearTag: - QtyClearedTags += 1 - # ToDo: Add logic to exclude graylistMarkForDeletion - tags = [int(item['id']) for item in scene["tags"] if item['name'] not in excludedTags] - # if clearAllDupfileManagerTags: - # tags = [] - # for tag in scene["tags"]: - # if tag['name'] in excludedTags: - # continue - # tags += [int(tag['id'])] - stash.TraceOnce(f"tagId={tagId}, len={len(tags)}, tags = {tags}") - dataDict = {'id' : scene['id']} - if addPrimaryDupPathToDetails: - sceneDetails = scene['details'] - if sceneDetails.find(detailPrefix) == 0 and sceneDetails.find(detailPostfix) > 1: - Pos1 = sceneDetails.find(detailPrefix) - Pos2 = sceneDetails.find(detailPostfix) - sceneDetails = sceneDetails[0:Pos1] + sceneDetails[Pos2 + len(detailPostfix):] - dataDict.update({'details' : sceneDetails}) - dataDict.update({'tag_ids' : tags}) - stash.Log(f"Updating scene with {dataDict};QtyClearedTags={QtyClearedTags};Count={QtyDup} of {qtyResults}") - stash.updateScene(dataDict) - # stash.removeTag(scene, duplicateMarkForDeletion) - elif setGrayListTag: - if stash.startsWithInList(graylist, scene['files'][0]['path']): - QtySetGraylistTag+=1 - if stash.addTag(scene, graylistMarkForDeletion, ignoreAutoTag=True): - stash.Log(f"Added tag {graylistMarkForDeletion} to scene {scene['files'][0]['path']};QtySetGraylistTag={QtySetGraylistTag};Count={QtyDup} of {qtyResults}") - else: - stash.Trace(f"Scene already had tag {graylistMarkForDeletion}; {scene['files'][0]['path']}") - elif deleteScenes: - DupFileName = scene['files'][0]['path'] - DupFileNameOnly = pathlib.Path(DupFileName).stem - if advanceMenuOptionSelected: - if isBlackList: - if not stash.startsWithInList(blacklist, scene['files'][0]['path']): - continue - if pathToDelete != "": - if not DupFileName.lower().startswith(pathToDelete): - stash.Debug(f"Skipping file {DupFileName} because it does not start with {pathToDelete}.") - continue - if pathStrToDelete != "": - if not pathStrToDelete in DupFileName.lower(): - stash.Debug(f"Skipping file {DupFileName} because it does not contain value {pathStrToDelete}.") - continue - if sizeToDelete != -1: - compareTo = int(scene['files'][0]['size']) - if compareToLess: - if not (compareTo < sizeToDelete): - continue - elif compareToGreater: - if not (compareTo > sizeToDelete): - continue - else: - if not compareTo == sizeToDelete: - continue - if durationToDelete != -1: - compareTo = int(scene['files'][0]['duration']) - if compareToLess: - if not (compareTo < durationToDelete): - continue - elif compareToGreater: - if not (compareTo > durationToDelete): - continue - else: - if not compareTo == durationToDelete: - continue - if resolutionToDelete != -1: - compareTo = int(scene['files'][0]['width']) * int(scene['files'][0]['height']) - if compareToLess: - if not (compareTo < resolutionToDelete): - continue - elif compareToGreater: - if not (compareTo > resolutionToDelete): - continue - else: - if not compareTo == resolutionToDelete: - continue - if ratingToDelete != -1: - if scene['rating100'] == "None": - compareTo = 0 - else: - compareTo = int(scene['rating100']) - if compareToLess: - if not (compareTo < resolutionToDelete): - continue - elif compareToGreater: - if not (compareTo > resolutionToDelete): - continue - else: - if not compareTo == resolutionToDelete: - continue - if titleToDelete != "": - if not titleToDelete in scene['title'].lower(): - stash.Debug(f"Skipping file {DupFileName} because it does not contain value {titleToDelete} in title ({scene['title']}).") - continue - if tagToDelete != "": - doProcessThis = False - for tag in scene['tags']: - if tag['name'].lower() == tagToDelete: - doProcessThis = True - break - if doProcessThis == False: - continue - if fileNotExistToDelete: - if os.path.isfile(scene['files'][0]['path']): - continue - stash.Warn(f"Deleting duplicate '{DupFileName}'", toAscii=True, printTo=LOG_STASH_N_PLUGIN) - if alternateTrashCanPath != "": - destPath = f"{alternateTrashCanPath }{os.sep}{DupFileNameOnly}" - if os.path.isfile(destPath): - destPath = f"{alternateTrashCanPath }{os.sep}_{time.time()}_{DupFileNameOnly}" - shutil.move(DupFileName, destPath) - elif moveToTrashCan: - sendToTrash(DupFileName) - result = stash.destroyScene(scene['id'], delete_file=True) - QtyDeleted += 1 - stash.Debug(f"destroyScene result={result} for file {DupFileName};QtyDeleted={QtyDeleted};Count={QtyDup} of {qtyResults}", toAscii=True) - else: - stash.Error("manageTagggedDuplicates called with invlaid input arguments. Doing early exit.") - return - stash.Debug("#####################################################") - stash.Log(f"QtyDup={QtyDup}, QtyClearedTags={QtyClearedTags}, QtySetGraylistTag={QtySetGraylistTag}, QtyDeleted={QtyDeleted}, QtyFailedQuery={QtyFailedQuery}", printTo=LOG_STASH_N_PLUGIN) - killScanningJobs() - if deleteScenes and not advanceMenuOptionSelected: - if cleanAfterDel: - stash.Log("Adding clean jobs to the Task Queue", printTo=LOG_STASH_N_PLUGIN) - stash.metadata_clean() - stash.metadata_clean_generated() - stash.optimise_database() - - -def removeDupTag(): - if 'Target' not in stash.JSON_INPUT['args']: - stash.Error(f"Could not find Target in JSON_INPUT ({stash.JSON_INPUT['args']})") - return - scene = stash.JSON_INPUT['args']['Target'] - stash.Log(f"Processing scene ID# {scene}") - stash.removeTag(scene, duplicateMarkForDeletion) - stash.Log(f"Done removing tag from scene {scene}.") - jsonReturn = "{'removeDupTag' : 'complete', 'id': '" + f"{scene}" + "'}" - stash.Log(f"Sending json value {jsonReturn}") - sys.stdout.write(jsonReturn) - -def addExcludeTag(): - if 'Target' not in stash.JSON_INPUT['args']: - stash.Error(f"Could not find Target in JSON_INPUT ({stash.JSON_INPUT['args']})") - return - scene = stash.JSON_INPUT['args']['Target'] - stash.Log(f"Processing scene ID# {scene}") - stash.addTag(scene, excludeDupFileDeleteTag) - stash.Log(f"Done adding exclude tag to scene {scene}.") - sys.stdout.write("{" + f"addExcludeTag : 'complete', id: '{scene}'" + "}") - -def removeExcludeTag(): - if 'Target' not in stash.JSON_INPUT['args']: - stash.Error(f"Could not find Target in JSON_INPUT ({stash.JSON_INPUT['args']})") - return - scene = stash.JSON_INPUT['args']['Target'] - stash.Log(f"Processing scene ID# {scene}") - stash.removeTag(scene, excludeDupFileDeleteTag) - stash.Log(f"Done removing exclude tag from scene {scene}.") - sys.stdout.write("{" + f"removeExcludeTag : 'complete', id: '{scene}'" + "}") - -def getParseData(getSceneDetails1=True, getSceneDetails2=True): - if 'Target' not in stash.JSON_INPUT['args']: - stash.Error(f"Could not find Target in JSON_INPUT ({stash.JSON_INPUT['args']})") - return None, None - targetsSrc = stash.JSON_INPUT['args']['Target'] - targets = targetsSrc.split(":") - if len(targets) < 2: - stash.Error(f"Could not get both targets from string {targetsSrc}") - return None, None - stash.Log(f"Parsed targets {targets[0]} and {targets[1]}") - target1 = targets[0] - target2 = targets[1] - if getSceneDetails1: - target1 = stash.find_scene(int(target1)) - if getSceneDetails2: - target2 = stash.find_scene(int(target2)) - elif len(targets) > 2: - target2 = target2 + targets[2] - return target1, target2 - - -def mergeTags(): - scene1, scene2 = getParseData() - if scene1 == None or scene2 == None: - sys.stdout.write("{" + f"mergeTags : 'failed', id1: '{scene1}', id2: '{scene2}'" + "}") - return - stash.mergeMetadata(scene1, scene2) - stash.Log(f"Done merging scenes for scene {scene1['id']} and scene {scene2['id']}") - sys.stdout.write("{" + f"mergeTags : 'complete', id1: '{scene1['id']}', id2: '{scene2['id']}'" + "}") - -def getLocalDupReportPath(): - htmlReportExist = "true" if os.path.isfile(htmlReportName) else "false" - localPath = htmlReportName.replace("\\", "\\\\") - jsonReturn = "{'LocalDupReportExist' : " + f"{htmlReportExist}" + ", 'Path': '" + f"{localPath}" + "'}" - stash.Log(f"Sending json value {jsonReturn}") - sys.stdout.write(jsonReturn) - -def deleteLocalDupReportHtmlFiles(doJsonOutput = True): - htmlReportExist = "true" if os.path.isfile(htmlReportName) else "false" - if os.path.isfile(htmlReportName): - stash.Log(f"Deleting file {htmlReportName}") - os.remove(htmlReportName) - for x in range(2, 9999): - fileName = htmlReportName.replace(".html", f"_{x-1}.html") - stash.Debug(f"Checking if file '{fileName}' exist.") - if not os.path.isfile(fileName): - break - stash.Log(f"Deleting file {fileName}") - os.remove(fileName) - else: - stash.Log(f"Report file does not exist: {htmlReportName}") - if doJsonOutput: - jsonReturn = "{'LocalDupReportExist' : " + f"{htmlReportExist}" + ", 'Path': '" + f"{htmlReportName}" + "', 'qty': '" + f"{x}" + "'}" - stash.Log(f"Sending json value {jsonReturn}") - sys.stdout.write(jsonReturn) - -def removeTagFromAllScenes(tagName, deleteTags): - # ToDo: Replace code with SQL code if DB version 68 - tagId = stash.find_tags(q=tagName) - if len(tagId) > 0 and 'id' in tagId[0]: - if deleteTags: - stash.Debug(f"Deleting tag name {tagName} with Tag ID {tagId[0]['id']} from stash.") - stash.destroy_tag(int(tagId[0]['id'])) - else: - stash.Debug(f"Removing tag name {tagName} with Tag ID {tagId[0]['id']} from all scenes.") - manageTagggedDuplicates(clearTag=True, tagId=int(tagId[0]['id'])) - return True - return False - -def removeAllDupTagsFromAllScenes(deleteTags=False): - tagsToClear = [duplicateMarkForDeletion, base1_duplicateMarkForDeletion, base2_duplicateMarkForDeletion, graylistMarkForDeletion, longerDurationLowerResolution, duplicateWhitelistTag] - for x in range(0, 3): - tagsToClear += [base1_duplicateMarkForDeletion + f"_{x}"] - for x in range(0, 3): - tagsToClear += [base2_duplicateMarkForDeletion + f"_{x}"] - tagsToClear = list(set(tagsToClear)) # Remove duplicates - validTags = [] - for tagToClear in tagsToClear: - if removeTagFromAllScenes(tagToClear, deleteTags): - validTags +=[tagToClear] - if doJsonReturn: - jsonReturn = "{'removeAllDupTagFromAllScenes' : " + f"{duplicateMarkForDeletion}" + ", 'OtherTags': '" + f"{validTags}" + "'}" - stash.Log(f"Sending json value {jsonReturn}") - sys.stdout.write(jsonReturn) - else: - stash.Log(f"Clear tags {tagsToClear}") - -def insertDisplayNone(htmlReportName, scene): - stash.Log(f"Inserting display none for scene {scene} in file {htmlReportName}") - import in_place - doStyleEndTagCheck = True - with in_place.InPlace(htmlReportName) as file: - for line in file: - if doStyleEndTagCheck and line.startsWith(""): - file.write(f".ID_{scene}" + "{display:none;}") - doStyleEndTagCheck = False - file.write(line) - file.close() - -def hideScene(scene): - if os.path.isfile(htmlReportName): - insertDisplayNone(htmlReportName, scene) - for x in range(2, 9999): - fileName = htmlReportName.replace(".html", f"_{x-1}.html") - stash.Debug(f"Checking if file '{fileName}' exist.") - if not os.path.isfile(fileName): - break - insertDisplayNone(fileName, scene) - else: - stash.Log(f"Report file does not exist: {htmlReportName}") - -def deleteScene(hideInReport=True, deleteFile=True): - if 'Target' not in stash.JSON_INPUT['args']: - stash.Error(f"Could not find Target in JSON_INPUT ({stash.JSON_INPUT['args']})") - return - scene = stash.JSON_INPUT['args']['Target'] - stash.Log(f"Processing scene ID# {scene}") - result = stash.destroyScene(scene, delete_file=deleteFile) - if hideInReport: - hideScene(scene) - stash.Log(f"{stash.PLUGIN_TASK_NAME} complete for scene {scene} with results = {result}") - sys.stdout.write("{" + f"{stash.PLUGIN_TASK_NAME} : 'complete', id: '{scene}', result: '{result}'" + "}") - -def copyScene(moveScene=False): - scene1, scene2 = getParseData() - if scene1 == None or scene2 == None: - sys.stdout.write("{" + f"{stash.PLUGIN_TASK_NAME} : 'failed', id1: '{scene1}', id2: '{scene2}'" + "}") - return - if moveScene: - stash.mergeMetadata(scene1, scene2) - result = shutil.copy(scene1['file']['path'], scene2['file']['path']) - if moveScene: - result = stash.destroyScene(scene1['id'], delete_file=True) - stash.Log(f"destroyScene for scene {scene1['id']} results = {result}") - stash.Log(f"{stash.PLUGIN_TASK_NAME} complete for scene {scene1['id']} and {scene2['id]}") - sys.stdout.write("{" + f"{stash.PLUGIN_TASK_NAME} : 'complete', id1: '{scene1['id']}', id2: '{scene2['id']}', result: '{result}'" + "}") - -def renameFile(): - scene, newName = getParseData(getSceneDetails2=False) - if scene == None or newName == None: - sys.stdout.write("{" + f"{stash.PLUGIN_TASK_NAME} : 'failed', scene: '{scene}', newName: '{newName}'" + "}") - return - newName = scene['file']['path'].replace(pathlib.Path(scene['file']['path']).stem, newName) - result = shutil.rename(scene['file']['path'], newName) - stash.Log(f"{stash.PLUGIN_TASK_NAME} complete for scene {scene['id']} ;renamed to {newName}; result={resul}") - sys.stdout.write("{" + f"{stash.PLUGIN_TASK_NAME} : 'complete', scene: '{scene['id']}', newName: '{newName}', result: '{result}'" + "}") - - -# ToDo: Add to UI menu -# Remove all Dup tagged files (Just remove from stash, and leave file) -# Clear GraylistMarkForDel tag -# Delete GraylistMarkForDel tag -# Remove from stash all files no longer part of stash library -# Remove from stash all files in the Exclusion list (Not supporting regexps) -# ToDo: Add to advance menu -# Remove only graylist dup -# Exclude graylist from delete -# Include graylist in delete - -try: - if stash.PLUGIN_TASK_NAME == "tag_duplicates_task": - mangeDupFiles(tagDuplicates=True, merge=mergeDupFilename) - stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") - elif stash.PLUGIN_TASK_NAME == "delete_tagged_duplicates_task": - manageTagggedDuplicates(deleteScenes=True) - stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") - elif stash.PLUGIN_TASK_NAME == "delete_duplicates_task": - mangeDupFiles(deleteDup=True, merge=mergeDupFilename) - stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") - elif stash.PLUGIN_TASK_NAME == "clear_duplicate_tags_task": - removeAllDupTagsFromAllScenes() - stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") - elif stash.PLUGIN_TASK_NAME == "graylist_tag_task": - manageTagggedDuplicates(setGrayListTag=True) - stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") - elif stash.PLUGIN_TASK_NAME == "generate_phash_task": - stash.metadata_generate({"phashes": True}) - stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") - elif stash.PLUGIN_TASK_NAME == "deleteScene": - deleteScene() - stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") - elif stash.PLUGIN_TASK_NAME == "removeScene": - deleteScene(deleteFile=False) - stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") - elif stash.PLUGIN_TASK_NAME == "renameFile": - renameFile() - stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") - elif stash.PLUGIN_TASK_NAME == "copyScene": - copyScene() - stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") - elif stash.PLUGIN_TASK_NAME == "moveScene": - copyScene(moveScene=True) - stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") - elif stash.PLUGIN_TASK_NAME == "removeDupTag": - removeDupTag() - stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") - elif stash.PLUGIN_TASK_NAME == "addExcludeTag": - addExcludeTag() - stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") - elif stash.PLUGIN_TASK_NAME == "removeExcludeTag": - removeExcludeTag() - stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") - elif stash.PLUGIN_TASK_NAME == "mergeTags": - mergeTags() - stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") - elif stash.PLUGIN_TASK_NAME == "getLocalDupReportPath": - getLocalDupReportPath() - stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") - elif stash.PLUGIN_TASK_NAME == "deleteLocalDupReportHtmlFiles": - deleteLocalDupReportHtmlFiles() - stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") - elif stash.PLUGIN_TASK_NAME == "createDuplicateReportWithoutTagging": - mangeDupFiles(tagDuplicates=False, merge=mergeDupFilename) - stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") - elif stash.PLUGIN_TASK_NAME == "deleteAllDupFileManagerTags": - removeAllDupTagsFromAllScenes(deleteTags=True) - stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") - elif stash.PLUGIN_TASK_NAME == "deleteBlackListTaggedDuplicatesTask": - mangeDupFiles(deleteDup=True, merge=mergeDupFilename, deleteBlacklistOnly=True) - stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") - elif stash.PLUGIN_TASK_NAME == "deleteTaggedDuplicatesLwrResOrLwrDuration": - mangeDupFiles(deleteDup=True, merge=mergeDupFilename, deleteLowerResAndDuration=True) - stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") - elif stash.PLUGIN_TASK_NAME == "deleteBlackListTaggedDuplicatesLwrResOrLwrDuration": - mangeDupFiles(deleteDup=True, merge=mergeDupFilename, deleteBlacklistOnly=True, deleteLowerResAndDuration=True) - stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") - elif parse_args.dup_tag: - stash.PLUGIN_TASK_NAME = "dup_tag" - mangeDupFiles(tagDuplicates=True, merge=mergeDupFilename) - stash.Debug(f"Tag duplicate EXIT") - elif parse_args.del_tag: - stash.PLUGIN_TASK_NAME = "del_tag" - manageTagggedDuplicates(deleteScenes=True) - stash.Debug(f"Delete Tagged duplicates EXIT") - elif parse_args.clear_tag: - stash.PLUGIN_TASK_NAME = "clear_tag" - removeAllDupTagsFromAllScenes() - stash.Debug(f"Clear duplicate tags EXIT") - elif parse_args.remove: - stash.PLUGIN_TASK_NAME = "remove" - mangeDupFiles(deleteDup=True, merge=mergeDupFilename) - stash.Debug(f"Delete duplicate EXIT") - elif len(sys.argv) < 2 and stash.PLUGIN_TASK_NAME in advanceMenuOptions: - manageTagggedDuplicates(deleteScenes=True, advanceMenuOptionSelected=True) - stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT") - else: - stash.Log(f"Nothing to do!!! (PLUGIN_ARGS_MODE={stash.PLUGIN_TASK_NAME})") -except Exception as e: - tb = traceback.format_exc() - - stash.Error(f"Exception while running DupFileManager Task({stash.PLUGIN_TASK_NAME}); Error: {e}\nTraceBack={tb}") - killScanningJobs() - stash.convertToAscii = False - stash.Error(f"Error: {e}\nTraceBack={tb}") - if doJsonReturn: - sys.stdout.write("{" + f"Exception : '{e}; See log file for TraceBack' " + "}") - -stash.Log("\n*********************************\nEXITING ***********************\n*********************************") From b216d09eb2baf1496a1ab8490587a18c731ac138 Mon Sep 17 00:00:00 2001 From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com> Date: Sat, 23 Nov 2024 00:50:19 -0500 Subject: [PATCH 38/39] prettier changes --- plugins/DupFileManager/DupFileManager.css | 10 +- plugins/DupFileManager/DupFileManager.yml | 10 +- plugins/DupFileManager/README.md | 19 +- plugins/DupFileManager/advance_options.html | 4602 +++++++++++-------- 4 files changed, 2723 insertions(+), 1918 deletions(-) diff --git a/plugins/DupFileManager/DupFileManager.css b/plugins/DupFileManager/DupFileManager.css index 7ef71ede..05f75f14 100644 --- a/plugins/DupFileManager/DupFileManager.css +++ b/plugins/DupFileManager/DupFileManager.css @@ -33,7 +33,7 @@ margin: 1em; } - /* Dashed border */ +/* Dashed border */ hr.dashed { border-top: 3px dashed #bbb; } @@ -55,13 +55,13 @@ hr.rounded { } h3.under_construction { - color:red; - background-color:yellow; + color: red; + background-color: yellow; } h3.submenu { - color:Tomato; - background-color:rgba(100, 100, 100); + color: Tomato; + background-color: rgba(100, 100, 100); } /*# sourceMappingURL=DupFileManager.css.map */ diff --git a/plugins/DupFileManager/DupFileManager.yml b/plugins/DupFileManager/DupFileManager.yml index f2f3ef6d..3d2f6ff1 100644 --- a/plugins/DupFileManager/DupFileManager.yml +++ b/plugins/DupFileManager/DupFileManager.yml @@ -4,15 +4,15 @@ version: 0.1.9 url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/DupFileManager ui: javascript: - - DupFileManager.js + - DupFileManager.js css: - - DupFileManager.css - - DupFileManager.css.map - - DupFileManager.js.map + - DupFileManager.css + - DupFileManager.css.map + - DupFileManager.js.map settings: matchDupDistance: displayName: Match Duplicate Distance - description: (Default=0) Where 0 = Exact Match, 1 = High Match, 2 = Medium Match, and 3 = Low Match. + description: (Default=0) Where 0 = Exact Match, 1 = High Match, 2 = Medium Match, and 3 = Low Match. type: NUMBER mergeDupFilename: displayName: Merge Duplicate Tags diff --git a/plugins/DupFileManager/README.md b/plugins/DupFileManager/README.md index 4e76a7f0..0a90703c 100644 --- a/plugins/DupFileManager/README.md +++ b/plugins/DupFileManager/README.md @@ -7,7 +7,7 @@ It has both **task** and **tools-UI** components. - Creates a duplicate file report which can be accessed from the settings->tools menu options.The report is created as an HTML file and stored in local path under plugins\DupFileManager\report\DuplicateTagScenes.html. - See screenshot at the bottom of this page for example report. - - Items on the left side of the report are the primary duplicates designated for deletion. By default, these duplicates are given a special _duplicate tag. + - Items on the left side of the report are the primary duplicates designated for deletion. By default, these duplicates are given a special \_duplicate tag. - Items on the right side of the report are designated as primary duplicates to keep. They usually have higher resolution, duration and/or preferred paths. - The report has the following options: - Delete: Delete file and remove from Stash library. @@ -42,11 +42,11 @@ It has both **task** and **tools-UI** components. - **Delete Duplicates** - Deletes duplicate files. Performs deletion without first tagging. - Plugin UI options (Settings->Plugins->Plugins->[DupFileManager]) - Has a 3 tier path selection to determine which duplicates to keep, and which should be candidates for deletions. - - **Whitelist** - List of paths NOT to be deleted. + - **Whitelist** - List of paths NOT to be deleted. - E.g. C:\Favorite\,E:\MustKeep\ - - **Gray-List** - List of preferential paths to determine which duplicate should be the primary. + - **Gray-List** - List of preferential paths to determine which duplicate should be the primary. - E.g. C:\2nd_Favorite\,H:\ShouldKeep\ - - **Blacklist** - List of LEAST preferential paths to determine primary candidates for deletion. + - **Blacklist** - List of LEAST preferential paths to determine primary candidates for deletion. - E.g. C:\Downloads\,F:\DeleteMeFirst\ - **Permanent Delete** - Enable to permanently delete files, instead of moving files to trash can. - **Max Dup Process** - Use to limit the maximum files to process. Can be used to do a limited test run. @@ -58,8 +58,9 @@ It has both **task** and **tools-UI** components. - **toRecycleBeforeSwap** - When enabled, moves destination file to recycle bin before swapping files. - **addPrimaryDupPathToDetails** - If enabled, adds the primary duplicate path to the scene detail. - Tools UI Menu -![Screenshot 2024-11-22 145512](https://github.com/user-attachments/assets/03e166eb-ddaa-4eb8-8160-4c9180ca1323) + ![Screenshot 2024-11-22 145512](https://github.com/user-attachments/assets/03e166eb-ddaa-4eb8-8160-4c9180ca1323) - Can access either **Duplicate File Report (DupFileManager)** or **DupFileManager Tools and Utilities** menu options. + ### Requirements - `pip install --upgrade stashapp-tools` @@ -87,9 +88,9 @@ That's it!!! - ![Screenshot 2024-11-22 225359](https://github.com/user-attachments/assets/dc705b24-e2d7-4663-92fd-1516aa7aacf5) - If there's a scene on the left side that has a higher resolution or duration, it gets a yellow highlight on the report. - There's an optional setting that allows both preview videos and preview images to be displayed on the report. See settings **htmlIncludeImagePreview** in the **DupFileManager_report_config.py** file. - - There are many more options available for how the report is created. These options are targeted for more advanced users. The options are all available in the **DupFileManager_report_config.py** file, and the settings have commented descriptions preceeding them. See the **DupFileManager_report_config.py** file in the DupFileManager plugin folder for more details. + - There are many more options available for how the report is created. These options are targeted for more advanced users. The options are all available in the **DupFileManager_report_config.py** file, and the settings have commented descriptions preceeding them. See the **DupFileManager_report_config.py** file in the DupFileManager plugin folder for more details. - Tools UI Menu -![Screenshot 2024-11-22 145512](https://github.com/user-attachments/assets/03e166eb-ddaa-4eb8-8160-4c9180ca1323) + ![Screenshot 2024-11-22 145512](https://github.com/user-attachments/assets/03e166eb-ddaa-4eb8-8160-4c9180ca1323) - Can access either **Duplicate File Report (DupFileManager)** or **DupFileManager Tools and Utilities** menu options. - DupFileManager Report Menu - ![Screenshot 2024-11-22 151630](https://github.com/user-attachments/assets/834ee60f-1a4a-4a3e-bbf7-23aeca2bda1f) @@ -99,6 +100,7 @@ That's it!!! - ![Screenshot 2024-11-22 232208](https://github.com/user-attachments/assets/bf1f3021-3a8c-4875-9737-60ee3d7fe675) ### Future Planned Features + - Currently, the report and advanced menu do not work with Stash settings requiring a password. Additional logic will be added to have them use the API Key. Planned for 1.0.0 Version. - Add an advanced menu that will work with non-tagged reports. It will iterated through the existing report file(s) to aplly deletions, instead of searching Stash DB for tagged files. Planned for 1.1.0 Version. - Greylist deletion option will be added to the advanced menu. Planned for 1.0.5 Version. @@ -106,6 +108,3 @@ That's it!!! - Add report directly to the Settings->Tools menu. Planned for 1.5.0 Version. - Remove all flags from all scenes option. Planned for 1.0.5 Version. - Transfer option settings **[Disable Complete Confirmation]** and **[Disable Delete Confirmation]** when paginating. Planned for 1.0.5 Version. - - - diff --git a/plugins/DupFileManager/advance_options.html b/plugins/DupFileManager/advance_options.html index 262c078d..1f5e5135 100644 --- a/plugins/DupFileManager/advance_options.html +++ b/plugins/DupFileManager/advance_options.html @@ -1,1902 +1,2708 @@ - + - -DupFileManager Advance Menus - - - + - - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
DupFileManager Advance _DuplicateMarkForDeletion_? Tagged Files MenuApply Multiple Options
- - -
- - -
- - - - -
-
- - - - - - - - -
-
- -
- - - - - - - - -
-
- -
- - - - - - - - - - -
-
- -
- - - - - - -
-
- - - - - - -
-
- - - - - - -
-
- - - - - - - - - - -
-
- -
- - - - -
- - - - - - - - - - -
-
- -
-
-
- -
- - - - - - -
Create report with different [Match Duplicate Distance] options -
Overrides user [Match Duplicate Distance] and [significantTimeDiff] settings
-
- - -
-
- - - - - -
Create Report with Tagging
- -
- -
- -
- -
- - - - - -
Create Report without Tagging
- -
- -
- -
- -
- Details: -
    -
  1. Match Duplicate Distance Number Details
  2. -
      -
    1. Exact Match
    2. -
        -
      1. Safest and most reliable option
      2. -
      3. Uses tag name _DuplicateMarkForDeletion_0
      4. -
      5. Has the fewest results, and it's very rare to have false matches.
      6. -
      -
    3. High Match
    4. -
        -
      1. Recommended Setting
      2. -
      3. Safe and usually reliable
      4. -
      5. Uses tag name _DuplicateMarkForDeletion_1
      6. -
      7. Scenes tagged by Exact Match will have both tags (_DuplicateMarkForDeletion_0 and _DuplicateMarkForDeletion_1)
      8. -
      -
    5. Medium Match
    6. -
        -
      1. Not so safe. Some false matches
      2. -
      3. To reduce false matches use a time difference of .96 or higher.
      4. -
      5. Uses tag name _DuplicateMarkForDeletion_2
      6. -
      7. Scenes tagged by 0 and 1 will have three tags.
      8. -
      -
    7. Low Match
    8. -
        -
      1. Unsafe, and many false matches
      2. -
      3. To reduce false matches use a time difference of .98 or higher.
      4. -
      5. Uses tag name _DuplicateMarkForDeletion_3
      6. -
      7. Scenes tagged by 0, 1, and 2 will have four tags.
      8. -
      9. Has the most results, but with many false matches.
      10. -
      -
    -
  3. Time Difference
  4. -
      -
    1. Significant time difference setting, where 1 equals 100% and (.9) equals 90%.
    2. -
    3. This setting overrides the setting in DupFileManager_config.py.
    4. -
        -
      1. See setting significantTimeDiff in DupFileManager_config.py
      2. -
      -
    5. This setting is generally not useful for [Exact Match] reports.
    6. -
    7. This is an important setting when creating Low or Medium match reports. It will reduce false matches.
    8. -
    -
  5. Report with tagging
  6. -
      -
    1. Reports with tagging will work with above DupFileManager Advance Menu.
    2. -
    3. The report can take serveral minutes to complete.
    4. -
    5. It takes much more time to produce a report with tagging compare to creating a report without tagging.
    6. -
    -
  7. Report WITHOUT tagging
  8. -
      -
    1. Reports with no tagging can NOT be used with above DupFileManager Advance Menu.
    2. -
    3. The report is created much faster. It usually takes a few seconds to complete.
    4. -
    5. This is the recommended report type to create if the DupFileManager Advance Menu is not needed or desired.
    6. -
    -
-
- - + function RunPluginDupFileManager(Mode, Param = 0, Async = false) { + $("html").addClass("wait"); + $("body").css("cursor", "progress"); + console.log( + "GraphQl_URL = " + + GraphQl_URL + + "; Mode = " + + Mode + + "; Param = " + + Param + ); + $.ajax({ + method: "POST", + url: GraphQl_URL, + contentType: "application/json", + dataType: "text", + cache: Async, + async: Async, + data: JSON.stringify({ + query: `mutation RunPluginOperation($plugin_id:ID!,$args:Map!){runPluginOperation(plugin_id:$plugin_id,args:$args)}`, + variables: { + plugin_id: "DupFileManager", + args: { Target: Param, mode: Mode }, + }, + }), + success: function (result) { + console.log(result); + $("html").removeClass("wait"); + $("body").css("cursor", "default"); + }, + }); + console.log("Setting default cursor"); + } + $(document).ready(function () { + $("button").click(function () { + const AddedWarn = + "? This will delete the files, and remove them from stash."; + console.log(this.id); + var blackliststr = ""; + var comparestr = "less than "; + if (this.id.includes("Blacklist")) blackliststr = "in blacklist "; + if (this.id.includes("Greater")) comparestr = "greater than "; + else if (this.id.includes("Eq")) comparestr = "equal to "; + if (this.id === "tag_duplicates_task") { + RunPluginDupFileManager(this.id, this.value, true); + } else if (this.id.startsWith("tag_duplicates_task")) { + RunPluginDupFileManager( + "tag_duplicates_task", + this.value + ":" + $("#significantTimeDiff").val(), + true + ); + } else if (this.id.startsWith("create_duplicate_report_task")) { + RunPluginDupFileManager( + "create_duplicate_report_task", + this.value + ":" + $("#significantTimeDiff").val(), + true + ); + } else if (this.id === "viewreport") { + var reportUrl = window.location.href; + reportUrl = reportUrl.replace( + "advance_options.html", + "report/DuplicateTagScenes.html" + ); + console.log("reportUrl = " + reportUrl); + window.open(reportUrl, "_blank"); + } else if ( + this.id === "pathToDelete" || + this.id === "pathToDeleteBlacklist" + ) { + if ( + confirm( + "Are you sure you want to delete tag scenes " + + blackliststr + + "having _DuplicateMarkForDeletion tags, and in path " + + $("#pathToDeleteText").val() + + AddedWarn + ) + ) + RunPluginDupFileManager(this.id, $("#pathToDeleteText").val()); + } else if ( + this.id === "sizeToDeleteLess" || + this.id === "sizeToDeleteGreater" || + this.id === "sizeToDeleteBlacklistLess" || + this.id === "sizeToDeleteBlacklistGreater" + ) { + if ( + confirm( + "Are you sure you want to delete duplicate tag scenes " + + blackliststr + + "having file size " + + comparestr + + $("#sizeToDelete").val() + + AddedWarn + ) + ) + RunPluginDupFileManager(this.id, $("#sizeToDelete").val()); + } else if ( + this.id === "durationToDeleteLess" || + this.id === "durationToDeleteGreater" || + this.id === "durationToDeleteBlacklistLess" || + this.id === "durationToDeleteBlacklistGreater" + ) { + if ( + confirm( + "Are you sure you want to delete duplicate tag scenes " + + blackliststr + + "having file duration " + + comparestr + + $("#durationToDelete").val() + + AddedWarn + ) + ) + RunPluginDupFileManager(this.id, $("#durationToDelete").val()); + } else if ( + this.id === "commonResToDeleteLess" || + this.id === "commonResToDeleteEq" || + this.id === "commonResToDeleteGreater" || + this.id === "commonResToDeleteBlacklistLess" || + this.id === "commonResToDeleteBlacklistEq" || + this.id === "commonResToDeleteBlacklistGreater" + ) { + if ( + confirm( + "Are you sure you want to delete duplicate tag scenes " + + blackliststr + + "having resolution " + + comparestr + + $("#commonResToDelete").val() + + AddedWarn + ) + ) + RunPluginDupFileManager(this.id, $("#commonResToDelete").val()); + } else if ( + this.id === "resolutionToDeleteLess" || + this.id === "resolutionToDeleteEq" || + this.id === "resolutionToDeleteGreater" || + this.id === "resolutionToDeleteBlacklistLess" || + this.id === "resolutionToDeleteBlacklistEq" || + this.id === "resolutionToDeleteBlacklistGreater" + ) { + if ( + confirm( + "Are you sure you want to delete duplicate tag scenes " + + blackliststr + + "having resolution " + + comparestr + + $("#resolutionToDelete").val() + + AddedWarn + ) + ) + RunPluginDupFileManager(this.id, $("#resolutionToDelete").val()); + } else if ( + this.id === "ratingToDeleteLess" || + this.id === "ratingToDeleteEq" || + this.id === "ratingToDeleteGreater" || + this.id === "ratingToDeleteBlacklistLess" || + this.id === "ratingToDeleteBlacklistEq" || + this.id === "ratingToDeleteBlacklistGreater" + ) { + let result = 0; + if ($("#ratingToDelete").val() == 1 && comparestr === "less than ") + result = confirm( + "Are you sure you want to delete duplicate tag scenes " + + blackliststr + + "having no rating" + + AddedWarn + ); + else if ( + $("#ratingToDelete").val() == 5 && + comparestr === "greater than " + ) + alert( + "Invalid selection. There are no scenes with rating greater than 5." + ); + else + result = confirm( + "Are you sure you want to delete duplicate tag scenes " + + blackliststr + + "having rating " + + comparestr + + $("#ratingToDelete").val() + + AddedWarn + ); + if (result) + RunPluginDupFileManager(this.id, $("#ratingToDelete").val()); + } else if ( + this.id === "tagToDelete" || + this.id === "tagToDeleteBlacklist" + ) { + if ( + confirm( + "Are you sure you want to delete tag scenes " + + blackliststr + + "having _DuplicateMarkForDeletion tags, and having tag " + + $("#tagToDeleteText").val() + + AddedWarn + ) + ) + RunPluginDupFileManager(this.id, $("#tagToDeleteText").val()); + } else if ( + this.id === "titleToDelete" || + this.id === "titleToDeleteBlacklist" + ) { + if ( + confirm( + "Are you sure you want to delete tag scenes " + + blackliststr + + "having _DuplicateMarkForDeletion tags, and having title containing " + + $("#titleToDeleteText").val() + + AddedWarn + ) + ) + RunPluginDupFileManager(this.id, $("#titleToDeleteText").val()); + } else if ( + this.id === "pathStrToDelete" || + this.id === "pathStrToDeleteBlacklist" + ) { + if ( + confirm( + "Are you sure you want to delete tag scenes " + + blackliststr + + "having _DuplicateMarkForDeletion tags, and having path containing " + + $("#pathStrToDeleteText").val() + + AddedWarn + ) + ) + RunPluginDupFileManager(this.id, $("#pathStrToDeleteText").val()); + } else if ( + this.id === "fileNotExistToDelete" || + this.id === "fileNotExistToDeleteBlacklist" + ) { + if ( + confirm( + "Are you sure you want to delete tag scenes " + + blackliststr + + "having _DuplicateMarkForDeletion tags, and that do NOT exist in the file system?" + ) + ) + RunPluginDupFileManager(this.id, true); + } else if ( + this.id === "applyCombo" || + this.id === "applyComboBlacklist" + ) { + var Blacklist = ""; + if (this.id === "applyComboBlacklist") Blacklist = "Blacklist"; + var Param = "{"; + if ($("#InPathCheck").prop("checked")) + Param += + '"' + + "pathToDelete" + + Blacklist + + '":"' + + $("#pathToDeleteText").val().replace("\\", "\\\\") + + '", '; + if ($("#sizeToDeleteCombobox").val() !== "") + Param += + '"' + + "sizeToDelete" + + Blacklist + + $("#sizeToDeleteCombobox").val() + + '":"' + + $("#sizeToDelete").val() + + '", '; + if ($("#durationToDeleteCombobox").val() !== "") + Param += + '"' + + "durationToDelete" + + Blacklist + + $("#durationToDeleteCombobox").val() + + '":"' + + $("#durationToDelete").val() + + '", '; + if ($("#commonResToDeleteCombobox").val() !== "") + Param += + '"' + + "commonResToDelete" + + Blacklist + + $("#commonResToDeleteCombobox").val() + + '":"' + + $("#commonResToDelete").val() + + '", '; + if ($("#resolutionToDeleteCombobox").val() !== "") { + if ($("#commonResToDeleteCombobox").val() !== "") { + alert( + "Error: Can not select both [Common Resolution] and [Other Resolution] at the same time." + ); + return; + } + Param += + '"' + + "resolutionToDelete" + + Blacklist + + $("#resolutionToDeleteCombobox").val() + + '":"' + + $("#resolutionToDelete").val() + + '", '; + } + if ($("#ratingToDeleteCombobox").val() !== "") + Param += + '"' + + "ratingToDelete" + + Blacklist + + $("#ratingToDeleteCombobox").val() + + '":"' + + $("#ratingToDelete").val() + + '", '; + if ($("#containTagCheck").prop("checked")) + Param += + '"' + + "tagToDelete" + + Blacklist + + '":"' + + $("#tagToDeleteText").val() + + '", '; + if ($("#containTitleCheck").prop("checked")) + Param += + '"' + + "titleToDelete" + + Blacklist + + '":"' + + $("#titleToDeleteText").val() + + '", '; + if ($("#containStrInPathCheck").prop("checked")) + Param += + '"' + + "pathStrToDelete" + + Blacklist + + '":"' + + $("#pathStrToDeleteText").val().replace("\\", "\\\\") + + '", '; + if ($("#fileNotExistCheck").prop("checked")) + Param += '"' + "fileNotExistToDelete" + Blacklist + '":"true", '; + Param += "}"; + Param = Param.replace(", }", "}"); + if (Param === "{}") { + alert("Error: Must select one or more options."); + return; + } + console.log(Param); + if ( + confirm( + "Are you sure you want to delete tag scenes " + + blackliststr + + "having _DuplicateMarkForDeletion tags, and having the selected options" + + AddedWarn + + "\nSelected options:\n" + + Param + ) + ) + RunPluginDupFileManager(this.id, Param); + } + }); + }); + function DeleteDupInPath() { + alert("Something went wrong!!!"); + } + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ DupFileManager Advance + _DuplicateMarkForDeletion_? Tagged Files + Menu + Apply Multiple Options
+
+ + +
+
+ + +
+
+ + + + +
+
+ +
+
+ + + + + + + + +
+
+ + +
+
+ + + + + + + + +
+
+ + +
+
+ + + + + + + + + + +
+
+ + +
+
+ + + + + + +
+
+ +
+
+ + + + + + +
+
+ +
+
+ + + + + + +
+
+ +
+
+ + + + + + + + + + +
+
+ + +
+ + + + + + +
+
+ + + + + + + + + + +
+
+ + +
+
+
+
+
+ + + + + + + + + + + +
+ Create report with different + [Match Duplicate Distance] options
+
+ Overrides user [Match Duplicate Distance] and + [significantTimeDiff] settings +
+
+ + +
+
+ + + + + + + + + + + + + + + + +
+ Create Report with Tagging +
+
+ +
+
+
+ +
+
+
+ +
+
+
+ +
+
+
+ + + + + + + + + + + + + + + + +
+ Create Report without Tagging +
+
+ +
+
+
+ +
+
+
+ +
+
+
+ +
+
+
+ Details: +
    +
  1. Match Duplicate Distance Number Details
  2. +
      +
    1. Exact Match
    2. +
        +
      1. Safest and most reliable option
      2. +
      3. Uses tag name _DuplicateMarkForDeletion_0
      4. +
      5. + Has the fewest results, and it's very rare to have false + matches. +
      6. +
      +
    3. High Match
    4. +
        +
      1. Recommended Setting
      2. +
      3. Safe and usually reliable
      4. +
      5. Uses tag name _DuplicateMarkForDeletion_1
      6. +
      7. + Scenes tagged by Exact Match will have both tags + (_DuplicateMarkForDeletion_0 and + _DuplicateMarkForDeletion_1) +
      8. +
      +
    5. Medium Match
    6. +
        +
      1. Not so safe. Some false matches
      2. +
      3. + To reduce false matches use a time difference of .96 or + higher. +
      4. +
      5. Uses tag name _DuplicateMarkForDeletion_2
      6. +
      7. Scenes tagged by 0 and 1 will have three tags.
      8. +
      +
    7. Low Match
    8. +
        +
      1. Unsafe, and many false matches
      2. +
      3. + To reduce false matches use a time difference of .98 or + higher. +
      4. +
      5. Uses tag name _DuplicateMarkForDeletion_3
      6. +
      7. Scenes tagged by 0, 1, and 2 will have four tags.
      8. +
      9. Has the most results, but with many false matches.
      10. +
      +
    +
  3. Time Difference
  4. +
      +
    1. + Significant time difference setting, where 1 equals 100% and + (.9) equals 90%. +
    2. +
    3. + This setting overrides the setting in + DupFileManager_config.py. +
    4. +
        +
      1. + See setting significantTimeDiff in + DupFileManager_config.py +
      2. +
      +
    5. + This setting is generally not useful for + [Exact Match] reports. +
    6. +
    7. + This is an important setting when creating Low or Medium match + reports. It will reduce false matches. +
    8. +
    +
  5. Report with tagging
  6. +
      +
    1. + Reports with tagging will work with above + DupFileManager Advance Menu. +
    2. +
    3. The report can take serveral minutes to complete.
    4. +
    5. + It takes much more time to produce a report with tagging + compare to creating a report without tagging. +
    6. +
    +
  7. Report WITHOUT tagging
  8. +
      +
    1. + Reports with no tagging can NOT be + used with above DupFileManager Advance Menu. +
    2. +
    3. + The report is created much faster. It usually takes a few + seconds to complete. +
    4. +
    5. + This is the recommended report type to create if the + DupFileManager Advance Menu is not needed or desired. +
    6. +
    +
+
+
+ + From 0ac917d5696034d4b8aa84e70b2ae57489308e26 Mon Sep 17 00:00:00 2001 From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com> Date: Sat, 23 Nov 2024 01:05:10 -0500 Subject: [PATCH 39/39] Update DupFileManager.js --- plugins/DupFileManager/DupFileManager.js | 989 ++++++++++++++++------- 1 file changed, 687 insertions(+), 302 deletions(-) diff --git a/plugins/DupFileManager/DupFileManager.js b/plugins/DupFileManager/DupFileManager.js index 994a4abd..c4d6b67c 100644 --- a/plugins/DupFileManager/DupFileManager.js +++ b/plugins/DupFileManager/DupFileManager.js @@ -1,310 +1,695 @@ (function () { - /*! jQuery v3.7.1 | (c) OpenJS Foundation and other contributors | jquery.org/license */ - !function(e,t){"use strict";"object"==typeof module&&"object"==typeof module.exports?module.exports=e.document?t(e,!0):function(e){if(!e.document)throw new Error("jQuery requires a window with a document");return t(e)}:t(e)}("undefined"!=typeof window?window:this,function(ie,e){"use strict";var oe=[],r=Object.getPrototypeOf,ae=oe.slice,g=oe.flat?function(e){return oe.flat.call(e)}:function(e){return oe.concat.apply([],e)},s=oe.push,se=oe.indexOf,n={},i=n.toString,ue=n.hasOwnProperty,o=ue.toString,a=o.call(Object),le={},v=function(e){return"function"==typeof e&&"number"!=typeof e.nodeType&&"function"!=typeof e.item},y=function(e){return null!=e&&e===e.window},C=ie.document,u={type:!0,src:!0,nonce:!0,noModule:!0};function m(e,t,n){var r,i,o=(n=n||C).createElement("script");if(o.text=e,t)for(r in u)(i=t[r]||t.getAttribute&&t.getAttribute(r))&&o.setAttribute(r,i);n.head.appendChild(o).parentNode.removeChild(o)}function x(e){return null==e?e+"":"object"==typeof e||"function"==typeof e?n[i.call(e)]||"object":typeof e}var t="3.7.1",l=/HTML$/i,ce=function(e,t){return new ce.fn.init(e,t)};function c(e){var t=!!e&&"length"in e&&e.length,n=x(e);return!v(e)&&!y(e)&&("array"===n||0===t||"number"==typeof t&&0+~]|"+ge+")"+ge+"*"),x=new RegExp(ge+"|>"),j=new RegExp(g),A=new RegExp("^"+t+"$"),D={ID:new RegExp("^#("+t+")"),CLASS:new RegExp("^\\.("+t+")"),TAG:new RegExp("^("+t+"|[*])"),ATTR:new RegExp("^"+p),PSEUDO:new RegExp("^"+g),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+ge+"*(even|odd|(([+-]|)(\\d*)n|)"+ge+"*(?:([+-]|)"+ge+"*(\\d+)|))"+ge+"*\\)|)","i"),bool:new RegExp("^(?:"+f+")$","i"),needsContext:new RegExp("^"+ge+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+ge+"*((?:-\\d)?\\d*)"+ge+"*\\)|)(?=[^-]|$)","i")},N=/^(?:input|select|textarea|button)$/i,q=/^h\d$/i,L=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,H=/[+~]/,O=new RegExp("\\\\[\\da-fA-F]{1,6}"+ge+"?|\\\\([^\\r\\n\\f])","g"),P=function(e,t){var n="0x"+e.slice(1)-65536;return t||(n<0?String.fromCharCode(n+65536):String.fromCharCode(n>>10|55296,1023&n|56320))},M=function(){V()},R=J(function(e){return!0===e.disabled&&fe(e,"fieldset")},{dir:"parentNode",next:"legend"});try{k.apply(oe=ae.call(ye.childNodes),ye.childNodes),oe[ye.childNodes.length].nodeType}catch(e){k={apply:function(e,t){me.apply(e,ae.call(t))},call:function(e){me.apply(e,ae.call(arguments,1))}}}function I(t,e,n,r){var i,o,a,s,u,l,c,f=e&&e.ownerDocument,p=e?e.nodeType:9;if(n=n||[],"string"!=typeof t||!t||1!==p&&9!==p&&11!==p)return n;if(!r&&(V(e),e=e||T,C)){if(11!==p&&(u=L.exec(t)))if(i=u[1]){if(9===p){if(!(a=e.getElementById(i)))return n;if(a.id===i)return k.call(n,a),n}else if(f&&(a=f.getElementById(i))&&I.contains(e,a)&&a.id===i)return k.call(n,a),n}else{if(u[2])return k.apply(n,e.getElementsByTagName(t)),n;if((i=u[3])&&e.getElementsByClassName)return k.apply(n,e.getElementsByClassName(i)),n}if(!(h[t+" "]||d&&d.test(t))){if(c=t,f=e,1===p&&(x.test(t)||m.test(t))){(f=H.test(t)&&U(e.parentNode)||e)==e&&le.scope||((s=e.getAttribute("id"))?s=ce.escapeSelector(s):e.setAttribute("id",s=S)),o=(l=Y(t)).length;while(o--)l[o]=(s?"#"+s:":scope")+" "+Q(l[o]);c=l.join(",")}try{return k.apply(n,f.querySelectorAll(c)),n}catch(e){h(t,!0)}finally{s===S&&e.removeAttribute("id")}}}return re(t.replace(ve,"$1"),e,n,r)}function W(){var r=[];return function e(t,n){return r.push(t+" ")>b.cacheLength&&delete e[r.shift()],e[t+" "]=n}}function F(e){return e[S]=!0,e}function $(e){var t=T.createElement("fieldset");try{return!!e(t)}catch(e){return!1}finally{t.parentNode&&t.parentNode.removeChild(t),t=null}}function B(t){return function(e){return fe(e,"input")&&e.type===t}}function _(t){return function(e){return(fe(e,"input")||fe(e,"button"))&&e.type===t}}function z(t){return function(e){return"form"in e?e.parentNode&&!1===e.disabled?"label"in e?"label"in e.parentNode?e.parentNode.disabled===t:e.disabled===t:e.isDisabled===t||e.isDisabled!==!t&&R(e)===t:e.disabled===t:"label"in e&&e.disabled===t}}function X(a){return F(function(o){return o=+o,F(function(e,t){var n,r=a([],e.length,o),i=r.length;while(i--)e[n=r[i]]&&(e[n]=!(t[n]=e[n]))})})}function U(e){return e&&"undefined"!=typeof e.getElementsByTagName&&e}function V(e){var t,n=e?e.ownerDocument||e:ye;return n!=T&&9===n.nodeType&&n.documentElement&&(r=(T=n).documentElement,C=!ce.isXMLDoc(T),i=r.matches||r.webkitMatchesSelector||r.msMatchesSelector,r.msMatchesSelector&&ye!=T&&(t=T.defaultView)&&t.top!==t&&t.addEventListener("unload",M),le.getById=$(function(e){return r.appendChild(e).id=ce.expando,!T.getElementsByName||!T.getElementsByName(ce.expando).length}),le.disconnectedMatch=$(function(e){return i.call(e,"*")}),le.scope=$(function(){return T.querySelectorAll(":scope")}),le.cssHas=$(function(){try{return T.querySelector(":has(*,:jqfake)"),!1}catch(e){return!0}}),le.getById?(b.filter.ID=function(e){var t=e.replace(O,P);return function(e){return e.getAttribute("id")===t}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&C){var n=t.getElementById(e);return n?[n]:[]}}):(b.filter.ID=function(e){var n=e.replace(O,P);return function(e){var t="undefined"!=typeof e.getAttributeNode&&e.getAttributeNode("id");return t&&t.value===n}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&C){var n,r,i,o=t.getElementById(e);if(o){if((n=o.getAttributeNode("id"))&&n.value===e)return[o];i=t.getElementsByName(e),r=0;while(o=i[r++])if((n=o.getAttributeNode("id"))&&n.value===e)return[o]}return[]}}),b.find.TAG=function(e,t){return"undefined"!=typeof t.getElementsByTagName?t.getElementsByTagName(e):t.querySelectorAll(e)},b.find.CLASS=function(e,t){if("undefined"!=typeof t.getElementsByClassName&&C)return t.getElementsByClassName(e)},d=[],$(function(e){var t;r.appendChild(e).innerHTML="",e.querySelectorAll("[selected]").length||d.push("\\["+ge+"*(?:value|"+f+")"),e.querySelectorAll("[id~="+S+"-]").length||d.push("~="),e.querySelectorAll("a#"+S+"+*").length||d.push(".#.+[+~]"),e.querySelectorAll(":checked").length||d.push(":checked"),(t=T.createElement("input")).setAttribute("type","hidden"),e.appendChild(t).setAttribute("name","D"),r.appendChild(e).disabled=!0,2!==e.querySelectorAll(":disabled").length&&d.push(":enabled",":disabled"),(t=T.createElement("input")).setAttribute("name",""),e.appendChild(t),e.querySelectorAll("[name='']").length||d.push("\\["+ge+"*name"+ge+"*="+ge+"*(?:''|\"\")")}),le.cssHas||d.push(":has"),d=d.length&&new RegExp(d.join("|")),l=function(e,t){if(e===t)return a=!0,0;var n=!e.compareDocumentPosition-!t.compareDocumentPosition;return n||(1&(n=(e.ownerDocument||e)==(t.ownerDocument||t)?e.compareDocumentPosition(t):1)||!le.sortDetached&&t.compareDocumentPosition(e)===n?e===T||e.ownerDocument==ye&&I.contains(ye,e)?-1:t===T||t.ownerDocument==ye&&I.contains(ye,t)?1:o?se.call(o,e)-se.call(o,t):0:4&n?-1:1)}),T}for(e in I.matches=function(e,t){return I(e,null,null,t)},I.matchesSelector=function(e,t){if(V(e),C&&!h[t+" "]&&(!d||!d.test(t)))try{var n=i.call(e,t);if(n||le.disconnectedMatch||e.document&&11!==e.document.nodeType)return n}catch(e){h(t,!0)}return 0":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace(O,P),e[3]=(e[3]||e[4]||e[5]||"").replace(O,P),"~="===e[2]&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),"nth"===e[1].slice(0,3)?(e[3]||I.error(e[0]),e[4]=+(e[4]?e[5]+(e[6]||1):2*("even"===e[3]||"odd"===e[3])),e[5]=+(e[7]+e[8]||"odd"===e[3])):e[3]&&I.error(e[0]),e},PSEUDO:function(e){var t,n=!e[6]&&e[2];return D.CHILD.test(e[0])?null:(e[3]?e[2]=e[4]||e[5]||"":n&&j.test(n)&&(t=Y(n,!0))&&(t=n.indexOf(")",n.length-t)-n.length)&&(e[0]=e[0].slice(0,t),e[2]=n.slice(0,t)),e.slice(0,3))}},filter:{TAG:function(e){var t=e.replace(O,P).toLowerCase();return"*"===e?function(){return!0}:function(e){return fe(e,t)}},CLASS:function(e){var t=s[e+" "];return t||(t=new RegExp("(^|"+ge+")"+e+"("+ge+"|$)"))&&s(e,function(e){return t.test("string"==typeof e.className&&e.className||"undefined"!=typeof e.getAttribute&&e.getAttribute("class")||"")})},ATTR:function(n,r,i){return function(e){var t=I.attr(e,n);return null==t?"!="===r:!r||(t+="","="===r?t===i:"!="===r?t!==i:"^="===r?i&&0===t.indexOf(i):"*="===r?i&&-1:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i;function T(e,n,r){return v(n)?ce.grep(e,function(e,t){return!!n.call(e,t,e)!==r}):n.nodeType?ce.grep(e,function(e){return e===n!==r}):"string"!=typeof n?ce.grep(e,function(e){return-1)[^>]*|#([\w-]+))$/;(ce.fn.init=function(e,t,n){var r,i;if(!e)return this;if(n=n||k,"string"==typeof e){if(!(r="<"===e[0]&&">"===e[e.length-1]&&3<=e.length?[null,e,null]:S.exec(e))||!r[1]&&t)return!t||t.jquery?(t||n).find(e):this.constructor(t).find(e);if(r[1]){if(t=t instanceof ce?t[0]:t,ce.merge(this,ce.parseHTML(r[1],t&&t.nodeType?t.ownerDocument||t:C,!0)),w.test(r[1])&&ce.isPlainObject(t))for(r in t)v(this[r])?this[r](t[r]):this.attr(r,t[r]);return this}return(i=C.getElementById(r[2]))&&(this[0]=i,this.length=1),this}return e.nodeType?(this[0]=e,this.length=1,this):v(e)?void 0!==n.ready?n.ready(e):e(ce):ce.makeArray(e,this)}).prototype=ce.fn,k=ce(C);var E=/^(?:parents|prev(?:Until|All))/,j={children:!0,contents:!0,next:!0,prev:!0};function A(e,t){while((e=e[t])&&1!==e.nodeType);return e}ce.fn.extend({has:function(e){var t=ce(e,this),n=t.length;return this.filter(function(){for(var e=0;e\x20\t\r\n\f]*)/i,Ce=/^$|^module$|\/(?:java|ecma)script/i;xe=C.createDocumentFragment().appendChild(C.createElement("div")),(be=C.createElement("input")).setAttribute("type","radio"),be.setAttribute("checked","checked"),be.setAttribute("name","t"),xe.appendChild(be),le.checkClone=xe.cloneNode(!0).cloneNode(!0).lastChild.checked,xe.innerHTML="",le.noCloneChecked=!!xe.cloneNode(!0).lastChild.defaultValue,xe.innerHTML="",le.option=!!xe.lastChild;var ke={thead:[1,"","
"],col:[2,"","
"],tr:[2,"","
"],td:[3,"","
"],_default:[0,"",""]};function Se(e,t){var n;return n="undefined"!=typeof e.getElementsByTagName?e.getElementsByTagName(t||"*"):"undefined"!=typeof e.querySelectorAll?e.querySelectorAll(t||"*"):[],void 0===t||t&&fe(e,t)?ce.merge([e],n):n}function Ee(e,t){for(var n=0,r=e.length;n",""]);var je=/<|&#?\w+;/;function Ae(e,t,n,r,i){for(var o,a,s,u,l,c,f=t.createDocumentFragment(),p=[],d=0,h=e.length;d\s*$/g;function Re(e,t){return fe(e,"table")&&fe(11!==t.nodeType?t:t.firstChild,"tr")&&ce(e).children("tbody")[0]||e}function Ie(e){return e.type=(null!==e.getAttribute("type"))+"/"+e.type,e}function We(e){return"true/"===(e.type||"").slice(0,5)?e.type=e.type.slice(5):e.removeAttribute("type"),e}function Fe(e,t){var n,r,i,o,a,s;if(1===t.nodeType){if(_.hasData(e)&&(s=_.get(e).events))for(i in _.remove(t,"handle events"),s)for(n=0,r=s[i].length;n").attr(n.scriptAttrs||{}).prop({charset:n.scriptCharset,src:n.url}).on("load error",i=function(e){r.remove(),i=null,e&&t("error"===e.type?404:200,e.type)}),C.head.appendChild(r[0])},abort:function(){i&&i()}}});var Jt,Kt=[],Zt=/(=)\?(?=&|$)|\?\?/;ce.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var e=Kt.pop()||ce.expando+"_"+jt.guid++;return this[e]=!0,e}}),ce.ajaxPrefilter("json jsonp",function(e,t,n){var r,i,o,a=!1!==e.jsonp&&(Zt.test(e.url)?"url":"string"==typeof e.data&&0===(e.contentType||"").indexOf("application/x-www-form-urlencoded")&&Zt.test(e.data)&&"data");if(a||"jsonp"===e.dataTypes[0])return r=e.jsonpCallback=v(e.jsonpCallback)?e.jsonpCallback():e.jsonpCallback,a?e[a]=e[a].replace(Zt,"$1"+r):!1!==e.jsonp&&(e.url+=(At.test(e.url)?"&":"?")+e.jsonp+"="+r),e.converters["script json"]=function(){return o||ce.error(r+" was not called"),o[0]},e.dataTypes[0]="json",i=ie[r],ie[r]=function(){o=arguments},n.always(function(){void 0===i?ce(ie).removeProp(r):ie[r]=i,e[r]&&(e.jsonpCallback=t.jsonpCallback,Kt.push(r)),o&&v(i)&&i(o[0]),o=i=void 0}),"script"}),le.createHTMLDocument=((Jt=C.implementation.createHTMLDocument("").body).innerHTML="
",2===Jt.childNodes.length),ce.parseHTML=function(e,t,n){return"string"!=typeof e?[]:("boolean"==typeof t&&(n=t,t=!1),t||(le.createHTMLDocument?((r=(t=C.implementation.createHTMLDocument("")).createElement("base")).href=C.location.href,t.head.appendChild(r)):t=C),o=!n&&[],(i=w.exec(e))?[t.createElement(i[1])]:(i=Ae([e],t,o),o&&o.length&&ce(o).remove(),ce.merge([],i.childNodes)));var r,i,o},ce.fn.load=function(e,t,n){var r,i,o,a=this,s=e.indexOf(" ");return-1").append(ce.parseHTML(e)).find(r):e)}).always(n&&function(e,t){a.each(function(){n.apply(this,o||[e.responseText,t,e])})}),this},ce.expr.pseudos.animated=function(t){return ce.grep(ce.timers,function(e){return t===e.elem}).length},ce.offset={setOffset:function(e,t,n){var r,i,o,a,s,u,l=ce.css(e,"position"),c=ce(e),f={};"static"===l&&(e.style.position="relative"),s=c.offset(),o=ce.css(e,"top"),u=ce.css(e,"left"),("absolute"===l||"fixed"===l)&&-1<(o+u).indexOf("auto")?(a=(r=c.position()).top,i=r.left):(a=parseFloat(o)||0,i=parseFloat(u)||0),v(t)&&(t=t.call(e,n,ce.extend({},s))),null!=t.top&&(f.top=t.top-s.top+a),null!=t.left&&(f.left=t.left-s.left+i),"using"in t?t.using.call(e,f):c.css(f)}},ce.fn.extend({offset:function(t){if(arguments.length)return void 0===t?this:this.each(function(e){ce.offset.setOffset(this,t,e)});var e,n,r=this[0];return r?r.getClientRects().length?(e=r.getBoundingClientRect(),n=r.ownerDocument.defaultView,{top:e.top+n.pageYOffset,left:e.left+n.pageXOffset}):{top:0,left:0}:void 0},position:function(){if(this[0]){var e,t,n,r=this[0],i={top:0,left:0};if("fixed"===ce.css(r,"position"))t=r.getBoundingClientRect();else{t=this.offset(),n=r.ownerDocument,e=r.offsetParent||n.documentElement;while(e&&(e===n.body||e===n.documentElement)&&"static"===ce.css(e,"position"))e=e.parentNode;e&&e!==r&&1===e.nodeType&&((i=ce(e).offset()).top+=ce.css(e,"borderTopWidth",!0),i.left+=ce.css(e,"borderLeftWidth",!0))}return{top:t.top-i.top-ce.css(r,"marginTop",!0),left:t.left-i.left-ce.css(r,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){var e=this.offsetParent;while(e&&"static"===ce.css(e,"position"))e=e.offsetParent;return e||J})}}),ce.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(t,i){var o="pageYOffset"===i;ce.fn[t]=function(e){return M(this,function(e,t,n){var r;if(y(e)?r=e:9===e.nodeType&&(r=e.defaultView),void 0===n)return r?r[i]:e[t];r?r.scrollTo(o?r.pageXOffset:n,o?n:r.pageYOffset):e[t]=n},t,e,arguments.length)}}),ce.each(["top","left"],function(e,n){ce.cssHooks[n]=Ye(le.pixelPosition,function(e,t){if(t)return t=Ge(e,n),_e.test(t)?ce(e).position()[n]+"px":t})}),ce.each({Height:"height",Width:"width"},function(a,s){ce.each({padding:"inner"+a,content:s,"":"outer"+a},function(r,o){ce.fn[o]=function(e,t){var n=arguments.length&&(r||"boolean"!=typeof e),i=r||(!0===e||!0===t?"margin":"border");return M(this,function(e,t,n){var r;return y(e)?0===o.indexOf("outer")?e["inner"+a]:e.document.documentElement["client"+a]:9===e.nodeType?(r=e.documentElement,Math.max(e.body["scroll"+a],r["scroll"+a],e.body["offset"+a],r["offset"+a],r["client"+a])):void 0===n?ce.css(e,t,i):ce.style(e,t,n,i)},s,n?e:void 0,n)}})}),ce.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],function(e,t){ce.fn[t]=function(e){return this.on(t,e)}}),ce.fn.extend({bind:function(e,t,n){return this.on(e,null,t,n)},unbind:function(e,t){return this.off(e,null,t)},delegate:function(e,t,n,r){return this.on(t,e,n,r)},undelegate:function(e,t,n){return 1===arguments.length?this.off(e,"**"):this.off(t,e||"**",n)},hover:function(e,t){return this.on("mouseenter",e).on("mouseleave",t||e)}}),ce.each("blur focus focusin focusout resize scroll click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup contextmenu".split(" "),function(e,n){ce.fn[n]=function(e,t){return 0+~]|"+ge+")"+ge+"*"),x=new RegExp(ge+"|>"),j=new RegExp(g),A=new RegExp("^"+t+"$"),D={ID:new RegExp("^#("+t+")"),CLASS:new RegExp("^\\.("+t+")"),TAG:new RegExp("^("+t+"|[*])"),ATTR:new RegExp("^"+p),PSEUDO:new RegExp("^"+g),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+ge+"*(even|odd|(([+-]|)(\\d*)n|)"+ge+"*(?:([+-]|)"+ge+"*(\\d+)|))"+ge+"*\\)|)","i"),bool:new RegExp("^(?:"+f+")$","i"),needsContext:new RegExp("^"+ge+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+ge+"*((?:-\\d)?\\d*)"+ge+"*\\)|)(?=[^-]|$)","i")},N=/^(?:input|select|textarea|button)$/i,q=/^h\d$/i,L=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,H=/[+~]/,O=new RegExp("\\\\[\\da-fA-F]{1,6}"+ge+"?|\\\\([^\\r\\n\\f])","g"),P=function(e,t){var n="0x"+e.slice(1)-65536;return t||(n<0?String.fromCharCode(n+65536):String.fromCharCode(n>>10|55296,1023&n|56320))},M=function(){V()},R=J(function(e){return!0===e.disabled&&fe(e,"fieldset")},{dir:"parentNode",next:"legend"});try{k.apply(oe=ae.call(ye.childNodes),ye.childNodes),oe[ye.childNodes.length].nodeType}catch(e){k={apply:function(e,t){me.apply(e,ae.call(t))},call:function(e){me.apply(e,ae.call(arguments,1))}}}function I(t,e,n,r){var i,o,a,s,u,l,c,f=e&&e.ownerDocument,p=e?e.nodeType:9;if(n=n||[],"string"!=typeof t||!t||1!==p&&9!==p&&11!==p)return n;if(!r&&(V(e),e=e||T,C)){if(11!==p&&(u=L.exec(t)))if(i=u[1]){if(9===p){if(!(a=e.getElementById(i)))return n;if(a.id===i)return k.call(n,a),n}else if(f&&(a=f.getElementById(i))&&I.contains(e,a)&&a.id===i)return k.call(n,a),n}else{if(u[2])return k.apply(n,e.getElementsByTagName(t)),n;if((i=u[3])&&e.getElementsByClassName)return k.apply(n,e.getElementsByClassName(i)),n}if(!(h[t+" "]||d&&d.test(t))){if(c=t,f=e,1===p&&(x.test(t)||m.test(t))){(f=H.test(t)&&U(e.parentNode)||e)==e&&le.scope||((s=e.getAttribute("id"))?s=ce.escapeSelector(s):e.setAttribute("id",s=S)),o=(l=Y(t)).length;while(o--)l[o]=(s?"#"+s:":scope")+" "+Q(l[o]);c=l.join(",")}try{return k.apply(n,f.querySelectorAll(c)),n}catch(e){h(t,!0)}finally{s===S&&e.removeAttribute("id")}}}return re(t.replace(ve,"$1"),e,n,r)}function W(){var r=[];return function e(t,n){return r.push(t+" ")>b.cacheLength&&delete e[r.shift()],e[t+" "]=n}}function F(e){return e[S]=!0,e}function $(e){var t=T.createElement("fieldset");try{return!!e(t)}catch(e){return!1}finally{t.parentNode&&t.parentNode.removeChild(t),t=null}}function B(t){return function(e){return fe(e,"input")&&e.type===t}}function _(t){return function(e){return(fe(e,"input")||fe(e,"button"))&&e.type===t}}function z(t){return function(e){return"form"in e?e.parentNode&&!1===e.disabled?"label"in e?"label"in e.parentNode?e.parentNode.disabled===t:e.disabled===t:e.isDisabled===t||e.isDisabled!==!t&&R(e)===t:e.disabled===t:"label"in e&&e.disabled===t}}function X(a){return F(function(o){return o=+o,F(function(e,t){var n,r=a([],e.length,o),i=r.length;while(i--)e[n=r[i]]&&(e[n]=!(t[n]=e[n]))})})}function U(e){return e&&"undefined"!=typeof e.getElementsByTagName&&e}function V(e){var t,n=e?e.ownerDocument||e:ye;return n!=T&&9===n.nodeType&&n.documentElement&&(r=(T=n).documentElement,C=!ce.isXMLDoc(T),i=r.matches||r.webkitMatchesSelector||r.msMatchesSelector,r.msMatchesSelector&&ye!=T&&(t=T.defaultView)&&t.top!==t&&t.addEventListener("unload",M),le.getById=$(function(e){return r.appendChild(e).id=ce.expando,!T.getElementsByName||!T.getElementsByName(ce.expando).length}),le.disconnectedMatch=$(function(e){return i.call(e,"*")}),le.scope=$(function(){return T.querySelectorAll(":scope")}),le.cssHas=$(function(){try{return T.querySelector(":has(*,:jqfake)"),!1}catch(e){return!0}}),le.getById?(b.filter.ID=function(e){var t=e.replace(O,P);return function(e){return e.getAttribute("id")===t}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&C){var n=t.getElementById(e);return n?[n]:[]}}):(b.filter.ID=function(e){var n=e.replace(O,P);return function(e){var t="undefined"!=typeof e.getAttributeNode&&e.getAttributeNode("id");return t&&t.value===n}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&C){var n,r,i,o=t.getElementById(e);if(o){if((n=o.getAttributeNode("id"))&&n.value===e)return[o];i=t.getElementsByName(e),r=0;while(o=i[r++])if((n=o.getAttributeNode("id"))&&n.value===e)return[o]}return[]}}),b.find.TAG=function(e,t){return"undefined"!=typeof t.getElementsByTagName?t.getElementsByTagName(e):t.querySelectorAll(e)},b.find.CLASS=function(e,t){if("undefined"!=typeof t.getElementsByClassName&&C)return t.getElementsByClassName(e)},d=[],$(function(e){var t;r.appendChild(e).innerHTML="",e.querySelectorAll("[selected]").length||d.push("\\["+ge+"*(?:value|"+f+")"),e.querySelectorAll("[id~="+S+"-]").length||d.push("~="),e.querySelectorAll("a#"+S+"+*").length||d.push(".#.+[+~]"),e.querySelectorAll(":checked").length||d.push(":checked"),(t=T.createElement("input")).setAttribute("type","hidden"),e.appendChild(t).setAttribute("name","D"),r.appendChild(e).disabled=!0,2!==e.querySelectorAll(":disabled").length&&d.push(":enabled",":disabled"),(t=T.createElement("input")).setAttribute("name",""),e.appendChild(t),e.querySelectorAll("[name='']").length||d.push("\\["+ge+"*name"+ge+"*="+ge+"*(?:''|\"\")")}),le.cssHas||d.push(":has"),d=d.length&&new RegExp(d.join("|")),l=function(e,t){if(e===t)return a=!0,0;var n=!e.compareDocumentPosition-!t.compareDocumentPosition;return n||(1&(n=(e.ownerDocument||e)==(t.ownerDocument||t)?e.compareDocumentPosition(t):1)||!le.sortDetached&&t.compareDocumentPosition(e)===n?e===T||e.ownerDocument==ye&&I.contains(ye,e)?-1:t===T||t.ownerDocument==ye&&I.contains(ye,t)?1:o?se.call(o,e)-se.call(o,t):0:4&n?-1:1)}),T}for(e in I.matches=function(e,t){return I(e,null,null,t)},I.matchesSelector=function(e,t){if(V(e),C&&!h[t+" "]&&(!d||!d.test(t)))try{var n=i.call(e,t);if(n||le.disconnectedMatch||e.document&&11!==e.document.nodeType)return n}catch(e){h(t,!0)}return 0":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace(O,P),e[3]=(e[3]||e[4]||e[5]||"").replace(O,P),"~="===e[2]&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),"nth"===e[1].slice(0,3)?(e[3]||I.error(e[0]),e[4]=+(e[4]?e[5]+(e[6]||1):2*("even"===e[3]||"odd"===e[3])),e[5]=+(e[7]+e[8]||"odd"===e[3])):e[3]&&I.error(e[0]),e},PSEUDO:function(e){var t,n=!e[6]&&e[2];return D.CHILD.test(e[0])?null:(e[3]?e[2]=e[4]||e[5]||"":n&&j.test(n)&&(t=Y(n,!0))&&(t=n.indexOf(")",n.length-t)-n.length)&&(e[0]=e[0].slice(0,t),e[2]=n.slice(0,t)),e.slice(0,3))}},filter:{TAG:function(e){var t=e.replace(O,P).toLowerCase();return"*"===e?function(){return!0}:function(e){return fe(e,t)}},CLASS:function(e){var t=s[e+" "];return t||(t=new RegExp("(^|"+ge+")"+e+"("+ge+"|$)"))&&s(e,function(e){return t.test("string"==typeof e.className&&e.className||"undefined"!=typeof e.getAttribute&&e.getAttribute("class")||"")})},ATTR:function(n,r,i){return function(e){var t=I.attr(e,n);return null==t?"!="===r:!r||(t+="","="===r?t===i:"!="===r?t!==i:"^="===r?i&&0===t.indexOf(i):"*="===r?i&&-1:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i;function T(e,n,r){return v(n)?ce.grep(e,function(e,t){return!!n.call(e,t,e)!==r}):n.nodeType?ce.grep(e,function(e){return e===n!==r}):"string"!=typeof n?ce.grep(e,function(e){return-1)[^>]*|#([\w-]+))$/;(ce.fn.init=function(e,t,n){var r,i;if(!e)return this;if(n=n||k,"string"==typeof e){if(!(r="<"===e[0]&&">"===e[e.length-1]&&3<=e.length?[null,e,null]:S.exec(e))||!r[1]&&t)return!t||t.jquery?(t||n).find(e):this.constructor(t).find(e);if(r[1]){if(t=t instanceof ce?t[0]:t,ce.merge(this,ce.parseHTML(r[1],t&&t.nodeType?t.ownerDocument||t:C,!0)),w.test(r[1])&&ce.isPlainObject(t))for(r in t)v(this[r])?this[r](t[r]):this.attr(r,t[r]);return this}return(i=C.getElementById(r[2]))&&(this[0]=i,this.length=1),this}return e.nodeType?(this[0]=e,this.length=1,this):v(e)?void 0!==n.ready?n.ready(e):e(ce):ce.makeArray(e,this)}).prototype=ce.fn,k=ce(C);var E=/^(?:parents|prev(?:Until|All))/,j={children:!0,contents:!0,next:!0,prev:!0};function A(e,t){while((e=e[t])&&1!==e.nodeType);return e}ce.fn.extend({has:function(e){var t=ce(e,this),n=t.length;return this.filter(function(){for(var e=0;e\x20\t\r\n\f]*)/i,Ce=/^$|^module$|\/(?:java|ecma)script/i;xe=C.createDocumentFragment().appendChild(C.createElement("div")),(be=C.createElement("input")).setAttribute("type","radio"),be.setAttribute("checked","checked"),be.setAttribute("name","t"),xe.appendChild(be),le.checkClone=xe.cloneNode(!0).cloneNode(!0).lastChild.checked,xe.innerHTML="",le.noCloneChecked=!!xe.cloneNode(!0).lastChild.defaultValue,xe.innerHTML="",le.option=!!xe.lastChild;var ke={thead:[1,"","
"],col:[2,"","
"],tr:[2,"","
"],td:[3,"","
"],_default:[0,"",""]};function Se(e,t){var n;return n="undefined"!=typeof e.getElementsByTagName?e.getElementsByTagName(t||"*"):"undefined"!=typeof e.querySelectorAll?e.querySelectorAll(t||"*"):[],void 0===t||t&&fe(e,t)?ce.merge([e],n):n}function Ee(e,t){for(var n=0,r=e.length;n",""]);var je=/<|&#?\w+;/;function Ae(e,t,n,r,i){for(var o,a,s,u,l,c,f=t.createDocumentFragment(),p=[],d=0,h=e.length;d\s*$/g;function Re(e,t){return fe(e,"table")&&fe(11!==t.nodeType?t:t.firstChild,"tr")&&ce(e).children("tbody")[0]||e}function Ie(e){return e.type=(null!==e.getAttribute("type"))+"/"+e.type,e}function We(e){return"true/"===(e.type||"").slice(0,5)?e.type=e.type.slice(5):e.removeAttribute("type"),e}function Fe(e,t){var n,r,i,o,a,s;if(1===t.nodeType){if(_.hasData(e)&&(s=_.get(e).events))for(i in _.remove(t,"handle events"),s)for(n=0,r=s[i].length;n").attr(n.scriptAttrs||{}).prop({charset:n.scriptCharset,src:n.url}).on("load error",i=function(e){r.remove(),i=null,e&&t("error"===e.type?404:200,e.type)}),C.head.appendChild(r[0])},abort:function(){i&&i()}}});var Jt,Kt=[],Zt=/(=)\?(?=&|$)|\?\?/;ce.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var e=Kt.pop()||ce.expando+"_"+jt.guid++;return this[e]=!0,e}}),ce.ajaxPrefilter("json jsonp",function(e,t,n){var r,i,o,a=!1!==e.jsonp&&(Zt.test(e.url)?"url":"string"==typeof e.data&&0===(e.contentType||"").indexOf("application/x-www-form-urlencoded")&&Zt.test(e.data)&&"data");if(a||"jsonp"===e.dataTypes[0])return r=e.jsonpCallback=v(e.jsonpCallback)?e.jsonpCallback():e.jsonpCallback,a?e[a]=e[a].replace(Zt,"$1"+r):!1!==e.jsonp&&(e.url+=(At.test(e.url)?"&":"?")+e.jsonp+"="+r),e.converters["script json"]=function(){return o||ce.error(r+" was not called"),o[0]},e.dataTypes[0]="json",i=ie[r],ie[r]=function(){o=arguments},n.always(function(){void 0===i?ce(ie).removeProp(r):ie[r]=i,e[r]&&(e.jsonpCallback=t.jsonpCallback,Kt.push(r)),o&&v(i)&&i(o[0]),o=i=void 0}),"script"}),le.createHTMLDocument=((Jt=C.implementation.createHTMLDocument("").body).innerHTML="
",2===Jt.childNodes.length),ce.parseHTML=function(e,t,n){return"string"!=typeof e?[]:("boolean"==typeof t&&(n=t,t=!1),t||(le.createHTMLDocument?((r=(t=C.implementation.createHTMLDocument("")).createElement("base")).href=C.location.href,t.head.appendChild(r)):t=C),o=!n&&[],(i=w.exec(e))?[t.createElement(i[1])]:(i=Ae([e],t,o),o&&o.length&&ce(o).remove(),ce.merge([],i.childNodes)));var r,i,o},ce.fn.load=function(e,t,n){var r,i,o,a=this,s=e.indexOf(" ");return-1").append(ce.parseHTML(e)).find(r):e)}).always(n&&function(e,t){a.each(function(){n.apply(this,o||[e.responseText,t,e])})}),this},ce.expr.pseudos.animated=function(t){return ce.grep(ce.timers,function(e){return t===e.elem}).length},ce.offset={setOffset:function(e,t,n){var r,i,o,a,s,u,l=ce.css(e,"position"),c=ce(e),f={};"static"===l&&(e.style.position="relative"),s=c.offset(),o=ce.css(e,"top"),u=ce.css(e,"left"),("absolute"===l||"fixed"===l)&&-1<(o+u).indexOf("auto")?(a=(r=c.position()).top,i=r.left):(a=parseFloat(o)||0,i=parseFloat(u)||0),v(t)&&(t=t.call(e,n,ce.extend({},s))),null!=t.top&&(f.top=t.top-s.top+a),null!=t.left&&(f.left=t.left-s.left+i),"using"in t?t.using.call(e,f):c.css(f)}},ce.fn.extend({offset:function(t){if(arguments.length)return void 0===t?this:this.each(function(e){ce.offset.setOffset(this,t,e)});var e,n,r=this[0];return r?r.getClientRects().length?(e=r.getBoundingClientRect(),n=r.ownerDocument.defaultView,{top:e.top+n.pageYOffset,left:e.left+n.pageXOffset}):{top:0,left:0}:void 0},position:function(){if(this[0]){var e,t,n,r=this[0],i={top:0,left:0};if("fixed"===ce.css(r,"position"))t=r.getBoundingClientRect();else{t=this.offset(),n=r.ownerDocument,e=r.offsetParent||n.documentElement;while(e&&(e===n.body||e===n.documentElement)&&"static"===ce.css(e,"position"))e=e.parentNode;e&&e!==r&&1===e.nodeType&&((i=ce(e).offset()).top+=ce.css(e,"borderTopWidth",!0),i.left+=ce.css(e,"borderLeftWidth",!0))}return{top:t.top-i.top-ce.css(r,"marginTop",!0),left:t.left-i.left-ce.css(r,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){var e=this.offsetParent;while(e&&"static"===ce.css(e,"position"))e=e.offsetParent;return e||J})}}),ce.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(t,i){var o="pageYOffset"===i;ce.fn[t]=function(e){return M(this,function(e,t,n){var r;if(y(e)?r=e:9===e.nodeType&&(r=e.defaultView),void 0===n)return r?r[i]:e[t];r?r.scrollTo(o?r.pageXOffset:n,o?n:r.pageYOffset):e[t]=n},t,e,arguments.length)}}),ce.each(["top","left"],function(e,n){ce.cssHooks[n]=Ye(le.pixelPosition,function(e,t){if(t)return t=Ge(e,n),_e.test(t)?ce(e).position()[n]+"px":t})}),ce.each({Height:"height",Width:"width"},function(a,s){ce.each({padding:"inner"+a,content:s,"":"outer"+a},function(r,o){ce.fn[o]=function(e,t){var n=arguments.length&&(r||"boolean"!=typeof e),i=r||(!0===e||!0===t?"margin":"border");return M(this,function(e,t,n){var r;return y(e)?0===o.indexOf("outer")?e["inner"+a]:e.document.documentElement["client"+a]:9===e.nodeType?(r=e.documentElement,Math.max(e.body["scroll"+a],r["scroll"+a],e.body["offset"+a],r["offset"+a],r["client"+a])):void 0===n?ce.css(e,t,i):ce.style(e,t,n,i)},s,n?e:void 0,n)}})}),ce.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],function(e,t){ce.fn[t]=function(e){return this.on(t,e)}}),ce.fn.extend({bind:function(e,t,n){return this.on(e,null,t,n)},unbind:function(e,t){return this.off(e,null,t)},delegate:function(e,t,n,r){return this.on(t,e,n,r)},undelegate:function(e,t,n){return 1===arguments.length?this.off(e,"**"):this.off(t,e||"**",n)},hover:function(e,t){return this.on("mouseenter",e).on("mouseleave",t||e)}}),ce.each("blur focus focusin focusout resize scroll click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup contextmenu".split(" "),function(e,n){ce.fn[n]=function(e,t){return 0 { + var LocalDuplicateReportPath = GetLocalDuplicateReportPath(); + console.log(LocalDupReportExist); + var MyHeader = React.createElement( + "h1", + null, + "DupFileManager Report Menu" + ); + if (LocalDupReportExist) + return React.createElement( + "center", + null, + MyHeader, + GetShowReportButton( + LocalDuplicateReportPath, + "Show Duplicate-File Report" + ), + React.createElement("p", null), + GetAdvanceMenuButton(), + React.createElement("p", null), + GetCreateReportNoTagButton("Create New Report (NO Tagging)"), + React.createElement("p", null), + GetCreateReportButton("Create New Report with Tagging"), + React.createElement("p", null), + ToolsMenuOptionButton + ); + return React.createElement( + "center", + null, + MyHeader, + GetCreateReportNoTagButton("Create Duplicate-File Report (NO Tagging)"), + React.createElement("p", null), + GetCreateReportButton("Create Duplicate-File Report with Tagging"), + React.createElement("p", null), + ToolsMenuOptionButton + ); + }; + const CreateReport = () => { + const componentsLoading = PluginApi.hooks.useLoadComponents([ + PluginApi.loadableComponents.SceneCard, + ]); + if (componentsLoading) + return React.createElement(LoadingIndicator, { + message: + "Running task to create report. This may take a while. Please standby.", + }); + RunPluginDupFileManager("tag_duplicates_task"); + return React.createElement( + "center", + null, + React.createElement( + "h1", + null, + "Report complete. Click [Show Report] to view report." + ), + GetShowReportButton(GetLocalDuplicateReportPath(), "Show Report"), + React.createElement("p", null), + GetAdvanceMenuButton(), + React.createElement("p", null), + DupFileManagerReportMenuButton, + React.createElement("p", null), + ToolsMenuOptionButton + ); + }; + const CreateReportWithNoTagging = () => { + const componentsLoading = PluginApi.hooks.useLoadComponents([ + PluginApi.loadableComponents.SceneCard, + ]); + if (componentsLoading) + return React.createElement(LoadingIndicator, { + message: "Running task to create report. Please standby.", + }); + RunPluginDupFileManager("createDuplicateReportWithoutTagging"); + return React.createElement( + "center", + null, + React.createElement( + "h1", + null, + "Created HTML report without tagging. Click [Show Report] to view report." + ), + GetShowReportButton(GetLocalDuplicateReportPath(), "Show Report"), + React.createElement("p", null), + GetAdvanceMenuButton(), + React.createElement("p", null), + DupFileManagerReportMenuButton, + React.createElement("p", null), + ToolsMenuOptionButton + ); + }; + const ToolsAndUtilities = () => { + return React.createElement( + "center", + null, + React.createElement("h1", null, "DupFileManager Tools and Utilities"), + React.createElement("p", null), - const PluginApi = window.PluginApi; - const React = PluginApi.React; - const GQL = PluginApi.GQL; - const { Button } = PluginApi.libraries.Bootstrap; - const { faEthernet } = PluginApi.libraries.FontAwesomeSolid; - const { Link, NavLink, } = PluginApi.libraries.ReactRouterDOM; - // ToolTip text - const CreateReportButtonToolTip = "Tag duplicate files, and create a new duplicate file report listing all duplicate files and using existing DupFileManager plugin options selected."; - const CreateReportNoTagButtonToolTip = "Create a new duplicate file report listing all duplicate files and using existing DupFileManager plugin options selected. Do NOT tag files."; - const ToolsMenuToolTip = "Show DupFileManager advance menu, which list additional tools and utilities."; - const ShowReportButtonToolTip = "Open link to the duplicate file (HTML) report created in local path."; - const ReportMenuButtonToolTip = "Main report menu for DupFileManager. Create and show duplicate files on an HTML report."; - // Buttons - const DupFileManagerReportMenuButton = React.createElement(Link, { to: "/plugin/DupFileManager", title: ReportMenuButtonToolTip }, React.createElement(Button, null, "DupFileManager Report Menu")); - const ToolsMenuOptionButton = React.createElement(Link, { to: "/plugin/DupFileManager_ToolsAndUtilities", title: ToolsMenuToolTip }, React.createElement(Button, null, "DupFileManager Tools and Utilities")); - function GetShowReportButton(LocalDuplicateReportPath, ButtonText){return React.createElement("a", { href: LocalDuplicateReportPath, title: ShowReportButtonToolTip}, React.createElement(Button, null, ButtonText));} - function GetAdvanceMenuButton(){return React.createElement("a", { href: AdvanceMenuOptionUrl, title: "Open link to the advance duplicate tagged menu."}, React.createElement(Button, null, "Show Advance Duplicate Tagged Menu"));} - function GetCreateReportNoTagButton(ButtonText){return React.createElement(Link, { to: "/plugin/DupFileManager_CreateReportWithNoTagging", title: CreateReportNoTagButtonToolTip }, React.createElement(Button, null, ButtonText));} - function GetCreateReportButton(ButtonText){return React.createElement(Link, { to: "/plugin/DupFileManager_CreateReport", title: CreateReportButtonToolTip }, React.createElement(Button, null, ButtonText));} - - const { LoadingIndicator, } = PluginApi.components; - const HomePage = () => { - var LocalDuplicateReportPath = GetLocalDuplicateReportPath(); - console.log(LocalDupReportExist); - var MyHeader = React.createElement("h1", null, "DupFileManager Report Menu"); - if (LocalDupReportExist) - return (React.createElement("center", null, - MyHeader, - GetShowReportButton(LocalDuplicateReportPath, "Show Duplicate-File Report"), - React.createElement("p", null), - GetAdvanceMenuButton(), - React.createElement("p", null), - GetCreateReportNoTagButton("Create New Report (NO Tagging)"), - React.createElement("p", null), - GetCreateReportButton("Create New Report with Tagging"), - React.createElement("p", null), - ToolsMenuOptionButton - )); - return (React.createElement("center", null, - MyHeader, - GetCreateReportNoTagButton("Create Duplicate-File Report (NO Tagging)"), - React.createElement("p", null), - GetCreateReportButton("Create Duplicate-File Report with Tagging"), - React.createElement("p", null), - ToolsMenuOptionButton - )); - }; - const CreateReport = () => { - const componentsLoading = PluginApi.hooks.useLoadComponents([PluginApi.loadableComponents.SceneCard]); - if (componentsLoading) - return (React.createElement(LoadingIndicator, {message: "Running task to create report. This may take a while. Please standby."})); - RunPluginDupFileManager("tag_duplicates_task"); - return (React.createElement("center", null, - React.createElement("h1", null, "Report complete. Click [Show Report] to view report."), - GetShowReportButton(GetLocalDuplicateReportPath(), "Show Report"), - React.createElement("p", null), - GetAdvanceMenuButton(), - React.createElement("p", null), DupFileManagerReportMenuButton, React.createElement("p", null), ToolsMenuOptionButton - )); - }; - const CreateReportWithNoTagging = () => { - const componentsLoading = PluginApi.hooks.useLoadComponents([PluginApi.loadableComponents.SceneCard]); - if (componentsLoading) - return (React.createElement(LoadingIndicator, {message: "Running task to create report. Please standby."})); - RunPluginDupFileManager("createDuplicateReportWithoutTagging"); - return (React.createElement("center", null, - React.createElement("h1", null, "Created HTML report without tagging. Click [Show Report] to view report."), - GetShowReportButton(GetLocalDuplicateReportPath(), "Show Report"), - React.createElement("p", null), - GetAdvanceMenuButton(), - React.createElement("p", null), DupFileManagerReportMenuButton, React.createElement("p", null), ToolsMenuOptionButton - )); - }; - const ToolsAndUtilities = () => { - return (React.createElement("center", null, - React.createElement("h1", null, "DupFileManager Tools and Utilities"), - React.createElement("p", null), - - React.createElement("h3", {class:"submenu"}, "Report Options"), - React.createElement("p", null), - GetCreateReportNoTagButton("Create Report (NO Tagging)"), - React.createElement("p", null), - GetCreateReportButton("Create Report (Tagging)"), - React.createElement("p", null), - DupFileManagerReportMenuButton, - React.createElement("p", null), - GetShowReportButton(GetLocalDuplicateReportPath(), "Show Duplicate-File Report"), - React.createElement("p", null), - React.createElement(Link, { to: "/plugin/DupFileManager_deleteLocalDupReportHtmlFiles", title: "Delete local HTML duplicate file report." }, React.createElement(Button, null, "Delete Duplicate-File Report HTML Files")), - React.createElement("hr", {class:"dotted"}), + React.createElement("h3", { class: "submenu" }, "Report Options"), + React.createElement("p", null), + GetCreateReportNoTagButton("Create Report (NO Tagging)"), + React.createElement("p", null), + GetCreateReportButton("Create Report (Tagging)"), + React.createElement("p", null), + DupFileManagerReportMenuButton, + React.createElement("p", null), + GetShowReportButton( + GetLocalDuplicateReportPath(), + "Show Duplicate-File Report" + ), + React.createElement("p", null), + React.createElement( + Link, + { + to: "/plugin/DupFileManager_deleteLocalDupReportHtmlFiles", + title: "Delete local HTML duplicate file report.", + }, + React.createElement( + Button, + null, + "Delete Duplicate-File Report HTML Files" + ) + ), + React.createElement("hr", { class: "dotted" }), - React.createElement("h3", {class:"submenu"}, "Tagged Duplicates Options"), - React.createElement("p", null), - GetAdvanceMenuButton(), - React.createElement("p", null), - React.createElement(Link, { to: "/plugin/DupFileManager_deleteTaggedDuplicatesTask", title: "Delete scenes previously given duplicate tag (_DuplicateMarkForDeletion)." }, React.createElement(Button, null, "Delete Tagged Duplicates")), - React.createElement("p", null), - React.createElement(Link, { to: "/plugin/DupFileManager_deleteBlackListTaggedDuplicatesTask", title: "Delete scenes only in blacklist which where previously given duplicate tag (_DuplicateMarkForDeletion)." }, React.createElement(Button, null, "Delete Tagged Duplicates in Blacklist Only")), - React.createElement("p", null), - React.createElement(Link, { to: "/plugin/DupFileManager_deleteTaggedDuplicatesLwrResOrLwrDuration", title: "Delete scenes previously given duplicate tag (_DuplicateMarkForDeletion) and lower resultion or duration compare to primary (ToKeep) duplicate." }, React.createElement(Button, null, "Delete Low Res/Dur Tagged Duplicates")), - React.createElement("p", null), - React.createElement(Link, { to: "/plugin/DupFileManager_deleteBlackListTaggedDuplicatesLwrResOrLwrDuration", title: "Delete scenes only in blacklist which where previously given duplicate tag (_DuplicateMarkForDeletion) and lower resultion or duration compare to primary (ToKeep) duplicate." }, React.createElement(Button, null, "Delete Low Res/Dur Tagged Duplicates in Blacklist Only")), - React.createElement("p", null), - React.createElement("hr", {class:"dotted"}), + React.createElement( + "h3", + { class: "submenu" }, + "Tagged Duplicates Options" + ), + React.createElement("p", null), + GetAdvanceMenuButton(), + React.createElement("p", null), + React.createElement( + Link, + { + to: "/plugin/DupFileManager_deleteTaggedDuplicatesTask", + title: + "Delete scenes previously given duplicate tag (_DuplicateMarkForDeletion).", + }, + React.createElement(Button, null, "Delete Tagged Duplicates") + ), + React.createElement("p", null), + React.createElement( + Link, + { + to: "/plugin/DupFileManager_deleteBlackListTaggedDuplicatesTask", + title: + "Delete scenes only in blacklist which where previously given duplicate tag (_DuplicateMarkForDeletion).", + }, + React.createElement( + Button, + null, + "Delete Tagged Duplicates in Blacklist Only" + ) + ), + React.createElement("p", null), + React.createElement( + Link, + { + to: "/plugin/DupFileManager_deleteTaggedDuplicatesLwrResOrLwrDuration", + title: + "Delete scenes previously given duplicate tag (_DuplicateMarkForDeletion) and lower resultion or duration compare to primary (ToKeep) duplicate.", + }, + React.createElement( + Button, + null, + "Delete Low Res/Dur Tagged Duplicates" + ) + ), + React.createElement("p", null), + React.createElement( + Link, + { + to: "/plugin/DupFileManager_deleteBlackListTaggedDuplicatesLwrResOrLwrDuration", + title: + "Delete scenes only in blacklist which where previously given duplicate tag (_DuplicateMarkForDeletion) and lower resultion or duration compare to primary (ToKeep) duplicate.", + }, + React.createElement( + Button, + null, + "Delete Low Res/Dur Tagged Duplicates in Blacklist Only" + ) + ), + React.createElement("p", null), + React.createElement("hr", { class: "dotted" }), - React.createElement("h3", {class:"submenu"}, "Tagged Management Options"), - React.createElement("p", null), - React.createElement(Link, { to: "/plugin/DupFileManager_ClearAllDuplicateTags", title: "Remove duplicate tag from all scenes. This task may take some time to complete." }, React.createElement(Button, null, "Clear All Duplicate Tags")), - React.createElement("p", null), - React.createElement(Link, { to: "/plugin/DupFileManager_deleteAllDupFileManagerTags", title: "Delete all DupFileManager tags from stash." }, React.createElement(Button, null, "Delete All DupFileManager Tags")), - React.createElement("p", null), - React.createElement(Link, { to: "/plugin/DupFileManager_tagGrayList", title: "Set tag _GraylistMarkForDeletion to scenes having DuplicateMarkForDeletion tag and that are in the Graylist." }, React.createElement(Button, null, "Tag Graylist")), - React.createElement("hr", {class:"dotted"}), + React.createElement( + "h3", + { class: "submenu" }, + "Tagged Management Options" + ), + React.createElement("p", null), + React.createElement( + Link, + { + to: "/plugin/DupFileManager_ClearAllDuplicateTags", + title: + "Remove duplicate tag from all scenes. This task may take some time to complete.", + }, + React.createElement(Button, null, "Clear All Duplicate Tags") + ), + React.createElement("p", null), + React.createElement( + Link, + { + to: "/plugin/DupFileManager_deleteAllDupFileManagerTags", + title: "Delete all DupFileManager tags from stash.", + }, + React.createElement(Button, null, "Delete All DupFileManager Tags") + ), + React.createElement("p", null), + React.createElement( + Link, + { + to: "/plugin/DupFileManager_tagGrayList", + title: + "Set tag _GraylistMarkForDeletion to scenes having DuplicateMarkForDeletion tag and that are in the Graylist.", + }, + React.createElement(Button, null, "Tag Graylist") + ), + React.createElement("hr", { class: "dotted" }), - React.createElement("h3", {class:"submenu"}, "Miscellaneous Options"), - React.createElement(Link, { to: "/plugin/DupFileManager_generatePHASH_Matching", title: "Generate PHASH (Perceptual hashes) matching. Used for file comparisons." }, React.createElement(Button, null, "Generate PHASH (Perceptual hashes) Matching")), - React.createElement("p", null), - React.createElement("p", null), - React.createElement("p", null), - React.createElement("p", null), - )); - }; - const ClearAllDuplicateTags = () => { - const componentsLoading = PluginApi.hooks.useLoadComponents([PluginApi.loadableComponents.SceneCard]); - if (componentsLoading) - return (React.createElement(LoadingIndicator, {message: "Running clear duplicate tags in background. This may take a while. Please standby."})); - RunPluginDupFileManager("clear_duplicate_tags_task"); - return (React.createElement("div", null, - React.createElement("h1", null, "Removed duplicate tags from all scenes."), - DupFileManagerReportMenuButton, React.createElement("p", null), ToolsMenuOptionButton - )); - }; - const deleteLocalDupReportHtmlFiles = () => { - const componentsLoading = PluginApi.hooks.useLoadComponents([PluginApi.loadableComponents.SceneCard]); - if (componentsLoading) - return (React.createElement(LoadingIndicator, {message: "Running task to delete HTML files. Please standby."})); - RunPluginDupFileManager("deleteLocalDupReportHtmlFiles"); - return (React.createElement("div", null, - React.createElement("h2", null, "Deleted the HTML duplicate file report from local files."), - DupFileManagerReportMenuButton, React.createElement("p", null), ToolsMenuOptionButton - )); - }; - const deleteAllDupFileManagerTags = () => { - const componentsLoading = PluginApi.hooks.useLoadComponents([PluginApi.loadableComponents.SceneCard]); - if (componentsLoading) - return (React.createElement(LoadingIndicator, {message: "Running task to delete all DupFileManager tags in background. This may take a while. Please standby."})); - RunPluginDupFileManager("deleteAllDupFileManagerTags"); - return (React.createElement("div", null, - React.createElement("h1", null, "Deleted all DupFileManager tags."), - DupFileManagerReportMenuButton, React.createElement("p", null), ToolsMenuOptionButton - )); - }; - const generatePHASH_Matching = () => { - const componentsLoading = PluginApi.hooks.useLoadComponents([PluginApi.loadableComponents.SceneCard]); - if (componentsLoading) - return (React.createElement(LoadingIndicator, {message: "Running task generate PHASH (Perceptual hashes) matching in background. This may take a while. Please standby."})); - RunPluginDupFileManager("generate_phash_task"); - return (React.createElement("div", null, - React.createElement("h1", null, "PHASH (Perceptual hashes) complete."), - DupFileManagerReportMenuButton, React.createElement("p", null), ToolsMenuOptionButton - )); - }; - const tagGrayList = () => { - const componentsLoading = PluginApi.hooks.useLoadComponents([PluginApi.loadableComponents.SceneCard]); - if (componentsLoading) - return (React.createElement(LoadingIndicator, {message: "Running task to tag _GraylistMarkForDeletion to scenes having DuplicateMarkForDeletion tag and that are in the Graylist. This may take a while. Please standby."})); - RunPluginDupFileManager("graylist_tag_task"); - return (React.createElement("div", null, - React.createElement("h1", null, "Gray list tagging complete."), - DupFileManagerReportMenuButton, React.createElement("p", null), ToolsMenuOptionButton - )); - }; - const deleteTaggedDuplicatesTask = () => { - let result = confirm("Are you sure you want to delete all scenes having _DuplicateMarkForDeletion tags? This will delete the files, and remove them from stash."); - if (result) - { - const componentsLoading = PluginApi.hooks.useLoadComponents([PluginApi.loadableComponents.SceneCard]); - if (componentsLoading) - return (React.createElement(LoadingIndicator, {message: "Running task to delete all scenes with _DuplicateMarkForDeletion tag. This may take a while. Please standby."})); - RunPluginDupFileManager("delete_tagged_duplicates_task"); - return (React.createElement("div", null, - React.createElement("h1", null, "Scenes with dup tag deleted."), - DupFileManagerReportMenuButton, React.createElement("p", null), ToolsMenuOptionButton - )); - } - return ToolsAndUtilities(); - }; - const deleteBlackListTaggedDuplicatesTask = () => { - let result = confirm("Are you sure you want to delete all scenes in blacklist having _DuplicateMarkForDeletion tags? This will delete the files, and remove tem from stash."); - if (result) - { - const componentsLoading = PluginApi.hooks.useLoadComponents([PluginApi.loadableComponents.SceneCard]); - if (componentsLoading) - return (React.createElement(LoadingIndicator, {message: "Running task to delete all scenes in blacklist with _DuplicateMarkForDeletion tag. This may take a while. Please standby."})); - RunPluginDupFileManager("deleteBlackListTaggedDuplicatesTask"); - return (React.createElement("div", null, - React.createElement("h1", null, "Blacklist scenes with dup tag deleted."), - DupFileManagerReportMenuButton, React.createElement("p", null), ToolsMenuOptionButton - )); - } - return ToolsAndUtilities(); - }; - const deleteTaggedDuplicatesLwrResOrLwrDuration = () => { - let result = confirm("Are you sure you want to delete scenes having _DuplicateMarkForDeletion tags and lower resultion or duration? This will delete the files, and remove them from stash."); - if (result) - { - const componentsLoading = PluginApi.hooks.useLoadComponents([PluginApi.loadableComponents.SceneCard]); - if (componentsLoading) - return (React.createElement(LoadingIndicator, {message: "Running task to delete all scenes with _DuplicateMarkForDeletion tag and lower resultion or duration. This may take a while. Please standby."})); - RunPluginDupFileManager("deleteTaggedDuplicatesLwrResOrLwrDuration"); - return (React.createElement("div", null, - React.createElement("h1", null, "Scenes with dup tag and lower resultion or duration deleted."), - DupFileManagerReportMenuButton, React.createElement("p", null), ToolsMenuOptionButton - )); - } - return ToolsAndUtilities(); - }; - const deleteBlackListTaggedDuplicatesLwrResOrLwrDuration = () => { - let result = confirm("Are you sure you want to delete scenes in blacklist having _DuplicateMarkForDeletion tags and lower resultion or duration? This will delete the files, and remove tem from stash."); - if (result) - { - const componentsLoading = PluginApi.hooks.useLoadComponents([PluginApi.loadableComponents.SceneCard]); - if (componentsLoading) - return (React.createElement(LoadingIndicator, {message: "Running task to delete all scenes in blacklist with _DuplicateMarkForDeletion tag and lower resultion or duration. This may take a while. Please standby."})); - RunPluginDupFileManager("deleteBlackListTaggedDuplicatesLwrResOrLwrDuration"); - return (React.createElement("div", null, - React.createElement("h1", null, "Blacklist scenes with dup tag and lower resultion or duration deleted."), - DupFileManagerReportMenuButton, React.createElement("p", null), ToolsMenuOptionButton - )); - } - return ToolsAndUtilities(); - }; - PluginApi.register.route("/plugin/DupFileManager", HomePage); - PluginApi.register.route("/plugin/DupFileManager_CreateReport", CreateReport); - PluginApi.register.route("/plugin/DupFileManager_CreateReportWithNoTagging", CreateReportWithNoTagging); - PluginApi.register.route("/plugin/DupFileManager_ToolsAndUtilities", ToolsAndUtilities); - PluginApi.register.route("/plugin/DupFileManager_ClearAllDuplicateTags", ClearAllDuplicateTags); - PluginApi.register.route("/plugin/DupFileManager_deleteLocalDupReportHtmlFiles", deleteLocalDupReportHtmlFiles); - PluginApi.register.route("/plugin/DupFileManager_deleteAllDupFileManagerTags", deleteAllDupFileManagerTags); - PluginApi.register.route("/plugin/DupFileManager_generatePHASH_Matching", generatePHASH_Matching); - PluginApi.register.route("/plugin/DupFileManager_tagGrayList", tagGrayList); - PluginApi.register.route("/plugin/DupFileManager_deleteTaggedDuplicatesTask", deleteTaggedDuplicatesTask); - PluginApi.register.route("/plugin/DupFileManager_deleteBlackListTaggedDuplicatesTask", deleteBlackListTaggedDuplicatesTask); - PluginApi.register.route("/plugin/DupFileManager_deleteTaggedDuplicatesLwrResOrLwrDuration", deleteTaggedDuplicatesLwrResOrLwrDuration); - PluginApi.register.route("/plugin/DupFileManager_deleteBlackListTaggedDuplicatesLwrResOrLwrDuration", deleteBlackListTaggedDuplicatesLwrResOrLwrDuration); - PluginApi.patch.before("SettingsToolsSection", function (props) { - const { Setting, } = PluginApi.components; - return [ + React.createElement("h3", { class: "submenu" }, "Miscellaneous Options"), + React.createElement( + Link, + { + to: "/plugin/DupFileManager_generatePHASH_Matching", + title: + "Generate PHASH (Perceptual hashes) matching. Used for file comparisons.", + }, + React.createElement( + Button, + null, + "Generate PHASH (Perceptual hashes) Matching" + ) + ), + React.createElement("p", null), + React.createElement("p", null), + React.createElement("p", null), + React.createElement("p", null) + ); + }; + const ClearAllDuplicateTags = () => { + const componentsLoading = PluginApi.hooks.useLoadComponents([ + PluginApi.loadableComponents.SceneCard, + ]); + if (componentsLoading) + return React.createElement(LoadingIndicator, { + message: + "Running clear duplicate tags in background. This may take a while. Please standby.", + }); + RunPluginDupFileManager("clear_duplicate_tags_task"); + return React.createElement( + "div", + null, + React.createElement( + "h1", + null, + "Removed duplicate tags from all scenes." + ), + DupFileManagerReportMenuButton, + React.createElement("p", null), + ToolsMenuOptionButton + ); + }; + const deleteLocalDupReportHtmlFiles = () => { + const componentsLoading = PluginApi.hooks.useLoadComponents([ + PluginApi.loadableComponents.SceneCard, + ]); + if (componentsLoading) + return React.createElement(LoadingIndicator, { + message: "Running task to delete HTML files. Please standby.", + }); + RunPluginDupFileManager("deleteLocalDupReportHtmlFiles"); + return React.createElement( + "div", + null, + React.createElement( + "h2", + null, + "Deleted the HTML duplicate file report from local files." + ), + DupFileManagerReportMenuButton, + React.createElement("p", null), + ToolsMenuOptionButton + ); + }; + const deleteAllDupFileManagerTags = () => { + const componentsLoading = PluginApi.hooks.useLoadComponents([ + PluginApi.loadableComponents.SceneCard, + ]); + if (componentsLoading) + return React.createElement(LoadingIndicator, { + message: + "Running task to delete all DupFileManager tags in background. This may take a while. Please standby.", + }); + RunPluginDupFileManager("deleteAllDupFileManagerTags"); + return React.createElement( + "div", + null, + React.createElement("h1", null, "Deleted all DupFileManager tags."), + DupFileManagerReportMenuButton, + React.createElement("p", null), + ToolsMenuOptionButton + ); + }; + const generatePHASH_Matching = () => { + const componentsLoading = PluginApi.hooks.useLoadComponents([ + PluginApi.loadableComponents.SceneCard, + ]); + if (componentsLoading) + return React.createElement(LoadingIndicator, { + message: + "Running task generate PHASH (Perceptual hashes) matching in background. This may take a while. Please standby.", + }); + RunPluginDupFileManager("generate_phash_task"); + return React.createElement( + "div", + null, + React.createElement("h1", null, "PHASH (Perceptual hashes) complete."), + DupFileManagerReportMenuButton, + React.createElement("p", null), + ToolsMenuOptionButton + ); + }; + const tagGrayList = () => { + const componentsLoading = PluginApi.hooks.useLoadComponents([ + PluginApi.loadableComponents.SceneCard, + ]); + if (componentsLoading) + return React.createElement(LoadingIndicator, { + message: + "Running task to tag _GraylistMarkForDeletion to scenes having DuplicateMarkForDeletion tag and that are in the Graylist. This may take a while. Please standby.", + }); + RunPluginDupFileManager("graylist_tag_task"); + return React.createElement( + "div", + null, + React.createElement("h1", null, "Gray list tagging complete."), + DupFileManagerReportMenuButton, + React.createElement("p", null), + ToolsMenuOptionButton + ); + }; + const deleteTaggedDuplicatesTask = () => { + let result = confirm( + "Are you sure you want to delete all scenes having _DuplicateMarkForDeletion tags? This will delete the files, and remove them from stash." + ); + if (result) { + const componentsLoading = PluginApi.hooks.useLoadComponents([ + PluginApi.loadableComponents.SceneCard, + ]); + if (componentsLoading) + return React.createElement(LoadingIndicator, { + message: + "Running task to delete all scenes with _DuplicateMarkForDeletion tag. This may take a while. Please standby.", + }); + RunPluginDupFileManager("delete_tagged_duplicates_task"); + return React.createElement( + "div", + null, + React.createElement("h1", null, "Scenes with dup tag deleted."), + DupFileManagerReportMenuButton, + React.createElement("p", null), + ToolsMenuOptionButton + ); + } + return ToolsAndUtilities(); + }; + const deleteBlackListTaggedDuplicatesTask = () => { + let result = confirm( + "Are you sure you want to delete all scenes in blacklist having _DuplicateMarkForDeletion tags? This will delete the files, and remove tem from stash." + ); + if (result) { + const componentsLoading = PluginApi.hooks.useLoadComponents([ + PluginApi.loadableComponents.SceneCard, + ]); + if (componentsLoading) + return React.createElement(LoadingIndicator, { + message: + "Running task to delete all scenes in blacklist with _DuplicateMarkForDeletion tag. This may take a while. Please standby.", + }); + RunPluginDupFileManager("deleteBlackListTaggedDuplicatesTask"); + return React.createElement( + "div", + null, + React.createElement( + "h1", + null, + "Blacklist scenes with dup tag deleted." + ), + DupFileManagerReportMenuButton, + React.createElement("p", null), + ToolsMenuOptionButton + ); + } + return ToolsAndUtilities(); + }; + const deleteTaggedDuplicatesLwrResOrLwrDuration = () => { + let result = confirm( + "Are you sure you want to delete scenes having _DuplicateMarkForDeletion tags and lower resultion or duration? This will delete the files, and remove them from stash." + ); + if (result) { + const componentsLoading = PluginApi.hooks.useLoadComponents([ + PluginApi.loadableComponents.SceneCard, + ]); + if (componentsLoading) + return React.createElement(LoadingIndicator, { + message: + "Running task to delete all scenes with _DuplicateMarkForDeletion tag and lower resultion or duration. This may take a while. Please standby.", + }); + RunPluginDupFileManager("deleteTaggedDuplicatesLwrResOrLwrDuration"); + return React.createElement( + "div", + null, + React.createElement( + "h1", + null, + "Scenes with dup tag and lower resultion or duration deleted." + ), + DupFileManagerReportMenuButton, + React.createElement("p", null), + ToolsMenuOptionButton + ); + } + return ToolsAndUtilities(); + }; + const deleteBlackListTaggedDuplicatesLwrResOrLwrDuration = () => { + let result = confirm( + "Are you sure you want to delete scenes in blacklist having _DuplicateMarkForDeletion tags and lower resultion or duration? This will delete the files, and remove tem from stash." + ); + if (result) { + const componentsLoading = PluginApi.hooks.useLoadComponents([ + PluginApi.loadableComponents.SceneCard, + ]); + if (componentsLoading) + return React.createElement(LoadingIndicator, { + message: + "Running task to delete all scenes in blacklist with _DuplicateMarkForDeletion tag and lower resultion or duration. This may take a while. Please standby.", + }); + RunPluginDupFileManager( + "deleteBlackListTaggedDuplicatesLwrResOrLwrDuration" + ); + return React.createElement( + "div", + null, + React.createElement( + "h1", + null, + "Blacklist scenes with dup tag and lower resultion or duration deleted." + ), + DupFileManagerReportMenuButton, + React.createElement("p", null), + ToolsMenuOptionButton + ); + } + return ToolsAndUtilities(); + }; + PluginApi.register.route("/plugin/DupFileManager", HomePage); + PluginApi.register.route("/plugin/DupFileManager_CreateReport", CreateReport); + PluginApi.register.route( + "/plugin/DupFileManager_CreateReportWithNoTagging", + CreateReportWithNoTagging + ); + PluginApi.register.route( + "/plugin/DupFileManager_ToolsAndUtilities", + ToolsAndUtilities + ); + PluginApi.register.route( + "/plugin/DupFileManager_ClearAllDuplicateTags", + ClearAllDuplicateTags + ); + PluginApi.register.route( + "/plugin/DupFileManager_deleteLocalDupReportHtmlFiles", + deleteLocalDupReportHtmlFiles + ); + PluginApi.register.route( + "/plugin/DupFileManager_deleteAllDupFileManagerTags", + deleteAllDupFileManagerTags + ); + PluginApi.register.route( + "/plugin/DupFileManager_generatePHASH_Matching", + generatePHASH_Matching + ); + PluginApi.register.route("/plugin/DupFileManager_tagGrayList", tagGrayList); + PluginApi.register.route( + "/plugin/DupFileManager_deleteTaggedDuplicatesTask", + deleteTaggedDuplicatesTask + ); + PluginApi.register.route( + "/plugin/DupFileManager_deleteBlackListTaggedDuplicatesTask", + deleteBlackListTaggedDuplicatesTask + ); + PluginApi.register.route( + "/plugin/DupFileManager_deleteTaggedDuplicatesLwrResOrLwrDuration", + deleteTaggedDuplicatesLwrResOrLwrDuration + ); + PluginApi.register.route( + "/plugin/DupFileManager_deleteBlackListTaggedDuplicatesLwrResOrLwrDuration", + deleteBlackListTaggedDuplicatesLwrResOrLwrDuration + ); + PluginApi.patch.before("SettingsToolsSection", function (props) { + const { Setting } = PluginApi.components; + return [ + { + children: React.createElement( + React.Fragment, + null, + props.children, + React.createElement(Setting, { + heading: React.createElement( + Link, + { to: "/plugin/DupFileManager", title: ReportMenuButtonToolTip }, + React.createElement( + Button, + null, + "Duplicate File Report (DupFileManager)" + ) + ), + }), + React.createElement(Setting, { + heading: React.createElement( + Link, + { + to: "/plugin/DupFileManager_ToolsAndUtilities", + title: ToolsMenuToolTip, + }, + React.createElement( + Button, + null, + "DupFileManager Tools and Utilities" + ) + ), + }) + ), + }, + ]; + }); + PluginApi.patch.before("MainNavBar.UtilityItems", function (props) { + const { Icon } = PluginApi.components; + return [ + { + children: React.createElement( + React.Fragment, + null, + props.children, + React.createElement( + NavLink, { - children: (React.createElement(React.Fragment, null, - props.children, - React.createElement(Setting, { heading: React.createElement(Link, { to: "/plugin/DupFileManager", title: ReportMenuButtonToolTip }, React.createElement(Button, null, "Duplicate File Report (DupFileManager)"))}), - React.createElement(Setting, { heading: React.createElement(Link, { to: "/plugin/DupFileManager_ToolsAndUtilities", title: ToolsMenuToolTip }, React.createElement(Button, null, "DupFileManager Tools and Utilities"))}), - )), + className: "nav-utility", + exact: true, + to: "/plugin/DupFileManager", }, - ]; - }); - PluginApi.patch.before("MainNavBar.UtilityItems", function (props) { - const { Icon, } = PluginApi.components; - return [ - { - children: (React.createElement(React.Fragment, null, - props.children, - React.createElement(NavLink, { className: "nav-utility", exact: true, to: "/plugin/DupFileManager" }, - React.createElement(Button, { className: "minimal d-flex align-items-center h-100", title: ReportMenuButtonToolTip }, - React.createElement(Icon, { icon: faEthernet }))))) - } - ]; - }); + React.createElement( + Button, + { + className: "minimal d-flex align-items-center h-100", + title: ReportMenuButtonToolTip, + }, + React.createElement(Icon, { icon: faEthernet }) + ) + ) + ), + }, + ]; + }); })();