From 84d3b7fdea6846bf1541ee63e66362fbfb77648f Mon Sep 17 00:00:00 2001
From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com>
Date: Tue, 23 Jul 2024 07:14:22 -0400
Subject: [PATCH 01/39] Fixed bug associated with rename without title field
populated
Where's was an (if not) condition which should have been an (if) condition. This caused issues when renaming a file without populating the title field and with an associated performer.
---
plugins/RenameFile/README.md | 6 +-
plugins/RenameFile/renamefile.py | 99 ++++++++++++++++++++-----------
plugins/RenameFile/renamefile.yml | 2 +-
3 files changed, 68 insertions(+), 39 deletions(-)
diff --git a/plugins/RenameFile/README.md b/plugins/RenameFile/README.md
index 86cb5fb3..e373aea8 100644
--- a/plugins/RenameFile/README.md
+++ b/plugins/RenameFile/README.md
@@ -37,10 +37,12 @@ To avoid this error, refresh the URL before changing the Title field.
### Installation
- Follow **Requirements** instructions.
- In the stash plugin directory (C:\Users\MyUserName\.stash\plugins), create a folder named **RenameFile**.
-- Copy all the plugin files to this folder.(**C:\Users\MyUserName\.stash\plugins\RenameFile**).
+- Copy all the plugin files to this folder.(**C:\Users\MyUserName\\.stash\plugins\RenameFile**).
- Restart Stash.
That's it!!!
### Options
-To change options, see **renamefile_settings.py** file. After making changes, go to http://localhost:9999/settings?tab=plugins, and click [Reload Plugins].
+- Main options are accessible in the GUI via Settings->Plugins->Plugins->[RenameFile].
+- Advanced options are avialable in the **renamefile_settings.py** file. After making changes, go to http://localhost:9999/settings?tab=plugins, and click [Reload Plugins].
+
diff --git a/plugins/RenameFile/renamefile.py b/plugins/RenameFile/renamefile.py
index be044561..f131a44d 100644
--- a/plugins/RenameFile/renamefile.py
+++ b/plugins/RenameFile/renamefile.py
@@ -27,6 +27,9 @@
DEFAULT_ENDPOINT = "http://localhost:9999/graphql" # Default GraphQL endpoint
DEFAULT_FIELD_KEY_LIST = "title, performers, tags" # Default Field Key List with the desired order
DEFAULT_SEPERATOR = "-"
+PLUGIN_ARGS = False
+
+
# ------------------------------------------
# ------------------------------------------
@@ -58,7 +61,11 @@
# Extract dry_run setting from settings
dry_run = settings["dryRun"]
dry_run_prefix = ''
-logger.info(f"\nStarting (debugTracing={debugTracing}) (dry_run={dry_run})************************************************")
+try:
+ PLUGIN_ARGS = json_input['args']["mode"]
+except:
+ pass
+logger.info(f"\nStarting (debugTracing={debugTracing}) (dry_run={dry_run}) (PLUGIN_ARGS={PLUGIN_ARGS})************************************************")
if debugTracing: logger.info("settings: %s " % (settings,))
if dry_run:
logger.info("Dry run mode is enabled.")
@@ -96,6 +103,7 @@
double_separator = separator + separator
+
# GraphQL query to fetch all scenes
query_all_scenes = """
query AllScenes {
@@ -152,7 +160,7 @@ def form_filename(original_file_stem, scene_details, wrapper_styles):
title = default_title
# ...................
- if debugTracing: logger.info("Debug Tracing................")
+ if debugTracing: logger.info(f"Debug Tracing (title=\"{title}\")................")
# Function to add tag to filename
def add_tag(tag_name):
@@ -195,7 +203,9 @@ def add_tag(tag_name):
if settings["performerAppend"]:
performers = '-'.join([performer.get('name', '') for performer in scene_details.get('performers', [])])
if performers:
- if not include_performer_if_in_name or performers.lower() not in title.lower():
+ if debugTracing: logger.info(f"Debug Tracing (include_performer_if_in_name={include_performer_if_in_name})................")
+ if include_performer_if_in_name or performers.lower() not in title.lower():
+ if debugTracing: logger.info(f"Debug Tracing (performers={performers})................")
if wrapper_styles.get('performers'):
filename_parts.append(f"{wrapper_styles['performers'][0]}{performers}{wrapper_styles['performers'][1]}")
else:
@@ -237,7 +247,7 @@ def add_tag(tag_name):
if debugTracing: logger.info(f"Debug Tracing (include_tag_if_in_name={include_tag_if_in_name})................")
if include_tag_if_in_name or tag_name.lower() not in title.lower():
add_tag(tag_name)
- if debugTracing: logger.info("Debug Tracing................")
+ if debugTracing: logger.info(f"Debug Tracing (tag_name={tag_name})................")
new_filename = separator.join(filename_parts).replace(double_separator, separator)
@@ -408,49 +418,66 @@ def rename_scene(scene_id, wrapper_styles, stash_directory):
return new_filename, original_path_info, new_path_info
-if debugTracing: logger.info("Debug Tracing................")
-# Execute the GraphQL query to fetch all scenes
-scene_result = graphql_request(query_all_scenes)
-if debugTracing: logger.info("Debug Tracing................")
-all_scenes = scene_result.get('data', {}).get('allScenes', [])
-if debugTracing: logger.info("Debug Tracing................")
-if not all_scenes:
+# Main default function for rename scene
+def rename_files_task():
+ if debugTracing: logger.info("Debug Tracing................")
+ # Execute the GraphQL query to fetch all scenes
+ scene_result = graphql_request(query_all_scenes)
+ if debugTracing: logger.info("Debug Tracing................")
+ all_scenes = scene_result.get('data', {}).get('allScenes', [])
+ if debugTracing: logger.info("Debug Tracing................")
+ if not all_scenes:
+ if debugTracing: logger.info("Debug Tracing................")
+ log.error("No scenes found.")
+ logger.error("No scenes found.")
+ exit()
if debugTracing: logger.info("Debug Tracing................")
- log.error("No scenes found.")
- logger.error("No scenes found.")
- exit()
-if debugTracing: logger.info("Debug Tracing................")
-# Find the scene with the latest updated_at timestamp
-latest_scene = max(all_scenes, key=lambda scene: scene['updated_at'])
+ # Find the scene with the latest updated_at timestamp
+ latest_scene = max(all_scenes, key=lambda scene: scene['updated_at'])
-# Extract the ID of the latest scene
-latest_scene_id = latest_scene.get('id')
+ # Extract the ID of the latest scene
+ latest_scene_id = latest_scene.get('id')
-# Extract wrapper styles
-wrapper_styles = config["wrapper_styles"]
+ # Extract wrapper styles
+ wrapper_styles = config["wrapper_styles"]
-# Read stash directory from renamefile_settings.py
-stash_directory = config.get('stash_directory', '')
-if debugTracing: logger.info("Debug Tracing................")
+ # Read stash directory from renamefile_settings.py
+ stash_directory = config.get('stash_directory', '')
+ if debugTracing: logger.info("Debug Tracing................")
-if debugTracing: logger.info("Debug Tracing................")
+ if debugTracing: logger.info("Debug Tracing................")
-# Rename the latest scene and trigger metadata scan
-new_filename = rename_scene(latest_scene_id, wrapper_styles, stash_directory)
-if debugTracing: logger.info("Debug Tracing................")
+ # Rename the latest scene and trigger metadata scan
+ new_filename = rename_scene(latest_scene_id, wrapper_styles, stash_directory)
+ if debugTracing: logger.info("Debug Tracing................")
-# Log dry run state and indicate if no changes were made
-if dry_run:
- log.info("Dry run: Script executed in dry run mode. No changes were made.")
- logger.info("Dry run: Script executed in dry run mode. No changes were made.")
-elif not new_filename:
- logger.info("No changes were made.")
+ # Log dry run state and indicate if no changes were made
+ if dry_run:
+ log.info("Dry run: Script executed in dry run mode. No changes were made.")
+ logger.info("Dry run: Script executed in dry run mode. No changes were made.")
+ elif not new_filename:
+ logger.info("No changes were made.")
+ else:
+ logger.info("Change success!")
+ return
+
+def fetch_dup_filename_tags(): # Place holder for new implementation
+ return
+
+if PLUGIN_ARGS == "fetch_dup_filename_tags":
+ fetch_dup_filename_tags()
+elif PLUGIN_ARGS == "rename_files_task":
+ rename_files_task()
else:
- logger.info("Change success!")
+ rename_files_task()
+
if debugTracing: logger.info("\n*********************************\nEXITING ***********************\n*********************************")
+
+
# ToDo List
# Add logic to max_filename_length code so it checks base file length and checks folder length, instead of lumping them altogether.
# Add logic to update Sqlite DB on file name change, instead of perform_metadata_scan.
- # Get variables from the Plugins Settings UI instead of from renamefile_settings.py
\ No newline at end of file
+ # Get variables from the Plugins Settings UI instead of from renamefile_settings.py
+ # Add code to get tags from duplicate filenames
\ No newline at end of file
diff --git a/plugins/RenameFile/renamefile.yml b/plugins/RenameFile/renamefile.yml
index 75570699..b838025c 100644
--- a/plugins/RenameFile/renamefile.yml
+++ b/plugins/RenameFile/renamefile.yml
@@ -1,6 +1,6 @@
name: RenameFile
description: Renames video (scene) file names when the user edits the [Title] field located in the scene [Edit] tab.
-version: 0.2.5
+version: 0.2.6
url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/RenameFile
settings:
dryRun:
From 1cee24801f26e5a363f07b9d5e2204957548f73e Mon Sep 17 00:00:00 2001
From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com>
Date: Sun, 28 Jul 2024 03:30:21 -0400
Subject: [PATCH 02/39] Added ChangeFileMonitor and added more features to
RenameFile
Added new plugin ChangeFileMonitor.
Made following changes to RenameFile plugin.
Added fields galleries, resolution, and width.
Fixed bug associated with studio.
Added logic to limit the log file size.
Added logic to only get [Change success] logging when no errors occurs.
Change default fields to include studio.
Added postfix styles, which was mainly needed to properly format resolution field, but can be used for the other fields.
Consolidated [Include Existing Key Field] options into one option.
Cleaned up code and updated version
---
plugins/ChangeFileMonitor | 1 +
plugins/RenameFile/README.md | 27 ++-
plugins/RenameFile/manifest | 14 ++
plugins/RenameFile/renamefile.py | 276 +++++++++++++---------
plugins/RenameFile/renamefile.yml | 57 +++--
plugins/RenameFile/renamefile_settings.py | 40 +++-
6 files changed, 263 insertions(+), 152 deletions(-)
create mode 120000 plugins/ChangeFileMonitor
create mode 100644 plugins/RenameFile/manifest
diff --git a/plugins/ChangeFileMonitor b/plugins/ChangeFileMonitor
new file mode 120000
index 00000000..8ca902f7
--- /dev/null
+++ b/plugins/ChangeFileMonitor
@@ -0,0 +1 @@
+../../Axter-Stash/plugins/ChangeFileMonitor
\ No newline at end of file
diff --git a/plugins/RenameFile/README.md b/plugins/RenameFile/README.md
index e373aea8..7ea05101 100644
--- a/plugins/RenameFile/README.md
+++ b/plugins/RenameFile/README.md
@@ -1,5 +1,5 @@
-# RenameFile: Ver 0.2.5
-RenameFile is a [Stash](https://github.com/stashapp/stash) plugin which performs the following two main task.
+# RenameFile: Ver 0.4.0 (By David Maisonave)
+RenameFile is a [Stash](https://github.com/stashapp/stash) plugin which performs the following tasks.
- **Rename Scene File Name** (On-The-Fly)
- **Append tag names** to file name
- **Append Performer names** to file name
@@ -21,6 +21,15 @@ Note: This script is **largely** based on the [Renamer](https://github.com/Serec
- To add these fields see the [Key Fields] option in Settings->Plugins->Plugins->[RenameFile].
- The [Key Fields] can also be used to change the order for the file name format.
- There are many options in Plugins->[RenameFile] UI, and all the options have detailed descriptions. Please advise us if any of the options need further clarification, and provide example details.
+ - **[Key Fields]**: (This option may require more detail than could be provided in the GUI)
+ - Define key fields to use to format the file name. This is a comma seperated list, and the list should be in the desired format order. (Default=title,performers,studio,tags)
+ - For example, if the user wants the performers name before the title, set the performers name first.
+ - Example:"performers,title,tags".
+ - This is an example of user adding height:"title,performers,tags,height"
+ - Here's an example using all of the supported fields: "title,performers,tags,studio,galleries,resolution,width,height,video_codec,frame_rate,date".
+ - The **resolution** field equals width + height.
+ - The date field is **not** populated by default unless the user explicitly adds the date value to a scene.
+ - If **[Key Fields]** is empty, the default value is used. (Default=title,performers,studio,tags)
- There are additional options in renamefile_settings.py, but these options should only be changed by advanced users, and any changes should be tested first with the [Dry-Run] option enabled.
**Note:** On Windows 10/11, the file can not be renamed while it's playing. It will result in following error:
@@ -30,15 +39,17 @@ Error: [WinError 32] The process cannot access the file because it is being used
To avoid this error, refresh the URL before changing the Title field.
### Requirements
-`pip install stashapp-tools`
-
-`pip install pyYAML`
+pip install -r requirements.txt
+- Or manually install each requirement:
+ - `pip install stashapp-tools`
+ - `pip install pyYAML`
+ - `pip install requests`
### Installation
- Follow **Requirements** instructions.
-- In the stash plugin directory (C:\Users\MyUserName\.stash\plugins), create a folder named **RenameFile**.
-- Copy all the plugin files to this folder.(**C:\Users\MyUserName\\.stash\plugins\RenameFile**).
-- Restart Stash.
+- Create a folder named **RenameFile**, in the stash plugin directory (C:\Users\MyUserName\.stash\plugins).
+- Download the latest version from the following link: [RenameFile](https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/RenameFile), and copy the plugin files to folder.(**C:\Users\MyUserName\\.stash\plugins\RenameFile**).
+- Click the **[Reload Plugins]** button in Stash->Settings->Plugins->Plugins.
That's it!!!
diff --git a/plugins/RenameFile/manifest b/plugins/RenameFile/manifest
new file mode 100644
index 00000000..a98d0dcf
--- /dev/null
+++ b/plugins/RenameFile/manifest
@@ -0,0 +1,14 @@
+id: renamefile
+name: RenameFile
+metadata:
+ description: Renames video (scene) file names when the user edits the [Title] field located in the scene [Edit] tab.
+version: 0.4.0
+date: "2024-07-26 08:00:00"
+requires: [pip install stashapp-tools, pip install pyYAML]
+source_repository: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/RenameFile
+files:
+- README.md
+- renamefile.yml
+- renamefile.py
+- renamefile_settings.py
+- requirements.txt
diff --git a/plugins/RenameFile/renamefile.py b/plugins/RenameFile/renamefile.py
index f131a44d..d7c55889 100644
--- a/plugins/RenameFile/renamefile.py
+++ b/plugins/RenameFile/renamefile.py
@@ -1,71 +1,98 @@
-import requests
+# Description: This is a Stash plugin which allows users to rename the video (scene) file name by editing the [Title] field located in the scene [Edit] tab.
+# By David Maisonave (aka Axter) Jul-2024 (https://www.axter.com/)
+# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/RenameFile
+# Based on source code from https://github.com/Serechops/Serechops-Stash/tree/main/plugins/Renamer
import os
-import logging
+import sys
import shutil
-from pathlib import Path
import hashlib
import json
-import sys
+from pathlib import Path
+import requests
+import logging
+from logging.handlers import RotatingFileHandler
+import stashapi.log as log # Importing stashapi.log as log for critical events ONLY
from stashapi.stashapp import StashInterface
+from renamefile_settings import config # Import settings from renamefile_settings.py
-# This is a Stash plugin which allows users to rename the video (scene) file name by editing the [Title] field located in the scene [Edit] tab.
-
-# Importing stashapi.log as log for critical events
-import stashapi.log as log
-
-# Import settings from renamefile_settings.py
-from renamefile_settings import config
-
-# Get the directory of the script
-script_dir = Path(__file__).resolve().parent
-
-# Configure logging for your script
-log_file_path = script_dir / 'renamefile.log'
+# **********************************************************************
+# Constant global variables --------------------------------------------
+LOG_FILE_PATH = log_file_path = f"{Path(__file__).resolve().parent}\\{Path(__file__).stem}.log"
FORMAT = "[%(asctime)s - LN:%(lineno)s] %(message)s"
-logging.basicConfig(filename=log_file_path, level=logging.INFO, format=FORMAT)
-logger = logging.getLogger('renamefile')
DEFAULT_ENDPOINT = "http://localhost:9999/graphql" # Default GraphQL endpoint
-DEFAULT_FIELD_KEY_LIST = "title, performers, tags" # Default Field Key List with the desired order
+DEFAULT_FIELD_KEY_LIST = "title,performers,studio,tags" # Default Field Key List with the desired order
DEFAULT_SEPERATOR = "-"
PLUGIN_ARGS = False
+PLUGIN_ARGS_MODE = False
+WRAPPER_STYLES = config["wrapper_styles"]
+POSTFIX_STYLES = config["postfix_styles"]
+# GraphQL query to fetch all scenes
+QUERY_ALL_SCENES = """
+ query AllScenes {
+ allScenes {
+ id
+ updated_at
+ }
+ }
+"""
+RFH = RotatingFileHandler(
+ filename=LOG_FILE_PATH,
+ mode='a',
+ maxBytes=2*1024*1024, # Configure logging for this script with max log file size of 2000K
+ backupCount=2,
+ encoding=None,
+ delay=0
+)
+
+# **********************************************************************
+# Global variables --------------------------------------------
+inputToUpdateScenePost = False
+exitMsg = "Change success!!"
+
+# Configure local log file for plugin within plugin folder having a limited max log file size
+logging.basicConfig(level=logging.INFO, format=FORMAT, datefmt="%y%m%d %H:%M:%S", handlers=[RFH])
+logger = logging.getLogger('renamefile')
-
-
-# ------------------------------------------
-# ------------------------------------------
-# Code to fetch variables from Plugin UI
+# **********************************************************************
+# ----------------------------------------------------------------------
+# Code section to fetch variables from Plugin UI and from renamefile_settings.py
json_input = json.loads(sys.stdin.read())
FRAGMENT_SERVER = json_input["server_connection"]
stash = StashInterface(FRAGMENT_SERVER)
pluginConfiguration = stash.get_configuration()["plugins"]
settings = {
- "dryRun": False,
- "fileRenameViaMove": False,
"performerAppend": False,
- "performerIncludeInFileName": False,
+ "studioAppend": False,
"tagAppend": False,
- "tagIncludeInFileName": False,
- "zFieldKeyList": DEFAULT_FIELD_KEY_LIST,
+ "z_keyFIeldsIncludeInFileName": False,
+ "zafileRenameViaMove": False,
+ "zfieldKeyList": DEFAULT_FIELD_KEY_LIST,
"zgraphqlEndpoint": DEFAULT_ENDPOINT,
"zmaximumTagKeys": 12,
"zpathToExclude": "",
"zseparators": DEFAULT_SEPERATOR,
"ztagWhitelist": "",
"zzdebugTracing": False,
+ "zzdryRun": False,
}
if "renamefile" in pluginConfiguration:
settings.update(pluginConfiguration["renamefile"])
-# ------------------------------------------
+# ----------------------------------------------------------------------
debugTracing = settings["zzdebugTracing"]
# Extract dry_run setting from settings
-dry_run = settings["dryRun"]
+dry_run = settings["zzdryRun"]
dry_run_prefix = ''
try:
- PLUGIN_ARGS = json_input['args']["mode"]
+ PLUGIN_ARGS = json_input['args']
+ PLUGIN_ARGS_MODE = json_input['args']["mode"]
except:
pass
-logger.info(f"\nStarting (debugTracing={debugTracing}) (dry_run={dry_run}) (PLUGIN_ARGS={PLUGIN_ARGS})************************************************")
+try:
+ if json_input['args']['hookContext']['input']: inputToUpdateScenePost = True # This avoids calling rename logic twice
+except:
+ pass
+logger.info(f"\nStarting (debugTracing={debugTracing}) (dry_run={dry_run}) (PLUGIN_ARGS_MODE={PLUGIN_ARGS_MODE}) (inputToUpdateScenePost={inputToUpdateScenePost})************************************************")
if debugTracing: logger.info("settings: %s " % (settings,))
if dry_run:
logger.info("Dry run mode is enabled.")
@@ -82,15 +109,16 @@
if debugTracing: logger.info("Debug Tracing................")
if not tag_whitelist:
tag_whitelist = ""
+if debugTracing: logger.info(f"Debug Tracing (tag_whitelist={tag_whitelist})................")
endpoint = settings["zgraphqlEndpoint"] # GraphQL endpoint
-if debugTracing: logger.info("Debug Tracing................")
if not endpoint or endpoint == "":
endpoint = DEFAULT_ENDPOINT
+if debugTracing: logger.info(f"Debug Tracing (endpoint={endpoint})................")
# Extract rename_files and move_files settings from renamefile_settings.py
rename_files = config["rename_files"]
-move_files = settings["fileRenameViaMove"]
+move_files = settings["zafileRenameViaMove"]
if debugTracing: logger.info("Debug Tracing................")
-fieldKeyList = settings["zFieldKeyList"] # Default Field Key List with the desired order
+fieldKeyList = settings["zfieldKeyList"] # Default Field Key List with the desired order
if not fieldKeyList or fieldKeyList == "":
fieldKeyList = DEFAULT_FIELD_KEY_LIST
fieldKeyList = fieldKeyList.replace(" ", "")
@@ -98,22 +126,11 @@
fieldKeyList = fieldKeyList.split(",")
if debugTracing: logger.info(f"Debug Tracing (fieldKeyList={fieldKeyList})................")
separator = settings["zseparators"]
-# ------------------------------------------
-# ------------------------------------------
-double_separator = separator + separator
-
-
+# ----------------------------------------------------------------------
+# **********************************************************************
-# GraphQL query to fetch all scenes
-query_all_scenes = """
- query AllScenes {
- allScenes {
- id
- updated_at
- }
- }
-"""
-if debugTracing: logger.info("Debug Tracing................")
+double_separator = separator + separator
+if debugTracing: logger.info(f"Debug Tracing (PLUGIN_ARGS={PLUGIN_ARGS}) (WRAPPER_STYLES={WRAPPER_STYLES}) (POSTFIX_STYLES={POSTFIX_STYLES})................")
# Function to make GraphQL requests
def graphql_request(query, variables=None):
@@ -142,14 +159,13 @@ def should_exclude_path(scene_details):
return False
# Function to form the new filename based on scene details and user settings
-def form_filename(original_file_stem, scene_details, wrapper_styles):
+def form_filename(original_file_stem, scene_details):
if debugTracing: logger.info("Debug Tracing................")
filename_parts = []
tag_keys_added = 0
default_title = ''
if_notitle_use_org_filename = config["if_notitle_use_org_filename"]
- include_tag_if_in_name = settings["tagIncludeInFileName"]
- include_performer_if_in_name = settings["performerIncludeInFileName"]
+ include_keyField_if_in_name = settings["z_keyFIeldsIncludeInFileName"]
if if_notitle_use_org_filename:
default_title = original_file_stem
# ...................
@@ -166,15 +182,14 @@ def form_filename(original_file_stem, scene_details, wrapper_styles):
def add_tag(tag_name):
nonlocal tag_keys_added
nonlocal filename_parts
- nonlocal wrapper_styles
if debugTracing: logger.info(f"Debug Tracing (tag_name={tag_name})................")
if max_tag_keys == -1 or (max_tag_keys is not None and tag_keys_added >= int(max_tag_keys)):
return # Skip adding more tags if the maximum limit is reached
# Check if the tag name is in the whitelist
if tag_whitelist == "" or tag_whitelist == None or (tag_whitelist and tag_name in tag_whitelist):
- if wrapper_styles.get('tag'):
- filename_parts.append(f"{wrapper_styles['tag'][0]}{tag_name}{wrapper_styles['tag'][1]}")
+ if WRAPPER_STYLES.get('tag'):
+ filename_parts.append(f"{WRAPPER_STYLES['tag'][0]}{tag_name}{WRAPPER_STYLES['tag'][1]}")
if debugTracing: logger.info("Debug Tracing................")
else:
filename_parts.append(tag_name)
@@ -187,69 +202,120 @@ def add_tag(tag_name):
for key in fieldKeyList:
if key == 'studio':
- studio_name = scene_details.get('studio', {}).get('name', '')
- if studio_name:
- if wrapper_styles.get('studio'):
- filename_parts.append(f"{wrapper_styles['studio'][0]}{studio_name}{wrapper_styles['studio'][1]}")
- else:
- filename_parts.append(studio_name)
+ if settings["studioAppend"]:
+ if debugTracing: logger.info("Debug Tracing................")
+ studio_name = scene_details.get('studio', {})
+ if debugTracing: logger.info(f"Debug Tracing (studio_name={studio_name})................")
+ if studio_name:
+ studio_name = scene_details.get('studio', {}).get('name', '')
+ if debugTracing: logger.info(f"Debug Tracing (studio_name={studio_name})................")
+ if studio_name:
+ studio_name += POSTFIX_STYLES.get('studio')
+ if debugTracing: logger.info("Debug Tracing................")
+ if include_keyField_if_in_name or studio_name.lower() not in title.lower():
+ if WRAPPER_STYLES.get('studio'):
+ filename_parts.append(f"{WRAPPER_STYLES['studio'][0]}{studio_name}{WRAPPER_STYLES['studio'][1]}")
+ else:
+ filename_parts.append(studio_name)
elif key == 'title':
if title: # This value has already been fetch in start of function because it needs to be defined before tags and performers
- if wrapper_styles.get('title'):
- filename_parts.append(f"{wrapper_styles['title'][0]}{title}{wrapper_styles['title'][1]}")
+ title += POSTFIX_STYLES.get('title')
+ if WRAPPER_STYLES.get('title'):
+ filename_parts.append(f"{WRAPPER_STYLES['title'][0]}{title}{WRAPPER_STYLES['title'][1]}")
else:
filename_parts.append(title)
elif key == 'performers':
if settings["performerAppend"]:
performers = '-'.join([performer.get('name', '') for performer in scene_details.get('performers', [])])
if performers:
- if debugTracing: logger.info(f"Debug Tracing (include_performer_if_in_name={include_performer_if_in_name})................")
- if include_performer_if_in_name or performers.lower() not in title.lower():
+ performers += POSTFIX_STYLES.get('performers')
+ if debugTracing: logger.info(f"Debug Tracing (include_keyField_if_in_name={include_keyField_if_in_name})................")
+ if include_keyField_if_in_name or performers.lower() not in title.lower():
if debugTracing: logger.info(f"Debug Tracing (performers={performers})................")
- if wrapper_styles.get('performers'):
- filename_parts.append(f"{wrapper_styles['performers'][0]}{performers}{wrapper_styles['performers'][1]}")
+ if WRAPPER_STYLES.get('performers'):
+ filename_parts.append(f"{WRAPPER_STYLES['performers'][0]}{performers}{WRAPPER_STYLES['performers'][1]}")
else:
filename_parts.append(performers)
elif key == 'date':
scene_date = scene_details.get('date', '')
+ if debugTracing: logger.info("Debug Tracing................")
if scene_date:
- if wrapper_styles.get('date'):
- filename_parts.append(f"{wrapper_styles['date'][0]}{scene_date}{wrapper_styles['date'][1]}")
+ scene_date += POSTFIX_STYLES.get('date')
+ if debugTracing: logger.info("Debug Tracing................")
+ if WRAPPER_STYLES.get('date'):
+ filename_parts.append(f"{WRAPPER_STYLES['date'][0]}{scene_date}{WRAPPER_STYLES['date'][1]}")
else:
filename_parts.append(scene_date)
+ elif key == 'resolution':
+ width = str(scene_details.get('files', [{}])[0].get('width', '')) # Convert width to string
+ height = str(scene_details.get('files', [{}])[0].get('height', '')) # Convert height to string
+ if width and height:
+ resolution = width + POSTFIX_STYLES.get('width_height_seperator') + height + POSTFIX_STYLES.get('resolution')
+ if WRAPPER_STYLES.get('resolution'):
+ filename_parts.append(f"{WRAPPER_STYLES['resolution'][0]}{resolution}{WRAPPER_STYLES['width'][1]}")
+ else:
+ filename_parts.append(resolution)
+ elif key == 'width':
+ width = str(scene_details.get('files', [{}])[0].get('width', '')) # Convert width to string
+ if width:
+ width += POSTFIX_STYLES.get('width')
+ if WRAPPER_STYLES.get('width'):
+ filename_parts.append(f"{WRAPPER_STYLES['width'][0]}{width}{WRAPPER_STYLES['width'][1]}")
+ else:
+ filename_parts.append(width)
elif key == 'height':
height = str(scene_details.get('files', [{}])[0].get('height', '')) # Convert height to string
if height:
- height += 'p'
- if wrapper_styles.get('height'):
- filename_parts.append(f"{wrapper_styles['height'][0]}{height}{wrapper_styles['height'][1]}")
+ height += POSTFIX_STYLES.get('height')
+ if WRAPPER_STYLES.get('height'):
+ filename_parts.append(f"{WRAPPER_STYLES['height'][0]}{height}{WRAPPER_STYLES['height'][1]}")
else:
filename_parts.append(height)
elif key == 'video_codec':
video_codec = scene_details.get('files', [{}])[0].get('video_codec', '').upper() # Convert to uppercase
if video_codec:
- if wrapper_styles.get('video_codec'):
- filename_parts.append(f"{wrapper_styles['video_codec'][0]}{video_codec}{wrapper_styles['video_codec'][1]}")
+ video_codec += POSTFIX_STYLES.get('video_codec')
+ if WRAPPER_STYLES.get('video_codec'):
+ filename_parts.append(f"{WRAPPER_STYLES['video_codec'][0]}{video_codec}{WRAPPER_STYLES['video_codec'][1]}")
else:
filename_parts.append(video_codec)
elif key == 'frame_rate':
- frame_rate = str(scene_details.get('files', [{}])[0].get('frame_rate', '')) + ' FPS' # Convert to string and append ' FPS'
+ frame_rate = str(scene_details.get('files', [{}])[0].get('frame_rate', '')) + 'FPS' # Convert to string and append ' FPS'
if frame_rate:
- if wrapper_styles.get('frame_rate'):
- filename_parts.append(f"{wrapper_styles['frame_rate'][0]}{frame_rate}{wrapper_styles['frame_rate'][1]}")
+ frame_rate += POSTFIX_STYLES.get('frame_rate')
+ if WRAPPER_STYLES.get('frame_rate'):
+ filename_parts.append(f"{WRAPPER_STYLES['frame_rate'][0]}{frame_rate}{WRAPPER_STYLES['frame_rate'][1]}")
else:
filename_parts.append(frame_rate)
+ elif key == 'galleries':
+ galleries = [gallery.get('title', '') for gallery in scene_details.get('galleries', [])]
+ if debugTracing: logger.info("Debug Tracing................")
+ for gallery_name in galleries:
+ if debugTracing: logger.info(f"Debug Tracing (include_keyField_if_in_name={include_keyField_if_in_name}) (gallery_name={gallery_name})................")
+ if include_keyField_if_in_name or gallery_name.lower() not in title.lower():
+ gallery_name += POSTFIX_STYLES.get('galleries')
+ if WRAPPER_STYLES.get('galleries'):
+ filename_parts.append(f"{WRAPPER_STYLES['galleries'][0]}{gallery_name}{WRAPPER_STYLES['galleries'][1]}")
+ if debugTracing: logger.info("Debug Tracing................")
+ else:
+ filename_parts.append(gallery_name)
+ if debugTracing: logger.info("Debug Tracing................")
+ if debugTracing: logger.info(f"Debug Tracing (gallery_name={gallery_name})................")
+ if debugTracing: logger.info("Debug Tracing................")
elif key == 'tags':
if settings["tagAppend"]:
tags = [tag.get('name', '') for tag in scene_details.get('tags', [])]
if debugTracing: logger.info("Debug Tracing................")
for tag_name in tags:
- if debugTracing: logger.info(f"Debug Tracing (include_tag_if_in_name={include_tag_if_in_name})................")
- if include_tag_if_in_name or tag_name.lower() not in title.lower():
- add_tag(tag_name)
+ if debugTracing: logger.info(f"Debug Tracing (include_keyField_if_in_name={include_keyField_if_in_name}) (tag_name={tag_name})................")
+ if include_keyField_if_in_name or tag_name.lower() not in title.lower():
+ add_tag(tag_name + POSTFIX_STYLES.get('tag'))
if debugTracing: logger.info(f"Debug Tracing (tag_name={tag_name})................")
+ if debugTracing: logger.info("Debug Tracing................")
+ if debugTracing: logger.info(f"Debug Tracing (filename_parts={filename_parts})................")
new_filename = separator.join(filename_parts).replace(double_separator, separator)
+ if debugTracing: logger.info(f"Debug Tracing (new_filename={new_filename})................")
# Check if the scene's path matches any of the excluded paths
if exclude_paths and should_exclude_path(scene_details):
@@ -267,10 +333,14 @@ def find_scene_by_id(scene_id):
date
files {
path
+ width
height
video_codec
frame_rate
}
+ galleries {
+ title
+ }
studio {
name
}
@@ -287,6 +357,7 @@ def find_scene_by_id(scene_id):
return scene_result.get('data', {}).get('findScene')
def move_or_rename_files(scene_details, new_filename, original_parent_directory):
+ global exitMsg
studio_directory = None
for file_info in scene_details['files']:
path = file_info['path']
@@ -325,12 +396,13 @@ def move_or_rename_files(scene_details, new_filename, original_parent_directory)
except FileNotFoundError:
log.error(f"File not found: {path}. Skipping...")
logger.error(f"File not found: {path}. Skipping...")
+ exitMsg = "File not found"
continue
except OSError as e:
log.error(f"Failed to move or rename file: {path}. Error: {e}")
logger.error(f"Failed to move or rename file: {path}. Error: {e}")
+ exitMsg = "Failed to move or rename file"
continue
-
return new_path # Return the new_path variable after the loop
def perform_metadata_scan(metadata_scan_path):
@@ -345,7 +417,8 @@ def perform_metadata_scan(metadata_scan_path):
logger.info(f"Mutation string: {mutation_metadata_scan}")
graphql_request(mutation_metadata_scan)
-def rename_scene(scene_id, wrapper_styles, stash_directory):
+def rename_scene(scene_id, stash_directory):
+ global exitMsg
scene_details = find_scene_by_id(scene_id)
if debugTracing: logger.info(f"Debug Tracing (scene_details={scene_details})................")
if not scene_details:
@@ -372,7 +445,7 @@ def rename_scene(scene_id, wrapper_styles, stash_directory):
original_file_stem = Path(original_file_path).stem
original_file_name = Path(original_file_path).name
- new_filename = form_filename(original_file_stem, scene_details, wrapper_styles)
+ new_filename = form_filename(original_file_stem, scene_details)
newFilenameWithExt = new_filename + Path(original_file_path).suffix
if debugTracing: logger.info(f"Debug Tracing (original_file_name={original_file_name})(newFilenameWithExt={newFilenameWithExt})................")
if original_file_name == newFilenameWithExt:
@@ -400,14 +473,13 @@ def rename_scene(scene_id, wrapper_styles, stash_directory):
os.rename(original_file_path, new_file_path)
logger.info(f"{dry_run_prefix}Renamed file: {original_file_path} -> {new_file_path}")
except Exception as e:
+ exitMsg = "Failed to rename file"
log.error(f"Failed to rename file: {original_file_path}. Error: {e}")
logger.error(f"Failed to rename file: {original_file_path}. Error: {e}")
metadata_scan_path = original_parent_directory
perform_metadata_scan(metadata_scan_path)
- # ToDo: Add logic to the below code section so it checks base file length and checks folder length, instead of lumping them altogether.
- # Current DB schema allows file folder max length to be 255, and max base filename to be 255
max_filename_length = int(config["max_filename_length"])
if len(new_filename) > max_filename_length:
extension_length = len(Path(original_file_path).suffix)
@@ -415,14 +487,15 @@ def rename_scene(scene_id, wrapper_styles, stash_directory):
truncated_filename = new_filename[:max_base_filename_length]
hash_suffix = hashlib.md5(new_filename.encode()).hexdigest()
new_filename = truncated_filename + '_' + hash_suffix + Path(original_file_path).suffix
-
+
+ if debugTracing: logger.info(f"Debug Tracing (exitMsg={exitMsg})................")
return new_filename, original_path_info, new_path_info
# Main default function for rename scene
def rename_files_task():
if debugTracing: logger.info("Debug Tracing................")
# Execute the GraphQL query to fetch all scenes
- scene_result = graphql_request(query_all_scenes)
+ scene_result = graphql_request(QUERY_ALL_SCENES)
if debugTracing: logger.info("Debug Tracing................")
all_scenes = scene_result.get('data', {}).get('allScenes', [])
if debugTracing: logger.info("Debug Tracing................")
@@ -439,19 +512,13 @@ def rename_files_task():
# Extract the ID of the latest scene
latest_scene_id = latest_scene.get('id')
-
- # Extract wrapper styles
- wrapper_styles = config["wrapper_styles"]
-
# Read stash directory from renamefile_settings.py
stash_directory = config.get('stash_directory', '')
if debugTracing: logger.info("Debug Tracing................")
- if debugTracing: logger.info("Debug Tracing................")
-
# Rename the latest scene and trigger metadata scan
- new_filename = rename_scene(latest_scene_id, wrapper_styles, stash_directory)
- if debugTracing: logger.info("Debug Tracing................")
+ new_filename = rename_scene(latest_scene_id, stash_directory)
+ if debugTracing: logger.info(f"Debug Tracing (exitMsg={exitMsg})................")
# Log dry run state and indicate if no changes were made
if dry_run:
@@ -460,24 +527,21 @@ def rename_files_task():
elif not new_filename:
logger.info("No changes were made.")
else:
- logger.info("Change success!")
+ logger.info(f"{exitMsg}")
return
def fetch_dup_filename_tags(): # Place holder for new implementation
return
-if PLUGIN_ARGS == "fetch_dup_filename_tags":
+if PLUGIN_ARGS_MODE == "fetch_dup_filename_tags":
fetch_dup_filename_tags()
-elif PLUGIN_ARGS == "rename_files_task":
+elif PLUGIN_ARGS_MODE == "rename_files_task":
rename_files_task()
-else:
+elif inputToUpdateScenePost:
rename_files_task()
if debugTracing: logger.info("\n*********************************\nEXITING ***********************\n*********************************")
-
-# ToDo List
- # Add logic to max_filename_length code so it checks base file length and checks folder length, instead of lumping them altogether.
+# ToDo: Wish List
# Add logic to update Sqlite DB on file name change, instead of perform_metadata_scan.
- # Get variables from the Plugins Settings UI instead of from renamefile_settings.py
# Add code to get tags from duplicate filenames
\ No newline at end of file
diff --git a/plugins/RenameFile/renamefile.yml b/plugins/RenameFile/renamefile.yml
index b838025c..14006c3a 100644
--- a/plugins/RenameFile/renamefile.yml
+++ b/plugins/RenameFile/renamefile.yml
@@ -1,37 +1,34 @@
name: RenameFile
-description: Renames video (scene) file names when the user edits the [Title] field located in the scene [Edit] tab.
-version: 0.2.6
+description: Renames video (scene) file names when the user edits the [Title] field located in the scene [Edit] tab.
+# By David Maisonave (aka Axter) 2024
+version: 0.4.0
url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/RenameFile
settings:
- dryRun:
- displayName: Dry Run
- description: Enable to run script in [Dry Run] mode. In dry run mode, files are NOT renamed, and only logging is performed. Use the logging to determine if rename will occur as expected. This should always be enabled on the first run after renamefile_settings.py has been modified.
- type: BOOLEAN
- fileRenameViaMove:
- displayName: Rename Using Move
- description: Enable to have file moved when renaming file.
- type: BOOLEAN
performerAppend:
displayName: Append Performers
description: Enable to append performers name to file name when renaming a file. Requires performers to be included in [Key Fields] list, which by default it is included.
- type: BOOLEAN
- performerIncludeInFileName:
- displayName: Include Existing Performers
- description: Enable to append performer even if performers name already exists in the original file name.
- type: BOOLEAN
+ type: BOOLEAN
+ studioAppend:
+ displayName: Append Studio
+ description: Enable to append studio name to file name when renaming a file. Requires studio to be included in [Key Fields] list, which by default it is included.
+ type: BOOLEAN
tagAppend:
displayName: Append Tags
description: Enable to append tag names to file name when renaming a file. Requires tags to be included in [Key Fields] list, which by default it is included.
- type: BOOLEAN
- tagIncludeInFileName:
- displayName: Include Existing Tags
- description: Enable to append tag name even if tag already exists in original file name.
- type: BOOLEAN
- zFieldKeyList:
+ type: BOOLEAN
+ z_keyFIeldsIncludeInFileName: # Prefixing z_ to variable names so that the GUI will place these fields after above fields (alphabatically listed)
+ displayName: Include Existing Key Field
+ description: Enable to append performer, tags, studios, & galleries even if name already exists in the original file name.
+ type: BOOLEAN
+ zafileRenameViaMove:
+ displayName: Rename Using Move
+ description: Enable to have file moved when renaming file.
+ type: BOOLEAN
+ zfieldKeyList:
displayName: Key Fields
- description: '(Default=title,performers,tags) Define key fields to use to format the file name. This is a comma seperated list, and the list should be in the desired format order. For example, if the user wants the performers name before the title, set the performers name first. Example:"performers,title,tags". This is an example of user adding height:"title,performers,tags,height" Here''s an example using all of the supported fields: "title,performers,tags,studio,date,height,video_codec,frame_rate".'
+ description: '(Default=title,performers,studio,tags) Define key fields to use to format the file name. This is a comma seperated list, and the list should be in the desired format order. For example, if the user wants the performers name before the title, set the performers name first. Example:"performers,title,tags". This is an example of user adding height:"title,performers,tags,height" Here''s an example using all of the supported fields: "title,performers,tags,studio,galleries,resolution,width,height,video_codec,frame_rate,date".'
type: STRING
- zgraphqlEndpoint: # Prefixing z_ to variable names so that the GUI will place these fields after above fields (alphabatically listed)
+ zgraphqlEndpoint:
displayName: GraphQL Endpoint
description: (Default=http://localhost:9999/graphql). Update with your endpoint, or leave blank to use default.
type: STRING
@@ -54,7 +51,11 @@ settings:
zzdebugTracing:
displayName: Debug Tracing
description: (Default=false) [***For Advanced Users***] Enable debug tracing. When enabled, additional tracing logging is added to Stash\plugins\RenameFile\renamefile.log
- type: BOOLEAN
+ type: BOOLEAN
+ zzdryRun:
+ displayName: Dry Run
+ description: Enable to run script in [Dry Run] mode. In dry run mode, files are NOT renamed, and only logging is performed. Use the logging to determine if rename will occur as expected. This should always be enabled on the first run after renamefile_settings.py has been modified.
+ type: BOOLEAN
exec:
- python
- "{pluginDir}/renamefile.py"
@@ -65,7 +66,11 @@ hooks:
triggeredBy:
- Scene.Update.Post
tasks:
- - name: Rename Files Task
- description: Renames scene files.
+ # - name: Fetch Tags
+ # description: Get tags from duplicate file names.
+ # defaultArgs:
+ # mode: fetch_dup_filename_tags
+ - name: Rename Last Scene
+ description: Renames file of last updated scene.
defaultArgs:
mode: rename_files_task
diff --git a/plugins/RenameFile/renamefile_settings.py b/plugins/RenameFile/renamefile_settings.py
index a027bd01..c4eeab9b 100644
--- a/plugins/RenameFile/renamefile_settings.py
+++ b/plugins/RenameFile/renamefile_settings.py
@@ -1,4 +1,4 @@
-# Importing config dictionary
+# By David Maisonave (aka Axter) 2024
# RenameFile plugin main configuration options are available on the Stash GUI under Settings->Plugins->Plugins->[RenameFile].
# Most users should only use the GUI options.
# The configuration options in this file are for advanced users ONLY!!!
@@ -9,15 +9,33 @@
config = {
# Define wrapper styles for different parts of the filename.
# Use '[]' for square brackets, '{}' for curly brackets, '()' for parentheses, or an empty string for None.
- "wrapper_styles": {
- "studio": '[]', # Modify these values to change how each part of the filename is wrapped.
- "title": '', # Use '[]' for square brackets, '{}' for curly brackets, '()' for parentheses, or an empty string for None.
- "performers": '()', # Modify these values to change how each part of the filename is wrapped.
- "date": '[]', # Use '[]' for square brackets, '{}' for curly brackets, '()' for parentheses, or an empty string for None.
- "height": '()', # Modify these values to change how each part of the filename is wrapped.
- "video_codec": '[]', # Use '[]' for square brackets, '{}' for curly brackets, '()' for parentheses, or an empty string for None.
- "frame_rate": '[]', # Modify these values to change how each part of the filename is wrapped.
- "tag": '[]' # Modify these values to change how each tag part of the filename is wrapped.
+ "wrapper_styles": { # Modify these values to change how each part of the filename is wrapped.
+ "title": '',
+ "performers": '()',
+ "tag": '[]',
+ "studio": '{}',
+ "galleries": '()',
+ "resolution": '', # Contains both WITH and HEIGHT
+ "width": '',
+ "height": '',
+ "video_codec": '',
+ "frame_rate": '',
+ "date": '()', # This field is not populated in the DB by default. It's usually empty.
+ },
+ # Define the field postfix
+ "postfix_styles": {
+ "title": '',
+ "performers": '',
+ "tag": '',
+ "studio": '',
+ "galleries": '',
+ "resolution": 'P', # Contains both WITH and HEIGHT
+ "width": 'W',
+ "height": 'P',
+ "width_height_seperator": 'x', # Used in RESOLUTION field as the string seperating WITH and HEIGHT. Example: 720x480 or 1280X720
+ "video_codec": '',
+ "frame_rate": 'FR',
+ "date": '',
},
# Define whether files should be renamed when moved
"rename_files": True,
@@ -25,6 +43,4 @@
"if_notitle_use_org_filename": True, # Warning: Do not recommend setting this to False.
# Current Stash DB schema only allows maximum base file name length to be 255
"max_filename_length": 255,
- # "max_filefolder_length": 255, # For future useage
- # "max_filebase_length": 255, # For future useage
}
From c99d35905a4bd18d8bdca715c6ef1cc4e3fbfeb9 Mon Sep 17 00:00:00 2001
From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com>
Date: Sun, 28 Jul 2024 03:37:49 -0400
Subject: [PATCH 03/39] Added plugin ChangeFileMonitor and updated RenameFile
plugin
Added new plugin ChangeFileMonitor.
Made following changes to RenameFile plugin.
Added fields galleries, resolution, and width.
Fixed bug associated with studio.
Added logic to limit the log file size.
Added logic to only get [Change success] logging when no errors occurs.
Change default fields to include studio.
Added postfix styles, which was mainly needed to properly format resolution field, but can be used for the other fields.
Consolidated [Include Existing Key Field] options into one option.
Cleaned up code and updated version
---
plugins/ChangeFileMonitor | 1 -
plugins/ChangeFileMonitor/.gitignore | 525 ++++++++++++++++++
plugins/ChangeFileMonitor/README.md | 27 +
.../ChangeFileMonitor/changefilemonitor.py | 246 ++++++++
.../ChangeFileMonitor/changefilemonitor.yml | 35 ++
plugins/ChangeFileMonitor/manifest | 13 +
plugins/ChangeFileMonitor/requirements.txt | 4 +
plugins/RenameFile/.gitignore | 525 ++++++++++++++++++
8 files changed, 1375 insertions(+), 1 deletion(-)
delete mode 120000 plugins/ChangeFileMonitor
create mode 100644 plugins/ChangeFileMonitor/.gitignore
create mode 100644 plugins/ChangeFileMonitor/README.md
create mode 100644 plugins/ChangeFileMonitor/changefilemonitor.py
create mode 100644 plugins/ChangeFileMonitor/changefilemonitor.yml
create mode 100644 plugins/ChangeFileMonitor/manifest
create mode 100644 plugins/ChangeFileMonitor/requirements.txt
create mode 100644 plugins/RenameFile/.gitignore
diff --git a/plugins/ChangeFileMonitor b/plugins/ChangeFileMonitor
deleted file mode 120000
index 8ca902f7..00000000
--- a/plugins/ChangeFileMonitor
+++ /dev/null
@@ -1 +0,0 @@
-../../Axter-Stash/plugins/ChangeFileMonitor
\ No newline at end of file
diff --git a/plugins/ChangeFileMonitor/.gitignore b/plugins/ChangeFileMonitor/.gitignore
new file mode 100644
index 00000000..dd93ef78
--- /dev/null
+++ b/plugins/ChangeFileMonitor/.gitignore
@@ -0,0 +1,525 @@
+$ cat .gitignore
+
+# Ignore these patterns
+desktop.ini
+~AutoRecover*.*
+*.aps
+*.exe
+*.idb
+*.ipch
+*.lib
+*.log
+*.log.1
+*.log.2
+*.manifest
+*.obj
+*.pch
+*.pdb
+*.sdf
+*.suo
+*.tlog
+*.user
+*.7z
+*.swp
+*.zip
+data.csv
+/boost
+/scintilla
+/bin
+/SQL
+/__pycache__
+__pycache__/
+renamefile_settings.cpython-310.pyc
+
+## Ignore Visual Studio temporary files, build results, and
+## files generated by popular Visual Studio add-ons.
+##
+## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
+
+# User-specific files
+*.rsuser
+*.suo
+*.user
+*.userosscache
+*.sln.docstates
+
+# User-specific files (MonoDevelop/Xamarin Studio)
+*.userprefs
+
+# Mono auto generated files
+mono_crash.*
+
+# Build results
+[Dd]ebug/
+[Dd]ebugPublic/
+[Rr]elease/
+[Rr]eleases/
+x64/
+x86/
+[Ww][Ii][Nn]32/
+[Aa][Rr][Mm]/
+[Aa][Rr][Mm]64/
+bld/
+[Bb]in/
+[Oo]bj/
+[Ll]og/
+[Ll]ogs/
+
+
+# Visual Studio 2015/2017 cache/options directory
+.vs/
+# Uncomment if you have tasks that create the project's static files in wwwroot
+#wwwroot/
+
+# Visual Studio 2017 auto generated files
+Generated\ Files/
+
+# MSTest test Results
+[Tt]est[Rr]esult*/
+[Bb]uild[Ll]og.*
+
+# NUnit
+*.VisualState.xml
+TestResult.xml
+nunit-*.xml
+
+# Build Results of an ATL Project
+[Dd]ebugPS/
+[Rr]eleasePS/
+dlldata.c
+
+# Benchmark Results
+BenchmarkDotNet.Artifacts/
+
+# .NET Core
+project.lock.json
+project.fragment.lock.json
+artifacts/
+
+# Tye
+.tye/
+
+# ASP.NET Scaffolding
+ScaffoldingReadMe.txt
+
+# StyleCop
+StyleCopReport.xml
+
+# Files built by Visual Studio
+*_i.c
+*_p.c
+*_h.h
+*_i.h
+*.ilk
+*.meta
+*.obj
+*.iobj
+*.pch
+*.pdb
+*.ipdb
+*.pgc
+*.pgd
+*.rsp
+*.sbr
+*.tlb
+*.tli
+*.tlh
+*.tmp
+*.tmp_proj
+*_wpftmp.csproj
+*.log
+*.vspscc
+*.vssscc
+.builds
+*.pidb
+*.svclog
+*.scc
+
+# Chutzpah Test files
+_Chutzpah*
+
+# Visual C++ cache files
+ipch/
+*.aps
+*.ncb
+*.opendb
+*.opensdf
+*.sdf
+*.cachefile
+*.VC.db
+*.VC.VC.opendb
+
+# Visual Studio profiler
+*.psess
+*.vsp
+*.vspx
+*.sap
+
+# Visual Studio Trace Files
+*.e2e
+
+# TFS 2012 Local Workspace
+$tf/
+
+# Guidance Automation Toolkit
+*.gpState
+
+# ReSharper is a .NET coding add-in
+_ReSharper*/
+*.[Rr]e[Ss]harper
+*.DotSettings.user
+# JustCode is a .NET coding add-in
+.JustCode
+
+# TeamCity is a build add-in
+_TeamCity*
+
+# DotCover is a Code Coverage Tool
+*.dotCover
+
+# AxoCover is a Code Coverage Tool
+.axoCover/*
+!.axoCover/settings.json
+
+# Coverlet is a free, cross platform Code Coverage Tool
+coverage*.json
+coverage*.xml
+coverage*.info
+
+# Visual Studio code coverage results
+*.coverage
+*.coveragexml
+
+# NCrunch
+_NCrunch_*
+.*crunch*.local.xml
+nCrunchTemp_*
+
+# MightyMoose
+*.mm.*
+AutoTest.Net/
+
+# Web workbench (sass)
+.sass-cache/
+
+# Installshield output folder
+[Ee]xpress/
+
+# DocProject is a documentation generator add-in
+DocProject/buildhelp/
+DocProject/Help/*.HxT
+DocProject/Help/*.HxC
+DocProject/Help/*.hhc
+DocProject/Help/*.hhk
+DocProject/Help/*.hhp
+DocProject/Help/Html2
+DocProject/Help/html
+
+# Click-Once directory
+publish/
+
+# Publish Web Output
+*.[Pp]ublish.xml
+*.azurePubxml
+# Note: Comment the next line if you want to checkin your web deploy settings,
+# but database connection strings (with potential passwords) will be unencrypted
+*.pubxml
+*.publishproj
+
+# Microsoft Azure Web App publish settings. Comment the next line if you want to
+# checkin your Azure Web App publish settings, but sensitive information contained
+# in these scripts will be unencrypted
+PublishScripts/
+
+# NuGet Packages
+*.nupkg
+# NuGet Symbol Packages
+*.snupkg
+# The packages folder can be ignored because of Package Restore
+**/[Pp]ackages/*
+# except build/, which is used as an MSBuild target.
+!**/[Pp]ackages/build/
+# Uncomment if necessary however generally it will be regenerated when needed
+#!**/[Pp]ackages/repositories.config
+# NuGet v3's project.json files produces more ignorable files
+*.nuget.props
+*.nuget.targets
+
+# Microsoft Azure Build Output
+csx/
+*.build.csdef
+
+# Microsoft Azure Emulator
+ecf/
+rcf/
+
+# Windows Store app package directories and files
+AppPackages/
+BundleArtifacts/
+Package.StoreAssociation.xml
+_pkginfo.txt
+*.appx
+*.appxbundle
+*.appxupload
+
+# Visual Studio cache files
+# files ending in .cache can be ignored
+*.[Cc]ache
+# but keep track of directories ending in .cache
+!?*.[Cc]ache/
+
+# Others
+ClientBin/
+~$*
+*~
+*.dbmdl
+*.dbproj.schemaview
+*.jfm
+*.pfx
+*.publishsettings
+node_modules/
+orleans.codegen.cs
+
+# Including strong name files can present a security risk
+# (https://github.com/github/gitignore/pull/2483#issue-259490424)
+#*.snk
+
+# Since there are multiple workflows, uncomment next line to ignore bower_components
+# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
+#bower_components/
+
+# RIA/Silverlight projects
+Generated_Code/
+
+# Backup & report files from converting an old project file
+# to a newer Visual Studio version. Backup files are not needed,
+# because we have git ;-)
+_UpgradeReport_Files/
+Backup*/
+UpgradeLog*.XML
+UpgradeLog*.htm
+ServiceFabricBackup/
+*.rptproj.bak
+
+# SQL Server files
+*.mdf
+*.ldf
+*.ndf
+
+# Business Intelligence projects
+*.rdl.data
+*.bim.layout
+*.bim_*.settings
+*.rptproj.rsuser
+*- [Bb]ackup.rdl
+*- [Bb]ackup ([0-9]).rdl
+*- [Bb]ackup ([0-9][0-9]).rdl
+
+# Microsoft Fakes
+FakesAssemblies/
+
+# GhostDoc plugin setting file
+*.GhostDoc.xml
+
+# Node.js Tools for Visual Studio
+.ntvs_analysis.dat
+node_modules/
+
+# Visual Studio 6 build log
+*.plg
+
+# Visual Studio 6 workspace options file
+*.opt
+
+# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
+*.vbw
+
+# Visual Studio LightSwitch build output
+**/*.HTMLClient/GeneratedArtifacts
+**/*.DesktopClient/GeneratedArtifacts
+**/*.DesktopClient/ModelManifest.xml
+**/*.Server/GeneratedArtifacts
+**/*.Server/ModelManifest.xml
+_Pvt_Extensions
+
+# Paket dependency manager
+.paket/paket.exe
+paket-files/
+
+# FAKE - F# Make
+.fake/
+
+# JetBrains Rider
+.idea/
+*.sln.iml
+
+# CodeRush
+.cr/
+
+# CodeRush personal settings
+.cr/personal
+
+# Python Tools for Visual Studio (PTVS)
+__pycache__/
+*.pyc
+
+# Cake - Uncomment if you are using it
+# tools/**
+# !tools/packages.config
+
+# Tabs Studio
+*.tss
+
+# Telerik's JustMock configuration file
+*.jmconfig
+
+# BizTalk build output
+*.btp.cs
+*.btm.cs
+*.odx.cs
+*.xsd.cs
+
+# OpenCover UI analysis results
+OpenCover/
+
+# Azure Stream Analytics local run output
+ASALocalRun/
+
+# MSBuild Binary and Structured Log
+*.binlog
+
+# NVidia Nsight GPU debugger configuration file
+*.nvuser
+
+# MFractors (Xamarin productivity tool) working folder
+.mfractor/
+
+# Local History for Visual Studio
+.localhistory/
+
+# BeatPulse healthcheck temp database
+healthchecksdb
+
+# Backup folder for Package Reference Convert tool in Visual Studio 2017
+MigrationBackup/
+
+# Ionide (cross platform F# VS Code tools) working folder
+.ionide/
+
+# Fody - auto-generated XML schema
+FodyWeavers.xsd
+
+##
+## Visual studio for Mac
+##
+
+
+# globs
+Makefile.in
+*.userprefs
+*.usertasks
+config.make
+config.status
+aclocal.m4
+install-sh
+autom4te.cache/
+*.tar.gz
+tarballs/
+test-results/
+
+# Mac bundle stuff
+*.dmg
+*.app
+
+# content below from: https://github.com/github/gitignore/blob/master/Global/macOS.gitignore
+# General
+.DS_Store
+.AppleDouble
+.LSOverride
+
+# Icon must end with two \r
+Icon
+
+
+# Thumbnails
+._*
+
+# Files that might appear in the root of a volume
+.DocumentRevisions-V100
+.fseventsd
+.Spotlight-V100
+.TemporaryItems
+.Trashes
+.VolumeIcon.icns
+.com.apple.timemachine.donotpresent
+
+# Directories potentially created on remote AFP share
+.AppleDB
+.AppleDesktop
+Network Trash Folder
+Temporary Items
+.apdisk
+
+# content below from: https://github.com/github/gitignore/blob/master/Global/Windows.gitignore
+# Windows thumbnail cache files
+Thumbs.db
+ehthumbs.db
+ehthumbs_vista.db
+
+# Dump file
+*.stackdump
+
+# Folder config file
+[Dd]esktop.ini
+
+# Recycle Bin used on file shares
+$RECYCLE.BIN/
+
+# Windows Installer files
+*.cab
+*.msi
+*.msix
+*.msm
+*.msp
+
+# Windows shortcuts
+*.lnk
+
+# JetBrains Rider
+.idea/
+*.sln.iml
+
+##
+## Visual Studio Code
+##
+.vscode/*
+!.vscode/settings.json
+!.vscode/tasks.json
+!.vscode/launch.json
+!.vscode/extensions.json
+
+# Other miscellaneous folders
+zzMiscellaneous/
+zzExcludeFromGithub/
+FromAnotherLanuageKit/
+_BadLanguages/
+
+# Exclude test data and temp files
+Test_Data/
+*__ExcludeFromRepo__*.*
+*__DoNotAddToRepo__*.*
+deleteme/
+RelatedProjects/
+obj/
+
+# Exclude temp and backup files
+*.bak
+
+# ###########################################
+# Unique to this project
+# ###########################################
+# Exclude reparsepoint files which are used to help view file using VS
+*.xaml.xml
+gitignore.txt
+
+GTranslate/obj/
diff --git a/plugins/ChangeFileMonitor/README.md b/plugins/ChangeFileMonitor/README.md
new file mode 100644
index 00000000..ca09e59e
--- /dev/null
+++ b/plugins/ChangeFileMonitor/README.md
@@ -0,0 +1,27 @@
+# ChangeFileMonitor: Ver 0.1.0 (By David Maisonave)
+ChangeFileMonitor is a [Stash](https://github.com/stashapp/stash) plugin which updates Stash if any changes occurs in the Stash library paths.
+
+### Using ChangeFileMonitor
+- To start monitoring file changes, go to **Stash->Settings->Task->[Plugin Tasks]->ChangeFileMonitor**, and click on the [Start Library Monitor] button.
+ - ![ChangeFileMonitor_Task](https://github.com/user-attachments/assets/f275a70f-8e86-42a4-b2c1-98b3f4935334)
+- To stop this task, go to **Stash->Settings->Task->[Task Queue]**, and click on the **[x]**.
+ - ![Kill_ChangeFileMonitor_Task](https://github.com/user-attachments/assets/a3f4abca-f3a2-49fa-9db5-e0c733e0aeb1)
+
+
+### Requirements
+`pip install stashapp-tools`
+`pip install pyYAML`
+`pip install watchdog`
+
+### Installation
+- Follow **Requirements** instructions.
+- In the stash plugin directory (C:\Users\MyUserName\.stash\plugins), create a folder named **ChangeFileMonitor**.
+- Copy all the plugin files to this folder.(**C:\Users\MyUserName\\.stash\plugins\ChangeFileMonitor**).
+- Restart Stash.
+
+That's it!!!
+
+### Options
+- All options are accessible in the GUI via Settings->Plugins->Plugins->[ChangeFileMonitor].
+
+
diff --git a/plugins/ChangeFileMonitor/changefilemonitor.py b/plugins/ChangeFileMonitor/changefilemonitor.py
new file mode 100644
index 00000000..41918a5c
--- /dev/null
+++ b/plugins/ChangeFileMonitor/changefilemonitor.py
@@ -0,0 +1,246 @@
+# Description: This is a Stash plugin which updates Stash if any changes occurs in the Stash library paths.
+# By David Maisonave (aka Axter) Jul-2024 (https://www.axter.com/)
+# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/ChangeFileMonitor
+import os
+import sys
+import shutil
+import hashlib
+import json
+from pathlib import Path
+import requests
+import logging
+from logging.handlers import RotatingFileHandler
+import stashapi.log as log # Importing stashapi.log as log for critical events ONLY
+from stashapi.stashapp import StashInterface
+from watchdog.observers import Observer # This is also needed for event attributes
+import watchdog # pip install watchdog # https://pythonhosted.org/watchdog/
+from threading import Lock, Condition
+from multiprocessing import shared_memory
+
+# **********************************************************************
+# Constant global variables --------------------------------------------
+LOG_FILE_PATH = log_file_path = f"{Path(__file__).resolve().parent}\\{Path(__file__).stem}.log"
+FORMAT = "[%(asctime)s - LN:%(lineno)s] %(message)s"
+DEFAULT_ENDPOINT = "http://localhost:9999/graphql" # Default GraphQL endpoint
+PLUGIN_ARGS = False
+PLUGIN_ARGS_MODE = False
+# GraphQL query to fetch all scenes
+QUERY_ALL_SCENES = """
+ query AllScenes {
+ allScenes {
+ id
+ updated_at
+ }
+ }
+"""
+RFH = RotatingFileHandler(
+ filename=LOG_FILE_PATH,
+ mode='a',
+ maxBytes=2*1024*1024, # Configure logging for this script with max log file size of 2000K
+ backupCount=2,
+ encoding=None,
+ delay=0
+)
+TIMEOUT = 5
+CONTINUE_RUNNING_SIG = 99
+
+# **********************************************************************
+# Global variables --------------------------------------------
+exitMsg = "Change success!!"
+mutex = Lock()
+signal = Condition(mutex)
+shouldUpdate = False
+TargetPaths = []
+
+# Configure local log file for plugin within plugin folder having a limited max log file size
+logging.basicConfig(level=logging.INFO, format=FORMAT, datefmt="%y%m%d %H:%M:%S", handlers=[RFH])
+logger = logging.getLogger(Path(__file__).stem)
+
+# **********************************************************************
+# ----------------------------------------------------------------------
+# Code section to fetch variables from Plugin UI and from changefilemonitor_settings.py
+json_input = json.loads(sys.stdin.read())
+FRAGMENT_SERVER = json_input["server_connection"]
+stash = StashInterface(FRAGMENT_SERVER)
+PLUGINCONFIGURATION = stash.get_configuration()["plugins"]
+STASHCONFIGURATION = stash.get_configuration()["general"]
+STASHPATHSCONFIG = STASHCONFIGURATION['stashes']
+stashPaths = []
+settings = {
+ "scanModified": False,
+ "recursiveDisabled": False,
+ "zgraphqlEndpoint": DEFAULT_ENDPOINT,
+ "zzdebugTracing": False,
+ "zzdryRun": False,
+}
+PLUGIN_ID = "changefilemonitor"
+if PLUGIN_ID in PLUGINCONFIGURATION:
+ settings.update(PLUGINCONFIGURATION[PLUGIN_ID])
+# ----------------------------------------------------------------------
+debugTracing = settings["zzdebugTracing"]
+RECURSIVE = settings["recursiveDisabled"] == False
+SCAN_MODIFIED = settings["scanModified"]
+
+for item in STASHPATHSCONFIG:
+ stashPaths.append(item["path"])
+
+# Extract dry_run setting from settings
+dry_run = settings["zzdryRun"]
+dry_run_prefix = ''
+try:
+ PLUGIN_ARGS = json_input['args']
+ PLUGIN_ARGS_MODE = json_input['args']["mode"]
+except:
+ pass
+logger.info(f"\nStarting (debugTracing={debugTracing}) (dry_run={dry_run}) (PLUGIN_ARGS_MODE={PLUGIN_ARGS_MODE}) (PLUGIN_ARGS={PLUGIN_ARGS})************************************************")
+if debugTracing: logger.info(f"Debug Tracing (stash.get_configuration()={stash.get_configuration()})................")
+if debugTracing: logger.info("settings: %s " % (settings,))
+if debugTracing: logger.info(f"Debug Tracing (STASHCONFIGURATION={STASHCONFIGURATION})................")
+if debugTracing: logger.info(f"Debug Tracing (stashPaths={stashPaths})................")
+
+if dry_run:
+ logger.info("Dry run mode is enabled.")
+ dry_run_prefix = "Would've "
+if debugTracing: logger.info("Debug Tracing................")
+# ToDo: Add split logic here to slpit possible string array into an array
+endpoint = settings["zgraphqlEndpoint"] # GraphQL endpoint
+if not endpoint or endpoint == "":
+ endpoint = DEFAULT_ENDPOINT
+if debugTracing: logger.info(f"Debug Tracing (endpoint={endpoint})................")
+# ----------------------------------------------------------------------
+# **********************************************************************
+if debugTracing: logger.info(f"Debug Tracing (SCAN_MODIFIED={SCAN_MODIFIED}) (RECURSIVE={RECURSIVE})................")
+
+def start_library_monitor():
+ global shouldUpdate
+ global TargetPaths
+ try:
+ # Create shared memory buffer which can be used as singleton logic or to get a signal to quit task from external script
+ shm_a = shared_memory.SharedMemory(name="DavidMaisonaveAxter_ChangeFileMonitor", create=True, size=4)
+ except:
+ pass
+ logger.info("Could not open shared memory map. Change File Monitor must be running. Can not run multiple instance of Change File Monitor.")
+ return
+ type(shm_a.buf)
+ shm_buffer = shm_a.buf
+ len(shm_buffer)
+ shm_buffer[0] = CONTINUE_RUNNING_SIG
+ if debugTracing: logger.info(f"Shared memory map opended, and flag set to {shm_buffer[0]}")
+
+ event_handler = watchdog.events.FileSystemEventHandler()
+ def on_created(event):
+ global shouldUpdate
+ global TargetPaths
+ TargetPaths.append(event.src_path)
+ logger.info(f"CREATE *** '{event.src_path}'")
+ with mutex:
+ shouldUpdate = True
+ signal.notify()
+
+ def on_deleted(event):
+ global shouldUpdate
+ global TargetPaths
+ TargetPaths.append(event.src_path)
+ logger.info(f"DELETE *** '{event.src_path}'")
+ with mutex:
+ shouldUpdate = True
+ signal.notify()
+
+ def on_modified(event):
+ global shouldUpdate
+ global TargetPaths
+ if SCAN_MODIFIED:
+ TargetPaths.append(event.src_path)
+ logger.info(f"MODIFIED *** '{event.src_path}'")
+ with mutex:
+ shouldUpdate = True
+ signal.notify()
+ else:
+ if debugTracing: logger.info(f"Ignoring modifications due to plugin UI setting. path='{event.src_path}'")
+
+ def on_moved(event):
+ global shouldUpdate
+ global TargetPaths
+ TargetPaths.append(event.src_path)
+ TargetPaths.append(event.dest_path)
+ logger.info(f"MOVE *** from '{event.src_path}' to '{event.dest_path}'")
+ with mutex:
+ shouldUpdate = True
+ signal.notify()
+
+ event_handler.on_created = on_created
+ event_handler.on_deleted = on_deleted
+ event_handler.on_modified = on_modified
+ event_handler.on_moved = on_moved
+
+ observer = Observer()
+ # Iterate through stashPaths
+ for path in stashPaths:
+ observer.schedule(event_handler, path, recursive=RECURSIVE)
+ if debugTracing: logger.info(f"Observing {path}")
+ observer.start()
+ if debugTracing: logger.info("Starting loop................")
+ try:
+ while True:
+ TmpTargetPaths = []
+ with mutex:
+ while not shouldUpdate:
+ if debugTracing: logger.info("Wait start................")
+ signal.wait()
+ if debugTracing: logger.info("Wait end................")
+ shouldUpdate = False
+ TmpTargetPaths = []
+ for TargetPath in TargetPaths:
+ TmpTargetPaths.append(os.path.dirname(TargetPath))
+ TargetPaths = []
+ TmpTargetPaths = list(set(TmpTargetPaths))
+ if TmpTargetPaths != []:
+ logger.info(f"Triggering stash scan for path(s) {TmpTargetPaths}")
+ if not dry_run:
+ stash.metadata_scan(paths=TmpTargetPaths)
+ stash.run_plugin_task(plugin_id=PLUGIN_ID, task_name="Start Library Monitor")
+ if debugTracing: logger.info("Exiting plugin so that metadata_scan task can run.")
+ return
+ else:
+ if debugTracing: logger.info("Nothing to scan.")
+ if shm_buffer[0] != CONTINUE_RUNNING_SIG:
+ logger.info(f"Exiting Change File Monitor. (shm_buffer[0]={shm_buffer[0]})")
+ shm_a.close()
+ shm_a.unlink() # Call unlink only once to release the shared memory
+ time.sleep(1)
+ break
+ except KeyboardInterrupt:
+ observer.stop()
+ if debugTracing: logger.info("Stopping observer................")
+ observer.join()
+ if debugTracing: logger.info("Exiting function................")
+
+# stop_library_monitor does not work because only one task can run at a time.
+# def stop_library_monitor():
+ # if debugTracing: logger.info("Opening shared memory map.")
+ # try:
+ # shm_a = shared_memory.SharedMemory(name="DavidMaisonaveAxter_ChangeFileMonitor", create=False, size=4)
+ # except:
+ # pass
+ # logger.info("Could not open shared memory map. Change File Monitor must not be running.")
+ # return
+ # type(shm_a.buf)
+ # shm_buffer = shm_a.buf
+ # len(shm_buffer)
+ # shm_buffer[0] = 123
+ # if debugTracing: logger.info(f"Shared memory map opended, and flag set to {shm_buffer[0]}")
+ # shm_a.close()
+ # shm_a.unlink() # Call unlink only once to release the shared memory
+ # time.sleep(1)
+ # return
+
+if PLUGIN_ARGS_MODE == "start_library_monitor":
+ start_library_monitor()
+ if debugTracing: logger.info(f"start_library_monitor EXIT................")
+# elif PLUGIN_ARGS_MODE == "stop_library_monitor":
+ # stop_library_monitor()
+ # if debugTracing: logger.info(f"stop_library_monitor EXIT................")
+else:
+ logger.info(f"Nothing to do!!! (PLUGIN_ARGS_MODE={PLUGIN_ARGS_MODE})")
+
+if debugTracing: logger.info("\n*********************************\nEXITING ***********************\n*********************************")
diff --git a/plugins/ChangeFileMonitor/changefilemonitor.yml b/plugins/ChangeFileMonitor/changefilemonitor.yml
new file mode 100644
index 00000000..0150513d
--- /dev/null
+++ b/plugins/ChangeFileMonitor/changefilemonitor.yml
@@ -0,0 +1,35 @@
+# By David Maisonave (aka Axter) 2024
+name: ChangeFileMonitor
+description: Monitors the Stash library folders, and updates Stash if any changes occurs in the Stash library paths.
+version: 0.1.0
+url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/ChangeFileMonitor
+settings:
+ scanModified:
+ displayName: Scan Modifications
+ description: Enable to monitor changes in file system for modification flag. Flags for CREATE, DELETE, and MOVE will still get triggered if this is disabled.
+ type: BOOLEAN
+ recursiveDisabled:
+ displayName: No Recursive
+ description: Enable stop monitoring paths recursively.
+ type: BOOLEAN
+ zgraphqlEndpoint:
+ displayName: GraphQL Endpoint
+ description: (Default=http://localhost:9999/graphql). Update with your endpoint, or leave blank to use default.
+ type: STRING
+ zzdebugTracing:
+ displayName: Debug Tracing
+ description: (Default=false) [***For Advanced Users***] Enable debug tracing. When enabled, additional tracing logging is added to Stash\plugins\ChangeFileMonitor\changefilemonitor.log
+ type: BOOLEAN
+ zzdryRun:
+ displayName: Dry Run
+ description: Enable to run script in [Dry Run] mode. In this mode, Stash does NOT call meta_scan, and only logs the action it would have taken.
+ type: BOOLEAN
+exec:
+ - python
+ - "{pluginDir}/changefilemonitor.py"
+interface: raw
+tasks:
+ - name: Start Library Monitor
+ description: Monitors paths in Stash library for media file changes, and updates Stash.
+ defaultArgs:
+ mode: start_library_monitor
diff --git a/plugins/ChangeFileMonitor/manifest b/plugins/ChangeFileMonitor/manifest
new file mode 100644
index 00000000..4a03c5f4
--- /dev/null
+++ b/plugins/ChangeFileMonitor/manifest
@@ -0,0 +1,13 @@
+id: changefilemonitor
+name: ChangeFileMonitor
+metadata:
+ description: Monitors the Stash library folders, and updates Stash if any changes occurs in the Stash library paths.
+version: 0.1.0
+date: "2024-07-26 08:00:00"
+requires: [pip install stashapp-tools, pip install pyYAML, pip install watchdog]
+source_repository: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/ChangeFileMonitor
+files:
+- README.md
+- changefilemonitor.yml
+- changefilemonitor.py
+- requirements.txt
diff --git a/plugins/ChangeFileMonitor/requirements.txt b/plugins/ChangeFileMonitor/requirements.txt
new file mode 100644
index 00000000..aa553701
--- /dev/null
+++ b/plugins/ChangeFileMonitor/requirements.txt
@@ -0,0 +1,4 @@
+stashapp-tools
+pyYAML
+watchdog
+requests
\ No newline at end of file
diff --git a/plugins/RenameFile/.gitignore b/plugins/RenameFile/.gitignore
new file mode 100644
index 00000000..dd93ef78
--- /dev/null
+++ b/plugins/RenameFile/.gitignore
@@ -0,0 +1,525 @@
+$ cat .gitignore
+
+# Ignore these patterns
+desktop.ini
+~AutoRecover*.*
+*.aps
+*.exe
+*.idb
+*.ipch
+*.lib
+*.log
+*.log.1
+*.log.2
+*.manifest
+*.obj
+*.pch
+*.pdb
+*.sdf
+*.suo
+*.tlog
+*.user
+*.7z
+*.swp
+*.zip
+data.csv
+/boost
+/scintilla
+/bin
+/SQL
+/__pycache__
+__pycache__/
+renamefile_settings.cpython-310.pyc
+
+## Ignore Visual Studio temporary files, build results, and
+## files generated by popular Visual Studio add-ons.
+##
+## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
+
+# User-specific files
+*.rsuser
+*.suo
+*.user
+*.userosscache
+*.sln.docstates
+
+# User-specific files (MonoDevelop/Xamarin Studio)
+*.userprefs
+
+# Mono auto generated files
+mono_crash.*
+
+# Build results
+[Dd]ebug/
+[Dd]ebugPublic/
+[Rr]elease/
+[Rr]eleases/
+x64/
+x86/
+[Ww][Ii][Nn]32/
+[Aa][Rr][Mm]/
+[Aa][Rr][Mm]64/
+bld/
+[Bb]in/
+[Oo]bj/
+[Ll]og/
+[Ll]ogs/
+
+
+# Visual Studio 2015/2017 cache/options directory
+.vs/
+# Uncomment if you have tasks that create the project's static files in wwwroot
+#wwwroot/
+
+# Visual Studio 2017 auto generated files
+Generated\ Files/
+
+# MSTest test Results
+[Tt]est[Rr]esult*/
+[Bb]uild[Ll]og.*
+
+# NUnit
+*.VisualState.xml
+TestResult.xml
+nunit-*.xml
+
+# Build Results of an ATL Project
+[Dd]ebugPS/
+[Rr]eleasePS/
+dlldata.c
+
+# Benchmark Results
+BenchmarkDotNet.Artifacts/
+
+# .NET Core
+project.lock.json
+project.fragment.lock.json
+artifacts/
+
+# Tye
+.tye/
+
+# ASP.NET Scaffolding
+ScaffoldingReadMe.txt
+
+# StyleCop
+StyleCopReport.xml
+
+# Files built by Visual Studio
+*_i.c
+*_p.c
+*_h.h
+*_i.h
+*.ilk
+*.meta
+*.obj
+*.iobj
+*.pch
+*.pdb
+*.ipdb
+*.pgc
+*.pgd
+*.rsp
+*.sbr
+*.tlb
+*.tli
+*.tlh
+*.tmp
+*.tmp_proj
+*_wpftmp.csproj
+*.log
+*.vspscc
+*.vssscc
+.builds
+*.pidb
+*.svclog
+*.scc
+
+# Chutzpah Test files
+_Chutzpah*
+
+# Visual C++ cache files
+ipch/
+*.aps
+*.ncb
+*.opendb
+*.opensdf
+*.sdf
+*.cachefile
+*.VC.db
+*.VC.VC.opendb
+
+# Visual Studio profiler
+*.psess
+*.vsp
+*.vspx
+*.sap
+
+# Visual Studio Trace Files
+*.e2e
+
+# TFS 2012 Local Workspace
+$tf/
+
+# Guidance Automation Toolkit
+*.gpState
+
+# ReSharper is a .NET coding add-in
+_ReSharper*/
+*.[Rr]e[Ss]harper
+*.DotSettings.user
+# JustCode is a .NET coding add-in
+.JustCode
+
+# TeamCity is a build add-in
+_TeamCity*
+
+# DotCover is a Code Coverage Tool
+*.dotCover
+
+# AxoCover is a Code Coverage Tool
+.axoCover/*
+!.axoCover/settings.json
+
+# Coverlet is a free, cross platform Code Coverage Tool
+coverage*.json
+coverage*.xml
+coverage*.info
+
+# Visual Studio code coverage results
+*.coverage
+*.coveragexml
+
+# NCrunch
+_NCrunch_*
+.*crunch*.local.xml
+nCrunchTemp_*
+
+# MightyMoose
+*.mm.*
+AutoTest.Net/
+
+# Web workbench (sass)
+.sass-cache/
+
+# Installshield output folder
+[Ee]xpress/
+
+# DocProject is a documentation generator add-in
+DocProject/buildhelp/
+DocProject/Help/*.HxT
+DocProject/Help/*.HxC
+DocProject/Help/*.hhc
+DocProject/Help/*.hhk
+DocProject/Help/*.hhp
+DocProject/Help/Html2
+DocProject/Help/html
+
+# Click-Once directory
+publish/
+
+# Publish Web Output
+*.[Pp]ublish.xml
+*.azurePubxml
+# Note: Comment the next line if you want to checkin your web deploy settings,
+# but database connection strings (with potential passwords) will be unencrypted
+*.pubxml
+*.publishproj
+
+# Microsoft Azure Web App publish settings. Comment the next line if you want to
+# checkin your Azure Web App publish settings, but sensitive information contained
+# in these scripts will be unencrypted
+PublishScripts/
+
+# NuGet Packages
+*.nupkg
+# NuGet Symbol Packages
+*.snupkg
+# The packages folder can be ignored because of Package Restore
+**/[Pp]ackages/*
+# except build/, which is used as an MSBuild target.
+!**/[Pp]ackages/build/
+# Uncomment if necessary however generally it will be regenerated when needed
+#!**/[Pp]ackages/repositories.config
+# NuGet v3's project.json files produces more ignorable files
+*.nuget.props
+*.nuget.targets
+
+# Microsoft Azure Build Output
+csx/
+*.build.csdef
+
+# Microsoft Azure Emulator
+ecf/
+rcf/
+
+# Windows Store app package directories and files
+AppPackages/
+BundleArtifacts/
+Package.StoreAssociation.xml
+_pkginfo.txt
+*.appx
+*.appxbundle
+*.appxupload
+
+# Visual Studio cache files
+# files ending in .cache can be ignored
+*.[Cc]ache
+# but keep track of directories ending in .cache
+!?*.[Cc]ache/
+
+# Others
+ClientBin/
+~$*
+*~
+*.dbmdl
+*.dbproj.schemaview
+*.jfm
+*.pfx
+*.publishsettings
+node_modules/
+orleans.codegen.cs
+
+# Including strong name files can present a security risk
+# (https://github.com/github/gitignore/pull/2483#issue-259490424)
+#*.snk
+
+# Since there are multiple workflows, uncomment next line to ignore bower_components
+# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
+#bower_components/
+
+# RIA/Silverlight projects
+Generated_Code/
+
+# Backup & report files from converting an old project file
+# to a newer Visual Studio version. Backup files are not needed,
+# because we have git ;-)
+_UpgradeReport_Files/
+Backup*/
+UpgradeLog*.XML
+UpgradeLog*.htm
+ServiceFabricBackup/
+*.rptproj.bak
+
+# SQL Server files
+*.mdf
+*.ldf
+*.ndf
+
+# Business Intelligence projects
+*.rdl.data
+*.bim.layout
+*.bim_*.settings
+*.rptproj.rsuser
+*- [Bb]ackup.rdl
+*- [Bb]ackup ([0-9]).rdl
+*- [Bb]ackup ([0-9][0-9]).rdl
+
+# Microsoft Fakes
+FakesAssemblies/
+
+# GhostDoc plugin setting file
+*.GhostDoc.xml
+
+# Node.js Tools for Visual Studio
+.ntvs_analysis.dat
+node_modules/
+
+# Visual Studio 6 build log
+*.plg
+
+# Visual Studio 6 workspace options file
+*.opt
+
+# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
+*.vbw
+
+# Visual Studio LightSwitch build output
+**/*.HTMLClient/GeneratedArtifacts
+**/*.DesktopClient/GeneratedArtifacts
+**/*.DesktopClient/ModelManifest.xml
+**/*.Server/GeneratedArtifacts
+**/*.Server/ModelManifest.xml
+_Pvt_Extensions
+
+# Paket dependency manager
+.paket/paket.exe
+paket-files/
+
+# FAKE - F# Make
+.fake/
+
+# JetBrains Rider
+.idea/
+*.sln.iml
+
+# CodeRush
+.cr/
+
+# CodeRush personal settings
+.cr/personal
+
+# Python Tools for Visual Studio (PTVS)
+__pycache__/
+*.pyc
+
+# Cake - Uncomment if you are using it
+# tools/**
+# !tools/packages.config
+
+# Tabs Studio
+*.tss
+
+# Telerik's JustMock configuration file
+*.jmconfig
+
+# BizTalk build output
+*.btp.cs
+*.btm.cs
+*.odx.cs
+*.xsd.cs
+
+# OpenCover UI analysis results
+OpenCover/
+
+# Azure Stream Analytics local run output
+ASALocalRun/
+
+# MSBuild Binary and Structured Log
+*.binlog
+
+# NVidia Nsight GPU debugger configuration file
+*.nvuser
+
+# MFractors (Xamarin productivity tool) working folder
+.mfractor/
+
+# Local History for Visual Studio
+.localhistory/
+
+# BeatPulse healthcheck temp database
+healthchecksdb
+
+# Backup folder for Package Reference Convert tool in Visual Studio 2017
+MigrationBackup/
+
+# Ionide (cross platform F# VS Code tools) working folder
+.ionide/
+
+# Fody - auto-generated XML schema
+FodyWeavers.xsd
+
+##
+## Visual studio for Mac
+##
+
+
+# globs
+Makefile.in
+*.userprefs
+*.usertasks
+config.make
+config.status
+aclocal.m4
+install-sh
+autom4te.cache/
+*.tar.gz
+tarballs/
+test-results/
+
+# Mac bundle stuff
+*.dmg
+*.app
+
+# content below from: https://github.com/github/gitignore/blob/master/Global/macOS.gitignore
+# General
+.DS_Store
+.AppleDouble
+.LSOverride
+
+# Icon must end with two \r
+Icon
+
+
+# Thumbnails
+._*
+
+# Files that might appear in the root of a volume
+.DocumentRevisions-V100
+.fseventsd
+.Spotlight-V100
+.TemporaryItems
+.Trashes
+.VolumeIcon.icns
+.com.apple.timemachine.donotpresent
+
+# Directories potentially created on remote AFP share
+.AppleDB
+.AppleDesktop
+Network Trash Folder
+Temporary Items
+.apdisk
+
+# content below from: https://github.com/github/gitignore/blob/master/Global/Windows.gitignore
+# Windows thumbnail cache files
+Thumbs.db
+ehthumbs.db
+ehthumbs_vista.db
+
+# Dump file
+*.stackdump
+
+# Folder config file
+[Dd]esktop.ini
+
+# Recycle Bin used on file shares
+$RECYCLE.BIN/
+
+# Windows Installer files
+*.cab
+*.msi
+*.msix
+*.msm
+*.msp
+
+# Windows shortcuts
+*.lnk
+
+# JetBrains Rider
+.idea/
+*.sln.iml
+
+##
+## Visual Studio Code
+##
+.vscode/*
+!.vscode/settings.json
+!.vscode/tasks.json
+!.vscode/launch.json
+!.vscode/extensions.json
+
+# Other miscellaneous folders
+zzMiscellaneous/
+zzExcludeFromGithub/
+FromAnotherLanuageKit/
+_BadLanguages/
+
+# Exclude test data and temp files
+Test_Data/
+*__ExcludeFromRepo__*.*
+*__DoNotAddToRepo__*.*
+deleteme/
+RelatedProjects/
+obj/
+
+# Exclude temp and backup files
+*.bak
+
+# ###########################################
+# Unique to this project
+# ###########################################
+# Exclude reparsepoint files which are used to help view file using VS
+*.xaml.xml
+gitignore.txt
+
+GTranslate/obj/
From 81a83bb10b4c6b1aab5bcf833ff2406ccdfc9e56 Mon Sep 17 00:00:00 2001
From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com>
Date: Sun, 28 Jul 2024 04:58:17 -0400
Subject: [PATCH 04/39] Added clean option
---
.../ChangeFileMonitor/changefilemonitor.py | 19 ++++++++++++++-----
.../ChangeFileMonitor/changefilemonitor.yml | 12 ++++++++----
2 files changed, 22 insertions(+), 9 deletions(-)
diff --git a/plugins/ChangeFileMonitor/changefilemonitor.py b/plugins/ChangeFileMonitor/changefilemonitor.py
index 41918a5c..ab81e793 100644
--- a/plugins/ChangeFileMonitor/changefilemonitor.py
+++ b/plugins/ChangeFileMonitor/changefilemonitor.py
@@ -67,8 +67,9 @@
STASHPATHSCONFIG = STASHCONFIGURATION['stashes']
stashPaths = []
settings = {
- "scanModified": False,
"recursiveDisabled": False,
+ "runCleanAfterDelete": False,
+ "scanModified": False,
"zgraphqlEndpoint": DEFAULT_ENDPOINT,
"zzdebugTracing": False,
"zzdryRun": False,
@@ -80,25 +81,26 @@
debugTracing = settings["zzdebugTracing"]
RECURSIVE = settings["recursiveDisabled"] == False
SCAN_MODIFIED = settings["scanModified"]
+RUN_CLEAN_AFTER_DELETE = settings["runCleanAfterDelete"]
for item in STASHPATHSCONFIG:
stashPaths.append(item["path"])
# Extract dry_run setting from settings
-dry_run = settings["zzdryRun"]
+DRY_RUN = settings["zzdryRun"]
dry_run_prefix = ''
try:
PLUGIN_ARGS = json_input['args']
PLUGIN_ARGS_MODE = json_input['args']["mode"]
except:
pass
-logger.info(f"\nStarting (debugTracing={debugTracing}) (dry_run={dry_run}) (PLUGIN_ARGS_MODE={PLUGIN_ARGS_MODE}) (PLUGIN_ARGS={PLUGIN_ARGS})************************************************")
+logger.info(f"\nStarting (debugTracing={debugTracing}) (DRY_RUN={DRY_RUN}) (PLUGIN_ARGS_MODE={PLUGIN_ARGS_MODE}) (PLUGIN_ARGS={PLUGIN_ARGS})************************************************")
if debugTracing: logger.info(f"Debug Tracing (stash.get_configuration()={stash.get_configuration()})................")
if debugTracing: logger.info("settings: %s " % (settings,))
if debugTracing: logger.info(f"Debug Tracing (STASHCONFIGURATION={STASHCONFIGURATION})................")
if debugTracing: logger.info(f"Debug Tracing (stashPaths={stashPaths})................")
-if dry_run:
+if DRY_RUN:
logger.info("Dry run mode is enabled.")
dry_run_prefix = "Would've "
if debugTracing: logger.info("Debug Tracing................")
@@ -126,6 +128,7 @@ def start_library_monitor():
len(shm_buffer)
shm_buffer[0] = CONTINUE_RUNNING_SIG
if debugTracing: logger.info(f"Shared memory map opended, and flag set to {shm_buffer[0]}")
+ RunCleanMetadata = False
event_handler = watchdog.events.FileSystemEventHandler()
def on_created(event):
@@ -140,10 +143,12 @@ def on_created(event):
def on_deleted(event):
global shouldUpdate
global TargetPaths
+ nonlocal RunCleanMetadata
TargetPaths.append(event.src_path)
logger.info(f"DELETE *** '{event.src_path}'")
with mutex:
shouldUpdate = True
+ RunCleanMetadata = True
signal.notify()
def on_modified(event):
@@ -168,12 +173,14 @@ def on_moved(event):
shouldUpdate = True
signal.notify()
+ if debugTracing: logger.info("Debug Trace........")
event_handler.on_created = on_created
event_handler.on_deleted = on_deleted
event_handler.on_modified = on_modified
event_handler.on_moved = on_moved
observer = Observer()
+
# Iterate through stashPaths
for path in stashPaths:
observer.schedule(event_handler, path, recursive=RECURSIVE)
@@ -196,8 +203,10 @@ def on_moved(event):
TmpTargetPaths = list(set(TmpTargetPaths))
if TmpTargetPaths != []:
logger.info(f"Triggering stash scan for path(s) {TmpTargetPaths}")
- if not dry_run:
+ if not DRY_RUN:
stash.metadata_scan(paths=TmpTargetPaths)
+ if RUN_CLEAN_AFTER_DELETE and RunCleanMetadata:
+ stash.metadata_clean(paths=TmpTargetPaths, dry_run=DRY_RUN)
stash.run_plugin_task(plugin_id=PLUGIN_ID, task_name="Start Library Monitor")
if debugTracing: logger.info("Exiting plugin so that metadata_scan task can run.")
return
diff --git a/plugins/ChangeFileMonitor/changefilemonitor.yml b/plugins/ChangeFileMonitor/changefilemonitor.yml
index 0150513d..501e0e32 100644
--- a/plugins/ChangeFileMonitor/changefilemonitor.yml
+++ b/plugins/ChangeFileMonitor/changefilemonitor.yml
@@ -4,13 +4,17 @@ description: Monitors the Stash library folders, and updates Stash if any chan
version: 0.1.0
url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/ChangeFileMonitor
settings:
- scanModified:
- displayName: Scan Modifications
- description: Enable to monitor changes in file system for modification flag. Flags for CREATE, DELETE, and MOVE will still get triggered if this is disabled.
- type: BOOLEAN
recursiveDisabled:
displayName: No Recursive
description: Enable stop monitoring paths recursively.
+ type: BOOLEAN
+ runCleanAfterDelete:
+ displayName: Run Clean
+ description: Enable to run metadata clean task after file deletion.
+ type: BOOLEAN
+ scanModified:
+ displayName: Scan Modifications
+ description: Enable to monitor changes in file system for modification flag. Flags for CREATE, DELETE, and MOVE will still get triggered if this is disabled.
type: BOOLEAN
zgraphqlEndpoint:
displayName: GraphQL Endpoint
From f34a382b9131d6f325b836382e5a4cf52ca3e0a6 Mon Sep 17 00:00:00 2001
From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com>
Date: Sun, 28 Jul 2024 05:04:04 -0400
Subject: [PATCH 05/39] Update changefilemonitor.yml
---
plugins/ChangeFileMonitor/changefilemonitor.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/plugins/ChangeFileMonitor/changefilemonitor.yml b/plugins/ChangeFileMonitor/changefilemonitor.yml
index 501e0e32..95c00f5d 100644
--- a/plugins/ChangeFileMonitor/changefilemonitor.yml
+++ b/plugins/ChangeFileMonitor/changefilemonitor.yml
@@ -14,7 +14,7 @@ settings:
type: BOOLEAN
scanModified:
displayName: Scan Modifications
- description: Enable to monitor changes in file system for modification flag. Flags for CREATE, DELETE, and MOVE will still get triggered if this is disabled.
+ description: Enable to monitor changes in file system for modification flag. This option is NOT needed for Windows, because on Windows changes are triggered via CREATE, DELETE, and MOVE flags. Other OS may differ.
type: BOOLEAN
zgraphqlEndpoint:
displayName: GraphQL Endpoint
From e1133dc65b908d45882a8eb73713eb30606bc7c2 Mon Sep 17 00:00:00 2001
From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com>
Date: Sun, 28 Jul 2024 05:10:41 -0400
Subject: [PATCH 06/39] Update changefilemonitor.yml
---
plugins/ChangeFileMonitor/changefilemonitor.yml | 1 -
1 file changed, 1 deletion(-)
diff --git a/plugins/ChangeFileMonitor/changefilemonitor.yml b/plugins/ChangeFileMonitor/changefilemonitor.yml
index 95c00f5d..818c4f7d 100644
--- a/plugins/ChangeFileMonitor/changefilemonitor.yml
+++ b/plugins/ChangeFileMonitor/changefilemonitor.yml
@@ -1,4 +1,3 @@
-# By David Maisonave (aka Axter) 2024
name: ChangeFileMonitor
description: Monitors the Stash library folders, and updates Stash if any changes occurs in the Stash library paths.
version: 0.1.0
From 789e5d05bd0f4dfb795f7b78554f5df185769b97 Mon Sep 17 00:00:00 2001
From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com>
Date: Sun, 28 Jul 2024 05:12:04 -0400
Subject: [PATCH 07/39] Update renamefile.yml
---
plugins/RenameFile/renamefile.yml | 5 -----
1 file changed, 5 deletions(-)
diff --git a/plugins/RenameFile/renamefile.yml b/plugins/RenameFile/renamefile.yml
index 14006c3a..e85c0d81 100644
--- a/plugins/RenameFile/renamefile.yml
+++ b/plugins/RenameFile/renamefile.yml
@@ -1,6 +1,5 @@
name: RenameFile
description: Renames video (scene) file names when the user edits the [Title] field located in the scene [Edit] tab.
-# By David Maisonave (aka Axter) 2024
version: 0.4.0
url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/RenameFile
settings:
@@ -66,10 +65,6 @@ hooks:
triggeredBy:
- Scene.Update.Post
tasks:
- # - name: Fetch Tags
- # description: Get tags from duplicate file names.
- # defaultArgs:
- # mode: fetch_dup_filename_tags
- name: Rename Last Scene
description: Renames file of last updated scene.
defaultArgs:
From ac708add88085a98deb9596cd961e1fa2469d4e5 Mon Sep 17 00:00:00 2001
From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com>
Date: Sun, 28 Jul 2024 05:15:38 -0400
Subject: [PATCH 08/39] Fixing format
---
plugins/ChangeFileMonitor/changefilemonitor.yml | 2 +-
plugins/RenameFile/renamefile.yml | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/plugins/ChangeFileMonitor/changefilemonitor.yml b/plugins/ChangeFileMonitor/changefilemonitor.yml
index 818c4f7d..0990e327 100644
--- a/plugins/ChangeFileMonitor/changefilemonitor.yml
+++ b/plugins/ChangeFileMonitor/changefilemonitor.yml
@@ -35,4 +35,4 @@ tasks:
- name: Start Library Monitor
description: Monitors paths in Stash library for media file changes, and updates Stash.
defaultArgs:
- mode: start_library_monitor
+ mode: start_library_monitor
\ No newline at end of file
diff --git a/plugins/RenameFile/renamefile.yml b/plugins/RenameFile/renamefile.yml
index e85c0d81..820f7edb 100644
--- a/plugins/RenameFile/renamefile.yml
+++ b/plugins/RenameFile/renamefile.yml
@@ -68,4 +68,4 @@ tasks:
- name: Rename Last Scene
description: Renames file of last updated scene.
defaultArgs:
- mode: rename_files_task
+ mode: rename_files_task
\ No newline at end of file
From 0bd49ca4548dcd8bbc32e0f72828032636f63061 Mon Sep 17 00:00:00 2001
From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com>
Date: Sun, 28 Jul 2024 05:19:37 -0400
Subject: [PATCH 09/39] Create .prettierignore
---
plugins/ChangeFileMonitor/.prettierignore | 14 ++++++++++++++
1 file changed, 14 insertions(+)
create mode 100644 plugins/ChangeFileMonitor/.prettierignore
diff --git a/plugins/ChangeFileMonitor/.prettierignore b/plugins/ChangeFileMonitor/.prettierignore
new file mode 100644
index 00000000..951dc9a3
--- /dev/null
+++ b/plugins/ChangeFileMonitor/.prettierignore
@@ -0,0 +1,14 @@
+## Please check .eslintignore and .gitignore when changing this file
+
+## file extensions
+*.*
+!*.css
+!*.js
+!*.json
+!*.jsx
+!*.less
+!*.md
+!*.mdx
+!*.ts
+!*.tsx
+!*.yml
\ No newline at end of file
From 1e4f9d37b72b536514ce9203f102a87eddc76f8f Mon Sep 17 00:00:00 2001
From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com>
Date: Sun, 28 Jul 2024 05:23:27 -0400
Subject: [PATCH 10/39] Delete .prettierignore
---
plugins/ChangeFileMonitor/.prettierignore | 14 --------------
1 file changed, 14 deletions(-)
delete mode 100644 plugins/ChangeFileMonitor/.prettierignore
diff --git a/plugins/ChangeFileMonitor/.prettierignore b/plugins/ChangeFileMonitor/.prettierignore
deleted file mode 100644
index 951dc9a3..00000000
--- a/plugins/ChangeFileMonitor/.prettierignore
+++ /dev/null
@@ -1,14 +0,0 @@
-## Please check .eslintignore and .gitignore when changing this file
-
-## file extensions
-*.*
-!*.css
-!*.js
-!*.json
-!*.jsx
-!*.less
-!*.md
-!*.mdx
-!*.ts
-!*.tsx
-!*.yml
\ No newline at end of file
From a9b73d6c3d824e326115fa0d1a88759c4e5f2d6b Mon Sep 17 00:00:00 2001
From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com>
Date: Sun, 28 Jul 2024 05:33:53 -0400
Subject: [PATCH 11/39] Fixed format via prettier
---
.../ChangeFileMonitor/changefilemonitor.yml | 10 +++++-----
plugins/RenameFile/renamefile.yml | 18 +++++++++---------
2 files changed, 14 insertions(+), 14 deletions(-)
diff --git a/plugins/ChangeFileMonitor/changefilemonitor.yml b/plugins/ChangeFileMonitor/changefilemonitor.yml
index 0990e327..b522cab8 100644
--- a/plugins/ChangeFileMonitor/changefilemonitor.yml
+++ b/plugins/ChangeFileMonitor/changefilemonitor.yml
@@ -1,5 +1,5 @@
name: ChangeFileMonitor
-description: Monitors the Stash library folders, and updates Stash if any changes occurs in the Stash library paths.
+description: Monitors the Stash library folders, and updates Stash if any changes occurs in the Stash library paths.
version: 0.1.0
url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/ChangeFileMonitor
settings:
@@ -14,7 +14,7 @@ settings:
scanModified:
displayName: Scan Modifications
description: Enable to monitor changes in file system for modification flag. This option is NOT needed for Windows, because on Windows changes are triggered via CREATE, DELETE, and MOVE flags. Other OS may differ.
- type: BOOLEAN
+ type: BOOLEAN
zgraphqlEndpoint:
displayName: GraphQL Endpoint
description: (Default=http://localhost:9999/graphql). Update with your endpoint, or leave blank to use default.
@@ -22,11 +22,11 @@ settings:
zzdebugTracing:
displayName: Debug Tracing
description: (Default=false) [***For Advanced Users***] Enable debug tracing. When enabled, additional tracing logging is added to Stash\plugins\ChangeFileMonitor\changefilemonitor.log
- type: BOOLEAN
+ type: BOOLEAN
zzdryRun:
displayName: Dry Run
description: Enable to run script in [Dry Run] mode. In this mode, Stash does NOT call meta_scan, and only logs the action it would have taken.
- type: BOOLEAN
+ type: BOOLEAN
exec:
- python
- "{pluginDir}/changefilemonitor.py"
@@ -35,4 +35,4 @@ tasks:
- name: Start Library Monitor
description: Monitors paths in Stash library for media file changes, and updates Stash.
defaultArgs:
- mode: start_library_monitor
\ No newline at end of file
+ mode: start_library_monitor
diff --git a/plugins/RenameFile/renamefile.yml b/plugins/RenameFile/renamefile.yml
index 820f7edb..4bc81ac6 100644
--- a/plugins/RenameFile/renamefile.yml
+++ b/plugins/RenameFile/renamefile.yml
@@ -1,28 +1,28 @@
name: RenameFile
-description: Renames video (scene) file names when the user edits the [Title] field located in the scene [Edit] tab.
+description: Renames video (scene) file names when the user edits the [Title] field located in the scene [Edit] tab.
version: 0.4.0
url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/RenameFile
settings:
performerAppend:
displayName: Append Performers
description: Enable to append performers name to file name when renaming a file. Requires performers to be included in [Key Fields] list, which by default it is included.
- type: BOOLEAN
+ type: BOOLEAN
studioAppend:
displayName: Append Studio
description: Enable to append studio name to file name when renaming a file. Requires studio to be included in [Key Fields] list, which by default it is included.
- type: BOOLEAN
+ type: BOOLEAN
tagAppend:
displayName: Append Tags
description: Enable to append tag names to file name when renaming a file. Requires tags to be included in [Key Fields] list, which by default it is included.
- type: BOOLEAN
+ type: BOOLEAN
z_keyFIeldsIncludeInFileName: # Prefixing z_ to variable names so that the GUI will place these fields after above fields (alphabatically listed)
displayName: Include Existing Key Field
description: Enable to append performer, tags, studios, & galleries even if name already exists in the original file name.
- type: BOOLEAN
+ type: BOOLEAN
zafileRenameViaMove:
displayName: Rename Using Move
description: Enable to have file moved when renaming file.
- type: BOOLEAN
+ type: BOOLEAN
zfieldKeyList:
displayName: Key Fields
description: '(Default=title,performers,studio,tags) Define key fields to use to format the file name. This is a comma seperated list, and the list should be in the desired format order. For example, if the user wants the performers name before the title, set the performers name first. Example:"performers,title,tags". This is an example of user adding height:"title,performers,tags,height" Here''s an example using all of the supported fields: "title,performers,tags,studio,galleries,resolution,width,height,video_codec,frame_rate,date".'
@@ -50,11 +50,11 @@ settings:
zzdebugTracing:
displayName: Debug Tracing
description: (Default=false) [***For Advanced Users***] Enable debug tracing. When enabled, additional tracing logging is added to Stash\plugins\RenameFile\renamefile.log
- type: BOOLEAN
+ type: BOOLEAN
zzdryRun:
displayName: Dry Run
description: Enable to run script in [Dry Run] mode. In dry run mode, files are NOT renamed, and only logging is performed. Use the logging to determine if rename will occur as expected. This should always be enabled on the first run after renamefile_settings.py has been modified.
- type: BOOLEAN
+ type: BOOLEAN
exec:
- python
- "{pluginDir}/renamefile.py"
@@ -68,4 +68,4 @@ tasks:
- name: Rename Last Scene
description: Renames file of last updated scene.
defaultArgs:
- mode: rename_files_task
\ No newline at end of file
+ mode: rename_files_task
From ec379fef07a57d1b205b5e1e10d6bc6bc2ad9399 Mon Sep 17 00:00:00 2001
From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com>
Date: Sun, 28 Jul 2024 15:30:49 -0400
Subject: [PATCH 12/39] Removed unused UI variable endpoint
Removed unused UI variable endpoint.
Added logic that allows ChangeFileMonitor to run as a script.
Updated README.md file to explain how to run it as a script.
---
plugins/ChangeFileMonitor/README.md | 10 +-
.../ChangeFileMonitor/changefilemonitor.py | 112 ++++++++++++------
.../ChangeFileMonitor/changefilemonitor.yml | 8 +-
plugins/RenameFile/renamefile.py | 16 ++-
4 files changed, 99 insertions(+), 47 deletions(-)
diff --git a/plugins/ChangeFileMonitor/README.md b/plugins/ChangeFileMonitor/README.md
index ca09e59e..30cd2412 100644
--- a/plugins/ChangeFileMonitor/README.md
+++ b/plugins/ChangeFileMonitor/README.md
@@ -1,12 +1,20 @@
# ChangeFileMonitor: Ver 0.1.0 (By David Maisonave)
ChangeFileMonitor is a [Stash](https://github.com/stashapp/stash) plugin which updates Stash if any changes occurs in the Stash library paths.
-### Using ChangeFileMonitor
+### Using ChangeFileMonitor as a plugin
- To start monitoring file changes, go to **Stash->Settings->Task->[Plugin Tasks]->ChangeFileMonitor**, and click on the [Start Library Monitor] button.
- ![ChangeFileMonitor_Task](https://github.com/user-attachments/assets/f275a70f-8e86-42a4-b2c1-98b3f4935334)
- To stop this task, go to **Stash->Settings->Task->[Task Queue]**, and click on the **[x]**.
- ![Kill_ChangeFileMonitor_Task](https://github.com/user-attachments/assets/a3f4abca-f3a2-49fa-9db5-e0c733e0aeb1)
+### Using ChangeFileMonitor as a script
+**ChangeFileMonitor** can be called as a standalone script.
+- To start monitoring call the script and pass any argument.
+ - python changefilemonitor.py **foofoo**
+- To stop **ChangeFileMonitor**, pass argument **stop**.
+ - python changefilemonitor.py **stop**
+ - After running above command line, **ChangeFileMonitor** will stop after the next file change occurs.
+ - The stop command works to stop the standalone job and the Stash plugin task job.
### Requirements
`pip install stashapp-tools`
diff --git a/plugins/ChangeFileMonitor/changefilemonitor.py b/plugins/ChangeFileMonitor/changefilemonitor.py
index ab81e793..77462398 100644
--- a/plugins/ChangeFileMonitor/changefilemonitor.py
+++ b/plugins/ChangeFileMonitor/changefilemonitor.py
@@ -1,9 +1,13 @@
# Description: This is a Stash plugin which updates Stash if any changes occurs in the Stash library paths.
# By David Maisonave (aka Axter) Jul-2024 (https://www.axter.com/)
# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/ChangeFileMonitor
+# Note: To call this script outside of Stash, pass any argument.
+# Example: python changefilemonitor.py foofoo
import os
import sys
+import time
import shutil
+import fileinput
import hashlib
import json
from pathlib import Path
@@ -21,9 +25,9 @@
# Constant global variables --------------------------------------------
LOG_FILE_PATH = log_file_path = f"{Path(__file__).resolve().parent}\\{Path(__file__).stem}.log"
FORMAT = "[%(asctime)s - LN:%(lineno)s] %(message)s"
-DEFAULT_ENDPOINT = "http://localhost:9999/graphql" # Default GraphQL endpoint
PLUGIN_ARGS = False
PLUGIN_ARGS_MODE = False
+PLUGIN_ID = Path(__file__).stem.lower()
# GraphQL query to fetch all scenes
QUERY_ALL_SCENES = """
query AllScenes {
@@ -51,16 +55,45 @@
signal = Condition(mutex)
shouldUpdate = False
TargetPaths = []
+runningInPluginMode = False
# Configure local log file for plugin within plugin folder having a limited max log file size
logging.basicConfig(level=logging.INFO, format=FORMAT, datefmt="%y%m%d %H:%M:%S", handlers=[RFH])
logger = logging.getLogger(Path(__file__).stem)
-
+
# **********************************************************************
# ----------------------------------------------------------------------
# Code section to fetch variables from Plugin UI and from changefilemonitor_settings.py
-json_input = json.loads(sys.stdin.read())
-FRAGMENT_SERVER = json_input["server_connection"]
+# Check if being called as Stash plugin
+gettingCalledAsStashPlugin = True
+stopLibraryMonitoring = False
+StdInRead = None
+try:
+ if len(sys.argv) == 1:
+ print(f"Attempting to read stdin. (len(sys.argv)={len(sys.argv)})", file=sys.stderr)
+ StdInRead = sys.stdin.read()
+ # for line in fileinput.input():
+ # StdInRead = line
+ # break
+ else:
+ if len(sys.argv) > 1 and sys.argv[1].lower() == "stop":
+ stopLibraryMonitoring = True
+ raise Exception("Not called in plugin mode.")
+except:
+ gettingCalledAsStashPlugin = False
+ print(f"Either len(sys.argv) not expected value OR sys.stdin.read() failed! (stopLibraryMonitoring={stopLibraryMonitoring}) (StdInRead={StdInRead}) (len(sys.argv)={len(sys.argv)})", file=sys.stderr)
+ pass
+
+if gettingCalledAsStashPlugin and StdInRead:
+ print(f"StdInRead={StdInRead} (len(sys.argv)={len(sys.argv)})", file=sys.stderr)
+ runningInPluginMode = True
+ json_input = json.loads(StdInRead)
+ FRAGMENT_SERVER = json_input["server_connection"]
+else:
+ runningInPluginMode = False
+ FRAGMENT_SERVER = {'Scheme': 'http', 'Host': '0.0.0.0', 'Port': 9999, 'SessionCookie': {'Name': 'session', 'Value': '', 'Path': '', 'Domain': '', 'Expires': '0001-01-01T00:00:00Z', 'RawExpires': '', 'MaxAge': 0, 'Secure': False, 'HttpOnly': False, 'SameSite': 0, 'Raw': '', 'Unparsed': None}, 'Dir': os.path.dirname(Path(__file__).resolve().parent), 'PluginDir': Path(__file__).resolve().parent}
+ print("Running in non-plugin mode!", file=sys.stderr)
+
stash = StashInterface(FRAGMENT_SERVER)
PLUGINCONFIGURATION = stash.get_configuration()["plugins"]
STASHCONFIGURATION = stash.get_configuration()["general"]
@@ -69,12 +102,12 @@
settings = {
"recursiveDisabled": False,
"runCleanAfterDelete": False,
+ "runGenerateContent": False,
"scanModified": False,
- "zgraphqlEndpoint": DEFAULT_ENDPOINT,
"zzdebugTracing": False,
"zzdryRun": False,
}
-PLUGIN_ID = "changefilemonitor"
+
if PLUGIN_ID in PLUGINCONFIGURATION:
settings.update(PLUGINCONFIGURATION[PLUGIN_ID])
# ----------------------------------------------------------------------
@@ -82,6 +115,7 @@
RECURSIVE = settings["recursiveDisabled"] == False
SCAN_MODIFIED = settings["scanModified"]
RUN_CLEAN_AFTER_DELETE = settings["runCleanAfterDelete"]
+RUN_GENERATE_CONTENT = settings["runGenerateContent"]
for item in STASHPATHSCONFIG:
stashPaths.append(item["path"])
@@ -94,7 +128,7 @@
PLUGIN_ARGS_MODE = json_input['args']["mode"]
except:
pass
-logger.info(f"\nStarting (debugTracing={debugTracing}) (DRY_RUN={DRY_RUN}) (PLUGIN_ARGS_MODE={PLUGIN_ARGS_MODE}) (PLUGIN_ARGS={PLUGIN_ARGS})************************************************")
+logger.info(f"\nStarting (runningInPluginMode={runningInPluginMode}) (debugTracing={debugTracing}) (DRY_RUN={DRY_RUN}) (PLUGIN_ARGS_MODE={PLUGIN_ARGS_MODE}) (PLUGIN_ARGS={PLUGIN_ARGS})************************************************")
if debugTracing: logger.info(f"Debug Tracing (stash.get_configuration()={stash.get_configuration()})................")
if debugTracing: logger.info("settings: %s " % (settings,))
if debugTracing: logger.info(f"Debug Tracing (STASHCONFIGURATION={STASHCONFIGURATION})................")
@@ -104,11 +138,6 @@
logger.info("Dry run mode is enabled.")
dry_run_prefix = "Would've "
if debugTracing: logger.info("Debug Tracing................")
-# ToDo: Add split logic here to slpit possible string array into an array
-endpoint = settings["zgraphqlEndpoint"] # GraphQL endpoint
-if not endpoint or endpoint == "":
- endpoint = DEFAULT_ENDPOINT
-if debugTracing: logger.info(f"Debug Tracing (endpoint={endpoint})................")
# ----------------------------------------------------------------------
# **********************************************************************
if debugTracing: logger.info(f"Debug Tracing (SCAN_MODIFIED={SCAN_MODIFIED}) (RECURSIVE={RECURSIVE})................")
@@ -207,48 +236,53 @@ def on_moved(event):
stash.metadata_scan(paths=TmpTargetPaths)
if RUN_CLEAN_AFTER_DELETE and RunCleanMetadata:
stash.metadata_clean(paths=TmpTargetPaths, dry_run=DRY_RUN)
- stash.run_plugin_task(plugin_id=PLUGIN_ID, task_name="Start Library Monitor")
- if debugTracing: logger.info("Exiting plugin so that metadata_scan task can run.")
- return
+ if RUN_GENERATE_CONTENT:
+ stash.metadata_generate()
+ if gettingCalledAsStashPlugin and shm_buffer[0] == CONTINUE_RUNNING_SIG:
+ stash.run_plugin_task(plugin_id=PLUGIN_ID, task_name="Start Library Monitor")
+ if debugTracing: logger.info("Exiting plugin so that metadata_scan task can run.")
+ return
else:
if debugTracing: logger.info("Nothing to scan.")
if shm_buffer[0] != CONTINUE_RUNNING_SIG:
logger.info(f"Exiting Change File Monitor. (shm_buffer[0]={shm_buffer[0]})")
shm_a.close()
shm_a.unlink() # Call unlink only once to release the shared memory
- time.sleep(1)
- break
+ raise KeyboardInterrupt
except KeyboardInterrupt:
observer.stop()
if debugTracing: logger.info("Stopping observer................")
observer.join()
if debugTracing: logger.info("Exiting function................")
-# stop_library_monitor does not work because only one task can run at a time.
-# def stop_library_monitor():
- # if debugTracing: logger.info("Opening shared memory map.")
- # try:
- # shm_a = shared_memory.SharedMemory(name="DavidMaisonaveAxter_ChangeFileMonitor", create=False, size=4)
- # except:
- # pass
- # logger.info("Could not open shared memory map. Change File Monitor must not be running.")
- # return
- # type(shm_a.buf)
- # shm_buffer = shm_a.buf
- # len(shm_buffer)
- # shm_buffer[0] = 123
- # if debugTracing: logger.info(f"Shared memory map opended, and flag set to {shm_buffer[0]}")
- # shm_a.close()
- # shm_a.unlink() # Call unlink only once to release the shared memory
- # time.sleep(1)
- # return
+# This function is only useful when called outside of Stash.
+# Example: python changefilemonitor.py stop
+# Stops monitoring after triggered by the next file change.
+# ToDo: Add logic so it doesn't have to wait until the next file change
+def stop_library_monitor():
+ if debugTracing: logger.info("Opening shared memory map.")
+ try:
+ shm_a = shared_memory.SharedMemory(name="DavidMaisonaveAxter_ChangeFileMonitor", create=False, size=4)
+ except:
+ pass
+ logger.info("Could not open shared memory map. Change File Monitor must not be running.")
+ return
+ type(shm_a.buf)
+ shm_buffer = shm_a.buf
+ len(shm_buffer)
+ shm_buffer[0] = 123
+ if debugTracing: logger.info(f"Shared memory map opended, and flag set to {shm_buffer[0]}")
+ shm_a.close()
+ shm_a.unlink() # Call unlink only once to release the shared memory
+ time.sleep(1)
+ return
-if PLUGIN_ARGS_MODE == "start_library_monitor":
+if stopLibraryMonitoring:
+ stop_library_monitor()
+ if debugTracing: logger.info(f"stop_library_monitor EXIT................")
+elif PLUGIN_ARGS_MODE == "start_library_monitor" or not gettingCalledAsStashPlugin:
start_library_monitor()
if debugTracing: logger.info(f"start_library_monitor EXIT................")
-# elif PLUGIN_ARGS_MODE == "stop_library_monitor":
- # stop_library_monitor()
- # if debugTracing: logger.info(f"stop_library_monitor EXIT................")
else:
logger.info(f"Nothing to do!!! (PLUGIN_ARGS_MODE={PLUGIN_ARGS_MODE})")
diff --git a/plugins/ChangeFileMonitor/changefilemonitor.yml b/plugins/ChangeFileMonitor/changefilemonitor.yml
index b522cab8..3336bc47 100644
--- a/plugins/ChangeFileMonitor/changefilemonitor.yml
+++ b/plugins/ChangeFileMonitor/changefilemonitor.yml
@@ -11,14 +11,14 @@ settings:
displayName: Run Clean
description: Enable to run metadata clean task after file deletion.
type: BOOLEAN
+ runGenerateContent:
+ displayName: Run Generate Content
+ description: Enable to run metadata_generate (Generate Content) after metadata scan.
+ type: BOOLEAN
scanModified:
displayName: Scan Modifications
description: Enable to monitor changes in file system for modification flag. This option is NOT needed for Windows, because on Windows changes are triggered via CREATE, DELETE, and MOVE flags. Other OS may differ.
type: BOOLEAN
- zgraphqlEndpoint:
- displayName: GraphQL Endpoint
- description: (Default=http://localhost:9999/graphql). Update with your endpoint, or leave blank to use default.
- type: STRING
zzdebugTracing:
displayName: Debug Tracing
description: (Default=false) [***For Advanced Users***] Enable debug tracing. When enabled, additional tracing logging is added to Stash\plugins\ChangeFileMonitor\changefilemonitor.log
diff --git a/plugins/RenameFile/renamefile.py b/plugins/RenameFile/renamefile.py
index d7c55889..a434970a 100644
--- a/plugins/RenameFile/renamefile.py
+++ b/plugins/RenameFile/renamefile.py
@@ -21,6 +21,7 @@
FORMAT = "[%(asctime)s - LN:%(lineno)s] %(message)s"
DEFAULT_ENDPOINT = "http://localhost:9999/graphql" # Default GraphQL endpoint
DEFAULT_FIELD_KEY_LIST = "title,performers,studio,tags" # Default Field Key List with the desired order
+PLUGIN_ID = Path(__file__).stem.lower()
DEFAULT_SEPERATOR = "-"
PLUGIN_ARGS = False
PLUGIN_ARGS_MODE = False
@@ -51,7 +52,7 @@
# Configure local log file for plugin within plugin folder having a limited max log file size
logging.basicConfig(level=logging.INFO, format=FORMAT, datefmt="%y%m%d %H:%M:%S", handlers=[RFH])
-logger = logging.getLogger('renamefile')
+logger = logging.getLogger(PLUGIN_ID)
# **********************************************************************
# ----------------------------------------------------------------------
@@ -75,8 +76,8 @@
"zzdebugTracing": False,
"zzdryRun": False,
}
-if "renamefile" in pluginConfiguration:
- settings.update(pluginConfiguration["renamefile"])
+if PLUGIN_ID in pluginConfiguration:
+ settings.update(pluginConfiguration[PLUGIN_ID])
# ----------------------------------------------------------------------
debugTracing = settings["zzdebugTracing"]
@@ -94,6 +95,13 @@
pass
logger.info(f"\nStarting (debugTracing={debugTracing}) (dry_run={dry_run}) (PLUGIN_ARGS_MODE={PLUGIN_ARGS_MODE}) (inputToUpdateScenePost={inputToUpdateScenePost})************************************************")
if debugTracing: logger.info("settings: %s " % (settings,))
+# if PLUGIN_ID in pluginConfiguration:
+ # if debugTracing: logger.info(f"Debug Tracing (pluginConfiguration[PLUGIN_ID]={pluginConfiguration[PLUGIN_ID]})................")
+ # if 'zmaximumTagKeys' not in pluginConfiguration[PLUGIN_ID]:
+ # if debugTracing: logger.info("Debug Tracing................")
+ # stash.configure_plugin(PLUGIN_ID, settings) # , init_defaults=True
+ # if debugTracing: logger.info("Debug Tracing................")
+
if dry_run:
logger.info("Dry run mode is enabled.")
dry_run_prefix = "Would've "
@@ -131,6 +139,8 @@
double_separator = separator + separator
if debugTracing: logger.info(f"Debug Tracing (PLUGIN_ARGS={PLUGIN_ARGS}) (WRAPPER_STYLES={WRAPPER_STYLES}) (POSTFIX_STYLES={POSTFIX_STYLES})................")
+if debugTracing: logger.info(f"Debug Tracing (PLUGIN_ID=\"{PLUGIN_ID}\")................")
+if debugTracing: logger.info("Debug Tracing................")
# Function to make GraphQL requests
def graphql_request(query, variables=None):
From bf3bfe4a269188e076a0b4d5413669348cb99ab2 Mon Sep 17 00:00:00 2001
From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com>
Date: Mon, 29 Jul 2024 03:21:39 -0400
Subject: [PATCH 13/39] Moved rarely used fields to config file
---
.../ChangeFileMonitor/changefilemonitor.py | 6 ++--
.../ChangeFileMonitor/changefilemonitor.yml | 4 ---
.../changefilemonitor_config.py | 12 +++++++
plugins/RenameFile/renamefile.py | 34 ++++++++++++-------
plugins/RenameFile/renamefile.yml | 12 -------
plugins/RenameFile/renamefile_settings.py | 4 +++
6 files changed, 40 insertions(+), 32 deletions(-)
create mode 100644 plugins/ChangeFileMonitor/changefilemonitor_config.py
diff --git a/plugins/ChangeFileMonitor/changefilemonitor.py b/plugins/ChangeFileMonitor/changefilemonitor.py
index 77462398..c8992c01 100644
--- a/plugins/ChangeFileMonitor/changefilemonitor.py
+++ b/plugins/ChangeFileMonitor/changefilemonitor.py
@@ -20,6 +20,7 @@
import watchdog # pip install watchdog # https://pythonhosted.org/watchdog/
from threading import Lock, Condition
from multiprocessing import shared_memory
+from changefilemonitor_config import config # Import settings from changefilemonitor_config.py
# **********************************************************************
# Constant global variables --------------------------------------------
@@ -91,7 +92,7 @@
FRAGMENT_SERVER = json_input["server_connection"]
else:
runningInPluginMode = False
- FRAGMENT_SERVER = {'Scheme': 'http', 'Host': '0.0.0.0', 'Port': 9999, 'SessionCookie': {'Name': 'session', 'Value': '', 'Path': '', 'Domain': '', 'Expires': '0001-01-01T00:00:00Z', 'RawExpires': '', 'MaxAge': 0, 'Secure': False, 'HttpOnly': False, 'SameSite': 0, 'Raw': '', 'Unparsed': None}, 'Dir': os.path.dirname(Path(__file__).resolve().parent), 'PluginDir': Path(__file__).resolve().parent}
+ FRAGMENT_SERVER = {'Scheme': config['endpoint_Scheme'], 'Host': config['endpoint_Host'], 'Port': config['endpoint_Port'], 'SessionCookie': {'Name': 'session', 'Value': '', 'Path': '', 'Domain': '', 'Expires': '0001-01-01T00:00:00Z', 'RawExpires': '', 'MaxAge': 0, 'Secure': False, 'HttpOnly': False, 'SameSite': 0, 'Raw': '', 'Unparsed': None}, 'Dir': os.path.dirname(Path(__file__).resolve().parent), 'PluginDir': Path(__file__).resolve().parent}
print("Running in non-plugin mode!", file=sys.stderr)
stash = StashInterface(FRAGMENT_SERVER)
@@ -102,7 +103,6 @@
settings = {
"recursiveDisabled": False,
"runCleanAfterDelete": False,
- "runGenerateContent": False,
"scanModified": False,
"zzdebugTracing": False,
"zzdryRun": False,
@@ -115,7 +115,7 @@
RECURSIVE = settings["recursiveDisabled"] == False
SCAN_MODIFIED = settings["scanModified"]
RUN_CLEAN_AFTER_DELETE = settings["runCleanAfterDelete"]
-RUN_GENERATE_CONTENT = settings["runGenerateContent"]
+RUN_GENERATE_CONTENT = config['runGenerateContent']
for item in STASHPATHSCONFIG:
stashPaths.append(item["path"])
diff --git a/plugins/ChangeFileMonitor/changefilemonitor.yml b/plugins/ChangeFileMonitor/changefilemonitor.yml
index 3336bc47..41e91fdb 100644
--- a/plugins/ChangeFileMonitor/changefilemonitor.yml
+++ b/plugins/ChangeFileMonitor/changefilemonitor.yml
@@ -11,10 +11,6 @@ settings:
displayName: Run Clean
description: Enable to run metadata clean task after file deletion.
type: BOOLEAN
- runGenerateContent:
- displayName: Run Generate Content
- description: Enable to run metadata_generate (Generate Content) after metadata scan.
- type: BOOLEAN
scanModified:
displayName: Scan Modifications
description: Enable to monitor changes in file system for modification flag. This option is NOT needed for Windows, because on Windows changes are triggered via CREATE, DELETE, and MOVE flags. Other OS may differ.
diff --git a/plugins/ChangeFileMonitor/changefilemonitor_config.py b/plugins/ChangeFileMonitor/changefilemonitor_config.py
new file mode 100644
index 00000000..4c09f403
--- /dev/null
+++ b/plugins/ChangeFileMonitor/changefilemonitor_config.py
@@ -0,0 +1,12 @@
+# Description: This is a Stash plugin which updates Stash if any changes occurs in the Stash library paths.
+# By David Maisonave (aka Axter) Jul-2024 (https://www.axter.com/)
+# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/ChangeFileMonitor
+config = {
+ # Enable to run metadata_generate (Generate Content) after metadata scan.
+ "runGenerateContent": False,
+
+ # The following fields are ONLY used when running ChangeFileMonitor in script mode
+ "endpoint_Scheme" : "http", # Define endpoint to use when contacting the Stash server
+ "endpoint_Host" : "0.0.0.0", # Define endpoint to use when contacting the Stash server
+ "endpoint_Port" : 9999, # Define endpoint to use when contacting the Stash server
+}
diff --git a/plugins/RenameFile/renamefile.py b/plugins/RenameFile/renamefile.py
index a434970a..ccf7b27e 100644
--- a/plugins/RenameFile/renamefile.py
+++ b/plugins/RenameFile/renamefile.py
@@ -19,7 +19,6 @@
# Constant global variables --------------------------------------------
LOG_FILE_PATH = log_file_path = f"{Path(__file__).resolve().parent}\\{Path(__file__).stem}.log"
FORMAT = "[%(asctime)s - LN:%(lineno)s] %(message)s"
-DEFAULT_ENDPOINT = "http://localhost:9999/graphql" # Default GraphQL endpoint
DEFAULT_FIELD_KEY_LIST = "title,performers,studio,tags" # Default Field Key List with the desired order
PLUGIN_ID = Path(__file__).stem.lower()
DEFAULT_SEPERATOR = "-"
@@ -58,9 +57,10 @@
# ----------------------------------------------------------------------
# Code section to fetch variables from Plugin UI and from renamefile_settings.py
json_input = json.loads(sys.stdin.read())
-FRAGMENT_SERVER = json_input["server_connection"]
+FRAGMENT_SERVER = json_input['server_connection']
stash = StashInterface(FRAGMENT_SERVER)
pluginConfiguration = stash.get_configuration()["plugins"]
+
settings = {
"performerAppend": False,
"studioAppend": False,
@@ -68,11 +68,8 @@
"z_keyFIeldsIncludeInFileName": False,
"zafileRenameViaMove": False,
"zfieldKeyList": DEFAULT_FIELD_KEY_LIST,
- "zgraphqlEndpoint": DEFAULT_ENDPOINT,
"zmaximumTagKeys": 12,
- "zpathToExclude": "",
"zseparators": DEFAULT_SEPERATOR,
- "ztagWhitelist": "",
"zzdebugTracing": False,
"zzdryRun": False,
}
@@ -95,11 +92,19 @@
pass
logger.info(f"\nStarting (debugTracing={debugTracing}) (dry_run={dry_run}) (PLUGIN_ARGS_MODE={PLUGIN_ARGS_MODE}) (inputToUpdateScenePost={inputToUpdateScenePost})************************************************")
if debugTracing: logger.info("settings: %s " % (settings,))
-# if PLUGIN_ID in pluginConfiguration:
- # if debugTracing: logger.info(f"Debug Tracing (pluginConfiguration[PLUGIN_ID]={pluginConfiguration[PLUGIN_ID]})................")
+
+if PLUGIN_ID in pluginConfiguration:
+ if debugTracing: logger.info(f"Debug Tracing (pluginConfiguration[PLUGIN_ID]={pluginConfiguration[PLUGIN_ID]})................")
# if 'zmaximumTagKeys' not in pluginConfiguration[PLUGIN_ID]:
# if debugTracing: logger.info("Debug Tracing................")
- # stash.configure_plugin(PLUGIN_ID, settings) # , init_defaults=True
+ # try:
+ # stash.configure_plugin(PLUGIN_ID, settings)
+ # stash.configure_plugin("renamefile", {"zmaximumTagKeys": 12})
+ # except Exception as e:
+ # logger.error(f"configure_plugin failed!!! Error: {e}")
+ # logger.exception('Got exception on main handler')
+ # pass
+ # # stash.configure_plugin(PLUGIN_ID, settings) # , init_defaults=True
# if debugTracing: logger.info("Debug Tracing................")
if dry_run:
@@ -109,18 +114,21 @@
max_tag_keys = settings["zmaximumTagKeys"] if settings["zmaximumTagKeys"] != 0 else 12 # Need this incase use explicitly sets value to zero in UI
if debugTracing: logger.info("Debug Tracing................")
# ToDo: Add split logic here to slpit possible string array into an array
-exclude_paths = settings["zpathToExclude"]
+exclude_paths = config["pathToExclude"]
exclude_paths = exclude_paths.split()
if debugTracing: logger.info(f"Debug Tracing (exclude_paths={exclude_paths})................")
# Extract tag whitelist from settings
-tag_whitelist = settings["ztagWhitelist"]
+tag_whitelist = config["tagWhitelist"]
if debugTracing: logger.info("Debug Tracing................")
if not tag_whitelist:
tag_whitelist = ""
if debugTracing: logger.info(f"Debug Tracing (tag_whitelist={tag_whitelist})................")
-endpoint = settings["zgraphqlEndpoint"] # GraphQL endpoint
-if not endpoint or endpoint == "":
- endpoint = DEFAULT_ENDPOINT
+
+endpointHost = json_input['server_connection']['Host']
+if endpointHost == "0.0.0.0":
+ endpointHost = "localhost"
+endpoint = f"{json_input['server_connection']['Scheme']}://{endpointHost}:{json_input['server_connection']['Port']}/graphql"
+
if debugTracing: logger.info(f"Debug Tracing (endpoint={endpoint})................")
# Extract rename_files and move_files settings from renamefile_settings.py
rename_files = config["rename_files"]
diff --git a/plugins/RenameFile/renamefile.yml b/plugins/RenameFile/renamefile.yml
index 4bc81ac6..e5d2a0f0 100644
--- a/plugins/RenameFile/renamefile.yml
+++ b/plugins/RenameFile/renamefile.yml
@@ -27,26 +27,14 @@ settings:
displayName: Key Fields
description: '(Default=title,performers,studio,tags) Define key fields to use to format the file name. This is a comma seperated list, and the list should be in the desired format order. For example, if the user wants the performers name before the title, set the performers name first. Example:"performers,title,tags". This is an example of user adding height:"title,performers,tags,height" Here''s an example using all of the supported fields: "title,performers,tags,studio,galleries,resolution,width,height,video_codec,frame_rate,date".'
type: STRING
- zgraphqlEndpoint:
- displayName: GraphQL Endpoint
- description: (Default=http://localhost:9999/graphql). Update with your endpoint, or leave blank to use default.
- type: STRING
zmaximumTagKeys:
displayName: Max Tag Keys
description: (Default=12) Maximum quantity of tag keys to append to file name. 0=Default(12); -1=No tags appended.
type: NUMBER
- zpathToExclude:
- displayName: Exclude Path
- description: 'Add path(s) to exclude from RenameFile. Example Usage: r"/path/to/exclude1" When entering multiple paths, use space. Example: r"/path_1_to/exclude" r"/someOtherPath2Exclude" r"/yetAnotherPath"'
- type: STRING
zseparators:
displayName: Separator
description: '(Default=-) Define the separator to use between different parts of the filename. Example Usage: ","'
type: STRING
- ztagWhitelist:
- displayName: Tag Whitelist
- description: 'Define a whitelist of allowed tags or EMPTY to allow all tags. Example Usage: "tag1", "tag2", "tag3"'
- type: STRING
zzdebugTracing:
displayName: Debug Tracing
description: (Default=false) [***For Advanced Users***] Enable debug tracing. When enabled, additional tracing logging is added to Stash\plugins\RenameFile\renamefile.log
diff --git a/plugins/RenameFile/renamefile_settings.py b/plugins/RenameFile/renamefile_settings.py
index c4eeab9b..24052f8a 100644
--- a/plugins/RenameFile/renamefile_settings.py
+++ b/plugins/RenameFile/renamefile_settings.py
@@ -37,6 +37,10 @@
"frame_rate": 'FR',
"date": '',
},
+ # Add path(s) to exclude from RenameFile. Example Usage: r"/path/to/exclude1" When entering multiple paths, use space. Example: r"/path_1_to/exclude" r"/someOtherPath2Exclude" r"/yetAnotherPath"
+ "pathToExclude": "",
+ # Define a whitelist of allowed tags or EMPTY to allow all tags. Example Usage: "tag1", "tag2", "tag3"
+ "tagWhitelist": "",
# Define whether files should be renamed when moved
"rename_files": True,
# Define whether the original file name should be used if title is empty
From ff65f8ce192d06a758f8352e6fa032b14c94ff88 Mon Sep 17 00:00:00 2001
From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com>
Date: Mon, 29 Jul 2024 23:18:26 -0400
Subject: [PATCH 14/39] Change plugin name from ChangeFileMonitor to
FileMonitor
---
plugins/ChangeFileMonitor/README.md | 35 -------------------
.../.gitignore | 0
plugins/FileMonitor/README.md | 35 +++++++++++++++++++
.../filemonitor.py} | 14 ++++----
.../filemonitor.yml} | 10 +++---
.../filemonitor_config.py} | 4 +--
.../manifest | 12 +++----
.../requirements.txt | 0
8 files changed, 55 insertions(+), 55 deletions(-)
delete mode 100644 plugins/ChangeFileMonitor/README.md
rename plugins/{ChangeFileMonitor => FileMonitor}/.gitignore (100%)
create mode 100644 plugins/FileMonitor/README.md
rename plugins/{ChangeFileMonitor/changefilemonitor.py => FileMonitor/filemonitor.py} (96%)
rename plugins/{ChangeFileMonitor/changefilemonitor.yml => FileMonitor/filemonitor.yml} (89%)
rename plugins/{ChangeFileMonitor/changefilemonitor_config.py => FileMonitor/filemonitor_config.py} (79%)
rename plugins/{ChangeFileMonitor => FileMonitor}/manifest (69%)
rename plugins/{ChangeFileMonitor => FileMonitor}/requirements.txt (100%)
diff --git a/plugins/ChangeFileMonitor/README.md b/plugins/ChangeFileMonitor/README.md
deleted file mode 100644
index 30cd2412..00000000
--- a/plugins/ChangeFileMonitor/README.md
+++ /dev/null
@@ -1,35 +0,0 @@
-# ChangeFileMonitor: Ver 0.1.0 (By David Maisonave)
-ChangeFileMonitor is a [Stash](https://github.com/stashapp/stash) plugin which updates Stash if any changes occurs in the Stash library paths.
-
-### Using ChangeFileMonitor as a plugin
-- To start monitoring file changes, go to **Stash->Settings->Task->[Plugin Tasks]->ChangeFileMonitor**, and click on the [Start Library Monitor] button.
- - ![ChangeFileMonitor_Task](https://github.com/user-attachments/assets/f275a70f-8e86-42a4-b2c1-98b3f4935334)
-- To stop this task, go to **Stash->Settings->Task->[Task Queue]**, and click on the **[x]**.
- - ![Kill_ChangeFileMonitor_Task](https://github.com/user-attachments/assets/a3f4abca-f3a2-49fa-9db5-e0c733e0aeb1)
-
-### Using ChangeFileMonitor as a script
-**ChangeFileMonitor** can be called as a standalone script.
-- To start monitoring call the script and pass any argument.
- - python changefilemonitor.py **foofoo**
-- To stop **ChangeFileMonitor**, pass argument **stop**.
- - python changefilemonitor.py **stop**
- - After running above command line, **ChangeFileMonitor** will stop after the next file change occurs.
- - The stop command works to stop the standalone job and the Stash plugin task job.
-
-### Requirements
-`pip install stashapp-tools`
-`pip install pyYAML`
-`pip install watchdog`
-
-### Installation
-- Follow **Requirements** instructions.
-- In the stash plugin directory (C:\Users\MyUserName\.stash\plugins), create a folder named **ChangeFileMonitor**.
-- Copy all the plugin files to this folder.(**C:\Users\MyUserName\\.stash\plugins\ChangeFileMonitor**).
-- Restart Stash.
-
-That's it!!!
-
-### Options
-- All options are accessible in the GUI via Settings->Plugins->Plugins->[ChangeFileMonitor].
-
-
diff --git a/plugins/ChangeFileMonitor/.gitignore b/plugins/FileMonitor/.gitignore
similarity index 100%
rename from plugins/ChangeFileMonitor/.gitignore
rename to plugins/FileMonitor/.gitignore
diff --git a/plugins/FileMonitor/README.md b/plugins/FileMonitor/README.md
new file mode 100644
index 00000000..dcf32fdb
--- /dev/null
+++ b/plugins/FileMonitor/README.md
@@ -0,0 +1,35 @@
+# FileMonitor: Ver 0.1.0 (By David Maisonave)
+FileMonitor is a [Stash](https://github.com/stashapp/stash) plugin which updates Stash if any changes occurs in the Stash library paths.
+
+### Using FileMonitor as a plugin
+- To start monitoring file changes, go to **Stash->Settings->Task->[Plugin Tasks]->FileMonitor**, and click on the [Start Library Monitor] button.
+ - ![FileMonitor_Task](https://github.com/user-attachments/assets/f275a70f-8e86-42a4-b2c1-98b3f4935334)
+- To stop this task, go to **Stash->Settings->Task->[Task Queue]**, and click on the **[x]**.
+ - ![Kill_FileMonitor_Task](https://github.com/user-attachments/assets/a3f4abca-f3a2-49fa-9db5-e0c733e0aeb1)
+
+### Using FileMonitor as a script
+**FileMonitor** can be called as a standalone script.
+- To start monitoring call the script and pass any argument.
+ - python filemonitor.py **start**
+- To stop **FileMonitor**, pass argument **stop**.
+ - python filemonitor.py **stop**
+ - After running above command line, **FileMonitor** will stop after the next file change occurs.
+ - The stop command works to stop the standalone job and the Stash plugin task job.
+
+### Requirements
+`pip install stashapp-tools`
+`pip install pyYAML`
+`pip install watchdog`
+
+### Installation
+- Follow **Requirements** instructions.
+- In the stash plugin directory (C:\Users\MyUserName\.stash\plugins), create a folder named **FileMonitor**.
+- Copy all the plugin files to this folder.(**C:\Users\MyUserName\\.stash\plugins\FileMonitor**).
+- Restart Stash.
+
+That's it!!!
+
+### Options
+- All options are accessible in the GUI via Settings->Plugins->Plugins->[FileMonitor].
+
+
diff --git a/plugins/ChangeFileMonitor/changefilemonitor.py b/plugins/FileMonitor/filemonitor.py
similarity index 96%
rename from plugins/ChangeFileMonitor/changefilemonitor.py
rename to plugins/FileMonitor/filemonitor.py
index c8992c01..ccfe0388 100644
--- a/plugins/ChangeFileMonitor/changefilemonitor.py
+++ b/plugins/FileMonitor/filemonitor.py
@@ -1,8 +1,8 @@
# Description: This is a Stash plugin which updates Stash if any changes occurs in the Stash library paths.
# By David Maisonave (aka Axter) Jul-2024 (https://www.axter.com/)
-# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/ChangeFileMonitor
+# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/FileMonitor
# Note: To call this script outside of Stash, pass any argument.
-# Example: python changefilemonitor.py foofoo
+# Example: python filemonitor.py foofoo
import os
import sys
import time
@@ -20,7 +20,7 @@
import watchdog # pip install watchdog # https://pythonhosted.org/watchdog/
from threading import Lock, Condition
from multiprocessing import shared_memory
-from changefilemonitor_config import config # Import settings from changefilemonitor_config.py
+from filemonitor_config import config # Import settings from filemonitor_config.py
# **********************************************************************
# Constant global variables --------------------------------------------
@@ -64,7 +64,7 @@
# **********************************************************************
# ----------------------------------------------------------------------
-# Code section to fetch variables from Plugin UI and from changefilemonitor_settings.py
+# Code section to fetch variables from Plugin UI and from filemonitor_settings.py
# Check if being called as Stash plugin
gettingCalledAsStashPlugin = True
stopLibraryMonitoring = False
@@ -147,7 +147,7 @@ def start_library_monitor():
global TargetPaths
try:
# Create shared memory buffer which can be used as singleton logic or to get a signal to quit task from external script
- shm_a = shared_memory.SharedMemory(name="DavidMaisonaveAxter_ChangeFileMonitor", create=True, size=4)
+ shm_a = shared_memory.SharedMemory(name="DavidMaisonaveAxter_FileMonitor", create=True, size=4)
except:
pass
logger.info("Could not open shared memory map. Change File Monitor must be running. Can not run multiple instance of Change File Monitor.")
@@ -256,13 +256,13 @@ def on_moved(event):
if debugTracing: logger.info("Exiting function................")
# This function is only useful when called outside of Stash.
-# Example: python changefilemonitor.py stop
+# Example: python filemonitor.py stop
# Stops monitoring after triggered by the next file change.
# ToDo: Add logic so it doesn't have to wait until the next file change
def stop_library_monitor():
if debugTracing: logger.info("Opening shared memory map.")
try:
- shm_a = shared_memory.SharedMemory(name="DavidMaisonaveAxter_ChangeFileMonitor", create=False, size=4)
+ shm_a = shared_memory.SharedMemory(name="DavidMaisonaveAxter_FileMonitor", create=False, size=4)
except:
pass
logger.info("Could not open shared memory map. Change File Monitor must not be running.")
diff --git a/plugins/ChangeFileMonitor/changefilemonitor.yml b/plugins/FileMonitor/filemonitor.yml
similarity index 89%
rename from plugins/ChangeFileMonitor/changefilemonitor.yml
rename to plugins/FileMonitor/filemonitor.yml
index 41e91fdb..14a41783 100644
--- a/plugins/ChangeFileMonitor/changefilemonitor.yml
+++ b/plugins/FileMonitor/filemonitor.yml
@@ -1,7 +1,7 @@
-name: ChangeFileMonitor
+name: FileMonitor
description: Monitors the Stash library folders, and updates Stash if any changes occurs in the Stash library paths.
-version: 0.1.0
-url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/ChangeFileMonitor
+version: 0.2.0
+url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/FileMonitor
settings:
recursiveDisabled:
displayName: No Recursive
@@ -17,7 +17,7 @@ settings:
type: BOOLEAN
zzdebugTracing:
displayName: Debug Tracing
- description: (Default=false) [***For Advanced Users***] Enable debug tracing. When enabled, additional tracing logging is added to Stash\plugins\ChangeFileMonitor\changefilemonitor.log
+ description: (Default=false) [***For Advanced Users***] Enable debug tracing. When enabled, additional tracing logging is added to Stash\plugins\FileMonitor\filemonitor.log
type: BOOLEAN
zzdryRun:
displayName: Dry Run
@@ -25,7 +25,7 @@ settings:
type: BOOLEAN
exec:
- python
- - "{pluginDir}/changefilemonitor.py"
+ - "{pluginDir}/filemonitor.py"
interface: raw
tasks:
- name: Start Library Monitor
diff --git a/plugins/ChangeFileMonitor/changefilemonitor_config.py b/plugins/FileMonitor/filemonitor_config.py
similarity index 79%
rename from plugins/ChangeFileMonitor/changefilemonitor_config.py
rename to plugins/FileMonitor/filemonitor_config.py
index 4c09f403..de0210b6 100644
--- a/plugins/ChangeFileMonitor/changefilemonitor_config.py
+++ b/plugins/FileMonitor/filemonitor_config.py
@@ -1,11 +1,11 @@
# Description: This is a Stash plugin which updates Stash if any changes occurs in the Stash library paths.
# By David Maisonave (aka Axter) Jul-2024 (https://www.axter.com/)
-# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/ChangeFileMonitor
+# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/FileMonitor
config = {
# Enable to run metadata_generate (Generate Content) after metadata scan.
"runGenerateContent": False,
- # The following fields are ONLY used when running ChangeFileMonitor in script mode
+ # The following fields are ONLY used when running FileMonitor in script mode
"endpoint_Scheme" : "http", # Define endpoint to use when contacting the Stash server
"endpoint_Host" : "0.0.0.0", # Define endpoint to use when contacting the Stash server
"endpoint_Port" : 9999, # Define endpoint to use when contacting the Stash server
diff --git a/plugins/ChangeFileMonitor/manifest b/plugins/FileMonitor/manifest
similarity index 69%
rename from plugins/ChangeFileMonitor/manifest
rename to plugins/FileMonitor/manifest
index 4a03c5f4..a6d00ee2 100644
--- a/plugins/ChangeFileMonitor/manifest
+++ b/plugins/FileMonitor/manifest
@@ -1,13 +1,13 @@
-id: changefilemonitor
-name: ChangeFileMonitor
+id: filemonitor
+name: FileMonitor
metadata:
description: Monitors the Stash library folders, and updates Stash if any changes occurs in the Stash library paths.
-version: 0.1.0
+version: 0.2.0
date: "2024-07-26 08:00:00"
requires: [pip install stashapp-tools, pip install pyYAML, pip install watchdog]
-source_repository: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/ChangeFileMonitor
+source_repository: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/FileMonitor
files:
- README.md
-- changefilemonitor.yml
-- changefilemonitor.py
+- filemonitor.yml
+- filemonitor.py
- requirements.txt
diff --git a/plugins/ChangeFileMonitor/requirements.txt b/plugins/FileMonitor/requirements.txt
similarity index 100%
rename from plugins/ChangeFileMonitor/requirements.txt
rename to plugins/FileMonitor/requirements.txt
From 2bff74c7cf63107870513865ea9468214019d829 Mon Sep 17 00:00:00 2001
From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com>
Date: Mon, 29 Jul 2024 23:32:45 -0400
Subject: [PATCH 15/39] Removed files per review
---
plugins/FileMonitor/.gitignore | 525 ---------------------------------
plugins/FileMonitor/manifest | 13 -
plugins/RenameFile/.gitignore | 525 ---------------------------------
plugins/RenameFile/manifest | 14 -
4 files changed, 1077 deletions(-)
delete mode 100644 plugins/FileMonitor/.gitignore
delete mode 100644 plugins/FileMonitor/manifest
delete mode 100644 plugins/RenameFile/.gitignore
delete mode 100644 plugins/RenameFile/manifest
diff --git a/plugins/FileMonitor/.gitignore b/plugins/FileMonitor/.gitignore
deleted file mode 100644
index dd93ef78..00000000
--- a/plugins/FileMonitor/.gitignore
+++ /dev/null
@@ -1,525 +0,0 @@
-$ cat .gitignore
-
-# Ignore these patterns
-desktop.ini
-~AutoRecover*.*
-*.aps
-*.exe
-*.idb
-*.ipch
-*.lib
-*.log
-*.log.1
-*.log.2
-*.manifest
-*.obj
-*.pch
-*.pdb
-*.sdf
-*.suo
-*.tlog
-*.user
-*.7z
-*.swp
-*.zip
-data.csv
-/boost
-/scintilla
-/bin
-/SQL
-/__pycache__
-__pycache__/
-renamefile_settings.cpython-310.pyc
-
-## Ignore Visual Studio temporary files, build results, and
-## files generated by popular Visual Studio add-ons.
-##
-## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
-
-# User-specific files
-*.rsuser
-*.suo
-*.user
-*.userosscache
-*.sln.docstates
-
-# User-specific files (MonoDevelop/Xamarin Studio)
-*.userprefs
-
-# Mono auto generated files
-mono_crash.*
-
-# Build results
-[Dd]ebug/
-[Dd]ebugPublic/
-[Rr]elease/
-[Rr]eleases/
-x64/
-x86/
-[Ww][Ii][Nn]32/
-[Aa][Rr][Mm]/
-[Aa][Rr][Mm]64/
-bld/
-[Bb]in/
-[Oo]bj/
-[Ll]og/
-[Ll]ogs/
-
-
-# Visual Studio 2015/2017 cache/options directory
-.vs/
-# Uncomment if you have tasks that create the project's static files in wwwroot
-#wwwroot/
-
-# Visual Studio 2017 auto generated files
-Generated\ Files/
-
-# MSTest test Results
-[Tt]est[Rr]esult*/
-[Bb]uild[Ll]og.*
-
-# NUnit
-*.VisualState.xml
-TestResult.xml
-nunit-*.xml
-
-# Build Results of an ATL Project
-[Dd]ebugPS/
-[Rr]eleasePS/
-dlldata.c
-
-# Benchmark Results
-BenchmarkDotNet.Artifacts/
-
-# .NET Core
-project.lock.json
-project.fragment.lock.json
-artifacts/
-
-# Tye
-.tye/
-
-# ASP.NET Scaffolding
-ScaffoldingReadMe.txt
-
-# StyleCop
-StyleCopReport.xml
-
-# Files built by Visual Studio
-*_i.c
-*_p.c
-*_h.h
-*_i.h
-*.ilk
-*.meta
-*.obj
-*.iobj
-*.pch
-*.pdb
-*.ipdb
-*.pgc
-*.pgd
-*.rsp
-*.sbr
-*.tlb
-*.tli
-*.tlh
-*.tmp
-*.tmp_proj
-*_wpftmp.csproj
-*.log
-*.vspscc
-*.vssscc
-.builds
-*.pidb
-*.svclog
-*.scc
-
-# Chutzpah Test files
-_Chutzpah*
-
-# Visual C++ cache files
-ipch/
-*.aps
-*.ncb
-*.opendb
-*.opensdf
-*.sdf
-*.cachefile
-*.VC.db
-*.VC.VC.opendb
-
-# Visual Studio profiler
-*.psess
-*.vsp
-*.vspx
-*.sap
-
-# Visual Studio Trace Files
-*.e2e
-
-# TFS 2012 Local Workspace
-$tf/
-
-# Guidance Automation Toolkit
-*.gpState
-
-# ReSharper is a .NET coding add-in
-_ReSharper*/
-*.[Rr]e[Ss]harper
-*.DotSettings.user
-# JustCode is a .NET coding add-in
-.JustCode
-
-# TeamCity is a build add-in
-_TeamCity*
-
-# DotCover is a Code Coverage Tool
-*.dotCover
-
-# AxoCover is a Code Coverage Tool
-.axoCover/*
-!.axoCover/settings.json
-
-# Coverlet is a free, cross platform Code Coverage Tool
-coverage*.json
-coverage*.xml
-coverage*.info
-
-# Visual Studio code coverage results
-*.coverage
-*.coveragexml
-
-# NCrunch
-_NCrunch_*
-.*crunch*.local.xml
-nCrunchTemp_*
-
-# MightyMoose
-*.mm.*
-AutoTest.Net/
-
-# Web workbench (sass)
-.sass-cache/
-
-# Installshield output folder
-[Ee]xpress/
-
-# DocProject is a documentation generator add-in
-DocProject/buildhelp/
-DocProject/Help/*.HxT
-DocProject/Help/*.HxC
-DocProject/Help/*.hhc
-DocProject/Help/*.hhk
-DocProject/Help/*.hhp
-DocProject/Help/Html2
-DocProject/Help/html
-
-# Click-Once directory
-publish/
-
-# Publish Web Output
-*.[Pp]ublish.xml
-*.azurePubxml
-# Note: Comment the next line if you want to checkin your web deploy settings,
-# but database connection strings (with potential passwords) will be unencrypted
-*.pubxml
-*.publishproj
-
-# Microsoft Azure Web App publish settings. Comment the next line if you want to
-# checkin your Azure Web App publish settings, but sensitive information contained
-# in these scripts will be unencrypted
-PublishScripts/
-
-# NuGet Packages
-*.nupkg
-# NuGet Symbol Packages
-*.snupkg
-# The packages folder can be ignored because of Package Restore
-**/[Pp]ackages/*
-# except build/, which is used as an MSBuild target.
-!**/[Pp]ackages/build/
-# Uncomment if necessary however generally it will be regenerated when needed
-#!**/[Pp]ackages/repositories.config
-# NuGet v3's project.json files produces more ignorable files
-*.nuget.props
-*.nuget.targets
-
-# Microsoft Azure Build Output
-csx/
-*.build.csdef
-
-# Microsoft Azure Emulator
-ecf/
-rcf/
-
-# Windows Store app package directories and files
-AppPackages/
-BundleArtifacts/
-Package.StoreAssociation.xml
-_pkginfo.txt
-*.appx
-*.appxbundle
-*.appxupload
-
-# Visual Studio cache files
-# files ending in .cache can be ignored
-*.[Cc]ache
-# but keep track of directories ending in .cache
-!?*.[Cc]ache/
-
-# Others
-ClientBin/
-~$*
-*~
-*.dbmdl
-*.dbproj.schemaview
-*.jfm
-*.pfx
-*.publishsettings
-node_modules/
-orleans.codegen.cs
-
-# Including strong name files can present a security risk
-# (https://github.com/github/gitignore/pull/2483#issue-259490424)
-#*.snk
-
-# Since there are multiple workflows, uncomment next line to ignore bower_components
-# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
-#bower_components/
-
-# RIA/Silverlight projects
-Generated_Code/
-
-# Backup & report files from converting an old project file
-# to a newer Visual Studio version. Backup files are not needed,
-# because we have git ;-)
-_UpgradeReport_Files/
-Backup*/
-UpgradeLog*.XML
-UpgradeLog*.htm
-ServiceFabricBackup/
-*.rptproj.bak
-
-# SQL Server files
-*.mdf
-*.ldf
-*.ndf
-
-# Business Intelligence projects
-*.rdl.data
-*.bim.layout
-*.bim_*.settings
-*.rptproj.rsuser
-*- [Bb]ackup.rdl
-*- [Bb]ackup ([0-9]).rdl
-*- [Bb]ackup ([0-9][0-9]).rdl
-
-# Microsoft Fakes
-FakesAssemblies/
-
-# GhostDoc plugin setting file
-*.GhostDoc.xml
-
-# Node.js Tools for Visual Studio
-.ntvs_analysis.dat
-node_modules/
-
-# Visual Studio 6 build log
-*.plg
-
-# Visual Studio 6 workspace options file
-*.opt
-
-# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
-*.vbw
-
-# Visual Studio LightSwitch build output
-**/*.HTMLClient/GeneratedArtifacts
-**/*.DesktopClient/GeneratedArtifacts
-**/*.DesktopClient/ModelManifest.xml
-**/*.Server/GeneratedArtifacts
-**/*.Server/ModelManifest.xml
-_Pvt_Extensions
-
-# Paket dependency manager
-.paket/paket.exe
-paket-files/
-
-# FAKE - F# Make
-.fake/
-
-# JetBrains Rider
-.idea/
-*.sln.iml
-
-# CodeRush
-.cr/
-
-# CodeRush personal settings
-.cr/personal
-
-# Python Tools for Visual Studio (PTVS)
-__pycache__/
-*.pyc
-
-# Cake - Uncomment if you are using it
-# tools/**
-# !tools/packages.config
-
-# Tabs Studio
-*.tss
-
-# Telerik's JustMock configuration file
-*.jmconfig
-
-# BizTalk build output
-*.btp.cs
-*.btm.cs
-*.odx.cs
-*.xsd.cs
-
-# OpenCover UI analysis results
-OpenCover/
-
-# Azure Stream Analytics local run output
-ASALocalRun/
-
-# MSBuild Binary and Structured Log
-*.binlog
-
-# NVidia Nsight GPU debugger configuration file
-*.nvuser
-
-# MFractors (Xamarin productivity tool) working folder
-.mfractor/
-
-# Local History for Visual Studio
-.localhistory/
-
-# BeatPulse healthcheck temp database
-healthchecksdb
-
-# Backup folder for Package Reference Convert tool in Visual Studio 2017
-MigrationBackup/
-
-# Ionide (cross platform F# VS Code tools) working folder
-.ionide/
-
-# Fody - auto-generated XML schema
-FodyWeavers.xsd
-
-##
-## Visual studio for Mac
-##
-
-
-# globs
-Makefile.in
-*.userprefs
-*.usertasks
-config.make
-config.status
-aclocal.m4
-install-sh
-autom4te.cache/
-*.tar.gz
-tarballs/
-test-results/
-
-# Mac bundle stuff
-*.dmg
-*.app
-
-# content below from: https://github.com/github/gitignore/blob/master/Global/macOS.gitignore
-# General
-.DS_Store
-.AppleDouble
-.LSOverride
-
-# Icon must end with two \r
-Icon
-
-
-# Thumbnails
-._*
-
-# Files that might appear in the root of a volume
-.DocumentRevisions-V100
-.fseventsd
-.Spotlight-V100
-.TemporaryItems
-.Trashes
-.VolumeIcon.icns
-.com.apple.timemachine.donotpresent
-
-# Directories potentially created on remote AFP share
-.AppleDB
-.AppleDesktop
-Network Trash Folder
-Temporary Items
-.apdisk
-
-# content below from: https://github.com/github/gitignore/blob/master/Global/Windows.gitignore
-# Windows thumbnail cache files
-Thumbs.db
-ehthumbs.db
-ehthumbs_vista.db
-
-# Dump file
-*.stackdump
-
-# Folder config file
-[Dd]esktop.ini
-
-# Recycle Bin used on file shares
-$RECYCLE.BIN/
-
-# Windows Installer files
-*.cab
-*.msi
-*.msix
-*.msm
-*.msp
-
-# Windows shortcuts
-*.lnk
-
-# JetBrains Rider
-.idea/
-*.sln.iml
-
-##
-## Visual Studio Code
-##
-.vscode/*
-!.vscode/settings.json
-!.vscode/tasks.json
-!.vscode/launch.json
-!.vscode/extensions.json
-
-# Other miscellaneous folders
-zzMiscellaneous/
-zzExcludeFromGithub/
-FromAnotherLanuageKit/
-_BadLanguages/
-
-# Exclude test data and temp files
-Test_Data/
-*__ExcludeFromRepo__*.*
-*__DoNotAddToRepo__*.*
-deleteme/
-RelatedProjects/
-obj/
-
-# Exclude temp and backup files
-*.bak
-
-# ###########################################
-# Unique to this project
-# ###########################################
-# Exclude reparsepoint files which are used to help view file using VS
-*.xaml.xml
-gitignore.txt
-
-GTranslate/obj/
diff --git a/plugins/FileMonitor/manifest b/plugins/FileMonitor/manifest
deleted file mode 100644
index a6d00ee2..00000000
--- a/plugins/FileMonitor/manifest
+++ /dev/null
@@ -1,13 +0,0 @@
-id: filemonitor
-name: FileMonitor
-metadata:
- description: Monitors the Stash library folders, and updates Stash if any changes occurs in the Stash library paths.
-version: 0.2.0
-date: "2024-07-26 08:00:00"
-requires: [pip install stashapp-tools, pip install pyYAML, pip install watchdog]
-source_repository: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/FileMonitor
-files:
-- README.md
-- filemonitor.yml
-- filemonitor.py
-- requirements.txt
diff --git a/plugins/RenameFile/.gitignore b/plugins/RenameFile/.gitignore
deleted file mode 100644
index dd93ef78..00000000
--- a/plugins/RenameFile/.gitignore
+++ /dev/null
@@ -1,525 +0,0 @@
-$ cat .gitignore
-
-# Ignore these patterns
-desktop.ini
-~AutoRecover*.*
-*.aps
-*.exe
-*.idb
-*.ipch
-*.lib
-*.log
-*.log.1
-*.log.2
-*.manifest
-*.obj
-*.pch
-*.pdb
-*.sdf
-*.suo
-*.tlog
-*.user
-*.7z
-*.swp
-*.zip
-data.csv
-/boost
-/scintilla
-/bin
-/SQL
-/__pycache__
-__pycache__/
-renamefile_settings.cpython-310.pyc
-
-## Ignore Visual Studio temporary files, build results, and
-## files generated by popular Visual Studio add-ons.
-##
-## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
-
-# User-specific files
-*.rsuser
-*.suo
-*.user
-*.userosscache
-*.sln.docstates
-
-# User-specific files (MonoDevelop/Xamarin Studio)
-*.userprefs
-
-# Mono auto generated files
-mono_crash.*
-
-# Build results
-[Dd]ebug/
-[Dd]ebugPublic/
-[Rr]elease/
-[Rr]eleases/
-x64/
-x86/
-[Ww][Ii][Nn]32/
-[Aa][Rr][Mm]/
-[Aa][Rr][Mm]64/
-bld/
-[Bb]in/
-[Oo]bj/
-[Ll]og/
-[Ll]ogs/
-
-
-# Visual Studio 2015/2017 cache/options directory
-.vs/
-# Uncomment if you have tasks that create the project's static files in wwwroot
-#wwwroot/
-
-# Visual Studio 2017 auto generated files
-Generated\ Files/
-
-# MSTest test Results
-[Tt]est[Rr]esult*/
-[Bb]uild[Ll]og.*
-
-# NUnit
-*.VisualState.xml
-TestResult.xml
-nunit-*.xml
-
-# Build Results of an ATL Project
-[Dd]ebugPS/
-[Rr]eleasePS/
-dlldata.c
-
-# Benchmark Results
-BenchmarkDotNet.Artifacts/
-
-# .NET Core
-project.lock.json
-project.fragment.lock.json
-artifacts/
-
-# Tye
-.tye/
-
-# ASP.NET Scaffolding
-ScaffoldingReadMe.txt
-
-# StyleCop
-StyleCopReport.xml
-
-# Files built by Visual Studio
-*_i.c
-*_p.c
-*_h.h
-*_i.h
-*.ilk
-*.meta
-*.obj
-*.iobj
-*.pch
-*.pdb
-*.ipdb
-*.pgc
-*.pgd
-*.rsp
-*.sbr
-*.tlb
-*.tli
-*.tlh
-*.tmp
-*.tmp_proj
-*_wpftmp.csproj
-*.log
-*.vspscc
-*.vssscc
-.builds
-*.pidb
-*.svclog
-*.scc
-
-# Chutzpah Test files
-_Chutzpah*
-
-# Visual C++ cache files
-ipch/
-*.aps
-*.ncb
-*.opendb
-*.opensdf
-*.sdf
-*.cachefile
-*.VC.db
-*.VC.VC.opendb
-
-# Visual Studio profiler
-*.psess
-*.vsp
-*.vspx
-*.sap
-
-# Visual Studio Trace Files
-*.e2e
-
-# TFS 2012 Local Workspace
-$tf/
-
-# Guidance Automation Toolkit
-*.gpState
-
-# ReSharper is a .NET coding add-in
-_ReSharper*/
-*.[Rr]e[Ss]harper
-*.DotSettings.user
-# JustCode is a .NET coding add-in
-.JustCode
-
-# TeamCity is a build add-in
-_TeamCity*
-
-# DotCover is a Code Coverage Tool
-*.dotCover
-
-# AxoCover is a Code Coverage Tool
-.axoCover/*
-!.axoCover/settings.json
-
-# Coverlet is a free, cross platform Code Coverage Tool
-coverage*.json
-coverage*.xml
-coverage*.info
-
-# Visual Studio code coverage results
-*.coverage
-*.coveragexml
-
-# NCrunch
-_NCrunch_*
-.*crunch*.local.xml
-nCrunchTemp_*
-
-# MightyMoose
-*.mm.*
-AutoTest.Net/
-
-# Web workbench (sass)
-.sass-cache/
-
-# Installshield output folder
-[Ee]xpress/
-
-# DocProject is a documentation generator add-in
-DocProject/buildhelp/
-DocProject/Help/*.HxT
-DocProject/Help/*.HxC
-DocProject/Help/*.hhc
-DocProject/Help/*.hhk
-DocProject/Help/*.hhp
-DocProject/Help/Html2
-DocProject/Help/html
-
-# Click-Once directory
-publish/
-
-# Publish Web Output
-*.[Pp]ublish.xml
-*.azurePubxml
-# Note: Comment the next line if you want to checkin your web deploy settings,
-# but database connection strings (with potential passwords) will be unencrypted
-*.pubxml
-*.publishproj
-
-# Microsoft Azure Web App publish settings. Comment the next line if you want to
-# checkin your Azure Web App publish settings, but sensitive information contained
-# in these scripts will be unencrypted
-PublishScripts/
-
-# NuGet Packages
-*.nupkg
-# NuGet Symbol Packages
-*.snupkg
-# The packages folder can be ignored because of Package Restore
-**/[Pp]ackages/*
-# except build/, which is used as an MSBuild target.
-!**/[Pp]ackages/build/
-# Uncomment if necessary however generally it will be regenerated when needed
-#!**/[Pp]ackages/repositories.config
-# NuGet v3's project.json files produces more ignorable files
-*.nuget.props
-*.nuget.targets
-
-# Microsoft Azure Build Output
-csx/
-*.build.csdef
-
-# Microsoft Azure Emulator
-ecf/
-rcf/
-
-# Windows Store app package directories and files
-AppPackages/
-BundleArtifacts/
-Package.StoreAssociation.xml
-_pkginfo.txt
-*.appx
-*.appxbundle
-*.appxupload
-
-# Visual Studio cache files
-# files ending in .cache can be ignored
-*.[Cc]ache
-# but keep track of directories ending in .cache
-!?*.[Cc]ache/
-
-# Others
-ClientBin/
-~$*
-*~
-*.dbmdl
-*.dbproj.schemaview
-*.jfm
-*.pfx
-*.publishsettings
-node_modules/
-orleans.codegen.cs
-
-# Including strong name files can present a security risk
-# (https://github.com/github/gitignore/pull/2483#issue-259490424)
-#*.snk
-
-# Since there are multiple workflows, uncomment next line to ignore bower_components
-# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
-#bower_components/
-
-# RIA/Silverlight projects
-Generated_Code/
-
-# Backup & report files from converting an old project file
-# to a newer Visual Studio version. Backup files are not needed,
-# because we have git ;-)
-_UpgradeReport_Files/
-Backup*/
-UpgradeLog*.XML
-UpgradeLog*.htm
-ServiceFabricBackup/
-*.rptproj.bak
-
-# SQL Server files
-*.mdf
-*.ldf
-*.ndf
-
-# Business Intelligence projects
-*.rdl.data
-*.bim.layout
-*.bim_*.settings
-*.rptproj.rsuser
-*- [Bb]ackup.rdl
-*- [Bb]ackup ([0-9]).rdl
-*- [Bb]ackup ([0-9][0-9]).rdl
-
-# Microsoft Fakes
-FakesAssemblies/
-
-# GhostDoc plugin setting file
-*.GhostDoc.xml
-
-# Node.js Tools for Visual Studio
-.ntvs_analysis.dat
-node_modules/
-
-# Visual Studio 6 build log
-*.plg
-
-# Visual Studio 6 workspace options file
-*.opt
-
-# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
-*.vbw
-
-# Visual Studio LightSwitch build output
-**/*.HTMLClient/GeneratedArtifacts
-**/*.DesktopClient/GeneratedArtifacts
-**/*.DesktopClient/ModelManifest.xml
-**/*.Server/GeneratedArtifacts
-**/*.Server/ModelManifest.xml
-_Pvt_Extensions
-
-# Paket dependency manager
-.paket/paket.exe
-paket-files/
-
-# FAKE - F# Make
-.fake/
-
-# JetBrains Rider
-.idea/
-*.sln.iml
-
-# CodeRush
-.cr/
-
-# CodeRush personal settings
-.cr/personal
-
-# Python Tools for Visual Studio (PTVS)
-__pycache__/
-*.pyc
-
-# Cake - Uncomment if you are using it
-# tools/**
-# !tools/packages.config
-
-# Tabs Studio
-*.tss
-
-# Telerik's JustMock configuration file
-*.jmconfig
-
-# BizTalk build output
-*.btp.cs
-*.btm.cs
-*.odx.cs
-*.xsd.cs
-
-# OpenCover UI analysis results
-OpenCover/
-
-# Azure Stream Analytics local run output
-ASALocalRun/
-
-# MSBuild Binary and Structured Log
-*.binlog
-
-# NVidia Nsight GPU debugger configuration file
-*.nvuser
-
-# MFractors (Xamarin productivity tool) working folder
-.mfractor/
-
-# Local History for Visual Studio
-.localhistory/
-
-# BeatPulse healthcheck temp database
-healthchecksdb
-
-# Backup folder for Package Reference Convert tool in Visual Studio 2017
-MigrationBackup/
-
-# Ionide (cross platform F# VS Code tools) working folder
-.ionide/
-
-# Fody - auto-generated XML schema
-FodyWeavers.xsd
-
-##
-## Visual studio for Mac
-##
-
-
-# globs
-Makefile.in
-*.userprefs
-*.usertasks
-config.make
-config.status
-aclocal.m4
-install-sh
-autom4te.cache/
-*.tar.gz
-tarballs/
-test-results/
-
-# Mac bundle stuff
-*.dmg
-*.app
-
-# content below from: https://github.com/github/gitignore/blob/master/Global/macOS.gitignore
-# General
-.DS_Store
-.AppleDouble
-.LSOverride
-
-# Icon must end with two \r
-Icon
-
-
-# Thumbnails
-._*
-
-# Files that might appear in the root of a volume
-.DocumentRevisions-V100
-.fseventsd
-.Spotlight-V100
-.TemporaryItems
-.Trashes
-.VolumeIcon.icns
-.com.apple.timemachine.donotpresent
-
-# Directories potentially created on remote AFP share
-.AppleDB
-.AppleDesktop
-Network Trash Folder
-Temporary Items
-.apdisk
-
-# content below from: https://github.com/github/gitignore/blob/master/Global/Windows.gitignore
-# Windows thumbnail cache files
-Thumbs.db
-ehthumbs.db
-ehthumbs_vista.db
-
-# Dump file
-*.stackdump
-
-# Folder config file
-[Dd]esktop.ini
-
-# Recycle Bin used on file shares
-$RECYCLE.BIN/
-
-# Windows Installer files
-*.cab
-*.msi
-*.msix
-*.msm
-*.msp
-
-# Windows shortcuts
-*.lnk
-
-# JetBrains Rider
-.idea/
-*.sln.iml
-
-##
-## Visual Studio Code
-##
-.vscode/*
-!.vscode/settings.json
-!.vscode/tasks.json
-!.vscode/launch.json
-!.vscode/extensions.json
-
-# Other miscellaneous folders
-zzMiscellaneous/
-zzExcludeFromGithub/
-FromAnotherLanuageKit/
-_BadLanguages/
-
-# Exclude test data and temp files
-Test_Data/
-*__ExcludeFromRepo__*.*
-*__DoNotAddToRepo__*.*
-deleteme/
-RelatedProjects/
-obj/
-
-# Exclude temp and backup files
-*.bak
-
-# ###########################################
-# Unique to this project
-# ###########################################
-# Exclude reparsepoint files which are used to help view file using VS
-*.xaml.xml
-gitignore.txt
-
-GTranslate/obj/
diff --git a/plugins/RenameFile/manifest b/plugins/RenameFile/manifest
deleted file mode 100644
index a98d0dcf..00000000
--- a/plugins/RenameFile/manifest
+++ /dev/null
@@ -1,14 +0,0 @@
-id: renamefile
-name: RenameFile
-metadata:
- description: Renames video (scene) file names when the user edits the [Title] field located in the scene [Edit] tab.
-version: 0.4.0
-date: "2024-07-26 08:00:00"
-requires: [pip install stashapp-tools, pip install pyYAML]
-source_repository: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/RenameFile
-files:
-- README.md
-- renamefile.yml
-- renamefile.py
-- renamefile_settings.py
-- requirements.txt
From e5d8f82f30aa11dd6890e399022ff09d7f1d8a88 Mon Sep 17 00:00:00 2001
From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com>
Date: Sat, 10 Aug 2024 13:19:33 -0400
Subject: [PATCH 16/39] Added issue templates
---
.github/ISSUE_TEMPLATE/bug_report.yml | 77 ++++++++++++++++++
.github/ISSUE_TEMPLATE/bug_report_plugin.yml | 84 ++++++++++++++++++++
.github/ISSUE_TEMPLATE/discussion.yml | 42 ++++++++++
.github/ISSUE_TEMPLATE/feature_request.yml | 35 ++++++++
.github/ISSUE_TEMPLATE/help.yml | 37 +++++++++
5 files changed, 275 insertions(+)
create mode 100644 .github/ISSUE_TEMPLATE/bug_report.yml
create mode 100644 .github/ISSUE_TEMPLATE/bug_report_plugin.yml
create mode 100644 .github/ISSUE_TEMPLATE/discussion.yml
create mode 100644 .github/ISSUE_TEMPLATE/feature_request.yml
create mode 100644 .github/ISSUE_TEMPLATE/help.yml
diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml
new file mode 100644
index 00000000..061780ac
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/bug_report.yml
@@ -0,0 +1,77 @@
+name: 🐞 Bug
+description: Create a bug report
+title: "🐞[Bug] Your_Short_title"
+labels: [Bug]
+body:
+ - type: markdown
+ attributes:
+ value: |
+ Thank you for taking the time to fill out this bug report!
+ Fields marked with an asterisk (*) are required, and remaining fields are optional, but we encourage their usage.
+ Steps to reproduce the behavior:
+ 1. Go to '...'
+ 2. Click on '....'
+ 3. Scroll down to '....'
+ 4. See error
+ - type: textarea
+ id: steps
+ attributes:
+ label: Please enter steps to reproduce the behavior.
+ validations:
+ required: true
+ - type: input
+ id: stash_ver
+ attributes:
+ label: Stash Version (from Settings -> About)
+ placeholder: e.g. v0.26.2
+ validations:
+ required: true
+ - type: input
+ id: os
+ attributes:
+ label: What Operating System (OS)?
+ placeholder: e.g. Windows, MacOS, Linux, iOS8.1 (mobile OS)
+ validations:
+ required: true
+ - type: input
+ id: device
+ attributes:
+ label: Phone or tablets
+ placeholder: e.g. iPhone6, Galaxy Tab A9+
+ validations:
+ required: false
+ - type: input
+ id: browser
+ attributes:
+ label: What browser and version?
+ placeholder: e.g. Chrome Ver:127, Firefox Ver:129, Safari Ver:1.3.2, Any
+ validations:
+ required: true
+ - type: textarea
+ id: expected-behavior
+ attributes:
+ label: Expected Behavior
+ description: What was the expected behavior?
+ validations:
+ required: false
+ - type: textarea
+ id: logging
+ attributes:
+ label: Logging
+ description: Enter Stash logging.
+ validations:
+ required: false
+ - type: textarea
+ id: snapshots
+ attributes:
+ label: Snapshot(s)
+ description: Optionally attach snapshot(s) which displays the bug.
+ validations:
+ required: false
+ - type: textarea
+ id: additional
+ attributes:
+ label: Additional context
+ description: Add any other context about the problem here.
+ validations:
+ required: false
diff --git a/.github/ISSUE_TEMPLATE/bug_report_plugin.yml b/.github/ISSUE_TEMPLATE/bug_report_plugin.yml
new file mode 100644
index 00000000..5c03d45b
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/bug_report_plugin.yml
@@ -0,0 +1,84 @@
+name: 🪲 Plugin Bug
+description: Create a plugin bug report
+title: "🪲[EnterPluginNameHere] Your_Short_title"
+labels: [Plugin_Bug]
+body:
+ - type: markdown
+ attributes:
+ value: |
+ Thank you for taking the time to fill out this bug report!
+ Fields marked with an asterisk (*) are required, and remaining fields are optional, but we encourage their usage.
+ Steps to reproduce the behavior:
+ 1. Go to '...'
+ 2. Click on '....'
+ 3. Scroll down to '....'
+ 4. See error
+ - type: textarea
+ id: steps
+ attributes:
+ label: Please enter steps to reproduce the behavior.
+ validations:
+ required: true
+ - type: input
+ id: stash_ver
+ attributes:
+ label: Stash Version (from Settings -> About)
+ placeholder: e.g. v0.26.2
+ validations:
+ required: true
+ - type: input
+ id: os
+ attributes:
+ label: What Operating System (OS)?
+ placeholder: e.g. Windows, MacOS, Linux, iOS8.1 (mobile OS)
+ validations:
+ required: true
+ - type: input
+ id: device
+ attributes:
+ label: Phone or tablets
+ placeholder: e.g. iPhone6, Galaxy Tab A9+
+ validations:
+ required: false
+ - type: input
+ id: browser
+ attributes:
+ label: What browser and version?
+ placeholder: e.g. Chrome Ver:127, Firefox Ver:129, Safari Ver:1.3.2, Any
+ validations:
+ required: true
+ - type: textarea
+ id: install
+ attributes:
+ label: The pip install for the plugin
+ description: pip install stashapp-tools --upgrade, pip install pyYAML
+ validations:
+ required: true
+ - type: textarea
+ id: expected-behavior
+ attributes:
+ label: Expected Behavior
+ description: What was the expected behavior?
+ validations:
+ required: false
+ - type: textarea
+ id: logging
+ attributes:
+ label: Logging
+ description: Enter Stash logging and plugin logging file if applicable.
+ validations:
+ required: false
+ - type: textarea
+ id: snapshots
+ attributes:
+ label: Snapshot(s)
+ description: Optionally attach snapshot(s) which displays the bug.
+ validations:
+ required: false
+ - type: textarea
+ id: additional
+ attributes:
+ label: Additional context
+ description: Add any other context about the problem here.
+ validations:
+ required: false
\ No newline at end of file
diff --git a/.github/ISSUE_TEMPLATE/discussion.yml b/.github/ISSUE_TEMPLATE/discussion.yml
new file mode 100644
index 00000000..177b35cd
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/discussion.yml
@@ -0,0 +1,42 @@
+name: 🛗Discussion / Request for Commentary [RFC]
+description: This is for issues that will be discussed and won't necessarily result directly in commits or pull requests.
+title: "🛗[RFC]: Your_Short_title"
+labels: [RFC]
+body:
+ - type: markdown
+ attributes:
+ value: |
+ This is for issues that will be discussed and won't necessarily result directly in commits or pull requests.
+ Please ensure that you respect people's time and attention and understand that people are volunteering their time, so concision is ideal and considerate.
+ Fields marked with an asterisk (*) are required, and remaining fields are optional, but we encourage their usage.
+ - type: textarea
+ id: Title
+ attributes:
+ label: Optional title of the topic to discuss.
+ validations:
+ required: false
+ - type: textarea
+ id: Summary
+ attributes:
+ label: Describe the scope of your topic and your goals ideally within a single paragraph or TL. A summary that makes it easier for people to determine if they can contribute at a glance.
+ validations:
+ required: true
+ - type: textarea
+ id: Details
+ attributes:
+ label: Only required if summary and title doesn't cover everything.
+ validations:
+ required: false
+ - type: textarea
+ id: Examples
+ attributes:
+ label: If you can show a picture or video examples post them here.
+ validations:
+ required: false
+ - type: textarea
+ id: snapshots
+ attributes:
+ label: Snapshot(s)
+ description: Optionally attach additional snapshot(s) which helps describe the discussion.
+ validations:
+ required: false
diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml
new file mode 100644
index 00000000..9593dc41
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/feature_request.yml
@@ -0,0 +1,35 @@
+name: 💡️Feature Request
+description: Suggest improvement for this project
+title: "💡️[Enhancement]: Your_Short_title"
+labels: [Enhancement]
+body:
+ - type: markdown
+ attributes:
+ value: |
+ Please fill out the following fields with as much detail as possible:
+ Fields marked with an asterisk (*) are required, and remaining fields are optional, but we encourage their usage.
+ - type: textarea
+ id: problem
+ attributes:
+ label: If feature request is related to a problem, give a clear and concise description of what is the problem. Ex. I'm always frustrated when [...]
+ validations:
+ required: false
+ - type: textarea
+ id: solution
+ attributes:
+ label: Describe the solution you'd like. A clear and concise description of what you want to happen.
+ validations:
+ required: true
+ - type: textarea
+ id: alternatives
+ attributes:
+ label: Describe alternatives you've considered. A clear and concise description of any alternative solutions or features you've considered.
+ validations:
+ required: false
+ - type: textarea
+ id: Snapshots
+ attributes:
+ label: Snapshots / Images
+ description: Add any other context or screenshots about the feature request here, which can help explain the feature, and a description of what to look for in the image(s).
+ validations:
+ required: false
diff --git a/.github/ISSUE_TEMPLATE/help.yml b/.github/ISSUE_TEMPLATE/help.yml
new file mode 100644
index 00000000..dae58e2e
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/help.yml
@@ -0,0 +1,37 @@
+name: ❓Help
+description: Post your question
+title: "❓[Help]: Your_Short_title"
+labels: [Question]
+body:
+ - type: markdown
+ attributes:
+ value: |
+ Please fill out the following fields with as much detail as possible, so that we can better answer your question.
+ Fields marked with an asterisk (*) are required, and remaining fields are optional, but we encourage their usage.
+ - type: textarea
+ id: question
+ attributes:
+ label: Please enter a clear and concise detailed question.
+ validations:
+ required: true
+ - type: input
+ id: os
+ attributes:
+ label: What Operating System (OS)?
+ placeholder: e.g. Windows, MacOS, Linux
+ validations:
+ required: false
+ - type: input
+ id: browser
+ attributes:
+ label: What browser and version?
+ placeholder: e.g. Chrome Ver:127, Firefox Ver:129, Safari Ver:1.3.2, Any
+ validations:
+ required: false
+ - type: textarea
+ id: snapshots
+ attributes:
+ label: Snapshot(s)
+ description: Optionally attach snapshot(s) which helps describe the question.
+ validations:
+ required: false
From 971d0ea408d8503acc8cdcd03f9e0cd1f105da56 Mon Sep 17 00:00:00 2001
From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com>
Date: Fri, 16 Aug 2024 04:03:12 -0400
Subject: [PATCH 17/39] Added run as a service UI option and scheduler option
Can start FileMonitor from Stash UI as a service.
Stop FileMonitor from the UI.
Scheduler service for multiple UI task.
Scheduler service for plugins.
UI option to trim backup DB files.
Config file option to exclude paths.
Config file option to only include paths.
Config file option to only include specified file types.
Very minor changes to RenameFile plugin.
---
plugins/FileMonitor/README.md | 171 +++++-
plugins/FileMonitor/StashPluginHelper.py | 363 +++++++++++++
plugins/FileMonitor/filemonitor.py | 625 +++++++++++++++-------
plugins/FileMonitor/filemonitor.yml | 36 +-
plugins/FileMonitor/filemonitor_config.py | 126 ++++-
plugins/FileMonitor/requirements.txt | 5 +-
plugins/RenameFile/README.md | 4 +-
plugins/RenameFile/renamefile.py | 2 +-
plugins/RenameFile/requirements.txt | 2 +-
9 files changed, 1106 insertions(+), 228 deletions(-)
create mode 100644 plugins/FileMonitor/StashPluginHelper.py
diff --git a/plugins/FileMonitor/README.md b/plugins/FileMonitor/README.md
index dcf32fdb..a196509c 100644
--- a/plugins/FileMonitor/README.md
+++ b/plugins/FileMonitor/README.md
@@ -1,35 +1,164 @@
-# FileMonitor: Ver 0.1.0 (By David Maisonave)
-FileMonitor is a [Stash](https://github.com/stashapp/stash) plugin which updates Stash if any changes occurs in the Stash library paths.
+# FileMonitor: Ver 0.8.2 (By David Maisonave)
+FileMonitor is a [Stash](https://github.com/stashapp/stash) plugin with the following two main features:
+- Updates Stash when any file changes occurs in the Stash library.
+- **Task Scheduler**: Runs scheduled task based on the scheduler configuration in **filemonitor_config.py**.
-### Using FileMonitor as a plugin
-- To start monitoring file changes, go to **Stash->Settings->Task->[Plugin Tasks]->FileMonitor**, and click on the [Start Library Monitor] button.
- - ![FileMonitor_Task](https://github.com/user-attachments/assets/f275a70f-8e86-42a4-b2c1-98b3f4935334)
-- To stop this task, go to **Stash->Settings->Task->[Task Queue]**, and click on the **[x]**.
- - ![Kill_FileMonitor_Task](https://github.com/user-attachments/assets/a3f4abca-f3a2-49fa-9db5-e0c733e0aeb1)
+## Starting FileMonitor from the UI
+From the GUI, FileMonitor can be started as a service or as a plugin. The recommended method is to start it as a service. When started as a service, it will jump on the Task Queue momentarily, and then disappear as it starts running in the background.
+- To start monitoring file changes, go to **Stash->Settings->Task->[Plugin Tasks]->FileMonitor**, and click on the [Start Library Monitor Service] button.
+ - ![FileMonitorService](https://github.com/user-attachments/assets/b12aeca9-37a8-447f-90da-26e9440735ad)
+ - **Important Note**: At first, this will show up as a plugin in the Task Queue momentarily. It will then disappear from the Task Queue and run in the background as a service.
+- To stop FileMonitor click on [Stop Library Monitor] button.
+- The **[Monitor as a Plugin]** option is mainly available for backwards compatibility and for test purposes.
+
-### Using FileMonitor as a script
+## Using FileMonitor as a script
**FileMonitor** can be called as a standalone script.
-- To start monitoring call the script and pass any argument.
- - python filemonitor.py **start**
-- To stop **FileMonitor**, pass argument **stop**.
- - python filemonitor.py **stop**
- - After running above command line, **FileMonitor** will stop after the next file change occurs.
+- To start monitoring call the script and pass --url and the Stash URL.
+ - python filemonitor.py --url http://localhost:9999
+- To stop **FileMonitor**, pass argument **--stop**.
+ - python filemonitor.py **--stop**
- The stop command works to stop the standalone job and the Stash plugin task job.
+- To restart **FileMonitor**, pass argument **--restart**.
+ - python filemonitor.py **--restart**
+ - The restart command restarts FileMonitor as a Task in Stash.
-### Requirements
-`pip install stashapp-tools`
-`pip install pyYAML`
-`pip install watchdog`
+# Task Scheduler
+To enable the scheduler go to **Stash->Settings->Plugins->Plugins->FileMonitor** and enable the **Scheduler** option.
+![ReoccurringTaskScheduler](https://github.com/user-attachments/assets/5a7bf6a4-3bd6-4692-a6c3-e9f8f4664f14)
-### Installation
+- **Warning:** The below task are already preconfigured in the scheduler, and when the scheduler is enabled all these task are enabled.
+ - Auto Tag -> [Auto Tag] (Daily)
+ - Maintenance -> [Clean] (every 2 days)
+ - Maintenance -> [Clean Generated Files] (every 2 days)
+ - Maintenance -> [Optimise Database] (Daily)
+ - Generated Content-> [Generate] (Every Sunday at 7AM)
+ - Library -> [Scan] (Weekly) (Every Sunday at 3AM)
+ - Backup -> [Backup] 2nd sunday of the month at 1AM
+- The example task are disabled by default because they have a zero frequency value.
+
+To configure the schedule or to add new task, edit the **task_scheduler** section in the **filemonitor_config.py** file.
+```` python
+"task_scheduler": [
+ # To create a daily task, include each day of the week for the weekday field.
+ {"task" : "Auto Tag", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "06:00"}, # Auto Tag -> [Auto Tag] (Daily at 6AM)
+ {"task" : "Optimise Database", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "07:00"}, # Maintenance -> [Optimise Database] (Daily at 7AM)
+
+ # The following tasks are scheduled for 3 days out of the week.
+ {"task" : "Clean", "weekday" : "monday,wednesday,friday", "time" : "08:00"}, # Maintenance -> [Clean] (3 days per week at 8AM)
+ {"task" : "Clean Generated Files", "weekday" : "tuesday,thursday,saturday", "time" : "08:00"}, # Maintenance -> [Clean Generated Files] (3 days per week at 8AM)
+
+ # The following tasks are scheduled weekly
+ {"task" : "Generate", "weekday" : "sunday", "time" : "07:00"}, # Generated Content-> [Generate] (Every Sunday at 7AM)
+ {"task" : "Scan", "weekday" : "sunday", "time" : "03:00"}, # Library -> [Scan] (Weekly) (Every Sunday at 3AM)
+
+ # To perform a task monthly, specify the day of the month as in the weekly schedule format, and add a monthly field.
+ # The monthly field value must be 1, 2, 3, or 4.
+ # 1 = 1st specified weekday of the month. Example 1st monday.
+ # 2 = 2nd specified weekday of the month. Example 2nd monday of the month.
+ # 3 = 3rd specified weekday of the month.
+ # 4 = 4th specified weekday of the month.
+ # The following task is scheduled monthly
+ {"task" : "Backup", "weekday" : "sunday", "time" : "01:00", "monthly" : 2}, # Backup -> [Backup] 2nd sunday of the month at 1AM (01:00)
+
+ # The following task is the syntax used for a plugins. A plugin task requires the plugin name for the [task] field, and the plugin-ID for the [pluginId] field.
+ # This task requires plugin [Path Parser], and it's disabled by default.
+ {"task" : "Create Tags", "pluginId" : "pathParser", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "DISABLED"}, # To enable this task change time "DISABLED" to a valid time.
+
+ # Example#A1: Task to call call_GQL API with custom input
+ {"task" : "GQL", "input" : "mutation OptimiseDatabase { optimiseDatabase }", "weekday" : "sunday", "time" : "DISABLED"}, # To enable, change "DISABLED" to valid time
+
+ # Example#A2: Task to call a python script. When this task is executed, the keyword is replaced by filemonitor.py current directory.
+ # The args field is NOT required.
+ {"task" : "python", "script" : "test_script_hello_world.py", "args" : "--MyArguments Hello", "weekday" : "monday", "time" : "DISABLED"}, # change "DISABLED" to valid time
+
+ # Example#A3: The following task types can optionally take a [paths] field. If the paths field does not exists, the paths in the Stash library is used.
+ {"task" : "Scan", "paths" : ["E:\\MyVideos\\downloads", "V:\\MyOtherVideos"], "weekday" : "sunday", "time" : "DISABLED"}, # Library -> [Scan]
+ {"task" : "Auto Tag", "paths" : [r"E:\MyVideos\downloads", r"V:\MyOtherVideos"], "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "DISABLED"}, # Auto Tag -> [Auto Tag]
+ {"task" : "Clean", "paths" : [r"E:\MyVideos\downloads", r"V:\MyOtherVideos"], "weekday" : "sunday", "time" : "DISABLED"}, # Generated Content-> [Generate]
+
+ # Example#A4: Task which calls Migrations -> [Rename generated files]
+ {"task" : "RenameGeneratedFiles", "weekday" : "tuesday,thursday", "time" : "DISABLED"}, # (bi-weekly) example
+
+ # The above weekday method is the more reliable method to schedule task, because it doesn't rely on FileMonitor running continuously (non-stop).
+
+ # The below examples use frequency field method which can work with minutes and hours. A zero frequency value disables the task.
+ # Note: Both seconds and days are also supported for the frequency field.
+ # However, seconds is mainly used for test purposes.
+ # And days usage is discourage, because it only works if FileMonitor is running for X many days non-stop.
+ # The below example tasks are done using hours and minutes, however any of these task types can be converted to a daily, weekly, or monthly syntax.
+
+ # Example#B1: Task for calling another Stash plugin, which needs plugin name and plugin ID.
+ {"task" : "PluginButtonName_Here", "pluginId" : "PluginId_Here", "hours" : 0}, # The zero frequency value makes this task disabled.
+
+ # Example#B2: Task to execute a command
+ {"task" : "execute", "command" : "C:\\MyPath\\HelloWorld.bat", "hours" : 0},
+
+ # Example#B3: Task to execute a command with optional args field, and using keyword , which gets replaced with filemonitor.py current directory.
+ {"task" : "execute", "command" : "HelloWorld.cmd", "args" : "--name David", "minutes" : 0},
+],
+````
+- To add plugins to the task list, both the Plugin-ID and the plugin name is required. The plugin ID is usually the file name of the script without the extension.
+- Task can be scheduled to run monthly, weekly, hourly, and by minutes.
+- The scheduler list uses two types of syntax. One is **weekday** based, and the other is **frequency** based.
+ - **weekday Based**
+ - Use the weekday based syntax for daily, weekly, and monthly schedules.
+ - All the weekday based methods must have a **weekday** field and a **time** field, which specifies the day(s) of the week and the time to start the task.
+ - **Daily**:
+ - A daily task populates the weekday field with all the days of the week.
+ - **Daily Example**:
+ - Starts a task daily at 6AM.
+ - `{"task" : "Optimise Database", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "06:00"},`
+ - **Weekly**:
+ - **Weekly Example**:
+ - Starts a task weekly every monday and 9AM.
+ - `{"task" : "Generate", "weekday" : "monday", "time" : "09:00"},`
+ - **Monthly**:
+ - The monthly syntax is similar to the weekly format, but it also includes a **"monthly"** field which must be set to 1, 2, 3, or 4.
+ - **Monthly Examples**:
+ - Starts a task once a month on the 3rd sunday of the month and at 1AM.
+ - `{"task" : "Backup", "weekday" : "sunday", "time" : "01:00", "monthly" : 3},`
+ - Starts a task at 2PM once a month on the 1st saturday of the month.
+ - `{"task" : "Clean", "weekday" : "saturday", "time" : "14:00", "monthly" : 1},`
+
+ - **Frequency Based**
+ - The frequency field can be in **minutes** or **hours**.
+ - The frequency value must be a number greater than zero. A frequency value of zero will disable the task on the schedule.
+ - **Frequency Based Examples**:
+ - Starts a task every 24 hours.
+ - `{"task" : "Auto Tag", "hours" : 24},`
+ - Starts a (**plugin**) task every 30 minutes.
+ - `{"task" : "Create Tags", "pluginId" : "pathParser", "minutes" : 30},`
+ - The frequency field does support **days** and **seconds**.
+ - **seconds** is mainly used for test purposes.
+ - The use of **days** is discourage, because it only works if FileMonitor is running for X many days non-stop.
+ - For example, if days is used with 30 days, FileMonitor would have to be running non-stop for 30 days before the task is activated. If it's restarted at any time during the 30 days, the count down restarts.
+ - It's recommended to use weekday based syntax over using days, because many restarts can occur during the week or month, and the task will still get started as long as FileMonitor is running during the scheduled activation time.
+
+- The scheduler feature requires `pip install schedule`
+ - If the user leaves the scheduler disabled, **schedule** does NOT have to be installed.
+- For best results use the scheduler with FileMonitor running as a service.
+
+## Requirements
+- pip install -r requirements.txt
+- Or manually install each requirement:
+ - `pip install stashapp-tools --upgrade`
+ - `pip install pyYAML`
+ - `pip install watchdog`
+ - `pip install schedule`
+
+## Installation
- Follow **Requirements** instructions.
- In the stash plugin directory (C:\Users\MyUserName\.stash\plugins), create a folder named **FileMonitor**.
- Copy all the plugin files to this folder.(**C:\Users\MyUserName\\.stash\plugins\FileMonitor**).
-- Restart Stash.
+- Click the **[Reload Plugins]** button in Stash->Settings->Plugins->Plugins.
That's it!!!
-### Options
-- All options are accessible in the GUI via Settings->Plugins->Plugins->[FileMonitor].
+## Options
+- Main options are accessible in the GUI via Settings->Plugins->Plugins->[FileMonitor].
+ - When the UI option [Max DB Backups] is set to a value greater than 1, and when the scheduler is enabled, the quantity of database backup files are trim down to the set [**Max DB Backups**] value after the scheduler executes the Backup task.
+ - The other options are self explanatory from the UI.
+- Additional options available in filemonitor_config.py. The options are well documented in the commented code.
diff --git a/plugins/FileMonitor/StashPluginHelper.py b/plugins/FileMonitor/StashPluginHelper.py
new file mode 100644
index 00000000..218e055c
--- /dev/null
+++ b/plugins/FileMonitor/StashPluginHelper.py
@@ -0,0 +1,363 @@
+from stashapi.stashapp import StashInterface
+from logging.handlers import RotatingFileHandler
+import inspect, sys, os, pathlib, logging, json
+import concurrent.futures
+from stashapi.stash_types import PhashDistance
+import __main__
+
+# StashPluginHelper (By David Maisonave aka Axter)
+ # See end of this file for example usage
+ # Log Features:
+ # Can optionally log out to multiple outputs for each Log or Trace call.
+ # Logging includes source code line number
+ # Sets a maximum plugin log file size
+ # Stash Interface Features:
+ # Gets STASH_URL value from command line argument and/or from STDIN_READ
+ # Sets FRAGMENT_SERVER based on command line arguments or STDIN_READ
+ # Sets PLUGIN_ID based on the main script file name (in lower case)
+ # Gets PLUGIN_TASK_NAME value
+ # Sets pluginSettings (The plugin UI settings)
+ # Misc Features:
+ # Gets DRY_RUN value from command line argument and/or from UI and/or from config file
+ # Gets DEBUG_TRACING value from command line argument and/or from UI and/or from config file
+ # Sets RUNNING_IN_COMMAND_LINE_MODE to True if detects multiple arguments
+ # Sets CALLED_AS_STASH_PLUGIN to True if it's able to read from STDIN_READ
+class StashPluginHelper(StashInterface):
+ # Primary Members for external reference
+ PLUGIN_TASK_NAME = None
+ PLUGIN_ID = None
+ PLUGIN_CONFIGURATION = None
+ pluginSettings = None
+ pluginConfig = None
+ STASH_INTERFACE_INIT = False
+ STASH_URL = None
+ STASH_CONFIGURATION = None
+ JSON_INPUT = None
+ DEBUG_TRACING = False
+ DRY_RUN = False
+ CALLED_AS_STASH_PLUGIN = False
+ RUNNING_IN_COMMAND_LINE_MODE = False
+ FRAGMENT_SERVER = None
+ STASHPATHSCONFIG = None
+ STASH_PATHS = []
+
+ # printTo argument
+ LOG_TO_FILE = 1
+ LOG_TO_CONSOLE = 2 # Note: Only see output when running in command line mode. In plugin mode, this output is lost.
+ LOG_TO_STDERR = 4 # Note: In plugin mode, output to StdErr ALWAYS gets sent to stash logging as an error.
+ LOG_TO_STASH = 8
+ LOG_TO_WARN = 16
+ LOG_TO_ERROR = 32
+ LOG_TO_CRITICAL = 64
+ LOG_TO_ALL = LOG_TO_FILE + LOG_TO_CONSOLE + LOG_TO_STDERR + LOG_TO_STASH
+
+ # Misc class variables
+ MAIN_SCRIPT_NAME = None
+ LOG_LEVEL = logging.INFO
+ LOG_FILE_DIR = None
+ LOG_FILE_NAME = None
+ STDIN_READ = None
+ pluginLog = None
+ logLinePreviousHits = []
+ thredPool = None
+
+ # Prefix message value
+ LEV_TRACE = "TRACE: "
+ LEV_DBG = "DBG: "
+ LEV_INF = "INF: "
+ LEV_WRN = "WRN: "
+ LEV_ERR = "ERR: "
+ LEV_CRITICAL = "CRITICAL: "
+
+ # Default format
+ LOG_FORMAT = "[%(asctime)s] %(message)s"
+
+ # Externally modifiable variables
+ log_to_err_set = LOG_TO_FILE + LOG_TO_STDERR # This can be changed by the calling source in order to customize what targets get error messages
+ log_to_norm = LOG_TO_FILE + LOG_TO_CONSOLE # Can be change so-as to set target output for normal logging
+ # Warn message goes to both plugin log file and stash when sent to Stash log file.
+ log_to_wrn_set = LOG_TO_STASH # This can be changed by the calling source in order to customize what targets get warning messages
+
+ def __init__(self,
+ debugTracing = None, # Set debugTracing to True so as to output debug and trace logging
+ logFormat = LOG_FORMAT, # Plugin log line format
+ dateFmt = "%y%m%d %H:%M:%S", # Date format when logging to plugin log file
+ maxbytes = 2*1024*1024, # Max size of plugin log file
+ backupcount = 2, # Backup counts when log file size reaches max size
+ logToWrnSet = 0, # Customize the target output set which will get warning logging
+ logToErrSet = 0, # Customize the target output set which will get error logging
+ logToNormSet = 0, # Customize the target output set which will get normal logging
+ logFilePath = "", # Plugin log file. If empty, the log file name will be set based on current python file name and path
+ mainScriptName = "", # The main plugin script file name (full path)
+ pluginID = "",
+ settings = None, # Default settings for UI fields
+ config = None, # From pluginName_config.py or pluginName_setting.py
+ fragmentServer = None,
+ stash_url = None, # Stash URL (endpoint URL) Example: http://localhost:9999
+ DebugTraceFieldName = "zzdebugTracing",
+ DryRunFieldName = "zzdryRun",
+ setStashLoggerAsPluginLogger = False):
+ self.thredPool = concurrent.futures.ThreadPoolExecutor(max_workers=2)
+ if logToWrnSet: self.log_to_wrn_set = logToWrnSet
+ if logToErrSet: self.log_to_err_set = logToErrSet
+ if logToNormSet: self.log_to_norm = logToNormSet
+ if stash_url and len(stash_url): self.STASH_URL = stash_url
+ self.MAIN_SCRIPT_NAME = mainScriptName if mainScriptName != "" else __main__.__file__
+ self.PLUGIN_ID = pluginID if pluginID != "" else pathlib.Path(self.MAIN_SCRIPT_NAME).stem.lower()
+ # print(f"self.MAIN_SCRIPT_NAME={self.MAIN_SCRIPT_NAME}, self.PLUGIN_ID={self.PLUGIN_ID}", file=sys.stderr)
+ self.LOG_FILE_NAME = logFilePath if logFilePath != "" else f"{pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent}{os.sep}{pathlib.Path(self.MAIN_SCRIPT_NAME).stem}.log"
+ self.LOG_FILE_DIR = pathlib.Path(self.LOG_FILE_NAME).resolve().parent
+ RFH = RotatingFileHandler(
+ filename=self.LOG_FILE_NAME,
+ mode='a',
+ maxBytes=maxbytes,
+ backupCount=backupcount,
+ encoding=None,
+ delay=0
+ )
+ if fragmentServer:
+ self.FRAGMENT_SERVER = fragmentServer
+ else:
+ self.FRAGMENT_SERVER = {'Scheme': 'http', 'Host': '0.0.0.0', 'Port': '9999', 'SessionCookie': {'Name': 'session', 'Value': '', 'Path': '', 'Domain': '', 'Expires': '0001-01-01T00:00:00Z', 'RawExpires': '', 'MaxAge': 0, 'Secure': False, 'HttpOnly': False, 'SameSite': 0, 'Raw': '', 'Unparsed': None}, 'Dir': os.path.dirname(pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent), 'PluginDir': pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent}
+
+ if debugTracing: self.DEBUG_TRACING = debugTracing
+ if config:
+ self.pluginConfig = config
+ if DebugTraceFieldName in self.pluginConfig:
+ self.DEBUG_TRACING = self.pluginConfig[DebugTraceFieldName]
+ if DryRunFieldName in self.pluginConfig:
+ self.DRY_RUN = self.pluginConfig[DryRunFieldName]
+
+ if len(sys.argv) > 1:
+ RUNNING_IN_COMMAND_LINE_MODE = True
+ if not debugTracing or not stash_url:
+ for argValue in sys.argv[1:]:
+ if argValue.lower() == "--trace":
+ self.DEBUG_TRACING = True
+ elif argValue.lower() == "--dry_run" or argValue.lower() == "--dryrun":
+ self.DRY_RUN = True
+ elif ":" in argValue and not self.STASH_URL:
+ self.STASH_URL = argValue
+ if self.STASH_URL:
+ endpointUrlArr = self.STASH_URL.split(":")
+ if len(endpointUrlArr) == 3:
+ self.FRAGMENT_SERVER['Scheme'] = endpointUrlArr[0]
+ self.FRAGMENT_SERVER['Host'] = endpointUrlArr[1][2:]
+ self.FRAGMENT_SERVER['Port'] = endpointUrlArr[2]
+ super().__init__(self.FRAGMENT_SERVER)
+ self.STASH_INTERFACE_INIT = True
+ else:
+ try:
+ self.STDIN_READ = sys.stdin.read()
+ self.CALLED_AS_STASH_PLUGIN = True
+ except:
+ pass
+ if self.STDIN_READ:
+ self.JSON_INPUT = json.loads(self.STDIN_READ)
+ if "args" in self.JSON_INPUT and "mode" in self.JSON_INPUT["args"]:
+ self.PLUGIN_TASK_NAME = self.JSON_INPUT["args"]["mode"]
+ self.FRAGMENT_SERVER = self.JSON_INPUT["server_connection"]
+ self.STASH_URL = f"{self.FRAGMENT_SERVER['Scheme']}://{self.FRAGMENT_SERVER['Host']}:{self.FRAGMENT_SERVER['Port']}"
+ super().__init__(self.FRAGMENT_SERVER)
+ self.STASH_INTERFACE_INIT = True
+
+ if self.STASH_INTERFACE_INIT:
+ self.PLUGIN_CONFIGURATION = self.get_configuration()["plugins"]
+ self.STASH_CONFIGURATION = self.get_configuration()["general"]
+ self.STASHPATHSCONFIG = self.STASH_CONFIGURATION['stashes']
+ for item in self.STASHPATHSCONFIG:
+ self.STASH_PATHS.append(item["path"])
+ if settings:
+ self.pluginSettings = settings
+ if self.PLUGIN_ID in self.PLUGIN_CONFIGURATION:
+ self.pluginSettings.update(self.PLUGIN_CONFIGURATION[self.PLUGIN_ID])
+ if DebugTraceFieldName in self.pluginSettings:
+ self.DEBUG_TRACING = self.pluginSettings[DebugTraceFieldName]
+ if DryRunFieldName in self.pluginSettings:
+ self.DRY_RUN = self.pluginSettings[DryRunFieldName]
+ if self.DEBUG_TRACING: self.LOG_LEVEL = logging.DEBUG
+
+ logging.basicConfig(level=self.LOG_LEVEL, format=logFormat, datefmt=dateFmt, handlers=[RFH])
+ self.pluginLog = logging.getLogger(pathlib.Path(self.MAIN_SCRIPT_NAME).stem)
+ if setStashLoggerAsPluginLogger:
+ self.log = self.pluginLog
+
+ def __del__(self):
+ self.thredPool.shutdown(wait=False)
+
+ def Log(self, logMsg, printTo = 0, logLevel = logging.INFO, lineNo = -1, levelStr = "", logAlways = False):
+ if printTo == 0:
+ printTo = self.log_to_norm
+ elif printTo == self.LOG_TO_ERROR and logLevel == logging.INFO:
+ logLevel = logging.ERROR
+ printTo = self.log_to_err_set
+ elif printTo == self.LOG_TO_CRITICAL and logLevel == logging.INFO:
+ logLevel = logging.CRITICAL
+ printTo = self.log_to_err_set
+ elif printTo == self.LOG_TO_WARN and logLevel == logging.INFO:
+ logLevel = logging.WARN
+ printTo = self.log_to_wrn_set
+ if lineNo == -1:
+ lineNo = inspect.currentframe().f_back.f_lineno
+ LN_Str = f"[LN:{lineNo}]"
+ # print(f"{LN_Str}, {logAlways}, {self.LOG_LEVEL}, {logging.DEBUG}, {levelStr}, {logMsg}")
+ if logLevel == logging.DEBUG and (logAlways == False or self.LOG_LEVEL == logging.DEBUG):
+ if levelStr == "": levelStr = self.LEV_DBG
+ if printTo & self.LOG_TO_FILE: self.pluginLog.debug(f"{LN_Str} {levelStr}{logMsg}")
+ if printTo & self.LOG_TO_STASH: self.log.debug(f"{LN_Str} {levelStr}{logMsg}")
+ elif logLevel == logging.INFO or logLevel == logging.DEBUG:
+ if levelStr == "": levelStr = self.LEV_INF if logLevel == logging.INFO else self.LEV_DBG
+ if printTo & self.LOG_TO_FILE: self.pluginLog.info(f"{LN_Str} {levelStr}{logMsg}")
+ if printTo & self.LOG_TO_STASH: self.log.info(f"{LN_Str} {levelStr}{logMsg}")
+ elif logLevel == logging.WARN:
+ if levelStr == "": levelStr = self.LEV_WRN
+ if printTo & self.LOG_TO_FILE: self.pluginLog.warning(f"{LN_Str} {levelStr}{logMsg}")
+ if printTo & self.LOG_TO_STASH: self.log.warning(f"{LN_Str} {levelStr}{logMsg}")
+ elif logLevel == logging.ERROR:
+ if levelStr == "": levelStr = self.LEV_ERR
+ if printTo & self.LOG_TO_FILE: self.pluginLog.error(f"{LN_Str} {levelStr}{logMsg}")
+ if printTo & self.LOG_TO_STASH: self.log.error(f"{LN_Str} {levelStr}{logMsg}")
+ elif logLevel == logging.CRITICAL:
+ if levelStr == "": levelStr = self.LEV_CRITICAL
+ if printTo & self.LOG_TO_FILE: self.pluginLog.critical(f"{LN_Str} {levelStr}{logMsg}")
+ if printTo & self.LOG_TO_STASH: self.log.error(f"{LN_Str} {levelStr}{logMsg}")
+ if (printTo & self.LOG_TO_CONSOLE) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways):
+ print(f"{LN_Str} {levelStr}{logMsg}")
+ if (printTo & self.LOG_TO_STDERR) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways):
+ print(f"StdErr: {LN_Str} {levelStr}{logMsg}", file=sys.stderr)
+
+ def Trace(self, logMsg = "", printTo = 0, logAlways = False, lineNo = -1):
+ if printTo == 0: printTo = self.LOG_TO_FILE
+ if lineNo == -1:
+ lineNo = inspect.currentframe().f_back.f_lineno
+ logLev = logging.INFO if logAlways else logging.DEBUG
+ if self.DEBUG_TRACING or logAlways:
+ if logMsg == "":
+ logMsg = f"Line number {lineNo}..."
+ self.Log(logMsg, printTo, logLev, lineNo, self.LEV_TRACE, logAlways)
+
+ # Log once per session. Only logs the first time called from a particular line number in the code.
+ def TraceOnce(self, logMsg = "", printTo = 0, logAlways = False):
+ lineNo = inspect.currentframe().f_back.f_lineno
+ if self.DEBUG_TRACING or logAlways:
+ FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}"
+ if FuncAndLineNo in self.logLinePreviousHits:
+ return
+ self.logLinePreviousHits.append(FuncAndLineNo)
+ self.Trace(logMsg, printTo, logAlways, lineNo)
+
+ # Log INFO on first call, then do Trace on remaining calls.
+ def LogOnce(self, logMsg = "", printTo = 0, logAlways = False, traceOnRemainingCalls = True):
+ if printTo == 0: printTo = self.LOG_TO_FILE
+ lineNo = inspect.currentframe().f_back.f_lineno
+ FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}"
+ if FuncAndLineNo in self.logLinePreviousHits:
+ if traceOnRemainingCalls:
+ self.Trace(logMsg, printTo, logAlways, lineNo)
+ else:
+ self.logLinePreviousHits.append(FuncAndLineNo)
+ self.Log(logMsg, printTo, logging.INFO, lineNo)
+
+ def Warn(self, logMsg, printTo = 0):
+ if printTo == 0: printTo = self.log_to_wrn_set
+ lineNo = inspect.currentframe().f_back.f_lineno
+ self.Log(logMsg, printTo, logging.WARN, lineNo)
+
+ def Error(self, logMsg, printTo = 0):
+ if printTo == 0: printTo = self.log_to_err_set
+ lineNo = inspect.currentframe().f_back.f_lineno
+ self.Log(logMsg, printTo, logging.ERROR, lineNo)
+
+ def Status(self, printTo = 0, logLevel = logging.INFO, lineNo = -1):
+ if printTo == 0: printTo = self.log_to_norm
+ if lineNo == -1:
+ lineNo = inspect.currentframe().f_back.f_lineno
+ self.Log(f"StashPluginHelper Status: (CALLED_AS_STASH_PLUGIN={self.CALLED_AS_STASH_PLUGIN}), (RUNNING_IN_COMMAND_LINE_MODE={self.RUNNING_IN_COMMAND_LINE_MODE}), (DEBUG_TRACING={self.DEBUG_TRACING}), (DRY_RUN={self.DRY_RUN}), (PLUGIN_ID={self.PLUGIN_ID}), (PLUGIN_TASK_NAME={self.PLUGIN_TASK_NAME}), (STASH_URL={self.STASH_URL}), (MAIN_SCRIPT_NAME={self.MAIN_SCRIPT_NAME})",
+ printTo, logLevel, lineNo)
+
+ def ExecuteProcess(self, args, ExecDetach=False):
+ import platform, subprocess
+ is_windows = any(platform.win32_ver())
+ pid = None
+ self.Trace(f"is_windows={is_windows} args={args}")
+ if is_windows:
+ if ExecDetach:
+ self.Trace("Executing process using Windows DETACHED_PROCESS")
+ DETACHED_PROCESS = 0x00000008
+ pid = subprocess.Popen(args,creationflags=DETACHED_PROCESS, shell=True).pid
+ else:
+ pid = subprocess.Popen(args, shell=True).pid
+ else:
+ self.Trace("Executing process using normal Popen")
+ pid = subprocess.Popen(args).pid
+ self.Trace(f"pid={pid}")
+ return pid
+
+ def ExecutePythonScript(self, args, ExecDetach=True):
+ PythonExe = f"{sys.executable}"
+ argsWithPython = [f"{PythonExe}"] + args
+ return self.ExecuteProcess(argsWithPython,ExecDetach=ExecDetach)
+
+ # Extends class StashInterface with functions which are not yet in the class
+ def metadata_autotag(self, paths:list=[], performers:list=[], studios:list=[], tags:list=[]):
+ query = """
+ mutation MetadataAutoTag($input:AutoTagMetadataInput!) {
+ metadataAutoTag(input: $input)
+ }
+ """
+ metadata_autotag_input = {
+ "paths":paths,
+ "performers": performers,
+ "studios":studios,
+ "tags":tags,
+ }
+ result = self.call_GQL(query, {"input": metadata_autotag_input})
+ return result
+
+ def backup_database(self):
+ return self.call_GQL("mutation { backupDatabase(input: {download: false})}")
+
+ def optimise_database(self):
+ return self.call_GQL("mutation OptimiseDatabase { optimiseDatabase }")
+
+ def metadata_clean_generated(self, blobFiles=True, dryRun=False, imageThumbnails=True, markers=True, screenshots=True, sprites=True, transcodes=True):
+ query = """
+ mutation MetadataCleanGenerated($input: CleanGeneratedInput!) {
+ metadataCleanGenerated(input: $input)
+ }
+ """
+ clean_metadata_input = {
+ "blobFiles": blobFiles,
+ "dryRun": dryRun,
+ "imageThumbnails": imageThumbnails,
+ "markers": markers,
+ "screenshots": screenshots,
+ "sprites": sprites,
+ "transcodes": transcodes,
+ }
+ result = self.call_GQL(query, {"input": clean_metadata_input})
+ return result
+
+ def rename_generated_files(self):
+ return self.call_GQL("mutation MigrateHashNaming {migrateHashNaming}")
+ # def find_duplicate_scenes(self, distance: PhashDistance=PhashDistance.EXACT, fragment=None):
+ # query = """
+ # query FindDuplicateScenes($distance: Int) {
+ # findDuplicateScenes(distance: $distance) {
+ # ...SceneSlim
+ # }
+ # }
+ # """
+ # if fragment:
+ # query = re.sub(r'\.\.\.SceneSlim', fragment, query)
+ # else:
+ # query = """
+ # query FindDuplicateScenes($distance: Int) {
+ # findDuplicateScenes(distance: $distance)
+ # }
+ # """
+ # variables = {
+ # "distance": distance
+ # }
+ # result = self.call_GQL(query, variables)
+ # return result['findDuplicateScenes']
\ No newline at end of file
diff --git a/plugins/FileMonitor/filemonitor.py b/plugins/FileMonitor/filemonitor.py
index ccfe0388..6d6752d1 100644
--- a/plugins/FileMonitor/filemonitor.py
+++ b/plugins/FileMonitor/filemonitor.py
@@ -1,180 +1,352 @@
-# Description: This is a Stash plugin which updates Stash if any changes occurs in the Stash library paths.
+# Description: This is a Stash plugin which updates Stash if any changes occurs in the Stash library paths, and runs a scheduler.
# By David Maisonave (aka Axter) Jul-2024 (https://www.axter.com/)
# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/FileMonitor
-# Note: To call this script outside of Stash, pass any argument.
-# Example: python filemonitor.py foofoo
-import os
-import sys
-import time
-import shutil
-import fileinput
-import hashlib
-import json
-from pathlib import Path
-import requests
-import logging
-from logging.handlers import RotatingFileHandler
-import stashapi.log as log # Importing stashapi.log as log for critical events ONLY
-from stashapi.stashapp import StashInterface
-from watchdog.observers import Observer # This is also needed for event attributes
+# Note: To call this script outside of Stash, pass argument --url and the Stash URL.
+# Example: python filemonitor.py --url http://localhost:9999
+import os, sys, time, pathlib, argparse
+from StashPluginHelper import StashPluginHelper
import watchdog # pip install watchdog # https://pythonhosted.org/watchdog/
+from watchdog.observers import Observer # This is also needed for event attributes
from threading import Lock, Condition
from multiprocessing import shared_memory
from filemonitor_config import config # Import settings from filemonitor_config.py
-# **********************************************************************
-# Constant global variables --------------------------------------------
-LOG_FILE_PATH = log_file_path = f"{Path(__file__).resolve().parent}\\{Path(__file__).stem}.log"
-FORMAT = "[%(asctime)s - LN:%(lineno)s] %(message)s"
-PLUGIN_ARGS = False
-PLUGIN_ARGS_MODE = False
-PLUGIN_ID = Path(__file__).stem.lower()
-# GraphQL query to fetch all scenes
-QUERY_ALL_SCENES = """
- query AllScenes {
- allScenes {
- id
- updated_at
- }
- }
-"""
-RFH = RotatingFileHandler(
- filename=LOG_FILE_PATH,
- mode='a',
- maxBytes=2*1024*1024, # Configure logging for this script with max log file size of 2000K
- backupCount=2,
- encoding=None,
- delay=0
-)
-TIMEOUT = 5
CONTINUE_RUNNING_SIG = 99
+STOP_RUNNING_SIG = 32
+
+parser = argparse.ArgumentParser()
+parser.add_argument('--url', '-u', dest='stash_url', type=str, help='Add Stash URL')
+parser.add_argument('--trace', '-t', dest='trace', action='store_true', help='Enables debug trace mode.')
+parser.add_argument('--stop', '-s', dest='stop', action='store_true', help='Stop (kill) a running FileMonitor task.')
+parser.add_argument('--restart', '-r', dest='restart', action='store_true', help='Restart FileMonitor.')
+parser.add_argument('--silent', '--quit', '-q', dest='quit', action='store_true', help='Run in silent mode. No output to console or stderr. Use this when running from pythonw.exe')
+parse_args = parser.parse_args()
+
+logToErrSet = 0
+logToNormSet = 0
+if parse_args.quit:
+ logToErrSet = 1
+ logToNormSet = 1
+
+settings = {
+ "recursiveDisabled": False,
+ "turnOnScheduler": False,
+ "zmaximumBackups": 0,
+ "zzdebugTracing": False
+}
+stash = StashPluginHelper(
+ stash_url=parse_args.stash_url,
+ debugTracing=parse_args.trace,
+ settings=settings,
+ config=config,
+ logToErrSet=logToErrSet,
+ logToNormSet=logToNormSet
+ )
+stash.Status()
+stash.Log(f"\nStarting (__file__={__file__}) (stash.CALLED_AS_STASH_PLUGIN={stash.CALLED_AS_STASH_PLUGIN}) (stash.DEBUG_TRACING={stash.DEBUG_TRACING}) (stash.DRY_RUN={stash.DRY_RUN}) (stash.PLUGIN_TASK_NAME={stash.PLUGIN_TASK_NAME})************************************************")
+
+# stash.Log(f"{stash.find_duplicate_scenes()}")
-# **********************************************************************
-# Global variables --------------------------------------------
exitMsg = "Change success!!"
mutex = Lock()
signal = Condition(mutex)
shouldUpdate = False
-TargetPaths = []
-runningInPluginMode = False
-# Configure local log file for plugin within plugin folder having a limited max log file size
-logging.basicConfig(level=logging.INFO, format=FORMAT, datefmt="%y%m%d %H:%M:%S", handlers=[RFH])
-logger = logging.getLogger(Path(__file__).stem)
-
-# **********************************************************************
-# ----------------------------------------------------------------------
-# Code section to fetch variables from Plugin UI and from filemonitor_settings.py
-# Check if being called as Stash plugin
-gettingCalledAsStashPlugin = True
-stopLibraryMonitoring = False
-StdInRead = None
-try:
- if len(sys.argv) == 1:
- print(f"Attempting to read stdin. (len(sys.argv)={len(sys.argv)})", file=sys.stderr)
- StdInRead = sys.stdin.read()
- # for line in fileinput.input():
- # StdInRead = line
- # break
- else:
- if len(sys.argv) > 1 and sys.argv[1].lower() == "stop":
- stopLibraryMonitoring = True
- raise Exception("Not called in plugin mode.")
-except:
- gettingCalledAsStashPlugin = False
- print(f"Either len(sys.argv) not expected value OR sys.stdin.read() failed! (stopLibraryMonitoring={stopLibraryMonitoring}) (StdInRead={StdInRead}) (len(sys.argv)={len(sys.argv)})", file=sys.stderr)
- pass
-
-if gettingCalledAsStashPlugin and StdInRead:
- print(f"StdInRead={StdInRead} (len(sys.argv)={len(sys.argv)})", file=sys.stderr)
- runningInPluginMode = True
- json_input = json.loads(StdInRead)
- FRAGMENT_SERVER = json_input["server_connection"]
-else:
- runningInPluginMode = False
- FRAGMENT_SERVER = {'Scheme': config['endpoint_Scheme'], 'Host': config['endpoint_Host'], 'Port': config['endpoint_Port'], 'SessionCookie': {'Name': 'session', 'Value': '', 'Path': '', 'Domain': '', 'Expires': '0001-01-01T00:00:00Z', 'RawExpires': '', 'MaxAge': 0, 'Secure': False, 'HttpOnly': False, 'SameSite': 0, 'Raw': '', 'Unparsed': None}, 'Dir': os.path.dirname(Path(__file__).resolve().parent), 'PluginDir': Path(__file__).resolve().parent}
- print("Running in non-plugin mode!", file=sys.stderr)
+SHAREDMEMORY_NAME = "DavidMaisonaveAxter_FileMonitor" # Unique name for shared memory
+RECURSIVE = stash.pluginSettings["recursiveDisabled"] == False
+SCAN_MODIFIED = stash.pluginConfig["scanModified"]
+RUN_CLEAN_AFTER_DELETE = stash.pluginConfig["runCleanAfterDelete"]
+RUN_GENERATE_CONTENT = stash.pluginConfig['runGenerateContent']
+SCAN_ON_ANY_EVENT = stash.pluginConfig['onAnyEvent']
+SIGNAL_TIMEOUT = stash.pluginConfig['timeOut'] if stash.pluginConfig['timeOut'] > 0 else 1
-stash = StashInterface(FRAGMENT_SERVER)
-PLUGINCONFIGURATION = stash.get_configuration()["plugins"]
-STASHCONFIGURATION = stash.get_configuration()["general"]
-STASHPATHSCONFIG = STASHCONFIGURATION['stashes']
-stashPaths = []
-settings = {
- "recursiveDisabled": False,
- "runCleanAfterDelete": False,
- "scanModified": False,
- "zzdebugTracing": False,
- "zzdryRun": False,
-}
+CREATE_SPECIAL_FILE_TO_EXIT = stash.pluginConfig['createSpecFileToExit']
+DELETE_SPECIAL_FILE_ON_STOP = stash.pluginConfig['deleteSpecFileInStop']
+SPECIAL_FILE_DIR = f"{stash.LOG_FILE_DIR}{os.sep}working"
+if CREATE_SPECIAL_FILE_TO_EXIT and not os.path.exists(SPECIAL_FILE_DIR):
+ os.makedirs(SPECIAL_FILE_DIR)
+# Unique name to trigger shutting down FileMonitor
+SPECIAL_FILE_NAME = f"{SPECIAL_FILE_DIR}{os.sep}trigger_to_kill_filemonitor_by_david_maisonave.txt"
+if CREATE_SPECIAL_FILE_TO_EXIT and os.path.isfile(SPECIAL_FILE_NAME):
+ os.remove(SPECIAL_FILE_NAME)
+
+fileExtTypes = stash.pluginConfig['fileExtTypes'].split(",") if stash.pluginConfig['fileExtTypes'] != "" else []
+includePathChanges = stash.pluginConfig['includePathChanges'] if len(stash.pluginConfig['includePathChanges']) > 0 else stash.STASH_PATHS
+excludePathChanges = stash.pluginConfig['excludePathChanges']
-if PLUGIN_ID in PLUGINCONFIGURATION:
- settings.update(PLUGINCONFIGURATION[PLUGIN_ID])
-# ----------------------------------------------------------------------
-debugTracing = settings["zzdebugTracing"]
-RECURSIVE = settings["recursiveDisabled"] == False
-SCAN_MODIFIED = settings["scanModified"]
-RUN_CLEAN_AFTER_DELETE = settings["runCleanAfterDelete"]
-RUN_GENERATE_CONTENT = config['runGenerateContent']
+stash.Trace(f"(includePathChanges={includePathChanges})")
-for item in STASHPATHSCONFIG:
- stashPaths.append(item["path"])
+if stash.DRY_RUN:
+ stash.Log("Dry run mode is enabled.")
+stash.Trace(f"(SCAN_MODIFIED={SCAN_MODIFIED}) (SCAN_ON_ANY_EVENT={SCAN_ON_ANY_EVENT}) (RECURSIVE={RECURSIVE})")
-# Extract dry_run setting from settings
-DRY_RUN = settings["zzdryRun"]
-dry_run_prefix = ''
-try:
- PLUGIN_ARGS = json_input['args']
- PLUGIN_ARGS_MODE = json_input['args']["mode"]
-except:
- pass
-logger.info(f"\nStarting (runningInPluginMode={runningInPluginMode}) (debugTracing={debugTracing}) (DRY_RUN={DRY_RUN}) (PLUGIN_ARGS_MODE={PLUGIN_ARGS_MODE}) (PLUGIN_ARGS={PLUGIN_ARGS})************************************************")
-if debugTracing: logger.info(f"Debug Tracing (stash.get_configuration()={stash.get_configuration()})................")
-if debugTracing: logger.info("settings: %s " % (settings,))
-if debugTracing: logger.info(f"Debug Tracing (STASHCONFIGURATION={STASHCONFIGURATION})................")
-if debugTracing: logger.info(f"Debug Tracing (stashPaths={stashPaths})................")
+StartFileMonitorAsAPluginTaskName = "Monitor as a Plugin"
+StartFileMonitorAsAServiceTaskName = "Start Library Monitor Service"
+StartFileMonitorAsAPluginTaskID = "start_library_monitor"
+StartFileMonitorAsAServiceTaskID = "start_library_monitor_service"
-if DRY_RUN:
- logger.info("Dry run mode is enabled.")
- dry_run_prefix = "Would've "
-if debugTracing: logger.info("Debug Tracing................")
-# ----------------------------------------------------------------------
-# **********************************************************************
-if debugTracing: logger.info(f"Debug Tracing (SCAN_MODIFIED={SCAN_MODIFIED}) (RECURSIVE={RECURSIVE})................")
+FileMonitorPluginIsOnTaskQue = stash.CALLED_AS_STASH_PLUGIN
+StopLibraryMonitorWaitingInTaskQueue = False
+JobIdInTheQue = 0
+def isJobWaitingToRun():
+ global StopLibraryMonitorWaitingInTaskQueue
+ global JobIdInTheQue
+ global FileMonitorPluginIsOnTaskQue
+ FileMonitorPluginIsOnTaskQue = False
+ jobIsWaiting = False
+ taskQue = stash.job_queue()
+ for jobDetails in taskQue:
+ stash.Trace(f"(Job ID({jobDetails['id']})={jobDetails})")
+ if jobDetails['status'] == "READY":
+ if jobDetails['description'] == "Running plugin task: Stop Library Monitor":
+ StopLibraryMonitorWaitingInTaskQueue = True
+ JobIdInTheQue = jobDetails['id']
+ jobIsWaiting = True
+ elif jobDetails['status'] == "RUNNING" and jobDetails['description'].find(StartFileMonitorAsAPluginTaskName) > -1:
+ FileMonitorPluginIsOnTaskQue = True
+ JobIdInTheQue = 0
+ return jobIsWaiting
+
+if stash.CALLED_AS_STASH_PLUGIN and stash.PLUGIN_TASK_NAME == StartFileMonitorAsAPluginTaskID:
+ stash.Trace(f"isJobWaitingToRun() = {isJobWaitingToRun()})")
+
+class StashScheduler: # Stash Scheduler
+ def __init__(self):
+ import schedule # pip install schedule # https://github.com/dbader/schedule
+ global SIGNAL_TIMEOUT
+ for task in stash.pluginConfig['task_scheduler']:
+ if 'task' not in task:
+ stash.Error(f"Task is missing required task field. task={task}")
+ elif 'hours' in task:
+ if task['hours'] > 0:
+ stash.Log(f"Adding to scheduler task '{task['task']}' at {task['hours']} hours interval")
+ schedule.every(task['hours']).hours.do(self.runTask, task)
+ if task['hours'] > 167: # Warn when using a week or more of hours
+ stash.Warn(f"Using {task['hours']} hours in task '{task['task']}'. Should use the weekday syntax instead.")
+ elif 'minutes' in task:
+ if task['minutes'] > 0:
+ stash.Log(f"Adding to scheduler task '{task['task']}' at {task['minutes']} minutes interval")
+ schedule.every(task['minutes']).minutes.do(self.runTask, task)
+ if task['minutes'] > 10079: # Warn when using a week or more of minutes
+ stash.Warn(f"Using {task['minutes']} minutes in task '{task['task']}'. Should use the weekday syntax instead.")
+ elif 'days' in task: # Left here for backward compatibility, but should use weekday logic instead.
+ if task['days'] > 0:
+ stash.Log(f"Adding to scheduler task '{task['task']}' at {task['days']} days interval")
+ schedule.every(task['days']).days.do(self.runTask, task)
+ if task['days'] > 6: # Warn when using a week or more of days
+ stash.Warn(f"Using {task['days']} days in task '{task['task']}'. Should use the weekday syntax instead.")
+ elif 'seconds' in task: # This is mainly here for test purposes only
+ if task['seconds'] > 0:
+ if SIGNAL_TIMEOUT > task['seconds']:
+ stash.Log(f"Changing SIGNAL_TIMEOUT from value {SIGNAL_TIMEOUT} to {task['seconds']} to allow '{task['task']}' to get triggered timely")
+ SIGNAL_TIMEOUT = task['seconds']
+ stash.Log(f"Adding to scheduler task '{task['task']}' at {task['seconds']} seconds interval")
+ schedule.every(task['seconds']).seconds.do(self.runTask, task)
+ elif 'weekday' in task and 'time' in task:
+ if task['time'].upper() == "DISABLED":
+ stash.Trace(f"Skipping task '{task['task']}', because it's disabled. To enable this task, change the time field to a valid time. Example: '07:00'")
+ elif len(task['time']) != 5 or task['time'][2] != ":":
+ stash.Error(f"Skipping task '{task['task']}', because time ({task['time']}) is invalid. Change the time field to a valid time. Example: '07:00'")
+ else:
+ weekDays = task['weekday'].lower()
+ if 'monthly' in task:
+ stash.Log(f"Adding to scheduler task '{task['task']}' monthly on number {task['monthly']} {task['weekday']} at {task['time']}")
+ else:
+ stash.Log(f"Adding to scheduler task '{task['task']}' (weekly) every {task['weekday']} at {task['time']}")
+
+ hasValidDay = False
+ if "monday" in weekDays:
+ schedule.every().monday.at(task['time']).do(self.runTask, task)
+ hasValidDay = True
+ if "tuesday" in weekDays:
+ schedule.every().tuesday.at(task['time']).do(self.runTask, task)
+ hasValidDay = True
+ if "wednesday" in weekDays:
+ schedule.every().wednesday.at(task['time']).do(self.runTask, task)
+ hasValidDay = True
+ if "thursday" in weekDays:
+ schedule.every().thursday.at(task['time']).do(self.runTask, task)
+ hasValidDay = True
+ if "friday" in weekDays:
+ schedule.every().friday.at(task['time']).do(self.runTask, task)
+ hasValidDay = True
+ if "saturday" in weekDays:
+ schedule.every().saturday.at(task['time']).do(self.runTask, task)
+ hasValidDay = True
+ if "sunday" in weekDays:
+ schedule.every().sunday.at(task['time']).do(self.runTask, task)
+ hasValidDay = True
+
+ if not hasValidDay:
+ stash.Error(f"Task '{task['task']}' is missing valid day(s) in weekday field. weekday = '{task['weekday']}'")
+ else:
+ stash.Error(f"Task '{task['task']}' is missing fields.")
+ self.checkSchedulePending()
+
+ # ToDo: Add asynchronous threading logic to running task.
+ def runTask(self, task):
+ import datetime
+ stash.Trace(f"Running task {task}")
+ if 'monthly' in task:
+ dayOfTheMonth = datetime.datetime.today().day
+ FirstAllowedDate = ((task['monthly'] - 1) * 7) + 1
+ LastAllowedDate = task['monthly'] * 7
+ if dayOfTheMonth < FirstAllowedDate or dayOfTheMonth > LastAllowedDate:
+ stash.Log(f"Skipping task {task['task']} because today is not the right {task['weekday']} of the month. Target range is between {FirstAllowedDate} and {LastAllowedDate}.")
+ return
+
+ targetPaths = includePathChanges
+ if 'paths' in task:
+ targetPaths = task['paths']
+
+ result = None
+ if task['task'] == "Clean":
+ result = stash.metadata_clean(paths=targetPaths, dry_run=stash.DRY_RUN)
+ elif task['task'] == "Clean Generated Files":
+ result = stash.metadata_clean_generated()
+ elif task['task'] == "Generate":
+ result = stash.metadata_generate()
+ elif task['task'] == "Backup":
+ stash.LogOnce("Note: Backup task does not get listed in the Task Queue, but user can verify that it started by looking in the Stash log file as an INFO level log line.")
+ result = stash.backup_database()
+ maximumBackup = stash.pluginSettings['zmaximumBackups']
+ if "maxBackups" in task:
+ maximumBackup = task['maxBackups']
+ if maximumBackup < 2:
+ stash.TraceOnce(f"Skipping DB backup file trim because zmaximumBackups={maximumBackup}. Value has to be greater than 1.")
+ elif 'backupDirectoryPath' in stash.STASH_CONFIGURATION:
+ if len(stash.STASH_CONFIGURATION['backupDirectoryPath']) < 5:
+ stash.TraceOnce(f"Skipping DB backup file trim because backupDirectoryPath length is to short. Len={len(stash.STASH_CONFIGURATION['backupDirectoryPath'])}. Only support length greater than 4 characters.")
+ elif os.path.exists(stash.STASH_CONFIGURATION['backupDirectoryPath']):
+ stash.LogOnce(f"Checking quantity of DB backups if path {stash.STASH_CONFIGURATION['backupDirectoryPath']} exceeds {maximumBackup} backup files.")
+ self.trimDbFiles(stash.STASH_CONFIGURATION['backupDirectoryPath'], maximumBackup)
+ else:
+ stash.TraceOnce(f"Skipping DB backup file trim because backupDirectoryPath does NOT exist. backupDirectoryPath={stash.STASH_CONFIGURATION['backupDirectoryPath']}")
+ elif task['task'] == "Scan":
+ result = stash.metadata_scan(paths=targetPaths)
+ elif task['task'] == "Auto Tag":
+ result = stash.metadata_autotag(paths=targetPaths)
+ elif task['task'] == "Optimise Database":
+ result = stash.optimise_database()
+ elif task['task'] == "RenameGeneratedFiles":
+ result = stash.rename_generated_files()
+ elif task['task'] == "GQL":
+ result = stash.call_GQL(task['input'])
+ elif task['task'] == "python":
+ if 'script' in task and task['script'] != "":
+ script = task['script'].replace("", f"{pathlib.Path(__file__).resolve().parent}{os.sep}")
+ stash.Log(f"Executing python script {script}.")
+ args = [script]
+ if 'args' in task and len(task['args']) > 0:
+ args = args + [task['args']]
+ detached = True
+ if 'detach' in task:
+ detached = task['detach']
+ result = f"Python process PID = {stash.ExecutePythonScript(args, ExecDetach=detached)}"
+ else:
+ stash.Error(f"Can not run task '{task['task']}', because it's missing 'script' field.")
+ elif task['task'] == "execute":
+ if 'command' in task and task['command'] != "":
+ cmd = task['command'].replace("", f"{pathlib.Path(__file__).resolve().parent}{os.sep}")
+ args = [cmd]
+ if 'args' in task and len(task['args']) > 0:
+ args = args + [task['args']]
+ stash.Log(f"Executing command arguments {args}.")
+ result = f"Execute process PID = {stash.ExecuteProcess(args)}"
+ else:
+ stash.Error(f"Can not run task '{task['task']}', because it's missing 'command' field.")
+ else:
+ # ToDo: Add code to check if plugin is installed.
+ try:
+ if 'pluginId' in task and task['pluginId'] != "":
+ stash.Trace(f"Running plugin task pluginID={task['pluginId']}, task name = {task['task']}")
+ stash.run_plugin_task(plugin_id=task['pluginId'], task_name=task['task'])
+ else:
+ stash.Error(f"Can not run task '{task['task']}', because it's an invalid task.")
+ stash.LogOnce(f"If task '{task['task']}' is supposed to be a built-in task, check for correct task name spelling.")
+ stash.LogOnce(f"If task '{task['task']}' is supposed to be a plugin, make sure to include the pluginId field in the task. task={task}")
+ except Exception as e:
+ stash.LogOnce(f"Failed to call plugin {task['task']} with plugin-ID {task['pluginId']}. Error: {e}")
+ pass
+
+ if result:
+ stash.Trace(f"Task '{task['task']}' result={result}")
+
+ def trimDbFiles(self, dbPath, maxFiles):
+ if not os.path.exists(dbPath):
+ stash.LogOnce(f"Exiting trimDbFiles, because path {dbPath} does not exists.")
+ return
+ if len(dbPath) < 5: # For safety and security, short path not supported.
+ stash.Warn(f"Exiting trimDbFiles, because path {dbPath} is to short. Len={len(dbPath)}. Path string must be at least 5 characters in length.")
+ return
+ stashPrefixSqlDbFileName = "stash-go.sqlite."
+ dbFiles = sorted(os.listdir(dbPath))
+ n = len(dbFiles)
+ for i in range(0, n-maxFiles):
+ dbFilePath = f"{dbPath}{os.sep}{dbFiles[i]}"
+ if dbFiles[i].startswith(stashPrefixSqlDbFileName):
+ stash.Warn(f"Deleting DB file {dbFilePath}")
+ os.remove(dbFilePath)
+ else:
+ stash.LogOnce(f"Skipping deleting file '{dbFiles[i]}', because the file doesn't start with string '{stashPrefixSqlDbFileName}'.")
+
+ def checkSchedulePending(self):
+ import schedule # pip install schedule # https://github.com/dbader/schedule
+ stash.TraceOnce("Checking if task pending.")
+ schedule.run_pending()
+ stash.TraceOnce("Pending check complete.")
+
+TargetPaths = []
def start_library_monitor():
global shouldUpdate
- global TargetPaths
+ global TargetPaths
try:
# Create shared memory buffer which can be used as singleton logic or to get a signal to quit task from external script
- shm_a = shared_memory.SharedMemory(name="DavidMaisonaveAxter_FileMonitor", create=True, size=4)
+ shm_a = shared_memory.SharedMemory(name=SHAREDMEMORY_NAME, create=True, size=4)
except:
- pass
- logger.info("Could not open shared memory map. Change File Monitor must be running. Can not run multiple instance of Change File Monitor.")
+ stash.Error(f"Could not open shared memory map ({SHAREDMEMORY_NAME}). Change File Monitor must be running. Can not run multiple instance of Change File Monitor. Stop FileMonitor before trying to start it again.")
return
type(shm_a.buf)
shm_buffer = shm_a.buf
len(shm_buffer)
shm_buffer[0] = CONTINUE_RUNNING_SIG
- if debugTracing: logger.info(f"Shared memory map opended, and flag set to {shm_buffer[0]}")
+ stash.Trace(f"Shared memory map opended, and flag set to {shm_buffer[0]}")
RunCleanMetadata = False
-
+ stashScheduler = StashScheduler() if stash.pluginSettings['turnOnScheduler'] else None
event_handler = watchdog.events.FileSystemEventHandler()
+ def doIgnoreFileExt(chng_path, addToTargetPaths = False):
+ global TargetPaths
+ chng_path_lwr = chng_path.lower()
+ if len(fileExtTypes) > 0:
+ suffix = pathlib.Path(chng_path_lwr).suffix.lstrip(".")
+ if suffix not in fileExtTypes:
+ stash.TraceOnce(f"Ignoring file change because not a monitored type ({suffix}).")
+ return True
+ if len(excludePathChanges) > 0:
+ for path in excludePathChanges:
+ if chng_path_lwr.startswith(path.lower()):
+ stash.TraceOnce(f"Ignoring file change because is excluded path ({chng_path_lwr}) per entery '{path}'.")
+ return True
+ if addToTargetPaths:
+ TargetPaths.append(chng_path)
+ return False
+
def on_created(event):
global shouldUpdate
- global TargetPaths
- TargetPaths.append(event.src_path)
- logger.info(f"CREATE *** '{event.src_path}'")
+ if doIgnoreFileExt(event.src_path, True):
+ return
+ stash.Log(f"CREATE *** '{event.src_path}'")
with mutex:
shouldUpdate = True
signal.notify()
def on_deleted(event):
global shouldUpdate
- global TargetPaths
nonlocal RunCleanMetadata
- TargetPaths.append(event.src_path)
- logger.info(f"DELETE *** '{event.src_path}'")
+ if doIgnoreFileExt(event.src_path, True):
+ return
+ stash.Log(f"DELETE *** '{event.src_path}'")
with mutex:
shouldUpdate = True
RunCleanMetadata = True
@@ -183,107 +355,194 @@ def on_deleted(event):
def on_modified(event):
global shouldUpdate
global TargetPaths
+ if doIgnoreFileExt(event.src_path):
+ return
if SCAN_MODIFIED:
TargetPaths.append(event.src_path)
- logger.info(f"MODIFIED *** '{event.src_path}'")
+ stash.Log(f"MODIFIED *** '{event.src_path}'")
with mutex:
shouldUpdate = True
signal.notify()
else:
- if debugTracing: logger.info(f"Ignoring modifications due to plugin UI setting. path='{event.src_path}'")
+ stash.TraceOnce(f"Ignoring modifications due to plugin UI setting. path='{event.src_path}'")
def on_moved(event):
global shouldUpdate
global TargetPaths
- TargetPaths.append(event.src_path)
+ if doIgnoreFileExt(event.src_path, True):
+ return
TargetPaths.append(event.dest_path)
- logger.info(f"MOVE *** from '{event.src_path}' to '{event.dest_path}'")
+ stash.Log(f"MOVE *** from '{event.src_path}' to '{event.dest_path}'")
with mutex:
shouldUpdate = True
signal.notify()
-
- if debugTracing: logger.info("Debug Trace........")
+
+ def on_any_event(event):
+ global shouldUpdate
+ global TargetPaths
+ if doIgnoreFileExt(event.src_path):
+ return
+ if SCAN_ON_ANY_EVENT or event.src_path == SPECIAL_FILE_DIR:
+ stash.Log(f"Any-Event *** '{event.src_path}'")
+ TargetPaths.append(event.src_path)
+ with mutex:
+ shouldUpdate = True
+ signal.notify()
+ else:
+ stash.TraceOnce("Ignoring on_any_event trigger.")
+
event_handler.on_created = on_created
event_handler.on_deleted = on_deleted
event_handler.on_modified = on_modified
event_handler.on_moved = on_moved
+ event_handler.on_any_event = on_any_event
observer = Observer()
- # Iterate through stashPaths
- for path in stashPaths:
+ # Iterate through includePathChanges
+ for path in includePathChanges:
observer.schedule(event_handler, path, recursive=RECURSIVE)
- if debugTracing: logger.info(f"Observing {path}")
+ stash.Log(f"Observing {path}")
+ observer.schedule(event_handler, SPECIAL_FILE_DIR, recursive=RECURSIVE)
+ stash.Trace(f"Observing FileMonitor path {SPECIAL_FILE_DIR}")
observer.start()
- if debugTracing: logger.info("Starting loop................")
+ JobIsRunning = False
+ PutPluginBackOnTaskQueAndExit = False
+ stash.Trace("Starting loop")
try:
while True:
TmpTargetPaths = []
with mutex:
while not shouldUpdate:
- if debugTracing: logger.info("Wait start................")
- signal.wait()
- if debugTracing: logger.info("Wait end................")
+ stash.TraceOnce("While not shouldUpdate")
+ if stash.CALLED_AS_STASH_PLUGIN and isJobWaitingToRun():
+ if FileMonitorPluginIsOnTaskQue:
+ stash.Log(f"Another task (JobID={JobIdInTheQue}) is waiting on the queue. Will restart FileMonitor to allow other task to run.")
+ JobIsRunning = True
+ break
+ else:
+ stash.Warn("Not restarting because FileMonitor is no longer on Task Queue")
+ if shm_buffer[0] != CONTINUE_RUNNING_SIG:
+ stash.Log(f"Breaking out of loop. (shm_buffer[0]={shm_buffer[0]})")
+ break
+ if stash.pluginSettings['turnOnScheduler']:
+ stashScheduler.checkSchedulePending()
+ stash.LogOnce("Waiting for a file change-trigger.")
+ signal.wait(timeout=SIGNAL_TIMEOUT)
+ if stash.pluginSettings['turnOnScheduler'] and not shouldUpdate:
+ stash.TraceOnce("Checking the scheduler.")
+ elif shouldUpdate:
+ stash.LogOnce("File change trigger occurred.")
+ else:
+ stash.TraceOnce("Wait timeout occurred.")
shouldUpdate = False
TmpTargetPaths = []
for TargetPath in TargetPaths:
TmpTargetPaths.append(os.path.dirname(TargetPath))
+ stash.Trace(f"Added Path {os.path.dirname(TargetPath)}")
+ if TargetPath == SPECIAL_FILE_NAME:
+ if os.path.isfile(SPECIAL_FILE_NAME):
+ shm_buffer[0] = STOP_RUNNING_SIG
+ stash.Log(f"[SpFl]Detected trigger file to kill FileMonitor. {SPECIAL_FILE_NAME}", printTo = stash.LOG_TO_FILE + stash.LOG_TO_CONSOLE + stash.LOG_TO_STASH)
+ else:
+ stash.Trace(f"[SpFl]Did not find file {SPECIAL_FILE_NAME}.")
+
+ # Make sure special file does not exist, incase change was missed.
+ if CREATE_SPECIAL_FILE_TO_EXIT and os.path.isfile(SPECIAL_FILE_NAME) and shm_buffer[0] == CONTINUE_RUNNING_SIG:
+ shm_buffer[0] = STOP_RUNNING_SIG
+ stash.Log(f"[SpFl]Detected trigger file to kill FileMonitor. {SPECIAL_FILE_NAME}", printTo = stash.LOG_TO_FILE + stash.LOG_TO_CONSOLE + stash.LOG_TO_STASH)
TargetPaths = []
TmpTargetPaths = list(set(TmpTargetPaths))
if TmpTargetPaths != []:
- logger.info(f"Triggering stash scan for path(s) {TmpTargetPaths}")
- if not DRY_RUN:
- stash.metadata_scan(paths=TmpTargetPaths)
- if RUN_CLEAN_AFTER_DELETE and RunCleanMetadata:
- stash.metadata_clean(paths=TmpTargetPaths, dry_run=DRY_RUN)
- if RUN_GENERATE_CONTENT:
- stash.metadata_generate()
- if gettingCalledAsStashPlugin and shm_buffer[0] == CONTINUE_RUNNING_SIG:
- stash.run_plugin_task(plugin_id=PLUGIN_ID, task_name="Start Library Monitor")
- if debugTracing: logger.info("Exiting plugin so that metadata_scan task can run.")
- return
+ stash.Log(f"Triggering Stash scan for path(s) {TmpTargetPaths}")
+ if len(TmpTargetPaths) > 1 or TmpTargetPaths[0] != SPECIAL_FILE_DIR:
+ if not stash.DRY_RUN:
+ # ToDo: Consider using create_scene, update_scene, and destroy_scene over general method metadata_scan
+ stash.metadata_scan(paths=TmpTargetPaths)
+ if RUN_CLEAN_AFTER_DELETE and RunCleanMetadata:
+ stash.metadata_clean(paths=TmpTargetPaths, dry_run=stash.DRY_RUN)
+ if RUN_GENERATE_CONTENT:
+ stash.metadata_generate()
+ if stash.CALLED_AS_STASH_PLUGIN and shm_buffer[0] == CONTINUE_RUNNING_SIG and FileMonitorPluginIsOnTaskQue:
+ PutPluginBackOnTaskQueAndExit = True
else:
- if debugTracing: logger.info("Nothing to scan.")
- if shm_buffer[0] != CONTINUE_RUNNING_SIG:
- logger.info(f"Exiting Change File Monitor. (shm_buffer[0]={shm_buffer[0]})")
+ stash.Trace("Nothing to scan.")
+
+ if shm_buffer[0] != CONTINUE_RUNNING_SIG or StopLibraryMonitorWaitingInTaskQueue:
+ stash.Log(f"Exiting Change File Monitor. (shm_buffer[0]={shm_buffer[0]}) (StopLibraryMonitorWaitingInTaskQueue={StopLibraryMonitorWaitingInTaskQueue})")
shm_a.close()
shm_a.unlink() # Call unlink only once to release the shared memory
raise KeyboardInterrupt
+ elif JobIsRunning or PutPluginBackOnTaskQueAndExit:
+ stash.run_plugin_task(plugin_id=stash.PLUGIN_ID, task_name=StartFileMonitorAsAPluginTaskName)
+ stash.Trace(f"Exiting plugin so that other task can run. (JobIsRunning={JobIsRunning}) (PutPluginBackOnTaskQueAndExit={PutPluginBackOnTaskQueAndExit})")
+ return
except KeyboardInterrupt:
observer.stop()
- if debugTracing: logger.info("Stopping observer................")
+ stash.Trace("Stopping observer")
+ if os.path.isfile(SPECIAL_FILE_NAME):
+ os.remove(SPECIAL_FILE_NAME)
observer.join()
- if debugTracing: logger.info("Exiting function................")
+ stash.Trace("Exiting function")
-# This function is only useful when called outside of Stash.
-# Example: python filemonitor.py stop
-# Stops monitoring after triggered by the next file change.
-# ToDo: Add logic so it doesn't have to wait until the next file change
+# Example: python filemonitor.py --stop
def stop_library_monitor():
- if debugTracing: logger.info("Opening shared memory map.")
+ if CREATE_SPECIAL_FILE_TO_EXIT:
+ if os.path.isfile(SPECIAL_FILE_NAME):
+ os.remove(SPECIAL_FILE_NAME)
+ pathlib.Path(SPECIAL_FILE_NAME).touch()
+ if DELETE_SPECIAL_FILE_ON_STOP:
+ os.remove(SPECIAL_FILE_NAME)
+ stash.Trace("Opening shared memory map.")
try:
- shm_a = shared_memory.SharedMemory(name="DavidMaisonaveAxter_FileMonitor", create=False, size=4)
+ shm_a = shared_memory.SharedMemory(name=SHAREDMEMORY_NAME, create=False, size=4)
except:
- pass
- logger.info("Could not open shared memory map. Change File Monitor must not be running.")
+ # If FileMonitor is running as plugin, then it's expected behavior that SharedMemory will not be available.
+ stash.Trace(f"Could not open shared memory map ({SHAREDMEMORY_NAME}). Change File Monitor must not be running.")
return
type(shm_a.buf)
shm_buffer = shm_a.buf
len(shm_buffer)
- shm_buffer[0] = 123
- if debugTracing: logger.info(f"Shared memory map opended, and flag set to {shm_buffer[0]}")
+ shm_buffer[0] = STOP_RUNNING_SIG
+ stash.Trace(f"Shared memory map opended, and flag set to {shm_buffer[0]}")
shm_a.close()
shm_a.unlink() # Call unlink only once to release the shared memory
- time.sleep(1)
- return
+
+def start_library_monitor_service():
+ # First check if FileMonitor is already running
+ try:
+ shm_a = shared_memory.SharedMemory(name=SHAREDMEMORY_NAME, create=False, size=4)
+ shm_a.close()
+ shm_a.unlink()
+ stash.Error("FileMonitor is already running. Need to stop FileMonitor before trying to start it again.")
+ return
+ except:
+ pass
+ stash.Trace("FileMonitor is not running, so it's safe to start it as a service.")
+ args = [f"{pathlib.Path(__file__).resolve().parent}{os.sep}filemonitor.py", '--url', f"{stash.STASH_URL}"]
+ stash.ExecutePythonScript(args)
-if stopLibraryMonitoring:
+if parse_args.stop or parse_args.restart or stash.PLUGIN_TASK_NAME == "stop_library_monitor":
stop_library_monitor()
- if debugTracing: logger.info(f"stop_library_monitor EXIT................")
-elif PLUGIN_ARGS_MODE == "start_library_monitor" or not gettingCalledAsStashPlugin:
+ if parse_args.restart:
+ time.sleep(5)
+ stash.run_plugin_task(plugin_id=stash.PLUGIN_ID, task_name=StartFileMonitorAsAPluginTaskName)
+ stash.Trace(f"Restart FileMonitor EXIT")
+ else:
+ stash.Trace(f"Stop FileMonitor EXIT")
+elif stash.PLUGIN_TASK_NAME == StartFileMonitorAsAServiceTaskID:
+ start_library_monitor_service()
+ stash.Trace(f"{StartFileMonitorAsAServiceTaskID} EXIT")
+elif stash.PLUGIN_TASK_NAME == StartFileMonitorAsAPluginTaskID:
start_library_monitor()
- if debugTracing: logger.info(f"start_library_monitor EXIT................")
+ stash.Trace(f"{StartFileMonitorAsAPluginTaskID} EXIT")
+elif not stash.CALLED_AS_STASH_PLUGIN:
+ try:
+ start_library_monitor()
+ stash.Trace(f"Command line FileMonitor EXIT")
+ except Exception as e:
+ stash.Error(f"Exception while running FileMonitor from the command line. Error: {e}")
else:
- logger.info(f"Nothing to do!!! (PLUGIN_ARGS_MODE={PLUGIN_ARGS_MODE})")
+ stash.Log(f"Nothing to do!!! (stash.PLUGIN_TASK_NAME={stash.PLUGIN_TASK_NAME})")
-if debugTracing: logger.info("\n*********************************\nEXITING ***********************\n*********************************")
+stash.Trace("\n*********************************\nEXITING ***********************\n*********************************")
diff --git a/plugins/FileMonitor/filemonitor.yml b/plugins/FileMonitor/filemonitor.yml
index 14a41783..4d2adff4 100644
--- a/plugins/FileMonitor/filemonitor.yml
+++ b/plugins/FileMonitor/filemonitor.yml
@@ -1,34 +1,38 @@
name: FileMonitor
description: Monitors the Stash library folders, and updates Stash if any changes occurs in the Stash library paths.
-version: 0.2.0
+version: 0.8.2
url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/FileMonitor
settings:
recursiveDisabled:
displayName: No Recursive
- description: Enable stop monitoring paths recursively.
+ description: Enable to STOP monitoring paths recursively.
type: BOOLEAN
- runCleanAfterDelete:
- displayName: Run Clean
- description: Enable to run metadata clean task after file deletion.
- type: BOOLEAN
- scanModified:
- displayName: Scan Modifications
- description: Enable to monitor changes in file system for modification flag. This option is NOT needed for Windows, because on Windows changes are triggered via CREATE, DELETE, and MOVE flags. Other OS may differ.
+ turnOnScheduler:
+ displayName: Scheduler
+ description: Enable to turn on the scheduler. See filemonitor_config.py for more details.
type: BOOLEAN
+ zmaximumBackups:
+ displayName: Max DB Backups
+ description: When value greater than 1, will trim the number of database backup files to set value. Requires [Scheduler] enabled and backupDirectoryPath populated with path length longer than 4.
+ type: NUMBER
zzdebugTracing:
displayName: Debug Tracing
- description: (Default=false) [***For Advanced Users***] Enable debug tracing. When enabled, additional tracing logging is added to Stash\plugins\FileMonitor\filemonitor.log
- type: BOOLEAN
- zzdryRun:
- displayName: Dry Run
- description: Enable to run script in [Dry Run] mode. In this mode, Stash does NOT call meta_scan, and only logs the action it would have taken.
+ description: Enable debug tracing. When enabled, additional tracing logging is added to Stash\plugins\FileMonitor\filemonitor.log
type: BOOLEAN
exec:
- python
- "{pluginDir}/filemonitor.py"
interface: raw
tasks:
- - name: Start Library Monitor
- description: Monitors paths in Stash library for media file changes, and updates Stash.
+ - name: Start Library Monitor Service
+ description: Run [Library Monitor] as a SERVICE to update Stash with any media file changes.
+ defaultArgs:
+ mode: start_library_monitor_service
+ - name: Stop Library Monitor
+ description: Stops library monitoring within 2 minute.
+ defaultArgs:
+ mode: stop_library_monitor
+ - name: Monitor as a Plugin
+ description: Run [Library Monitor] as a plugin (*not recommended method*)
defaultArgs:
mode: start_library_monitor
diff --git a/plugins/FileMonitor/filemonitor_config.py b/plugins/FileMonitor/filemonitor_config.py
index de0210b6..a5f6f00a 100644
--- a/plugins/FileMonitor/filemonitor_config.py
+++ b/plugins/FileMonitor/filemonitor_config.py
@@ -2,11 +2,135 @@
# By David Maisonave (aka Axter) Jul-2024 (https://www.axter.com/)
# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/FileMonitor
config = {
+ # The task scheduler list.
+ # Task can be scheduled to run monthly, weekly, daily, hourly, and by minutes. For best results use the scheduler with FileMonitor running as a service.
+ # For daily, weekly, and monthly task, use the weekday syntax.
+ # The [Auto Tag] task is an example of a daily scheduled task.
+ # The [Generate] task is an example of a weekly scheduled task.
+ # The [Backup] task is an example of a monthly scheduled task.
+ # Note: The hour section in time MUST be a two digit number, and use military time format. Example: 1PM = "13:00" and 1AM = "01:00"
+ "task_scheduler": [
+ # To create a daily task, include each day of the week for the weekday field.
+ {"task" : "Auto Tag", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "06:00"}, # Auto Tag -> [Auto Tag] (Daily at 6AM)
+ {"task" : "Optimise Database", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "07:00"}, # Maintenance -> [Optimise Database] (Daily at 7AM)
+
+ # The following tasks are scheduled for 3 days out of the week.
+ {"task" : "Clean", "weekday" : "monday,wednesday,friday", "time" : "08:00"}, # Maintenance -> [Clean] (3 days per week at 8AM)
+ {"task" : "Clean Generated Files", "weekday" : "tuesday,thursday,saturday", "time" : "08:00"}, # Maintenance -> [Clean Generated Files] (3 days per week at 8AM)
+
+ # The following tasks are scheduled weekly
+ {"task" : "Generate", "weekday" : "sunday", "time" : "07:00"}, # Generated Content-> [Generate] (Every Sunday at 7AM)
+ {"task" : "Scan", "weekday" : "sunday", "time" : "03:00"}, # Library -> [Scan] (Weekly) (Every Sunday at 3AM)
+
+ # To perform a task monthly, specify the day of the month as in the weekly schedule format, and add a monthly field.
+ # The monthly field value must be 1, 2, 3, or 4.
+ # 1 = 1st specified weekday of the month. Example 1st monday.
+ # 2 = 2nd specified weekday of the month. Example 2nd monday of the month.
+ # 3 = 3rd specified weekday of the month.
+ # 4 = 4th specified weekday of the month.
+ # The following task is scheduled monthly
+ {"task" : "Backup", "weekday" : "sunday", "time" : "01:00", "monthly" : 2}, # Backup -> [Backup] 2nd sunday of the month at 1AM (01:00)
+
+ # The following task is the syntax used for a plugins. A plugin task requires the plugin name for the [task] field, and the plugin-ID for the [pluginId] field.
+ # This task requires plugin [Path Parser], and it's disabled by default.
+ {"task" : "Create Tags", "pluginId" : "pathParser", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "DISABLED"}, # To enable this task change time "DISABLED" to a valid time.
+
+ # Example#A1: Task to call call_GQL API with custom input
+ {"task" : "GQL", "input" : "mutation OptimiseDatabase { optimiseDatabase }", "weekday" : "sunday", "time" : "DISABLED"}, # To enable, change "DISABLED" to valid time
+
+ # Example#A2: Task to call a python script. When this task is executed, the keyword is replaced by filemonitor.py current directory.
+ # The args field is NOT required.
+ {"task" : "python", "script" : "test_script_hello_world.py", "args" : "--MyArguments Hello", "weekday" : "monday", "time" : "DISABLED"}, # change "DISABLED" to valid time
+
+ # Example#A3: The following task types can optionally take a [paths] field. If the paths field does not exists, the paths in the Stash library is used.
+ {"task" : "Scan", "paths" : [r"E:\MyVideos\downloads", r"V:\MyOtherVideos"], "weekday" : "sunday", "time" : "DISABLED"}, # Library -> [Scan]
+ {"task" : "Auto Tag", "paths" : [r"E:\MyVideos\downloads", r"V:\MyOtherVideos"], "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "DISABLED"}, # Auto Tag -> [Auto Tag]
+ {"task" : "Clean", "paths" : ["E:\\MyVideos\\downloads", "V:\\MyOtherVideos"], "weekday" : "sunday", "time" : "DISABLED"}, # Generated Content-> [Generate]
+
+ # Example#A4: Task which calls Migrations -> [Rename generated files]
+ {"task" : "RenameGeneratedFiles", "weekday" : "tuesday,thursday", "time" : "DISABLED"}, # (bi-weekly) example
+
+ # Example#A5: The Backup task using optional field maxBackup, which overrides the UI [Max DB Backups] value
+ {"task" : "Backup", "maxBackup" : 12, "weekday" : "sunday", "time" : "DISABLED"}, # Trim the DB backup files down to 12 backup files.
+ {"task" : "Backup", "maxBackup" : 0, "weekday" : "sunday", "time" : "DISABLED"}, # When used with a zero value, it will make sure no file trimming will occur no matter the value of the UI [Max DB Backups]
+
+ # The above weekday method is the more reliable method to schedule task, because it doesn't rely on FileMonitor running continuously (non-stop).
+
+ # The below examples use frequency field method which can work with minutes and hours. A zero frequency value disables the task.
+ # Note: Both seconds and days are also supported for the frequency field.
+ # However, seconds is mainly used for test purposes.
+ # And days usage is discourage, because it only works if FileMonitor is running for X many days non-stop.
+ # The below example tasks are done using hours and minutes, however any of these task types can be converted to a daily, weekly, or monthly syntax.
+
+ # Example#B1: Task for calling another Stash plugin, which needs plugin name and plugin ID.
+ {"task" : "PluginButtonName_Here", "pluginId" : "PluginId_Here", "hours" : 0}, # The zero frequency value makes this task disabled.
+
+ # Example#B2: Task to execute a command
+ {"task" : "execute", "command" : "C:\\MyPath\\HelloWorld.bat", "hours" : 0},
+
+ # Example#B3: Task to execute a command with optional args field, and using keyword , which gets replaced with filemonitor.py current directory.
+ {"task" : "execute", "command" : "HelloWorld.cmd", "args" : "--name David", "minutes" : 0},
+
+ # Comment out **test** tasks.
+ # To run test, enable all task, and start FileMonitor as a service.
+ # When executed, these task should be seen in the Task Queue unless otherwise stated in comments.
+ # These tasks are usually executed before updating major releases on https://github.com/David-Maisonave/Axter-Stash/blob/main/plugins/FileMonitor
+ # These tasks are ALWAYS executed before updating to https://github.com/stashapp/CommunityScripts
+ # MUST ToDo: Always comment out below test task before checking in this code!!!
+ # {"task" : "TestBadTaskNameError", "minutes" : 1}, # Test invalid task name
+ # {"task" : "execute", "minutes" : 1}, # Test invalid task (missing command)
+ # {"task" : "python", "minutes" : 1}, # Test invalid task (missing scripts)
+ # {"task" : "PluginWithOutID", "minutes" : 1}, # Test invalid task (missing pluginId)
+ # {"task" : "execute", "command" : "", "minutes" : 1}, # Test invalid task (missing command)
+ # {"task" : "python", "script" : "", "minutes" : 1}, # Test invalid task (missing scripts)
+ # {"task" : "PluginWithOutID", "pluginId" : "", "minutes" : 1}, # Test invalid task (missing pluginId)
+ # {"task" : "Generate", "weekday" : "friday", "time" : "00:00"},
+ # {"task" : "Clean", "weekday" : "friday", "time" : "00:00"},
+ # {"task" : "Auto Tag", "weekday" : "friday", "time" : "00:00"},
+ # {"task" : "Optimise Database", "weekday" : "friday", "time" : "00:00"},
+ # {"task" : "Create Tags", "pluginId" : "pathParser", "weekday" : "friday", "time" : "00:00"}, # In task queue as -> Running plugin task: Create Tags
+ # {"task" : "Scan","paths": [r"B:\_\SpecialSet", r"C:\foo"], "weekday" : "friday", "time" : "00:00"},
+ # {"task" : "GQL", "input" : "mutation OptimiseDatabase { optimiseDatabase }", "weekday" : "friday", "time" : "00:00"}, # In task queue as -> Optimising database...
+ # {"task" : "Clean Generated Files", "weekday" : "friday", "time" : "00:00"},
+ # {"task" : "RenameGeneratedFiles", "weekday" : "friday", "time" : "00:00"}, # In task queue as -> Migrating scene hashes...
+ # {"task" : "Backup", "maxBackups" : 0, "weekday" : "friday", "time" : "00:00"}, # Does NOT show up in the Task Queue. Must check STASH log file to verify run.
+ # {"task" : "python", "script" : "test_hello_world2.py", "weekday" : "friday", "time" : "00:00"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=???
+ # {"task" : "python", "script" : "test_hello_world.py", "detach" : False, "weekday" : "friday", "time" : "00:00"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=???
+ # {"task" : "execute", "command" : "test_hello_world2.cmd", "weekday" : "friday", "time" : "00:00"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=???
+ # {"task" : "execute", "command" : "test_hello_world.bat", "args" : "--name David", "weekday" : "friday", "time" : "00:00"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=???
+ ],
+
+ # Timeout in seconds. This is how often FileMonitor will check the scheduler and (in-plugin mode) check if another job (Task) is in the queue.
+ "timeOut": 60,
+ # Enable to run metadata clean task after file deletion.
+ "runCleanAfterDelete": False,
# Enable to run metadata_generate (Generate Content) after metadata scan.
"runGenerateContent": False,
+ # When populated (comma separated list [lower-case]), only scan for changes for specified file extension
+ "fileExtTypes" : "", # Example: "mp4,mpg,mpeg,m2ts,wmv,avi,m4v,flv,mov,asf,mkv,divx,webm,ts,mp2t"
+ # When populated, only include file changes in specified paths.
+ "includePathChanges" :[], # Example: ["C:\\MyVideos", "C:\\MyImages"]
+ # When populated, exclude file changes in paths that start with specified entries.
+ "excludePathChanges" :[], # Example: ["C:\\MyVideos\\SomeSubFolder\\", "C:\\MyImages\\folder\\Sub\\"]
- # The following fields are ONLY used when running FileMonitor in script mode
+ # The following fields are ONLY used when running FileMonitor in script mode.
"endpoint_Scheme" : "http", # Define endpoint to use when contacting the Stash server
"endpoint_Host" : "0.0.0.0", # Define endpoint to use when contacting the Stash server
"endpoint_Port" : 9999, # Define endpoint to use when contacting the Stash server
+
+ # The following are advanced user options.
+ # Enable to run scan when triggered by on_any_event.
+ "onAnyEvent": False, # If enabled may cause excessive triggers.
+ # Enable to monitor changes in file system for modification flag. This option is NOT needed for Windows, because on Windows changes are triggered via CREATE, DELETE, and MOVE flags. Other OS may differ.
+ "scanModified": False, # Warning: Enabling this in Windows OS may cause excessive triggers when user is only viewing directory content.
+ # Enable to exit FileMonitor by creating special file in plugin folder\working
+ "createSpecFileToExit": True,
+ # Enable to delete special file immediately after it's created in stop process.
+ "deleteSpecFileInStop": False,
+
+ # Below are place holders for **possible** future features.
+ # !!! Not yet implemented !!!
+ # When enabled, if CREATE flag is triggered, DupFileManager task is called if the plugin is installed.
+ "onCreateCallDupFileManager": False, # Not yet implemented!!!!
+ # !!! Not yet implemented !!!
}
diff --git a/plugins/FileMonitor/requirements.txt b/plugins/FileMonitor/requirements.txt
index aa553701..e7825b02 100644
--- a/plugins/FileMonitor/requirements.txt
+++ b/plugins/FileMonitor/requirements.txt
@@ -1,4 +1,3 @@
-stashapp-tools
+stashapp-tools >= 0.2.49
pyYAML
-watchdog
-requests
\ No newline at end of file
+watchdog
\ No newline at end of file
diff --git a/plugins/RenameFile/README.md b/plugins/RenameFile/README.md
index 7ea05101..1474ed6f 100644
--- a/plugins/RenameFile/README.md
+++ b/plugins/RenameFile/README.md
@@ -39,9 +39,9 @@ Error: [WinError 32] The process cannot access the file because it is being used
To avoid this error, refresh the URL before changing the Title field.
### Requirements
-pip install -r requirements.txt
+- pip install -r requirements.txt
- Or manually install each requirement:
- - `pip install stashapp-tools`
+ - `pip install stashapp-tools --upgrade`
- `pip install pyYAML`
- `pip install requests`
diff --git a/plugins/RenameFile/renamefile.py b/plugins/RenameFile/renamefile.py
index ccf7b27e..884eaa86 100644
--- a/plugins/RenameFile/renamefile.py
+++ b/plugins/RenameFile/renamefile.py
@@ -17,7 +17,7 @@
# **********************************************************************
# Constant global variables --------------------------------------------
-LOG_FILE_PATH = log_file_path = f"{Path(__file__).resolve().parent}\\{Path(__file__).stem}.log"
+LOG_FILE_PATH = f"{Path(__file__).resolve().parent}\\{Path(__file__).stem}.log"
FORMAT = "[%(asctime)s - LN:%(lineno)s] %(message)s"
DEFAULT_FIELD_KEY_LIST = "title,performers,studio,tags" # Default Field Key List with the desired order
PLUGIN_ID = Path(__file__).stem.lower()
diff --git a/plugins/RenameFile/requirements.txt b/plugins/RenameFile/requirements.txt
index 2b546d99..a3649895 100644
--- a/plugins/RenameFile/requirements.txt
+++ b/plugins/RenameFile/requirements.txt
@@ -1,3 +1,3 @@
-stashapp-tools
+stashapp-tools >= 0.2.49
pyYAML
requests
\ No newline at end of file
From e847bdc1077237bc67cff647f3ba2ab4cb4ead52 Mon Sep 17 00:00:00 2001
From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com>
Date: Fri, 16 Aug 2024 04:10:39 -0400
Subject: [PATCH 18/39] Removing template changes to let plugin changes go
through first.
---
.github/ISSUE_TEMPLATE/bug_report.yml | 77 ------------------
.github/ISSUE_TEMPLATE/bug_report_plugin.yml | 84 --------------------
.github/ISSUE_TEMPLATE/discussion.yml | 42 ----------
.github/ISSUE_TEMPLATE/feature_request.yml | 35 --------
.github/ISSUE_TEMPLATE/help.yml | 37 ---------
5 files changed, 275 deletions(-)
delete mode 100644 .github/ISSUE_TEMPLATE/bug_report.yml
delete mode 100644 .github/ISSUE_TEMPLATE/bug_report_plugin.yml
delete mode 100644 .github/ISSUE_TEMPLATE/discussion.yml
delete mode 100644 .github/ISSUE_TEMPLATE/feature_request.yml
delete mode 100644 .github/ISSUE_TEMPLATE/help.yml
diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml
deleted file mode 100644
index 061780ac..00000000
--- a/.github/ISSUE_TEMPLATE/bug_report.yml
+++ /dev/null
@@ -1,77 +0,0 @@
-name: 🐞 Bug
-description: Create a bug report
-title: "🐞[Bug] Your_Short_title"
-labels: [Bug]
-body:
- - type: markdown
- attributes:
- value: |
- Thank you for taking the time to fill out this bug report!
- Fields marked with an asterisk (*) are required, and remaining fields are optional, but we encourage their usage.
- Steps to reproduce the behavior:
- 1. Go to '...'
- 2. Click on '....'
- 3. Scroll down to '....'
- 4. See error
- - type: textarea
- id: steps
- attributes:
- label: Please enter steps to reproduce the behavior.
- validations:
- required: true
- - type: input
- id: stash_ver
- attributes:
- label: Stash Version (from Settings -> About)
- placeholder: e.g. v0.26.2
- validations:
- required: true
- - type: input
- id: os
- attributes:
- label: What Operating System (OS)?
- placeholder: e.g. Windows, MacOS, Linux, iOS8.1 (mobile OS)
- validations:
- required: true
- - type: input
- id: device
- attributes:
- label: Phone or tablets
- placeholder: e.g. iPhone6, Galaxy Tab A9+
- validations:
- required: false
- - type: input
- id: browser
- attributes:
- label: What browser and version?
- placeholder: e.g. Chrome Ver:127, Firefox Ver:129, Safari Ver:1.3.2, Any
- validations:
- required: true
- - type: textarea
- id: expected-behavior
- attributes:
- label: Expected Behavior
- description: What was the expected behavior?
- validations:
- required: false
- - type: textarea
- id: logging
- attributes:
- label: Logging
- description: Enter Stash logging.
- validations:
- required: false
- - type: textarea
- id: snapshots
- attributes:
- label: Snapshot(s)
- description: Optionally attach snapshot(s) which displays the bug.
- validations:
- required: false
- - type: textarea
- id: additional
- attributes:
- label: Additional context
- description: Add any other context about the problem here.
- validations:
- required: false
diff --git a/.github/ISSUE_TEMPLATE/bug_report_plugin.yml b/.github/ISSUE_TEMPLATE/bug_report_plugin.yml
deleted file mode 100644
index 5c03d45b..00000000
--- a/.github/ISSUE_TEMPLATE/bug_report_plugin.yml
+++ /dev/null
@@ -1,84 +0,0 @@
-name: 🪲 Plugin Bug
-description: Create a plugin bug report
-title: "🪲[EnterPluginNameHere] Your_Short_title"
-labels: [Plugin_Bug]
-body:
- - type: markdown
- attributes:
- value: |
- Thank you for taking the time to fill out this bug report!
- Fields marked with an asterisk (*) are required, and remaining fields are optional, but we encourage their usage.
- Steps to reproduce the behavior:
- 1. Go to '...'
- 2. Click on '....'
- 3. Scroll down to '....'
- 4. See error
- - type: textarea
- id: steps
- attributes:
- label: Please enter steps to reproduce the behavior.
- validations:
- required: true
- - type: input
- id: stash_ver
- attributes:
- label: Stash Version (from Settings -> About)
- placeholder: e.g. v0.26.2
- validations:
- required: true
- - type: input
- id: os
- attributes:
- label: What Operating System (OS)?
- placeholder: e.g. Windows, MacOS, Linux, iOS8.1 (mobile OS)
- validations:
- required: true
- - type: input
- id: device
- attributes:
- label: Phone or tablets
- placeholder: e.g. iPhone6, Galaxy Tab A9+
- validations:
- required: false
- - type: input
- id: browser
- attributes:
- label: What browser and version?
- placeholder: e.g. Chrome Ver:127, Firefox Ver:129, Safari Ver:1.3.2, Any
- validations:
- required: true
- - type: textarea
- id: install
- attributes:
- label: The pip install for the plugin
- description: pip install stashapp-tools --upgrade, pip install pyYAML
- validations:
- required: true
- - type: textarea
- id: expected-behavior
- attributes:
- label: Expected Behavior
- description: What was the expected behavior?
- validations:
- required: false
- - type: textarea
- id: logging
- attributes:
- label: Logging
- description: Enter Stash logging and plugin logging file if applicable.
- validations:
- required: false
- - type: textarea
- id: snapshots
- attributes:
- label: Snapshot(s)
- description: Optionally attach snapshot(s) which displays the bug.
- validations:
- required: false
- - type: textarea
- id: additional
- attributes:
- label: Additional context
- description: Add any other context about the problem here.
- validations:
- required: false
\ No newline at end of file
diff --git a/.github/ISSUE_TEMPLATE/discussion.yml b/.github/ISSUE_TEMPLATE/discussion.yml
deleted file mode 100644
index 177b35cd..00000000
--- a/.github/ISSUE_TEMPLATE/discussion.yml
+++ /dev/null
@@ -1,42 +0,0 @@
-name: 🛗Discussion / Request for Commentary [RFC]
-description: This is for issues that will be discussed and won't necessarily result directly in commits or pull requests.
-title: "🛗[RFC]: Your_Short_title"
-labels: [RFC]
-body:
- - type: markdown
- attributes:
- value: |
- This is for issues that will be discussed and won't necessarily result directly in commits or pull requests.
- Please ensure that you respect people's time and attention and understand that people are volunteering their time, so concision is ideal and considerate.
- Fields marked with an asterisk (*) are required, and remaining fields are optional, but we encourage their usage.
- - type: textarea
- id: Title
- attributes:
- label: Optional title of the topic to discuss.
- validations:
- required: false
- - type: textarea
- id: Summary
- attributes:
- label: Describe the scope of your topic and your goals ideally within a single paragraph or TL. A summary that makes it easier for people to determine if they can contribute at a glance.
- validations:
- required: true
- - type: textarea
- id: Details
- attributes:
- label: Only required if summary and title doesn't cover everything.
- validations:
- required: false
- - type: textarea
- id: Examples
- attributes:
- label: If you can show a picture or video examples post them here.
- validations:
- required: false
- - type: textarea
- id: snapshots
- attributes:
- label: Snapshot(s)
- description: Optionally attach additional snapshot(s) which helps describe the discussion.
- validations:
- required: false
diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml
deleted file mode 100644
index 9593dc41..00000000
--- a/.github/ISSUE_TEMPLATE/feature_request.yml
+++ /dev/null
@@ -1,35 +0,0 @@
-name: 💡️Feature Request
-description: Suggest improvement for this project
-title: "💡️[Enhancement]: Your_Short_title"
-labels: [Enhancement]
-body:
- - type: markdown
- attributes:
- value: |
- Please fill out the following fields with as much detail as possible:
- Fields marked with an asterisk (*) are required, and remaining fields are optional, but we encourage their usage.
- - type: textarea
- id: problem
- attributes:
- label: If feature request is related to a problem, give a clear and concise description of what is the problem. Ex. I'm always frustrated when [...]
- validations:
- required: false
- - type: textarea
- id: solution
- attributes:
- label: Describe the solution you'd like. A clear and concise description of what you want to happen.
- validations:
- required: true
- - type: textarea
- id: alternatives
- attributes:
- label: Describe alternatives you've considered. A clear and concise description of any alternative solutions or features you've considered.
- validations:
- required: false
- - type: textarea
- id: Snapshots
- attributes:
- label: Snapshots / Images
- description: Add any other context or screenshots about the feature request here, which can help explain the feature, and a description of what to look for in the image(s).
- validations:
- required: false
diff --git a/.github/ISSUE_TEMPLATE/help.yml b/.github/ISSUE_TEMPLATE/help.yml
deleted file mode 100644
index dae58e2e..00000000
--- a/.github/ISSUE_TEMPLATE/help.yml
+++ /dev/null
@@ -1,37 +0,0 @@
-name: ❓Help
-description: Post your question
-title: "❓[Help]: Your_Short_title"
-labels: [Question]
-body:
- - type: markdown
- attributes:
- value: |
- Please fill out the following fields with as much detail as possible, so that we can better answer your question.
- Fields marked with an asterisk (*) are required, and remaining fields are optional, but we encourage their usage.
- - type: textarea
- id: question
- attributes:
- label: Please enter a clear and concise detailed question.
- validations:
- required: true
- - type: input
- id: os
- attributes:
- label: What Operating System (OS)?
- placeholder: e.g. Windows, MacOS, Linux
- validations:
- required: false
- - type: input
- id: browser
- attributes:
- label: What browser and version?
- placeholder: e.g. Chrome Ver:127, Firefox Ver:129, Safari Ver:1.3.2, Any
- validations:
- required: false
- - type: textarea
- id: snapshots
- attributes:
- label: Snapshot(s)
- description: Optionally attach snapshot(s) which helps describe the question.
- validations:
- required: false
From bdc12bd66ae3678e9b38be4a6733cadca0255f64 Mon Sep 17 00:00:00 2001
From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com>
Date: Fri, 16 Aug 2024 04:14:05 -0400
Subject: [PATCH 19/39] ver change
---
plugins/RenameFile/README.md | 2 +-
plugins/RenameFile/renamefile.yml | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/plugins/RenameFile/README.md b/plugins/RenameFile/README.md
index 1474ed6f..acf06b9a 100644
--- a/plugins/RenameFile/README.md
+++ b/plugins/RenameFile/README.md
@@ -1,4 +1,4 @@
-# RenameFile: Ver 0.4.0 (By David Maisonave)
+# RenameFile: Ver 0.4.1 (By David Maisonave)
RenameFile is a [Stash](https://github.com/stashapp/stash) plugin which performs the following tasks.
- **Rename Scene File Name** (On-The-Fly)
- **Append tag names** to file name
diff --git a/plugins/RenameFile/renamefile.yml b/plugins/RenameFile/renamefile.yml
index e5d2a0f0..20778b34 100644
--- a/plugins/RenameFile/renamefile.yml
+++ b/plugins/RenameFile/renamefile.yml
@@ -1,6 +1,6 @@
name: RenameFile
description: Renames video (scene) file names when the user edits the [Title] field located in the scene [Edit] tab.
-version: 0.4.0
+version: 0.4.1
url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/RenameFile
settings:
performerAppend:
From 1d2f57582f4e70e32156969969888b3bf634ecb8 Mon Sep 17 00:00:00 2001
From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com>
Date: Fri, 16 Aug 2024 12:14:31 -0400
Subject: [PATCH 20/39] Added validation check for scheduled plugins
---
plugins/FileMonitor/README.md | 33 ++++++------
plugins/FileMonitor/StashPluginHelper.py | 3 ++
plugins/FileMonitor/filemonitor.py | 21 ++++++--
plugins/FileMonitor/filemonitor.yml | 2 +-
plugins/FileMonitor/filemonitor_config.py | 66 +++++++++++------------
5 files changed, 72 insertions(+), 53 deletions(-)
diff --git a/plugins/FileMonitor/README.md b/plugins/FileMonitor/README.md
index a196509c..63f5c9c4 100644
--- a/plugins/FileMonitor/README.md
+++ b/plugins/FileMonitor/README.md
@@ -1,4 +1,4 @@
-# FileMonitor: Ver 0.8.2 (By David Maisonave)
+# FileMonitor: Ver 0.8.3 (By David Maisonave)
FileMonitor is a [Stash](https://github.com/stashapp/stash) plugin with the following two main features:
- Updates Stash when any file changes occurs in the Stash library.
- **Task Scheduler**: Runs scheduled task based on the scheduler configuration in **filemonitor_config.py**.
@@ -41,12 +41,13 @@ To configure the schedule or to add new task, edit the **task_scheduler** sectio
```` python
"task_scheduler": [
# To create a daily task, include each day of the week for the weekday field.
- {"task" : "Auto Tag", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "06:00"}, # Auto Tag -> [Auto Tag] (Daily at 6AM)
- {"task" : "Optimise Database", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "07:00"}, # Maintenance -> [Optimise Database] (Daily at 7AM)
+ {"task" : "Auto Tag", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "06:00"}, # Auto Tag -> [Auto Tag] (Daily at 6AM)
+ {"task" : "Optimise Database", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "07:00"}, # Maintenance -> [Optimise Database] (Daily at 7AM)
+ {"task" : "Create Tags", "pluginId" : "pathParser", "validateDir" : "pathParser", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "05:00"}, # [Plugin Tasks] - > [Path Parser] -> [Create Tags] (Daily at 5AM) : This task requires plugin [Path Parser]
# The following tasks are scheduled for 3 days out of the week.
- {"task" : "Clean", "weekday" : "monday,wednesday,friday", "time" : "08:00"}, # Maintenance -> [Clean] (3 days per week at 8AM)
- {"task" : "Clean Generated Files", "weekday" : "tuesday,thursday,saturday", "time" : "08:00"}, # Maintenance -> [Clean Generated Files] (3 days per week at 8AM)
+ {"task" : "Clean", "weekday" : "monday,wednesday,friday", "time" : "08:00"}, # Maintenance -> [Clean] (3 days per week at 8AM)
+ {"task" : "Clean Generated Files", "weekday" : "tuesday,thursday,saturday", "time" : "08:00"}, # Maintenance -> [Clean Generated Files] (3 days per week at 8AM)
# The following tasks are scheduled weekly
{"task" : "Generate", "weekday" : "sunday", "time" : "07:00"}, # Generated Content-> [Generate] (Every Sunday at 7AM)
@@ -59,11 +60,7 @@ To configure the schedule or to add new task, edit the **task_scheduler** sectio
# 3 = 3rd specified weekday of the month.
# 4 = 4th specified weekday of the month.
# The following task is scheduled monthly
- {"task" : "Backup", "weekday" : "sunday", "time" : "01:00", "monthly" : 2}, # Backup -> [Backup] 2nd sunday of the month at 1AM (01:00)
-
- # The following task is the syntax used for a plugins. A plugin task requires the plugin name for the [task] field, and the plugin-ID for the [pluginId] field.
- # This task requires plugin [Path Parser], and it's disabled by default.
- {"task" : "Create Tags", "pluginId" : "pathParser", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "DISABLED"}, # To enable this task change time "DISABLED" to a valid time.
+ {"task" : "Backup", "weekday" : "sunday", "time" : "01:00", "monthly" : 2}, # Backup -> [Backup] 2nd sunday of the month at 1AM (01:00)
# Example#A1: Task to call call_GQL API with custom input
{"task" : "GQL", "input" : "mutation OptimiseDatabase { optimiseDatabase }", "weekday" : "sunday", "time" : "DISABLED"}, # To enable, change "DISABLED" to valid time
@@ -73,13 +70,17 @@ To configure the schedule or to add new task, edit the **task_scheduler** sectio
{"task" : "python", "script" : "test_script_hello_world.py", "args" : "--MyArguments Hello", "weekday" : "monday", "time" : "DISABLED"}, # change "DISABLED" to valid time
# Example#A3: The following task types can optionally take a [paths] field. If the paths field does not exists, the paths in the Stash library is used.
- {"task" : "Scan", "paths" : ["E:\\MyVideos\\downloads", "V:\\MyOtherVideos"], "weekday" : "sunday", "time" : "DISABLED"}, # Library -> [Scan]
+ {"task" : "Scan", "paths" : [r"E:\MyVideos\downloads", r"V:\MyOtherVideos"], "weekday" : "sunday", "time" : "DISABLED"}, # Library -> [Scan]
{"task" : "Auto Tag", "paths" : [r"E:\MyVideos\downloads", r"V:\MyOtherVideos"], "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "DISABLED"}, # Auto Tag -> [Auto Tag]
- {"task" : "Clean", "paths" : [r"E:\MyVideos\downloads", r"V:\MyOtherVideos"], "weekday" : "sunday", "time" : "DISABLED"}, # Generated Content-> [Generate]
+ {"task" : "Clean", "paths" : ["E:\\MyVideos\\downloads", "V:\\MyOtherVideos"], "weekday" : "sunday", "time" : "DISABLED"}, # Generated Content-> [Generate]
# Example#A4: Task which calls Migrations -> [Rename generated files]
{"task" : "RenameGeneratedFiles", "weekday" : "tuesday,thursday", "time" : "DISABLED"}, # (bi-weekly) example
+ # Example#A5: The Backup task using optional field maxBackup, which overrides the UI [Max DB Backups] value
+ {"task" : "Backup", "maxBackup" : 12, "weekday" : "sunday", "time" : "DISABLED"}, # Trim the DB backup files down to 12 backup files.
+ {"task" : "Backup", "maxBackup" : 0, "weekday" : "sunday", "time" : "DISABLED"}, # When used with a zero value, it will make sure no file trimming will occur no matter the value of the UI [Max DB Backups]
+
# The above weekday method is the more reliable method to schedule task, because it doesn't rely on FileMonitor running continuously (non-stop).
# The below examples use frequency field method which can work with minutes and hours. A zero frequency value disables the task.
@@ -88,13 +89,15 @@ To configure the schedule or to add new task, edit the **task_scheduler** sectio
# And days usage is discourage, because it only works if FileMonitor is running for X many days non-stop.
# The below example tasks are done using hours and minutes, however any of these task types can be converted to a daily, weekly, or monthly syntax.
- # Example#B1: Task for calling another Stash plugin, which needs plugin name and plugin ID.
+ # Example#B1: The following task is the syntax used for a plugin. A plugin task requires the plugin name for the [task] field, and the plugin-ID for the [pluginId] field.
{"task" : "PluginButtonName_Here", "pluginId" : "PluginId_Here", "hours" : 0}, # The zero frequency value makes this task disabled.
+ # Example#B2: Optionally, the validateDir field can be included which is used to validate that the plugin is installed either under the plugins folder or under the plugins-community folder.
+ {"task" : "PluginButtonName_Here", "pluginId" : "PluginId_Here", "validateDir" : "UsuallySameAsPluginID", "hours" : 0}, # The zero frequency value makes this task disabled.
- # Example#B2: Task to execute a command
+ # Example#B3: Task to execute a command
{"task" : "execute", "command" : "C:\\MyPath\\HelloWorld.bat", "hours" : 0},
- # Example#B3: Task to execute a command with optional args field, and using keyword , which gets replaced with filemonitor.py current directory.
+ # Example#B4: Task to execute a command with optional args field, and using keyword , which gets replaced with filemonitor.py current directory.
{"task" : "execute", "command" : "HelloWorld.cmd", "args" : "--name David", "minutes" : 0},
],
````
diff --git a/plugins/FileMonitor/StashPluginHelper.py b/plugins/FileMonitor/StashPluginHelper.py
index 218e055c..c32fe779 100644
--- a/plugins/FileMonitor/StashPluginHelper.py
+++ b/plugins/FileMonitor/StashPluginHelper.py
@@ -27,6 +27,7 @@ class StashPluginHelper(StashInterface):
PLUGIN_TASK_NAME = None
PLUGIN_ID = None
PLUGIN_CONFIGURATION = None
+ PLUGINS_PATH = None
pluginSettings = None
pluginConfig = None
STASH_INTERFACE_INIT = False
@@ -165,6 +166,8 @@ def __init__(self,
self.PLUGIN_CONFIGURATION = self.get_configuration()["plugins"]
self.STASH_CONFIGURATION = self.get_configuration()["general"]
self.STASHPATHSCONFIG = self.STASH_CONFIGURATION['stashes']
+ if 'pluginsPath' in self.STASH_CONFIGURATION:
+ self.PLUGINS_PATH = self.STASH_CONFIGURATION['pluginsPath']
for item in self.STASHPATHSCONFIG:
self.STASH_PATHS.append(item["path"])
if settings:
diff --git a/plugins/FileMonitor/filemonitor.py b/plugins/FileMonitor/filemonitor.py
index 6d6752d1..e23fbd47 100644
--- a/plugins/FileMonitor/filemonitor.py
+++ b/plugins/FileMonitor/filemonitor.py
@@ -45,8 +45,6 @@
stash.Status()
stash.Log(f"\nStarting (__file__={__file__}) (stash.CALLED_AS_STASH_PLUGIN={stash.CALLED_AS_STASH_PLUGIN}) (stash.DEBUG_TRACING={stash.DEBUG_TRACING}) (stash.DRY_RUN={stash.DRY_RUN}) (stash.PLUGIN_TASK_NAME={stash.PLUGIN_TASK_NAME})************************************************")
-# stash.Log(f"{stash.find_duplicate_scenes()}")
-
exitMsg = "Change success!!"
mutex = Lock()
signal = Condition(mutex)
@@ -260,8 +258,23 @@ def runTask(self, task):
# ToDo: Add code to check if plugin is installed.
try:
if 'pluginId' in task and task['pluginId'] != "":
- stash.Trace(f"Running plugin task pluginID={task['pluginId']}, task name = {task['task']}")
- stash.run_plugin_task(plugin_id=task['pluginId'], task_name=task['task'])
+ invalidDir = False
+ validDirMsg = ""
+ if 'validateDir' in task and task['validateDir'] != "":
+ invalidDir = True
+ communityPluginPath = f"{stash.PLUGINS_PATH}{os.sep}community{os.sep}{task['validateDir']}"
+ basePluginPath = f"{stash.PLUGINS_PATH}{os.sep}{task['validateDir']}"
+ if os.path.exists(communityPluginPath):
+ invalidDir = False
+ validDirMsg = f"Valid path in {communityPluginPath}"
+ elif os.path.exists(basePluginPath):
+ invalidDir = False
+ validDirMsg = f"Valid path in {basePluginPath}"
+ if invalidDir:
+ stash.Error(f"Could not run task '{task['task']}' because sub directory '{task['validateDir']}' does not exist under path '{stash.PLUGINS_PATH}'")
+ else:
+ stash.Trace(f"Running plugin task pluginID={task['pluginId']}, task name = {task['task']}. {validDirMsg}")
+ stash.run_plugin_task(plugin_id=task['pluginId'], task_name=task['task'])
else:
stash.Error(f"Can not run task '{task['task']}', because it's an invalid task.")
stash.LogOnce(f"If task '{task['task']}' is supposed to be a built-in task, check for correct task name spelling.")
diff --git a/plugins/FileMonitor/filemonitor.yml b/plugins/FileMonitor/filemonitor.yml
index 4d2adff4..b30f935b 100644
--- a/plugins/FileMonitor/filemonitor.yml
+++ b/plugins/FileMonitor/filemonitor.yml
@@ -1,6 +1,6 @@
name: FileMonitor
description: Monitors the Stash library folders, and updates Stash if any changes occurs in the Stash library paths.
-version: 0.8.2
+version: 0.8.3
url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/FileMonitor
settings:
recursiveDisabled:
diff --git a/plugins/FileMonitor/filemonitor_config.py b/plugins/FileMonitor/filemonitor_config.py
index a5f6f00a..add1c3d3 100644
--- a/plugins/FileMonitor/filemonitor_config.py
+++ b/plugins/FileMonitor/filemonitor_config.py
@@ -11,12 +11,13 @@
# Note: The hour section in time MUST be a two digit number, and use military time format. Example: 1PM = "13:00" and 1AM = "01:00"
"task_scheduler": [
# To create a daily task, include each day of the week for the weekday field.
- {"task" : "Auto Tag", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "06:00"}, # Auto Tag -> [Auto Tag] (Daily at 6AM)
- {"task" : "Optimise Database", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "07:00"}, # Maintenance -> [Optimise Database] (Daily at 7AM)
+ {"task" : "Auto Tag", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "06:00"}, # Auto Tag -> [Auto Tag] (Daily at 6AM)
+ {"task" : "Optimise Database", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "07:00"}, # Maintenance -> [Optimise Database] (Daily at 7AM)
+ {"task" : "Create Tags", "pluginId" : "pathParser", "validateDir" : "pathParser", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "05:00"}, # [Plugin Tasks] - > [Path Parser] -> [Create Tags] (Daily at 5AM) : This task requires plugin [Path Parser]
# The following tasks are scheduled for 3 days out of the week.
- {"task" : "Clean", "weekday" : "monday,wednesday,friday", "time" : "08:00"}, # Maintenance -> [Clean] (3 days per week at 8AM)
- {"task" : "Clean Generated Files", "weekday" : "tuesday,thursday,saturday", "time" : "08:00"}, # Maintenance -> [Clean Generated Files] (3 days per week at 8AM)
+ {"task" : "Clean", "weekday" : "monday,wednesday,friday", "time" : "08:00"}, # Maintenance -> [Clean] (3 days per week at 8AM)
+ {"task" : "Clean Generated Files", "weekday" : "tuesday,thursday,saturday", "time" : "08:00"}, # Maintenance -> [Clean Generated Files] (3 days per week at 8AM)
# The following tasks are scheduled weekly
{"task" : "Generate", "weekday" : "sunday", "time" : "07:00"}, # Generated Content-> [Generate] (Every Sunday at 7AM)
@@ -29,11 +30,7 @@
# 3 = 3rd specified weekday of the month.
# 4 = 4th specified weekday of the month.
# The following task is scheduled monthly
- {"task" : "Backup", "weekday" : "sunday", "time" : "01:00", "monthly" : 2}, # Backup -> [Backup] 2nd sunday of the month at 1AM (01:00)
-
- # The following task is the syntax used for a plugins. A plugin task requires the plugin name for the [task] field, and the plugin-ID for the [pluginId] field.
- # This task requires plugin [Path Parser], and it's disabled by default.
- {"task" : "Create Tags", "pluginId" : "pathParser", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "DISABLED"}, # To enable this task change time "DISABLED" to a valid time.
+ {"task" : "Backup", "weekday" : "sunday", "time" : "01:00", "monthly" : 2}, # Backup -> [Backup] 2nd sunday of the month at 1AM (01:00)
# Example#A1: Task to call call_GQL API with custom input
{"task" : "GQL", "input" : "mutation OptimiseDatabase { optimiseDatabase }", "weekday" : "sunday", "time" : "DISABLED"}, # To enable, change "DISABLED" to valid time
@@ -62,13 +59,15 @@
# And days usage is discourage, because it only works if FileMonitor is running for X many days non-stop.
# The below example tasks are done using hours and minutes, however any of these task types can be converted to a daily, weekly, or monthly syntax.
- # Example#B1: Task for calling another Stash plugin, which needs plugin name and plugin ID.
+ # Example#B1: The following task is the syntax used for a plugin. A plugin task requires the plugin name for the [task] field, and the plugin-ID for the [pluginId] field.
{"task" : "PluginButtonName_Here", "pluginId" : "PluginId_Here", "hours" : 0}, # The zero frequency value makes this task disabled.
+ # Example#B2: Optionally, the validateDir field can be included which is used to validate that the plugin is installed either under the plugins folder or under the plugins-community folder.
+ {"task" : "PluginButtonName_Here", "pluginId" : "PluginId_Here", "validateDir" : "UsuallySameAsPluginID", "hours" : 0}, # The zero frequency value makes this task disabled.
- # Example#B2: Task to execute a command
+ # Example#B3: Task to execute a command
{"task" : "execute", "command" : "C:\\MyPath\\HelloWorld.bat", "hours" : 0},
- # Example#B3: Task to execute a command with optional args field, and using keyword , which gets replaced with filemonitor.py current directory.
+ # Example#B4: Task to execute a command with optional args field, and using keyword , which gets replaced with filemonitor.py current directory.
{"task" : "execute", "command" : "HelloWorld.cmd", "args" : "--name David", "minutes" : 0},
# Comment out **test** tasks.
@@ -77,27 +76,28 @@
# These tasks are usually executed before updating major releases on https://github.com/David-Maisonave/Axter-Stash/blob/main/plugins/FileMonitor
# These tasks are ALWAYS executed before updating to https://github.com/stashapp/CommunityScripts
# MUST ToDo: Always comment out below test task before checking in this code!!!
- # {"task" : "TestBadTaskNameError", "minutes" : 1}, # Test invalid task name
- # {"task" : "execute", "minutes" : 1}, # Test invalid task (missing command)
- # {"task" : "python", "minutes" : 1}, # Test invalid task (missing scripts)
- # {"task" : "PluginWithOutID", "minutes" : 1}, # Test invalid task (missing pluginId)
- # {"task" : "execute", "command" : "", "minutes" : 1}, # Test invalid task (missing command)
- # {"task" : "python", "script" : "", "minutes" : 1}, # Test invalid task (missing scripts)
- # {"task" : "PluginWithOutID", "pluginId" : "", "minutes" : 1}, # Test invalid task (missing pluginId)
- # {"task" : "Generate", "weekday" : "friday", "time" : "00:00"},
- # {"task" : "Clean", "weekday" : "friday", "time" : "00:00"},
- # {"task" : "Auto Tag", "weekday" : "friday", "time" : "00:00"},
- # {"task" : "Optimise Database", "weekday" : "friday", "time" : "00:00"},
- # {"task" : "Create Tags", "pluginId" : "pathParser", "weekday" : "friday", "time" : "00:00"}, # In task queue as -> Running plugin task: Create Tags
- # {"task" : "Scan","paths": [r"B:\_\SpecialSet", r"C:\foo"], "weekday" : "friday", "time" : "00:00"},
- # {"task" : "GQL", "input" : "mutation OptimiseDatabase { optimiseDatabase }", "weekday" : "friday", "time" : "00:00"}, # In task queue as -> Optimising database...
- # {"task" : "Clean Generated Files", "weekday" : "friday", "time" : "00:00"},
- # {"task" : "RenameGeneratedFiles", "weekday" : "friday", "time" : "00:00"}, # In task queue as -> Migrating scene hashes...
- # {"task" : "Backup", "maxBackups" : 0, "weekday" : "friday", "time" : "00:00"}, # Does NOT show up in the Task Queue. Must check STASH log file to verify run.
- # {"task" : "python", "script" : "test_hello_world2.py", "weekday" : "friday", "time" : "00:00"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=???
- # {"task" : "python", "script" : "test_hello_world.py", "detach" : False, "weekday" : "friday", "time" : "00:00"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=???
- # {"task" : "execute", "command" : "test_hello_world2.cmd", "weekday" : "friday", "time" : "00:00"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=???
- # {"task" : "execute", "command" : "test_hello_world.bat", "args" : "--name David", "weekday" : "friday", "time" : "00:00"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=???
+ # {"task" : "TestBadTaskNameError", "minutes" : 1}, # Test invalid task name
+ # {"task" : "execute", "minutes" : 1}, # Test invalid task (missing command)
+ # {"task" : "python", "minutes" : 1}, # Test invalid task (missing scripts)
+ # {"task" : "PluginWithOutID", "minutes" : 1}, # Test invalid task (missing pluginId)
+ # {"task" : "execute", "command" : "", "minutes" : 1}, # Test invalid task (missing command)
+ # {"task" : "python", "script" : "", "minutes" : 1}, # Test invalid task (missing scripts)
+ # {"task" : "PluginWithOutID", "pluginId" : "", "minutes" : 1}, # Test invalid task (missing pluginId)
+ # {"task" : "Foo","pluginId":"foo","validateDir":"foo", "minutes" : 1}, # Test invalid task (missing plugin directory)
+ # {"task" : "Generate", "weekday" : "friday", "time" : "12:03"},
+ # {"task" : "Clean", "weekday" : "friday", "time" : "12:03"},
+ # {"task" : "Auto Tag", "weekday" : "friday", "time" : "12:03"},
+ # {"task" : "Optimise Database", "weekday" : "friday", "time" : "12:03"},
+ # {"task" : "Create Tags", "pluginId" : "pathParser", "validateDir" : "pathParser", "weekday" : "friday", "time" : "12:03"}, # In task queue as -> Running plugin task: Create Tags
+ # {"task" : "Scan","paths": [r"B:\_\SpecialSet", r"C:\foo"], "weekday" : "friday", "time" : "12:03"},
+ # {"task" : "GQL", "input" : "mutation OptimiseDatabase { optimiseDatabase }", "weekday" : "friday", "time" : "12:03"}, # In task queue as -> Optimising database...
+ # {"task" : "Clean Generated Files", "weekday" : "friday", "time" : "12:03"},
+ # {"task" : "RenameGeneratedFiles", "weekday" : "friday", "time" : "12:03"}, # In task queue as -> Migrating scene hashes...
+ # {"task" : "Backup", "maxBackups" : 0, "weekday" : "friday", "time" : "12:03"}, # Does NOT show up in the Task Queue. Must check STASH log file to verify run.
+ # {"task" : "python", "script" : "test_hello_world2.py", "weekday" : "friday", "time" : "12:03"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=???
+ # {"task" : "python", "script" : "test_hello_world.py", "detach" : False, "weekday" : "friday", "time" : "12:03"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=???
+ # {"task" : "execute", "command" : "test_hello_world2.cmd", "weekday" : "friday", "time" : "12:03"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=???
+ # {"task" : "execute", "command" : "test_hello_world.bat", "args" : "--name David", "weekday" : "friday", "time" : "12:03"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=???
],
# Timeout in seconds. This is how often FileMonitor will check the scheduler and (in-plugin mode) check if another job (Task) is in the queue.
From 44e794871fea8510006fa08bf0e03cf843b749e9 Mon Sep 17 00:00:00 2001
From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com>
Date: Tue, 20 Aug 2024 20:46:26 -0400
Subject: [PATCH 21/39] Update requirements.txt
---
plugins/FileMonitor/requirements.txt | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/plugins/FileMonitor/requirements.txt b/plugins/FileMonitor/requirements.txt
index 7a38cdfb..19a1174d 100644
--- a/plugins/FileMonitor/requirements.txt
+++ b/plugins/FileMonitor/requirements.txt
@@ -1,3 +1,3 @@
stashapp-tools >= 0.2.50
pyYAML
-watchdog
+watchdog
\ No newline at end of file
From 90b30a4970aa479799e30ba80279fba89b58c595 Mon Sep 17 00:00:00 2001
From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com>
Date: Tue, 20 Aug 2024 20:57:18 -0400
Subject: [PATCH 22/39] Fixed bug that was adding duplicate resolution
1. Fixed bug that added duplicate resolution, width, height , scene_date, video_codec, and frame_rate when title is not populated.
2. Added excludeTags field, and pre-populated it with DuplicateMarkForDeletion and DuplicateWhitelistFile to avoid conflicts with up coming plugin DupFileManager.
---
plugins/RenameFile/README.md | 10 +++++++-
plugins/RenameFile/renamefile.py | 29 +++++++++++++----------
plugins/RenameFile/renamefile.yml | 2 +-
plugins/RenameFile/renamefile_settings.py | 2 ++
plugins/RenameFile/requirements.txt | 3 ++-
5 files changed, 30 insertions(+), 16 deletions(-)
diff --git a/plugins/RenameFile/README.md b/plugins/RenameFile/README.md
index acf06b9a..bab3fca7 100644
--- a/plugins/RenameFile/README.md
+++ b/plugins/RenameFile/README.md
@@ -1,4 +1,4 @@
-# RenameFile: Ver 0.4.1 (By David Maisonave)
+# RenameFile: Ver 0.4.2 (By David Maisonave)
RenameFile is a [Stash](https://github.com/stashapp/stash) plugin which performs the following tasks.
- **Rename Scene File Name** (On-The-Fly)
- **Append tag names** to file name
@@ -57,3 +57,11 @@ That's it!!!
- Main options are accessible in the GUI via Settings->Plugins->Plugins->[RenameFile].
- Advanced options are avialable in the **renamefile_settings.py** file. After making changes, go to http://localhost:9999/settings?tab=plugins, and click [Reload Plugins].
+## Bugs and Feature Request
+Please use the following link to report RenameFile bugs:
+[RenameFile Bug Report](https://github.com/David-Maisonave/Axter-Stash/issues/new?assignees=&labels=Plugin_Bug&projects=&template=bug_report_plugin.yml&title=%F0%9F%AA%B2%5BRenameFile%5D+Your_Short_title)
+
+Please use the following link to report RenameFile Feature Request:[RenameFile Feature Reques](https://github.com/David-Maisonave/Axter-Stash/issues/new?assignees=&labels=Enhancement&projects=&template=feature_request_plugin.yml&title=%F0%9F%92%A1%EF%B8%8F%5BEnhancement%5D%3A%5BRenameFile%5D+Your_Short_title)
+
+Please do **NOT** use the feature request to include any problems associated with errors. Instead use the bug report for error issues.
+
diff --git a/plugins/RenameFile/renamefile.py b/plugins/RenameFile/renamefile.py
index 884eaa86..00e112a7 100644
--- a/plugins/RenameFile/renamefile.py
+++ b/plugins/RenameFile/renamefile.py
@@ -117,6 +117,7 @@
exclude_paths = config["pathToExclude"]
exclude_paths = exclude_paths.split()
if debugTracing: logger.info(f"Debug Tracing (exclude_paths={exclude_paths})................")
+excluded_tags = config["excludeTags"]
# Extract tag whitelist from settings
tag_whitelist = config["tagWhitelist"]
if debugTracing: logger.info("Debug Tracing................")
@@ -203,7 +204,9 @@ def add_tag(tag_name):
if debugTracing: logger.info(f"Debug Tracing (tag_name={tag_name})................")
if max_tag_keys == -1 or (max_tag_keys is not None and tag_keys_added >= int(max_tag_keys)):
return # Skip adding more tags if the maximum limit is reached
-
+ if tag_name in excluded_tags:
+ if debugTracing: logger.info(f"Debug Tracing EXCLUDING (tag_name={tag_name})")
+ return
# Check if the tag name is in the whitelist
if tag_whitelist == "" or tag_whitelist == None or (tag_whitelist and tag_name in tag_whitelist):
if WRAPPER_STYLES.get('tag'):
@@ -261,8 +264,8 @@ def add_tag(tag_name):
scene_date += POSTFIX_STYLES.get('date')
if debugTracing: logger.info("Debug Tracing................")
if WRAPPER_STYLES.get('date'):
- filename_parts.append(f"{WRAPPER_STYLES['date'][0]}{scene_date}{WRAPPER_STYLES['date'][1]}")
- else:
+ scene_date = f"{WRAPPER_STYLES['date'][0]}{scene_date}{WRAPPER_STYLES['date'][1]}"
+ if scene_date not in title:
filename_parts.append(scene_date)
elif key == 'resolution':
width = str(scene_details.get('files', [{}])[0].get('width', '')) # Convert width to string
@@ -270,40 +273,40 @@ def add_tag(tag_name):
if width and height:
resolution = width + POSTFIX_STYLES.get('width_height_seperator') + height + POSTFIX_STYLES.get('resolution')
if WRAPPER_STYLES.get('resolution'):
- filename_parts.append(f"{WRAPPER_STYLES['resolution'][0]}{resolution}{WRAPPER_STYLES['width'][1]}")
- else:
+ resolution = f"{WRAPPER_STYLES['resolution'][0]}{resolution}{WRAPPER_STYLES['width'][1]}"
+ if resolution not in title:
filename_parts.append(resolution)
elif key == 'width':
width = str(scene_details.get('files', [{}])[0].get('width', '')) # Convert width to string
if width:
width += POSTFIX_STYLES.get('width')
if WRAPPER_STYLES.get('width'):
- filename_parts.append(f"{WRAPPER_STYLES['width'][0]}{width}{WRAPPER_STYLES['width'][1]}")
- else:
+ width = f"{WRAPPER_STYLES['width'][0]}{width}{WRAPPER_STYLES['width'][1]}"
+ if width not in title:
filename_parts.append(width)
elif key == 'height':
height = str(scene_details.get('files', [{}])[0].get('height', '')) # Convert height to string
if height:
height += POSTFIX_STYLES.get('height')
if WRAPPER_STYLES.get('height'):
- filename_parts.append(f"{WRAPPER_STYLES['height'][0]}{height}{WRAPPER_STYLES['height'][1]}")
- else:
+ height = f"{WRAPPER_STYLES['height'][0]}{height}{WRAPPER_STYLES['height'][1]}"
+ if height not in title:
filename_parts.append(height)
elif key == 'video_codec':
video_codec = scene_details.get('files', [{}])[0].get('video_codec', '').upper() # Convert to uppercase
if video_codec:
video_codec += POSTFIX_STYLES.get('video_codec')
if WRAPPER_STYLES.get('video_codec'):
- filename_parts.append(f"{WRAPPER_STYLES['video_codec'][0]}{video_codec}{WRAPPER_STYLES['video_codec'][1]}")
- else:
+ video_codec = f"{WRAPPER_STYLES['video_codec'][0]}{video_codec}{WRAPPER_STYLES['video_codec'][1]}"
+ if video_codec not in title:
filename_parts.append(video_codec)
elif key == 'frame_rate':
frame_rate = str(scene_details.get('files', [{}])[0].get('frame_rate', '')) + 'FPS' # Convert to string and append ' FPS'
if frame_rate:
frame_rate += POSTFIX_STYLES.get('frame_rate')
if WRAPPER_STYLES.get('frame_rate'):
- filename_parts.append(f"{WRAPPER_STYLES['frame_rate'][0]}{frame_rate}{WRAPPER_STYLES['frame_rate'][1]}")
- else:
+ frame_rate = f"{WRAPPER_STYLES['frame_rate'][0]}{frame_rate}{WRAPPER_STYLES['frame_rate'][1]}"
+ if frame_rate not in title:
filename_parts.append(frame_rate)
elif key == 'galleries':
galleries = [gallery.get('title', '') for gallery in scene_details.get('galleries', [])]
diff --git a/plugins/RenameFile/renamefile.yml b/plugins/RenameFile/renamefile.yml
index 20778b34..1c9d5ad9 100644
--- a/plugins/RenameFile/renamefile.yml
+++ b/plugins/RenameFile/renamefile.yml
@@ -1,6 +1,6 @@
name: RenameFile
description: Renames video (scene) file names when the user edits the [Title] field located in the scene [Edit] tab.
-version: 0.4.1
+version: 0.4.2
url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/RenameFile
settings:
performerAppend:
diff --git a/plugins/RenameFile/renamefile_settings.py b/plugins/RenameFile/renamefile_settings.py
index 24052f8a..6a4445db 100644
--- a/plugins/RenameFile/renamefile_settings.py
+++ b/plugins/RenameFile/renamefile_settings.py
@@ -37,6 +37,8 @@
"frame_rate": 'FR',
"date": '',
},
+ # Add tags to exclude from RenameFile.
+ "excludeTags": ["DuplicateMarkForDeletion", "DuplicateWhitelistFile","_DuplicateMarkForDeletion", "_DuplicateWhitelistFile","_DuplicateMarkForDeletion_", "_DuplicateWhitelistFile_"],
# Add path(s) to exclude from RenameFile. Example Usage: r"/path/to/exclude1" When entering multiple paths, use space. Example: r"/path_1_to/exclude" r"/someOtherPath2Exclude" r"/yetAnotherPath"
"pathToExclude": "",
# Define a whitelist of allowed tags or EMPTY to allow all tags. Example Usage: "tag1", "tag2", "tag3"
diff --git a/plugins/RenameFile/requirements.txt b/plugins/RenameFile/requirements.txt
index 14af1d68..d4e029a9 100644
--- a/plugins/RenameFile/requirements.txt
+++ b/plugins/RenameFile/requirements.txt
@@ -1,2 +1,3 @@
-stashapp-tools >= 0.2.49
+stashapp-tools >= 0.2.50
+pyYAML
requests
\ No newline at end of file
From 131f3ebbb3fb786fe296e44ef6f5fcaaea24e111 Mon Sep 17 00:00:00 2001
From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com>
Date: Tue, 20 Aug 2024 21:04:34 -0400
Subject: [PATCH 23/39] Added API Key option
Added API Key logic to allow FileMonitor to work in service mode when user has configured a Stash username and password.
To slim down the main config, moved self_unit_test and task_examples to their own configuration files.
Added more self_unit_test, and a selfUnitTest activation field that is normally false.
When field is true, it turns on the task for unit testing.
---
.../FileMonitor/filemonitor_self_unit_test.py | 42 ++++++++++++++++
.../FileMonitor/filemonitor_task_examples.py | 49 +++++++++++++++++++
2 files changed, 91 insertions(+)
create mode 100644 plugins/FileMonitor/filemonitor_self_unit_test.py
create mode 100644 plugins/FileMonitor/filemonitor_task_examples.py
diff --git a/plugins/FileMonitor/filemonitor_self_unit_test.py b/plugins/FileMonitor/filemonitor_self_unit_test.py
new file mode 100644
index 00000000..c30311fc
--- /dev/null
+++ b/plugins/FileMonitor/filemonitor_self_unit_test.py
@@ -0,0 +1,42 @@
+# **test** tasks which are disabled by default. To enable test tasks, set selfUnitTest to True.
+# To run test, enable all task, and start FileMonitor as a service.
+# When executed, these task should be seen in the Task Queue unless otherwise stated in comments.
+# These tasks are usually executed before updating major releases on https://github.com/David-Maisonave/Axter-Stash/blob/main/plugins/FileMonitor
+# These tasks are ALWAYS executed before updating to https://github.com/stashapp/CommunityScripts
+self_unit_test = {
+ "task_scheduler": [
+ {"task" : "TestBadTaskNameError", "minutes" : 1}, # Test invalid task name
+ {"task" : "execute", "minutes" : 1}, # Test invalid task (missing command)
+ {"task" : "python", "minutes" : 1}, # Test invalid task (missing scripts)
+ {"task" : "PluginWithOutID", "minutes" : 1}, # Test invalid task (missing pluginId)
+ {"task" : "execute", "command" : "", "minutes" : 1}, # Test invalid task (missing command)
+ {"task" : "python", "script" : "", "minutes" : 1}, # Test invalid task (missing scripts)
+ {"task" : "PluginWithOutID", "pluginId" : "", "minutes" : 1}, # Test invalid task (missing pluginId)
+ {"task" : "Foo","pluginId":"foo","validateDir":"foo", "minutes" : 1}, # Test invalid task (missing plugin directory)
+ {"task" : "Log", "msg" : "Testing Scheduled Log", "minutes" : 1}, # Test plugin log file
+ {"task" : "Trace", "minutes" : 1}, # Test plugin trace logging
+ {"task" : "LogOnce", "seconds" :15}, # Test LogOnce
+ {"task" : "TraceOnce", "seconds" : 5}, # Test TraceOnce
+ # {"task" : "CheckStashIsRunning", "RunAfter" : [{"task" : "Scan"}], "seconds" :15}, # To test CheckStashIsRunning, kill Stash after starting FileMonitor service via following command:taskkill /F /IM "stash-win.exe"
+ {"task" : "CheckStashIsRunning", "RunAfter" : [{"task" : "Scan"},{"task" : "Backup", "maxBackup" : 0},{"task" : "Clean"}], "seconds" :15}, # Test RunAfter
+ {"task" : "CheckStashIsRunning", "command" : "stash-win.exe", "seconds" :10}, # Check if Stash is running. If not running, start up Stash.
+ {"task" : "Generate", "weekday" : "friday", "time" : "12:03"},
+ {"task" : "Clean", "weekday" : "friday", "time" : "12:03"},
+ {"task" : "Auto Tag", "weekday" : "friday", "time" : "12:03"},
+ {"task" : "Optimise Database", "weekday" : "friday", "time" : "12:03"},
+ {"task" : "Create Tags", "pluginId" : "pathParser", "validateDir" : "pathParser", "weekday" : "friday", "time" : "12:03"}, # In task queue as -> Running plugin task: Create Tags
+ {"task" : "Scan","paths": [r"B:\_\SpecialSet", r"C:\foo"], "weekday" : "friday", "time" : "12:03"},
+ {"task" : "GQL", "input" : "mutation OptimiseDatabase { optimiseDatabase }", "weekday" : "friday", "time" : "12:03"}, # In task queue as -> Optimising database...
+ {"task" : "Clean Generated Files", "weekday" : "friday", "time" : "12:03"},
+ {"task" : "RenameGeneratedFiles", "weekday" : "friday", "time" : "12:03"}, # In task queue as -> Migrating scene hashes...
+ {"task" : "Backup", "maxBackups" : 0, "weekday" : "friday", "time" : "12:03"}, # Does NOT show up in the Task Queue. Must check STASH log file to verify run.
+ {"task" : "python", "script" : "test_hello_world2.py", "weekday" : "friday", "time" : "12:03"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=???
+ {"task" : "python", "script" : "test_hello_world.py", "detach" : False, "weekday" : "friday", "time" : "12:03"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=???
+ {"task" : "execute", "command" : "test_hello_world2.cmd", "weekday" : "friday", "time" : "12:03"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=???
+ {"task" : "execute", "command" : "test_hello_world.bat", "args" : "--name David", "weekday" : "friday", "time" : "12:03"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=???
+ ],
+
+ # MUST ToDo: Always set selfUnitTest to False before checking in this code!!!
+ # Enable to turn on self unit test.
+ "selfUnitTest": False,
+}
diff --git a/plugins/FileMonitor/filemonitor_task_examples.py b/plugins/FileMonitor/filemonitor_task_examples.py
new file mode 100644
index 00000000..3cbfad23
--- /dev/null
+++ b/plugins/FileMonitor/filemonitor_task_examples.py
@@ -0,0 +1,49 @@
+# Below are example tasks.
+# They are all disabled by default, by having zero value for time frequency, or by having "DISABLED" set for the time field.
+# To enable these tasks, set the frequency or the time value to a valid frequency or time stamp.
+task_examples = {
+ "task_scheduler": [
+ # Example#A1: Task to call call_GQL API with custom input
+ {"task" : "GQL", "input" : "mutation OptimiseDatabase { optimiseDatabase }", "weekday" : "sunday", "time" : "DISABLED"}, # To enable, change "DISABLED" to valid time
+
+ # Example#A2: Task to call a python script. When this task is executed, the keyword is replaced by filemonitor.py current directory.
+ # The args field is NOT required.
+ {"task" : "python", "script" : "test_script_hello_world.py", "args" : "--MyArguments Hello", "weekday" : "monday", "time" : "DISABLED"}, # change "DISABLED" to valid time
+
+ # Example#A3: The following task types can optionally take a [paths] field. If the paths field does not exists, the paths in the Stash library is used.
+ {"task" : "Scan", "paths" : [r"E:\MyVideos\downloads", r"V:\MyOtherVideos"], "weekday" : "sunday", "time" : "DISABLED"}, # Library -> [Scan]
+ {"task" : "Auto Tag", "paths" : [r"E:\MyVideos\downloads", r"V:\MyOtherVideos"], "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "DISABLED"}, # Auto Tag -> [Auto Tag]
+ {"task" : "Clean", "paths" : ["E:\\MyVideos\\downloads", "V:\\MyOtherVideos"], "weekday" : "sunday", "time" : "DISABLED"}, # Generated Content-> [Generate]
+
+ # Example#A4: Task which calls Migrations -> [Rename generated files]
+ {"task" : "RenameGeneratedFiles", "weekday" : "tuesday,thursday", "time" : "DISABLED"}, # (bi-weekly) example
+
+ # Example#A5: The Backup task using optional field maxBackup, which overrides the UI [Max DB Backups] value
+ {"task" : "Backup", "maxBackup" : 12, "weekday" : "sunday", "time" : "DISABLED"}, # Trim the DB backup files down to 12 backup files.
+ {"task" : "Backup", "maxBackup" : 0, "weekday" : "sunday", "time" : "DISABLED"}, # When used with a zero value, it will make sure no file trimming will occur no matter the value of the UI [Max DB Backups]
+
+ # The above weekday method is the more reliable method to schedule task, because it doesn't rely on FileMonitor running continuously (non-stop).
+
+ # The below examples use frequency field method which can work with minutes and hours. A zero frequency value disables the task.
+ # Note: Both seconds and days are also supported for the frequency field.
+ # However, seconds is mainly used for test purposes.
+ # And days usage is discourage, because it only works if FileMonitor is running for X many days non-stop.
+ # The below example tasks are done using hours and minutes, however any of these task types can be converted to a daily, weekly, or monthly syntax.
+
+ # Example#B1: The following task is the syntax used for a plugin. A plugin task requires the plugin name for the [task] field, and the plugin-ID for the [pluginId] field.
+ {"task" : "PluginButtonName_Here", "pluginId" : "PluginId_Here", "hours" : 0}, # The zero frequency value makes this task disabled.
+ # Example#B2: Optionally, the validateDir field can be included which is used to validate that the plugin is installed either under the plugins folder or under the plugins-community folder.
+ {"task" : "PluginButtonName_Here", "pluginId" : "PluginId_Here", "validateDir" : "UsuallySameAsPluginID", "hours" : 0}, # The zero frequency value makes this task disabled.
+
+ # Example#B3: Task to execute a command
+ {"task" : "execute", "command" : "C:\\MyPath\\HelloWorld.bat", "hours" : 0},
+
+ # Example#B4: Task to execute a command with optional args field, and using keyword , which gets replaced with filemonitor.py current directory.
+ {"task" : "execute", "command" : "HelloWorld.cmd", "args" : "--name David", "minutes" : 0},
+
+ # Example#C1 Some OS may need the "command" field, which specifies the binary path.
+ {"task" : "CheckStashIsRunning", "command" : "stash-linux-arm64v8", "minutes" :0},
+ # Example#C2 RunAfter field can be used to specify task to run after starting Stash
+ {"task" : "CheckStashIsRunning", "RunAfter" : [{"task" : "Scan"},{"task" : "Backup", "maxBackup" : 0},{"task" : "Clean"}], "minutes" :0},
+ ],
+}
From f02e66899bb59914d33a85a102675b57c1fe6e20 Mon Sep 17 00:00:00 2001
From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com>
Date: Tue, 20 Aug 2024 23:04:46 -0400
Subject: [PATCH 24/39] Change default rename using Move
Change default rename using Move, in order to avoid access issues when scene is being played.
---
plugins/RenameFile/renamefile.py | 4 ++--
plugins/RenameFile/renamefile.yml | 8 ++++----
2 files changed, 6 insertions(+), 6 deletions(-)
diff --git a/plugins/RenameFile/renamefile.py b/plugins/RenameFile/renamefile.py
index 00e112a7..a8ab1fd4 100644
--- a/plugins/RenameFile/renamefile.py
+++ b/plugins/RenameFile/renamefile.py
@@ -66,7 +66,7 @@
"studioAppend": False,
"tagAppend": False,
"z_keyFIeldsIncludeInFileName": False,
- "zafileRenameViaMove": False,
+ "zafileRenameViaRaname": False,
"zfieldKeyList": DEFAULT_FIELD_KEY_LIST,
"zmaximumTagKeys": 12,
"zseparators": DEFAULT_SEPERATOR,
@@ -133,7 +133,7 @@
if debugTracing: logger.info(f"Debug Tracing (endpoint={endpoint})................")
# Extract rename_files and move_files settings from renamefile_settings.py
rename_files = config["rename_files"]
-move_files = settings["zafileRenameViaMove"]
+move_files = False if settings["zafileRenameViaRaname"] else True
if debugTracing: logger.info("Debug Tracing................")
fieldKeyList = settings["zfieldKeyList"] # Default Field Key List with the desired order
if not fieldKeyList or fieldKeyList == "":
diff --git a/plugins/RenameFile/renamefile.yml b/plugins/RenameFile/renamefile.yml
index 1c9d5ad9..ca2c8f53 100644
--- a/plugins/RenameFile/renamefile.yml
+++ b/plugins/RenameFile/renamefile.yml
@@ -1,6 +1,6 @@
name: RenameFile
description: Renames video (scene) file names when the user edits the [Title] field located in the scene [Edit] tab.
-version: 0.4.2
+version: 0.4.3
url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/RenameFile
settings:
performerAppend:
@@ -19,9 +19,9 @@ settings:
displayName: Include Existing Key Field
description: Enable to append performer, tags, studios, & galleries even if name already exists in the original file name.
type: BOOLEAN
- zafileRenameViaMove:
- displayName: Rename Using Move
- description: Enable to have file moved when renaming file.
+ zafileRenameViaRaname:
+ displayName: Rename Instead of Move
+ description: Enable to rename file instead of Move file. (Not recommended for Windows OS)
type: BOOLEAN
zfieldKeyList:
displayName: Key Fields
From 444a569a32ef529e61ed8cbfcba0fcd86e0fd868 Mon Sep 17 00:00:00 2001
From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com>
Date: Thu, 22 Aug 2024 02:40:38 -0400
Subject: [PATCH 25/39] Added logic to stop running multiple scan jobs.
100's of file changes at the same time caused FileMonitor to run many dozens of scan jobs.
Added logic to have FileMonitor delay new scan jobs while last scan job is still running.
---
plugins/FileMonitor/filemonitor.py | 62 ++++++++++++++++++++---
plugins/FileMonitor/filemonitor_config.py | 5 ++
2 files changed, 59 insertions(+), 8 deletions(-)
diff --git a/plugins/FileMonitor/filemonitor.py b/plugins/FileMonitor/filemonitor.py
index ecc2d5d5..948088c2 100644
--- a/plugins/FileMonitor/filemonitor.py
+++ b/plugins/FileMonitor/filemonitor.py
@@ -65,6 +65,8 @@
RUN_GENERATE_CONTENT = stash.pluginConfig['runGenerateContent']
SCAN_ON_ANY_EVENT = stash.pluginConfig['onAnyEvent']
SIGNAL_TIMEOUT = stash.pluginConfig['timeOut'] if stash.pluginConfig['timeOut'] > 0 else 1
+MAX_TIMEOUT_FOR_DELAY_PATH_PROCESS = stash.pluginConfig['timeOutDelayProcess']
+MAX_SECONDS_WAIT_SCANJOB_COMPLETE = stash.pluginConfig['maxWaitTimeJobFinish']
CREATE_SPECIAL_FILE_TO_EXIT = stash.pluginConfig['createSpecFileToExit']
DELETE_SPECIAL_FILE_ON_STOP = stash.pluginConfig['deleteSpecFileInStop']
@@ -399,10 +401,19 @@ def checkSchedulePending(self):
schedule.run_pending()
stash.TraceOnce("Pending check complete.")
-TargetPaths = []
+TargetPaths = []
+lastScanJob = {
+ "id": -1,
+ "TargetPaths": [],
+ "DelayedProcessTargetPaths": [],
+ "timeAddedToTaskQueue": None,
+ "lastStatus" : ""
+}
+
def start_library_monitor():
global shouldUpdate
global TargetPaths
+ global lastScanJob
try:
# Create shared memory buffer which can be used as singleton logic or to get a signal to quit task from external script
shm_a = shared_memory.SharedMemory(name=SHAREDMEMORY_NAME, create=True, size=4)
@@ -529,9 +540,17 @@ def on_any_event(event):
break
if stash.pluginSettings['turnOnScheduler']:
stashScheduler.checkSchedulePending()
- stash.LogOnce("Waiting for a file change-trigger.")
- signal.wait(timeout=SIGNAL_TIMEOUT)
- if stash.pluginSettings['turnOnScheduler'] and not shouldUpdate:
+ timeOutInSeconds = SIGNAL_TIMEOUT
+ if lastScanJob['DelayedProcessTargetPaths'] != [] and timeOutInSeconds > MAX_TIMEOUT_FOR_DELAY_PATH_PROCESS:
+ timeOutInSeconds = MAX_TIMEOUT_FOR_DELAY_PATH_PROCESS
+ stash.LogOnce(f"Awaiting file change-trigger, with a short timeout ({timeOutInSeconds} seconds), because of active delay path processing.")
+ else:
+ stash.LogOnce(f"Waiting for a file change-trigger. Timeout = {timeOutInSeconds} seconds.")
+ signal.wait(timeout=timeOutInSeconds)
+ if lastScanJob['DelayedProcessTargetPaths'] != []:
+ stash.TraceOnce(f"Processing delay scan for path(s) {lastScanJob['DelayedProcessTargetPaths']}")
+ break
+ elif stash.pluginSettings['turnOnScheduler'] and not shouldUpdate:
stash.TraceOnce("Checking the scheduler.")
elif shouldUpdate:
stash.LogOnce("File change trigger occurred.")
@@ -555,12 +574,39 @@ def on_any_event(event):
stash.Log(f"[SpFl]Detected trigger file to kill FileMonitor. {SPECIAL_FILE_NAME}", printTo = stash.LOG_TO_FILE + stash.LOG_TO_CONSOLE + stash.LOG_TO_STASH)
TargetPaths = []
TmpTargetPaths = list(set(TmpTargetPaths))
- if TmpTargetPaths != []:
+ if TmpTargetPaths != [] or lastScanJob['DelayedProcessTargetPaths'] != []:
stash.Log(f"Triggering Stash scan for path(s) {TmpTargetPaths}")
- if len(TmpTargetPaths) > 1 or TmpTargetPaths[0] != SPECIAL_FILE_DIR:
+ if lastScanJob['DelayedProcessTargetPaths'] != [] or len(TmpTargetPaths) > 1 or TmpTargetPaths[0] != SPECIAL_FILE_DIR:
if not stash.DRY_RUN:
- # ToDo: Consider using create_scene, update_scene, and destroy_scene over general method metadata_scan
- stash.metadata_scan(paths=TmpTargetPaths)
+ if lastScanJob['id'] > -1:
+ lastScanJob['lastStatus'] = stash.find_job(lastScanJob['id'])
+ stash.Trace(f"Last Scan Job ({lastScanJob['id']}); result = {lastScanJob['lastStatus']}")
+ elapsedTime = time.time() - lastScanJob['timeAddedToTaskQueue']
+ if ('status' in lastScanJob['lastStatus'] and lastScanJob['lastStatus']['status'] == "FINISHED") or elapsedTime > MAX_SECONDS_WAIT_SCANJOB_COMPLETE:
+ if elapsedTime > MAX_SECONDS_WAIT_SCANJOB_COMPLETE:
+ stash.Warn(f"Timeout occurred waiting for scan job {lastScanJob['id']} to complete. Elapse-Time = {elapsedTime}; Max-Time={MAX_SECONDS_WAIT_SCANJOB_COMPLETE}; Scan-Path(s) = {lastScanJob['TargetPaths']}")
+ lastScanJob['id'] = -1
+ if len(lastScanJob['DelayedProcessTargetPaths']) > 0:
+ stash.Trace(f"Adding {lastScanJob['DelayedProcessTargetPaths']} to {TmpTargetPaths}")
+ for path in lastScanJob['DelayedProcessTargetPaths']:
+ if path not in TmpTargetPaths:
+ TmpTargetPaths.append(path)
+ # TmpTargetPaths += [lastScanJob['DelayedProcessTargetPaths']]
+ stash.Trace(f"TmpTargetPaths = {TmpTargetPaths}")
+ lastScanJob['DelayedProcessTargetPaths'] = []
+ else:
+ if TmpTargetPaths != []:
+ stash.Trace(f"Adding {TmpTargetPaths} to {lastScanJob['DelayedProcessTargetPaths']}")
+ for path in TmpTargetPaths:
+ if path not in lastScanJob['DelayedProcessTargetPaths']:
+ lastScanJob['DelayedProcessTargetPaths'].append(path)
+ stash.Trace(f"lastScanJob['DelayedProcessTargetPaths'] = {lastScanJob['DelayedProcessTargetPaths']}")
+ if lastScanJob['id'] == -1:
+ stash.Trace(f"Calling metadata_scan for paths '{TmpTargetPaths}'")
+ lastScanJob['id'] = int(stash.metadata_scan(paths=TmpTargetPaths))
+ lastScanJob['TargetPaths'] = TmpTargetPaths
+ lastScanJob['timeAddedToTaskQueue'] = time.time()
+ stash.Trace(f"metadata_scan JobId = {lastScanJob['id']}, Start-Time = {lastScanJob['timeAddedToTaskQueue']}, paths = {lastScanJob['TargetPaths']}")
if RUN_CLEAN_AFTER_DELETE and RunCleanMetadata:
stash.metadata_clean(paths=TmpTargetPaths, dry_run=stash.DRY_RUN)
if RUN_GENERATE_CONTENT:
diff --git a/plugins/FileMonitor/filemonitor_config.py b/plugins/FileMonitor/filemonitor_config.py
index dbda6312..e1aaceac 100644
--- a/plugins/FileMonitor/filemonitor_config.py
+++ b/plugins/FileMonitor/filemonitor_config.py
@@ -46,6 +46,11 @@
# Timeout in seconds. This is how often FileMonitor will check the scheduler and (in-plugin mode) check if another job (Task) is in the queue.
"timeOut": 60,
+ # Timeout in seconds for delay processing of path scan jobs. This value should always be smaller than timeOut
+ "timeOutDelayProcess": 3,
+ # Maximum time to wait for a scan job to complete. Need this incase Stash gets restarted in the middle of a scan job.
+ "maxWaitTimeJobFinish": 30 * 60, # Wait 30 minutes max
+
# ApiKey only needed when Stash credentials are set and while calling FileMonitor via command line.
"apiKey" : "", # Example: "eyJabccideJIUfg1NigRInD345I6dfpXVCfd.eyJ1abcDEfGheHRlHJiJklMonPQ32FsVewtsfSIsImlhdCI6MTcyMzg2NzkwOH0.5bkHU6sfs3532dsryu1ki3iFBwnd_4AHs325yHljsPw"
# Enable to run metadata clean task after file deletion.
From 43acfe18c4363f29473d61051e9aff5da29324d5 Mon Sep 17 00:00:00 2001
From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com>
Date: Thu, 22 Aug 2024 03:20:35 -0400
Subject: [PATCH 26/39] Tweaked delay scan path logic
---
plugins/FileMonitor/README.md | 2 +-
plugins/FileMonitor/filemonitor.py | 18 ++++++++++++++----
plugins/FileMonitor/filemonitor.yml | 2 +-
plugins/FileMonitor/filemonitor_config.py | 2 +-
4 files changed, 17 insertions(+), 7 deletions(-)
diff --git a/plugins/FileMonitor/README.md b/plugins/FileMonitor/README.md
index c639631c..5339c9ee 100644
--- a/plugins/FileMonitor/README.md
+++ b/plugins/FileMonitor/README.md
@@ -1,4 +1,4 @@
-# FileMonitor: Ver 0.8.6 (By David Maisonave)
+# FileMonitor: Ver 0.8.7 (By David Maisonave)
FileMonitor is a [Stash](https://github.com/stashapp/stash) plugin with the following two main features:
- Updates Stash when any file changes occurs in the Stash library.
- **Task Scheduler**: Runs scheduled task based on the scheduler configuration in **filemonitor_config.py**.
diff --git a/plugins/FileMonitor/filemonitor.py b/plugins/FileMonitor/filemonitor.py
index 948088c2..f0d69106 100644
--- a/plugins/FileMonitor/filemonitor.py
+++ b/plugins/FileMonitor/filemonitor.py
@@ -407,8 +407,10 @@ def checkSchedulePending(self):
"TargetPaths": [],
"DelayedProcessTargetPaths": [],
"timeAddedToTaskQueue": None,
+ "timeOutDelayProcess": 1,
"lastStatus" : ""
}
+JOB_ENDED_STATUSES = ["FINISHED", "CANCELLED"]
def start_library_monitor():
global shouldUpdate
@@ -541,8 +543,12 @@ def on_any_event(event):
if stash.pluginSettings['turnOnScheduler']:
stashScheduler.checkSchedulePending()
timeOutInSeconds = SIGNAL_TIMEOUT
- if lastScanJob['DelayedProcessTargetPaths'] != [] and timeOutInSeconds > MAX_TIMEOUT_FOR_DELAY_PATH_PROCESS:
- timeOutInSeconds = MAX_TIMEOUT_FOR_DELAY_PATH_PROCESS
+ if lastScanJob['DelayedProcessTargetPaths'] != [] and timeOutInSeconds > lastScanJob['timeOutDelayProcess']:
+ if lastScanJob['timeOutDelayProcess'] < MAX_TIMEOUT_FOR_DELAY_PATH_PROCESS:
+ lastScanJob['timeOutDelayProcess'] = lastScanJob['timeOutDelayProcess'] * 2
+ if lastScanJob['timeOutDelayProcess'] > MAX_TIMEOUT_FOR_DELAY_PATH_PROCESS:
+ lastScanJob['timeOutDelayProcess'] = MAX_TIMEOUT_FOR_DELAY_PATH_PROCESS
+ timeOutInSeconds = lastScanJob['timeOutDelayProcess']
stash.LogOnce(f"Awaiting file change-trigger, with a short timeout ({timeOutInSeconds} seconds), because of active delay path processing.")
else:
stash.LogOnce(f"Waiting for a file change-trigger. Timeout = {timeOutInSeconds} seconds.")
@@ -580,12 +586,16 @@ def on_any_event(event):
if not stash.DRY_RUN:
if lastScanJob['id'] > -1:
lastScanJob['lastStatus'] = stash.find_job(lastScanJob['id'])
- stash.Trace(f"Last Scan Job ({lastScanJob['id']}); result = {lastScanJob['lastStatus']}")
elapsedTime = time.time() - lastScanJob['timeAddedToTaskQueue']
- if ('status' in lastScanJob['lastStatus'] and lastScanJob['lastStatus']['status'] == "FINISHED") or elapsedTime > MAX_SECONDS_WAIT_SCANJOB_COMPLETE:
+ if 'status' not in lastScanJob['lastStatus']:
+ stash.Warn(f"Could not get a status from scan job {lastScanJob['id']}; result = {lastScanJob['lastStatus']}; Elapse-Time = {elapsedTime}")
+ else:
+ stash.Trace(f"Last Scan Job ({lastScanJob['id']}); Status = {lastScanJob['lastStatus']['status']}; result = {lastScanJob['lastStatus']}; Elapse-Time = {elapsedTime}")
+ if 'status' not in lastScanJob['lastStatus'] or lastScanJob['lastStatus']['status'] in JOB_ENDED_STATUSES or elapsedTime > MAX_SECONDS_WAIT_SCANJOB_COMPLETE:
if elapsedTime > MAX_SECONDS_WAIT_SCANJOB_COMPLETE:
stash.Warn(f"Timeout occurred waiting for scan job {lastScanJob['id']} to complete. Elapse-Time = {elapsedTime}; Max-Time={MAX_SECONDS_WAIT_SCANJOB_COMPLETE}; Scan-Path(s) = {lastScanJob['TargetPaths']}")
lastScanJob['id'] = -1
+ lastScanJob['timeOutDelayProcess'] = 1
if len(lastScanJob['DelayedProcessTargetPaths']) > 0:
stash.Trace(f"Adding {lastScanJob['DelayedProcessTargetPaths']} to {TmpTargetPaths}")
for path in lastScanJob['DelayedProcessTargetPaths']:
diff --git a/plugins/FileMonitor/filemonitor.yml b/plugins/FileMonitor/filemonitor.yml
index 98326791..f96bf70e 100644
--- a/plugins/FileMonitor/filemonitor.yml
+++ b/plugins/FileMonitor/filemonitor.yml
@@ -1,6 +1,6 @@
name: FileMonitor
description: Monitors the Stash library folders, and updates Stash if any changes occurs in the Stash library paths.
-version: 0.8.6
+version: 0.8.7
url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/FileMonitor
settings:
recursiveDisabled:
diff --git a/plugins/FileMonitor/filemonitor_config.py b/plugins/FileMonitor/filemonitor_config.py
index e1aaceac..38ed73a0 100644
--- a/plugins/FileMonitor/filemonitor_config.py
+++ b/plugins/FileMonitor/filemonitor_config.py
@@ -47,7 +47,7 @@
# Timeout in seconds. This is how often FileMonitor will check the scheduler and (in-plugin mode) check if another job (Task) is in the queue.
"timeOut": 60,
# Timeout in seconds for delay processing of path scan jobs. This value should always be smaller than timeOut
- "timeOutDelayProcess": 3,
+ "timeOutDelayProcess": 32,
# Maximum time to wait for a scan job to complete. Need this incase Stash gets restarted in the middle of a scan job.
"maxWaitTimeJobFinish": 30 * 60, # Wait 30 minutes max
From af2be30a043497835e9953e1e0e64dbdf5b9cc59 Mon Sep 17 00:00:00 2001
From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com>
Date: Thu, 22 Aug 2024 03:54:34 -0400
Subject: [PATCH 27/39] Tweaked delay scan path logic
---
plugins/FileMonitor/filemonitor.py | 10 ++++++----
1 file changed, 6 insertions(+), 4 deletions(-)
diff --git a/plugins/FileMonitor/filemonitor.py b/plugins/FileMonitor/filemonitor.py
index f0d69106..75398d3f 100644
--- a/plugins/FileMonitor/filemonitor.py
+++ b/plugins/FileMonitor/filemonitor.py
@@ -313,7 +313,7 @@ def runPluginTask(self, task):
pass
return None
- def checkStashIsRunning(self, task):
+ def checkStashIsRunning(self, task = {}, sleepAfterStart = 10):
try:
result = stash.stash_version()
except:
@@ -349,7 +349,7 @@ def checkStashIsRunning(self, task):
stash.Error("Could not start Stash, because could not find executable Stash file '{execPath}'")
return None
result = f"Execute process PID = {stash.ExecuteProcess(args)}"
- time.sleep(10)
+ time.sleep(sleepAfterStart)
if "RunAfter" in task and len(task['RunAfter']) > 0:
for runAfterTask in task['RunAfter']:
self.runTask(runAfterTask)
@@ -585,13 +585,15 @@ def on_any_event(event):
if lastScanJob['DelayedProcessTargetPaths'] != [] or len(TmpTargetPaths) > 1 or TmpTargetPaths[0] != SPECIAL_FILE_DIR:
if not stash.DRY_RUN:
if lastScanJob['id'] > -1:
+ if stashScheduler:
+ stashScheduler.checkStashIsRunning()
lastScanJob['lastStatus'] = stash.find_job(lastScanJob['id'])
elapsedTime = time.time() - lastScanJob['timeAddedToTaskQueue']
- if 'status' not in lastScanJob['lastStatus']:
+ if lastScanJob['lastStatus'] == None or lastScanJob['lastStatus'] == "" or 'status' not in lastScanJob['lastStatus']:
stash.Warn(f"Could not get a status from scan job {lastScanJob['id']}; result = {lastScanJob['lastStatus']}; Elapse-Time = {elapsedTime}")
else:
stash.Trace(f"Last Scan Job ({lastScanJob['id']}); Status = {lastScanJob['lastStatus']['status']}; result = {lastScanJob['lastStatus']}; Elapse-Time = {elapsedTime}")
- if 'status' not in lastScanJob['lastStatus'] or lastScanJob['lastStatus']['status'] in JOB_ENDED_STATUSES or elapsedTime > MAX_SECONDS_WAIT_SCANJOB_COMPLETE:
+ if lastScanJob['lastStatus'] == None or lastScanJob['lastStatus'] == "" or 'status' not in lastScanJob['lastStatus'] or lastScanJob['lastStatus']['status'] in JOB_ENDED_STATUSES or elapsedTime > MAX_SECONDS_WAIT_SCANJOB_COMPLETE:
if elapsedTime > MAX_SECONDS_WAIT_SCANJOB_COMPLETE:
stash.Warn(f"Timeout occurred waiting for scan job {lastScanJob['id']} to complete. Elapse-Time = {elapsedTime}; Max-Time={MAX_SECONDS_WAIT_SCANJOB_COMPLETE}; Scan-Path(s) = {lastScanJob['TargetPaths']}")
lastScanJob['id'] = -1
From 688f8c07d1f4abadf6c4d04b2b8b1e8f66471028 Mon Sep 17 00:00:00 2001
From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com>
Date: Wed, 28 Aug 2024 04:36:29 -0400
Subject: [PATCH 28/39] FileMonitor updates
Separated unit testing into two parts.
Increase max log file size.
Set status logging to debug level.
Added UI option to enable DupFileManager delete duplicate task in the scheduler.
Added "every" option for every day of the week in the scheduler.
Added scheduler option to start plugin without the Task Scheduler.
Changed the format for plugins in the scheduler.
---
plugins/FileMonitor/README.md | 90 +++----
plugins/FileMonitor/StashPluginHelper.py | 240 ++++++++++++++----
plugins/FileMonitor/filemonitor.py | 123 +++++----
plugins/FileMonitor/filemonitor.yml | 12 +-
plugins/FileMonitor/filemonitor_config.py | 14 +-
.../FileMonitor/filemonitor_self_unit_test.py | 49 ++--
.../FileMonitor/filemonitor_task_examples.py | 18 +-
7 files changed, 366 insertions(+), 180 deletions(-)
diff --git a/plugins/FileMonitor/README.md b/plugins/FileMonitor/README.md
index 5339c9ee..cca15a93 100644
--- a/plugins/FileMonitor/README.md
+++ b/plugins/FileMonitor/README.md
@@ -1,4 +1,4 @@
-# FileMonitor: Ver 0.8.7 (By David Maisonave)
+# FileMonitor: Ver 0.9.0 (By David Maisonave)
FileMonitor is a [Stash](https://github.com/stashapp/stash) plugin with the following two main features:
- Updates Stash when any file changes occurs in the Stash library.
- **Task Scheduler**: Runs scheduled task based on the scheduler configuration in **filemonitor_config.py**.
@@ -40,18 +40,25 @@ To enable the scheduler go to **Stash->Settings->Plugins->Plugins->FileMonitor**
To configure the schedule or to add new task, edit the **task_scheduler** section in the **filemonitor_config.py** file.
```` python
"task_scheduler": [
- # To create a daily task, include each day of the week for the weekday field.
- {"task" : "Auto Tag", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "06:00"}, # Auto Tag -> [Auto Tag] (Daily at 6AM)
- {"task" : "Optimise Database", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "07:00"}, # Maintenance -> [Optimise Database] (Daily at 7AM)
- {"task" : "Create Tags", "pluginId" : "pathParser", "validateDir" : "pathParser", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "05:00"}, # [Plugin Tasks] - > [Path Parser] -> [Create Tags] (Daily at 5AM) : This task requires plugin [Path Parser]
-
- # The following tasks are scheduled for 3 days out of the week.
- {"task" : "Clean", "weekday" : "monday,wednesday,friday", "time" : "08:00"}, # Maintenance -> [Clean] (3 days per week at 8AM)
- {"task" : "Clean Generated Files", "weekday" : "tuesday,thursday,saturday", "time" : "08:00"}, # Maintenance -> [Clean Generated Files] (3 days per week at 8AM)
+ # To create a daily task, include each day of the week for the weekday field or "every"
+ # Optional field for task "Auto Tag" is 'paths'. For detail usage, see example #A3: in filemonitor_task_examples.py
+ {"task" : "Auto Tag", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "05:00"}, # Auto Tag -> [Auto Tag] (Daily at 6AM)
+ # Task "Create Tags" is a plugin task. Optional fields are taskName and validateDir field. For detail usage, see examples #B1, #B2, #B3, and #B4 in filemonitor_task_examples.py
+ {"task" : "pathParser", "taskName" : "Create Tags", "validateDir" : "pathParser",
+ "weekday" : "every", "time" : "05:30"}, # [Plugin Tasks] - > [Path Parser] -> [Create Tags] (Daily at 5AM) : This task requires plugin [Path Parser]
+ # The following task runs plugin DupFileManager (tag_duplicates_task) if the plugin is installed. The task runs in the background because of "taskQue" : False
+ {"task" : "DupFileManager", "taskMode" : "tag_duplicates_task", "validateDir" : "DupFileManager", "taskQue" : False,
+ "weekday" : "every", "time" : "02:30"}, # [Plugin Tasks] -> DupFileManager -> [Delete Duplicates] (Daily at 2:30AM)
+ {"task" : "Optimise Database", "weekday" : "monday,tuesday,wednesday,thursday,friday", "time" : "07:00"}, # Maintenance -> [Optimise Database] (Every weekday at 7AM)
# The following tasks are scheduled weekly
- {"task" : "Generate", "weekday" : "sunday", "time" : "07:00"}, # Generated Content-> [Generate] (Every Sunday at 7AM)
- {"task" : "Scan", "weekday" : "sunday", "time" : "03:00"}, # Library -> [Scan] (Weekly) (Every Sunday at 3AM)
+ # Optional field for task "Scan", "Auto Tag", and "Clean" is 'paths'. For detail usage, see examples #A3: in filemonitor_task_examples.py
+ {"task" : "Scan", "weekday" : "saturday", "time" : "03:00"}, # Library -> [Scan] (Weekly) (Every saturday at 3AM)
+ {"task" : "Auto Tag", "weekday" : "saturday", "time" : "03:30"}, # Auto Tag -> [Auto Tag] (Weekly) (Every saturday at 3:30AM)
+ {"task" : "Generate", "weekday" : "saturday", "time" : "04:00"}, # Generated Content-> [Generate] (Every saturday at 4AM)
+ {"task" : "Clean", "weekday" : "saturday", "time" : "04:30"}, # Maintenance -> [Clean] (Every saturday at 4:30AM)
+ {"task" : "Clean Generated Files", "weekday" : "saturday", "time" : "05:00"}, # Maintenance -> [Clean Generated Files] (Every saturday at 5AM)
+ {"task" : "Optimise Database", "weekday" : "saturday", "time" : "05:30"}, # Maintenance -> [Optimise Database] (Every saturday at 5:30AM)
# To perform a task monthly, specify the day of the month as in the weekly schedule format, and add a monthly field.
# The monthly field value must be 1, 2, 3, or 4.
@@ -59,60 +66,37 @@ To configure the schedule or to add new task, edit the **task_scheduler** sectio
# 2 = 2nd specified weekday of the month. Example 2nd monday of the month.
# 3 = 3rd specified weekday of the month.
# 4 = 4th specified weekday of the month.
- # The following task is scheduled monthly
- {"task" : "Backup", "weekday" : "sunday", "time" : "01:00", "monthly" : 2}, # Backup -> [Backup] 2nd sunday of the month at 1AM (01:00)
-
- # Example#A1: Task to call call_GQL API with custom input
- {"task" : "GQL", "input" : "mutation OptimiseDatabase { optimiseDatabase }", "weekday" : "sunday", "time" : "DISABLED"}, # To enable, change "DISABLED" to valid time
-
- # Example#A2: Task to call a python script. When this task is executed, the keyword is replaced by filemonitor.py current directory.
- # The args field is NOT required.
- {"task" : "python", "script" : "test_script_hello_world.py", "args" : "--MyArguments Hello", "weekday" : "monday", "time" : "DISABLED"}, # change "DISABLED" to valid time
-
- # Example#A3: The following task types can optionally take a [paths] field. If the paths field does not exists, the paths in the Stash library is used.
- {"task" : "Scan", "paths" : [r"E:\MyVideos\downloads", r"V:\MyOtherVideos"], "weekday" : "sunday", "time" : "DISABLED"}, # Library -> [Scan]
- {"task" : "Auto Tag", "paths" : [r"E:\MyVideos\downloads", r"V:\MyOtherVideos"], "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "DISABLED"}, # Auto Tag -> [Auto Tag]
- {"task" : "Clean", "paths" : ["E:\\MyVideos\\downloads", "V:\\MyOtherVideos"], "weekday" : "sunday", "time" : "DISABLED"}, # Generated Content-> [Generate]
-
- # Example#A4: Task which calls Migrations -> [Rename generated files]
- {"task" : "RenameGeneratedFiles", "weekday" : "tuesday,thursday", "time" : "DISABLED"}, # (bi-weekly) example
-
- # Example#A5: The Backup task using optional field maxBackup, which overrides the UI [Max DB Backups] value
- {"task" : "Backup", "maxBackup" : 12, "weekday" : "sunday", "time" : "DISABLED"}, # Trim the DB backup files down to 12 backup files.
- {"task" : "Backup", "maxBackup" : 0, "weekday" : "sunday", "time" : "DISABLED"}, # When used with a zero value, it will make sure no file trimming will occur no matter the value of the UI [Max DB Backups]
-
- # The above weekday method is the more reliable method to schedule task, because it doesn't rely on FileMonitor running continuously (non-stop).
-
- # The below examples use frequency field method which can work with minutes and hours. A zero frequency value disables the task.
- # Note: Both seconds and days are also supported for the frequency field.
- # However, seconds is mainly used for test purposes.
- # And days usage is discourage, because it only works if FileMonitor is running for X many days non-stop.
- # The below example tasks are done using hours and minutes, however any of these task types can be converted to a daily, weekly, or monthly syntax.
-
- # Example#B1: The following task is the syntax used for a plugin. A plugin task requires the plugin name for the [task] field, and the plugin-ID for the [pluginId] field.
- {"task" : "PluginButtonName_Here", "pluginId" : "PluginId_Here", "hours" : 0}, # The zero frequency value makes this task disabled.
- # Example#B2: Optionally, the validateDir field can be included which is used to validate that the plugin is installed either under the plugins folder or under the plugins-community folder.
- {"task" : "PluginButtonName_Here", "pluginId" : "PluginId_Here", "validateDir" : "UsuallySameAsPluginID", "hours" : 0}, # The zero frequency value makes this task disabled.
-
- # Example#B3: Task to execute a command
- {"task" : "execute", "command" : "C:\\MyPath\\HelloWorld.bat", "hours" : 0},
+ # The Backup task is scheduled monthly
+ # Optional field for task "Backup" is maxBackup. For detail usage, see example #A5 in filemonitor_task_examples.py
+ {"task" : "Backup", "weekday" : "sunday", "time" : "01:00", "monthly" : 2}, # Backup -> [Backup] 2nd sunday of the month at 1AM (01:00)
+ # The following task requires plugin DupFileManager and UI option [Delete Duplicate Scheduler] enabled.
+ {"task" : "DupFileManager", "taskName" : "Delete Duplicates", "validateDir" : "DupFileManager",
+ "weekday" : "sunday", "time" : "02:00", "monthly" : 2}, # [Plugin Tasks] -> DupFileManager -> [Delete Duplicates] 2nd sunday of the month at 2AM (02:00)
- # Example#B4: Task to execute a command with optional args field, and using keyword , which gets replaced with filemonitor.py current directory.
- {"task" : "execute", "command" : "HelloWorld.cmd", "args" : "--name David", "minutes" : 0},
+ # The [CheckStashIsRunning] task checks if Stash is running. If not running, it will start up stash.
+ # This task only works if FileMonitor is started as a service or in command line mode.
+ # Optional fields are 'command' and 'RunAfter'. For detail usage, see examples #C1 and #C2 in filemonitor_task_examples.py
+ {"task" : "CheckStashIsRunning", "minutes" :5}, # Checks every 5 minutes
],
````
-- To add plugins to the task list, both the Plugin-ID and the plugin name is required. The plugin ID is usually the file name of the script without the extension.
- - For plugin task, optionally **validateDir** field can be included that defines the plugin sub directory, which is checked to see if it exist before running the task.
+- To add plugins to the task list, use the Plugin-ID in the "task" field. The plugin ID is usually the file name of the script without the extension.
+ - Plugin task have the following optional fields: taskName, taskMode, validateDir, and taskQue
+ - The **validateDir** field can be used to define the plugin sub directory, which is checked to see if it exist before running the task.
+ - **taskName** field is used to name the task to call for the associated plugin. It can not be used with "taskQue":False
+ - **taskQue** field is used to call the plugin without using the Task Queue. I.E. "taskQue":False. When this field is set to False, the taskName field can NOT be used. Instead use taskMode to identify the task to call.
+ - **taskMode** field is used in order to run the plugin without using the Task Queue. The plugin runs immediatly. Be careful not to confuse taskMode with taskName. Look in the plugin *.yml file under the **tasks** section where it defines both the task-name and the task-mode.
- Task can be scheduled to run monthly, weekly, hourly, and by minutes.
- The scheduler list uses two types of syntax. One is **weekday** based, and the other is **frequency** based.
- **weekday Based**
- Use the weekday based syntax for daily, weekly, and monthly schedules.
- All the weekday based methods must have a **weekday** field and a **time** field, which specifies the day(s) of the week and the time to start the task.
- **Daily**:
- - A daily task populates the weekday field with all the days of the week.
+ - A daily task populates the weekday field with all the days of the week or with keyword **every**.
- **Daily Example**:
- Starts a task daily at 6AM.
- `{"task" : "Optimise Database", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "06:00"},`
+ - Starts a task daily at 2PM.
+ - `{"task" : "Optimise Database", "weekday" : "every", "time" : "14:00"},`
- **Weekly**:
- **Weekly Example**:
- Starts a task weekly every monday and 9AM.
diff --git a/plugins/FileMonitor/StashPluginHelper.py b/plugins/FileMonitor/StashPluginHelper.py
index eec93b61..6f0d3d15 100644
--- a/plugins/FileMonitor/StashPluginHelper.py
+++ b/plugins/FileMonitor/StashPluginHelper.py
@@ -5,6 +5,8 @@
from stashapi.stash_types import PhashDistance
import __main__
+_ARGUMENT_UNSPECIFIED_ = "_ARGUMENT_UNSPECIFIED_"
+
# StashPluginHelper (By David Maisonave aka Axter)
# See end of this file for example usage
# Log Features:
@@ -41,6 +43,7 @@ class StashPluginHelper(StashInterface):
STASHPATHSCONFIG = None
STASH_PATHS = []
API_KEY = None
+ excludeMergeTags = None
# printTo argument
LOG_TO_FILE = 1
@@ -62,6 +65,9 @@ class StashPluginHelper(StashInterface):
logLinePreviousHits = []
thredPool = None
STASH_INTERFACE_INIT = False
+ _mergeMetadata = None
+ encodeToUtf8 = False
+ convertToAscii = False # If set True, it takes precedence over encodeToUtf8
# Prefix message value
LEV_TRACE = "TRACE: "
@@ -84,7 +90,7 @@ def __init__(self,
debugTracing = None, # Set debugTracing to True so as to output debug and trace logging
logFormat = LOG_FORMAT, # Plugin log line format
dateFmt = "%y%m%d %H:%M:%S", # Date format when logging to plugin log file
- maxbytes = 2*1024*1024, # Max size of plugin log file
+ maxbytes = 8*1024*1024, # Max size of plugin log file
backupcount = 2, # Backup counts when log file size reaches max size
logToWrnSet = 0, # Customize the target output set which will get warning logging
logToErrSet = 0, # Customize the target output set which will get error logging
@@ -126,12 +132,9 @@ def __init__(self,
if debugTracing: self.DEBUG_TRACING = debugTracing
if config:
self.pluginConfig = config
- if 'apiKey' in self.pluginConfig and self.pluginConfig['apiKey'] != "":
- self.FRAGMENT_SERVER['ApiKey'] = self.pluginConfig['apiKey']
- if DebugTraceFieldName in self.pluginConfig:
- self.DEBUG_TRACING = self.pluginConfig[DebugTraceFieldName]
- if DryRunFieldName in self.pluginConfig:
- self.DRY_RUN = self.pluginConfig[DryRunFieldName]
+ if self.Setting('apiKey', "") != "":
+ self.FRAGMENT_SERVER['ApiKey'] = self.Setting('apiKey')
+
if apiKey and apiKey != "":
self.FRAGMENT_SERVER['ApiKey'] = apiKey
@@ -169,6 +172,9 @@ def __init__(self,
super().__init__(self.FRAGMENT_SERVER)
self.STASH_INTERFACE_INIT = True
+ if self.STASH_URL.startswith("http://0.0.0.0:"):
+ self.STASH_URL = self.STASH_URL.replace("http://0.0.0.0:", "http://localhost:")
+
if self.STASH_INTERFACE_INIT:
self.PLUGIN_CONFIGURATION = self.get_configuration()["plugins"]
self.STASH_CONFIGURATION = self.get_configuration()["general"]
@@ -181,12 +187,11 @@ def __init__(self,
self.pluginSettings = settings
if self.PLUGIN_ID in self.PLUGIN_CONFIGURATION:
self.pluginSettings.update(self.PLUGIN_CONFIGURATION[self.PLUGIN_ID])
- if DebugTraceFieldName in self.pluginSettings:
- self.DEBUG_TRACING = self.pluginSettings[DebugTraceFieldName]
- if DryRunFieldName in self.pluginSettings:
- self.DRY_RUN = self.pluginSettings[DryRunFieldName]
if 'apiKey' in self.STASH_CONFIGURATION:
self.API_KEY = self.STASH_CONFIGURATION['apiKey']
+
+ self.DRY_RUN = self.Setting(DryRunFieldName, self.DRY_RUN)
+ self.DEBUG_TRACING = self.Setting(DebugTraceFieldName, self.DEBUG_TRACING)
if self.DEBUG_TRACING: self.LOG_LEVEL = logging.DEBUG
logging.basicConfig(level=self.LOG_LEVEL, format=logFormat, datefmt=dateFmt, handlers=[RFH])
@@ -197,7 +202,22 @@ def __init__(self,
def __del__(self):
self.thredPool.shutdown(wait=False)
- def Log(self, logMsg, printTo = 0, logLevel = logging.INFO, lineNo = -1, levelStr = "", logAlways = False):
+ def Setting(self, name, default=_ARGUMENT_UNSPECIFIED_, raiseEx=True, notEmpty=False):
+ if self.pluginSettings != None and name in self.pluginSettings:
+ if notEmpty == False or self.pluginSettings[name] != "":
+ return self.pluginSettings[name]
+ if self.pluginConfig != None and name in self.pluginConfig:
+ if notEmpty == False or self.pluginConfig[name] != "":
+ return self.pluginConfig[name]
+ if default == _ARGUMENT_UNSPECIFIED_ and raiseEx:
+ raise Exception(f"Missing {name} from both UI settings and config file settings.")
+ return default
+
+ def Log(self, logMsg, printTo = 0, logLevel = logging.INFO, lineNo = -1, levelStr = "", logAlways = False, toAscii = None):
+ if toAscii or (toAscii == None and (self.encodeToUtf8 or self.convertToAscii)):
+ logMsg = self.asc2(logMsg)
+ else:
+ logMsg = logMsg
if printTo == 0:
printTo = self.log_to_norm
elif printTo == self.LOG_TO_ERROR and logLevel == logging.INFO:
@@ -238,7 +258,7 @@ def Log(self, logMsg, printTo = 0, logLevel = logging.INFO, lineNo = -1, levelSt
if (printTo & self.LOG_TO_STDERR) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways):
print(f"StdErr: {LN_Str} {levelStr}{logMsg}", file=sys.stderr)
- def Trace(self, logMsg = "", printTo = 0, logAlways = False, lineNo = -1):
+ def Trace(self, logMsg = "", printTo = 0, logAlways = False, lineNo = -1, toAscii = None):
if printTo == 0: printTo = self.LOG_TO_FILE
if lineNo == -1:
lineNo = inspect.currentframe().f_back.f_lineno
@@ -246,40 +266,40 @@ def Trace(self, logMsg = "", printTo = 0, logAlways = False, lineNo = -1):
if self.DEBUG_TRACING or logAlways:
if logMsg == "":
logMsg = f"Line number {lineNo}..."
- self.Log(logMsg, printTo, logLev, lineNo, self.LEV_TRACE, logAlways)
+ self.Log(logMsg, printTo, logLev, lineNo, self.LEV_TRACE, logAlways, toAscii=toAscii)
# Log once per session. Only logs the first time called from a particular line number in the code.
- def TraceOnce(self, logMsg = "", printTo = 0, logAlways = False):
+ def TraceOnce(self, logMsg = "", printTo = 0, logAlways = False, toAscii = None):
lineNo = inspect.currentframe().f_back.f_lineno
if self.DEBUG_TRACING or logAlways:
FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}"
if FuncAndLineNo in self.logLinePreviousHits:
return
self.logLinePreviousHits.append(FuncAndLineNo)
- self.Trace(logMsg, printTo, logAlways, lineNo)
+ self.Trace(logMsg, printTo, logAlways, lineNo, toAscii=toAscii)
# Log INFO on first call, then do Trace on remaining calls.
- def LogOnce(self, logMsg = "", printTo = 0, logAlways = False, traceOnRemainingCalls = True):
+ def LogOnce(self, logMsg = "", printTo = 0, logAlways = False, traceOnRemainingCalls = True, toAscii = None):
if printTo == 0: printTo = self.LOG_TO_FILE
lineNo = inspect.currentframe().f_back.f_lineno
FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}"
if FuncAndLineNo in self.logLinePreviousHits:
if traceOnRemainingCalls:
- self.Trace(logMsg, printTo, logAlways, lineNo)
+ self.Trace(logMsg, printTo, logAlways, lineNo, toAscii=toAscii)
else:
self.logLinePreviousHits.append(FuncAndLineNo)
- self.Log(logMsg, printTo, logging.INFO, lineNo)
+ self.Log(logMsg, printTo, logging.INFO, lineNo, toAscii=toAscii)
- def Warn(self, logMsg, printTo = 0):
+ def Warn(self, logMsg, printTo = 0, toAscii = None):
if printTo == 0: printTo = self.log_to_wrn_set
lineNo = inspect.currentframe().f_back.f_lineno
- self.Log(logMsg, printTo, logging.WARN, lineNo)
+ self.Log(logMsg, printTo, logging.WARN, lineNo, toAscii=toAscii)
- def Error(self, logMsg, printTo = 0):
+ def Error(self, logMsg, printTo = 0, toAscii = None):
if printTo == 0: printTo = self.log_to_err_set
lineNo = inspect.currentframe().f_back.f_lineno
- self.Log(logMsg, printTo, logging.ERROR, lineNo)
-
+ self.Log(logMsg, printTo, logging.ERROR, lineNo, toAscii=toAscii)
+
def Status(self, printTo = 0, logLevel = logging.INFO, lineNo = -1):
if printTo == 0: printTo = self.log_to_norm
if lineNo == -1:
@@ -310,10 +330,86 @@ def ExecutePythonScript(self, args, ExecDetach=True):
argsWithPython = [f"{PythonExe}"] + args
return self.ExecuteProcess(argsWithPython,ExecDetach=ExecDetach)
- def Submit(*args, **kwargs):
- thredPool.submit(*args, **kwargs)
+ def Submit(self, *args, **kwargs):
+ return self.thredPool.submit(*args, **kwargs)
+
+ def asc2(self, data, convertToAscii=None):
+ if convertToAscii or (convertToAscii == None and self.convertToAscii):
+ return ascii(data)
+ return str(str(data).encode('utf-8'))[2:-1] # This works better for logging than ascii function
+ # data = str(data).encode('ascii','ignore') # This works better for logging than ascii function
+ # return str(data)[2:-1] # strip out b'str'
+
+ def init_mergeMetadata(self, excludeMergeTags=None):
+ self.excludeMergeTags = excludeMergeTags
+ self._mergeMetadata = mergeMetadata(self, self.excludeMergeTags)
+
+ # Must call init_mergeMetadata, before calling merge_metadata
+ def merge_metadata(self, SrcData, DestData): # Input arguments can be scene ID or scene metadata
+ if type(SrcData) is int:
+ SrcData = self.find_scene(SrcData)
+ DestData = self.find_scene(DestData)
+ return self._mergeMetadata.merge(SrcData, DestData)
+
+ def Progress(self, currentIndex, maxCount):
+ progress = (currentIndex / maxCount) if currentIndex < maxCount else (maxCount / currentIndex)
+ self.log.progress(progress)
+
+ def run_plugin(self, plugin_id, task_mode=None, args:dict={}, asyn=False):
+ """Runs a plugin operation.
+ The operation is run immediately and does not use the job queue.
+ Args:
+ plugin_id (ID): plugin_id
+ task_name (str, optional): Plugin task to perform
+ args (dict, optional): Arguments to pass to plugin. Plugin access via JSON_INPUT['args']
+ Returns:
+ A map of the result.
+ """
+ query = """mutation RunPluginOperation($plugin_id: ID!, $args: Map!) {
+ runPluginOperation(plugin_id: $plugin_id, args: $args)
+ }"""
+ if task_mode != None:
+ args.update({"mode" : task_mode})
+ variables = {
+ "plugin_id": plugin_id,
+ "args": args,
+ }
+ if asyn:
+ self.Submit(self.call_GQL, query, variables)
+ return f"Made asynchronous call for plugin {plugin_id}"
+ else:
+ return self.call_GQL(query, variables)
+
+ def find_duplicate_scenes_diff(self, distance: PhashDistance=PhashDistance.EXACT, fragment='id', duration_diff: float=10.00 ):
+ query = """
+ query FindDuplicateScenes($distance: Int, $duration_diff: Float) {
+ findDuplicateScenes(distance: $distance, duration_diff: $duration_diff) {
+ ...SceneSlim
+ }
+ }
+ """
+ if fragment:
+ query = re.sub(r'\.\.\.SceneSlim', fragment, query)
+ else:
+ query += "fragment SceneSlim on Scene { id }"
+
+ variables = { "distance": distance, "duration_diff": duration_diff }
+ result = self.call_GQL(query, variables)
+ return result['findDuplicateScenes']
+
+ # #################################################################################################
+ # The below functions extends class StashInterface with functions which are not yet in the class
+ def get_all_scenes(self):
+ query_all_scenes = """
+ query AllScenes {
+ allScenes {
+ id
+ updated_at
+ }
+ }
+ """
+ return self.call_GQL(query_all_scenes)
- # Extends class StashInterface with functions which are not yet in the class
def metadata_autotag(self, paths:list=[], performers:list=[], studios:list=[], tags:list=[]):
query = """
mutation MetadataAutoTag($input:AutoTagMetadataInput!) {
@@ -355,20 +451,76 @@ def metadata_clean_generated(self, blobFiles=True, dryRun=False, imageThumbnails
def rename_generated_files(self):
return self.call_GQL("mutation MigrateHashNaming {migrateHashNaming}")
-
- def find_duplicate_scenes_diff(self, distance: PhashDistance=PhashDistance.EXACT, fragment='id', duration_diff: float=10.00 ):
- query = """
- query FindDuplicateScenes($distance: Int, $duration_diff: Float) {
- findDuplicateScenes(distance: $distance, duration_diff: $duration_diff) {
- ...SceneSlim
- }
- }
- """
- if fragment:
- query = re.sub(r'\.\.\.SceneSlim', fragment, query)
- else:
- query += "fragment SceneSlim on Scene { id }"
-
- variables = { "distance": distance, "duration_diff": duration_diff }
- result = self.call_GQL(query, variables)
- return result['findDuplicateScenes']
+
+class mergeMetadata: # A class to merge scene metadata from source scene to destination scene
+ srcData = None
+ destData = None
+ stash = None
+ excludeMergeTags = None
+ dataDict = None
+ result = "Nothing To Merge"
+ def __init__(self, stash, excludeMergeTags=None):
+ self.stash = stash
+ self.excludeMergeTags = excludeMergeTags
+
+ def merge(self, SrcData, DestData):
+ self.srcData = SrcData
+ self.destData = DestData
+ ORG_DATA_DICT = {'id' : self.destData['id']}
+ self.dataDict = ORG_DATA_DICT.copy()
+ self.mergeItems('tags', 'tag_ids', [], excludeName=self.excludeMergeTags)
+ self.mergeItems('performers', 'performer_ids', [])
+ self.mergeItems('galleries', 'gallery_ids', [])
+ self.mergeItems('movies', 'movies', [])
+ self.mergeItems('urls', listToAdd=self.destData['urls'], NotStartWith=self.stash.STASH_URL)
+ self.mergeItem('studio', 'studio_id', 'id')
+ self.mergeItem('title')
+ self.mergeItem('director')
+ self.mergeItem('date')
+ self.mergeItem('details')
+ self.mergeItem('rating100')
+ self.mergeItem('code')
+ if self.dataDict != ORG_DATA_DICT:
+ self.stash.Trace(f"Updating scene ID({self.destData['id']}) with {self.dataDict}; path={self.destData['files'][0]['path']}", toAscii=True)
+ self.result = self.stash.update_scene(self.dataDict)
+ return self.result
+
+ def Nothing(self, Data):
+ if not Data or Data == "" or (type(Data) is str and Data.strip() == ""):
+ return True
+ return False
+
+ def mergeItem(self,fieldName, updateFieldName=None, subField=None):
+ if updateFieldName == None:
+ updateFieldName = fieldName
+ if self.Nothing(self.destData[fieldName]) and not self.Nothing(self.srcData[fieldName]):
+ if subField == None:
+ self.dataDict.update({ updateFieldName : self.srcData[fieldName]})
+ else:
+ self.dataDict.update({ updateFieldName : self.srcData[fieldName][subField]})
+ def mergeItems(self, fieldName, updateFieldName=None, listToAdd=[], NotStartWith=None, excludeName=None):
+ dataAdded = ""
+ for item in self.srcData[fieldName]:
+ if item not in self.destData[fieldName]:
+ if NotStartWith == None or not item.startswith(NotStartWith):
+ if excludeName == None or item['name'] not in excludeName:
+ if fieldName == 'movies':
+ listToAdd += [{"movie_id" : item['movie']['id'], "scene_index" : item['scene_index']}]
+ dataAdded += f"{item['movie']['id']} "
+ elif updateFieldName == None:
+ listToAdd += [item]
+ dataAdded += f"{item} "
+ else:
+ listToAdd += [item['id']]
+ dataAdded += f"{item['id']} "
+ if dataAdded != "":
+ if updateFieldName == None:
+ updateFieldName = fieldName
+ else:
+ for item in self.destData[fieldName]:
+ if fieldName == 'movies':
+ listToAdd += [{"movie_id" : item['movie']['id'], "scene_index" : item['scene_index']}]
+ else:
+ listToAdd += [item['id']]
+ self.dataDict.update({ updateFieldName : listToAdd})
+ # self.stash.Trace(f"Added {fieldName} ({dataAdded}) to scene ID({self.destData['id']})", toAscii=True)
diff --git a/plugins/FileMonitor/filemonitor.py b/plugins/FileMonitor/filemonitor.py
index 75398d3f..29aea88b 100644
--- a/plugins/FileMonitor/filemonitor.py
+++ b/plugins/FileMonitor/filemonitor.py
@@ -3,7 +3,7 @@
# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/FileMonitor
# Note: To call this script outside of Stash, pass argument --url and the Stash URL.
# Example: python filemonitor.py --url http://localhost:9999
-import os, sys, time, pathlib, argparse, platform, traceback
+import os, sys, time, pathlib, argparse, platform, traceback, logging
from StashPluginHelper import StashPluginHelper
import watchdog # pip install watchdog # https://pythonhosted.org/watchdog/
from watchdog.observers import Observer # This is also needed for event attributes
@@ -14,8 +14,10 @@
from filemonitor_self_unit_test import self_unit_test
config['task_scheduler'] = config['task_scheduler'] + task_examples['task_scheduler']
-if self_unit_test['selfUnitTest']:
- config['task_scheduler'] = config['task_scheduler'] + self_unit_test['task_scheduler']
+if self_unit_test['selfUnitTest_repeat']:
+ config['task_scheduler'] = config['task_scheduler'] + self_unit_test['task_scheduler_repeat']
+if self_unit_test['selfUnitTest_set_time']:
+ config['task_scheduler'] = config['task_scheduler'] + self_unit_test['task_scheduler_set_time']
CONTINUE_RUNNING_SIG = 99
STOP_RUNNING_SIG = 32
@@ -38,6 +40,7 @@
settings = {
"recursiveDisabled": False,
"turnOnScheduler": False,
+ "turnOnSchedulerDeleteDup": False,
"zmaximumBackups": 1,
"zzdebugTracing": False
}
@@ -48,9 +51,10 @@
config=config,
logToErrSet=logToErrSet,
logToNormSet=logToNormSet,
+ maxbytes=5*1024*1024,
apiKey=parse_args.apikey
)
-stash.Status()
+stash.Status(logLevel=logging.DEBUG)
stash.Log(f"\nStarting (__file__={__file__}) (stash.CALLED_AS_STASH_PLUGIN={stash.CALLED_AS_STASH_PLUGIN}) (stash.DEBUG_TRACING={stash.DEBUG_TRACING}) (stash.DRY_RUN={stash.DRY_RUN}) (stash.PLUGIN_TASK_NAME={stash.PLUGIN_TASK_NAME})************************************************")
exitMsg = "Change success!!"
@@ -81,6 +85,7 @@
fileExtTypes = stash.pluginConfig['fileExtTypes'].split(",") if stash.pluginConfig['fileExtTypes'] != "" else []
includePathChanges = stash.pluginConfig['includePathChanges'] if len(stash.pluginConfig['includePathChanges']) > 0 else stash.STASH_PATHS
excludePathChanges = stash.pluginConfig['excludePathChanges']
+turnOnSchedulerDeleteDup = stash.pluginSettings['turnOnSchedulerDeleteDup']
if stash.DRY_RUN:
stash.Log("Dry run mode is enabled.")
@@ -157,29 +162,31 @@ def __init__(self):
weekDays = task['weekday'].lower()
if 'monthly' in task:
stash.Log(f"Adding to scheduler task '{task['task']}' monthly on number {task['monthly']} {task['weekday']} at {task['time']}")
+ elif task['weekday'] == "every":
+ stash.Log(f"Adding to scheduler task '{task['task']}' (weekly) every day at {task['time']}")
else:
stash.Log(f"Adding to scheduler task '{task['task']}' (weekly) every {task['weekday']} at {task['time']}")
hasValidDay = False
- if "monday" in weekDays:
+ if "monday" in weekDays or "every" in weekDays:
schedule.every().monday.at(task['time']).do(self.runTask, task)
hasValidDay = True
- if "tuesday" in weekDays:
+ if "tuesday" in weekDays or "every" in weekDays:
schedule.every().tuesday.at(task['time']).do(self.runTask, task)
hasValidDay = True
- if "wednesday" in weekDays:
+ if "wednesday" in weekDays or "every" in weekDays:
schedule.every().wednesday.at(task['time']).do(self.runTask, task)
hasValidDay = True
- if "thursday" in weekDays:
+ if "thursday" in weekDays or "every" in weekDays:
schedule.every().thursday.at(task['time']).do(self.runTask, task)
hasValidDay = True
- if "friday" in weekDays:
+ if "friday" in weekDays or "every" in weekDays:
schedule.every().friday.at(task['time']).do(self.runTask, task)
hasValidDay = True
- if "saturday" in weekDays:
+ if "saturday" in weekDays or "every" in weekDays or "weekend" in weekDays:
schedule.every().saturday.at(task['time']).do(self.runTask, task)
hasValidDay = True
- if "sunday" in weekDays:
+ if "sunday" in weekDays or "every" in weekDays or "weekend" in weekDays:
schedule.every().sunday.at(task['time']).do(self.runTask, task)
hasValidDay = True
@@ -207,23 +214,23 @@ def runTask(self, task):
result = None
if task['task'] == "Clean":
- result = stash.metadata_clean(paths=targetPaths, dry_run=stash.DRY_RUN)
+ result = self.jobIdOutput(stash.metadata_clean(paths=targetPaths, dry_run=stash.DRY_RUN))
elif task['task'] == "Clean Generated Files":
- result = stash.metadata_clean_generated()
+ result = self.jobIdOutput(stash.metadata_clean_generated())
elif task['task'] == "Generate":
- result = stash.metadata_generate()
+ result = self.jobIdOutput(stash.metadata_generate())
elif task['task'] == "Backup":
- result = self.runBackupTask(task)
+ result = self.jobIdOutput(self.runBackupTask(task))
elif task['task'] == "Scan":
- result = stash.metadata_scan(paths=targetPaths)
+ result = self.jobIdOutput(stash.metadata_scan(paths=targetPaths))
elif task['task'] == "Auto Tag":
- result = stash.metadata_autotag(paths=targetPaths)
+ result = self.jobIdOutput(stash.metadata_autotag(paths=targetPaths))
elif task['task'] == "Optimise Database":
- result = stash.optimise_database()
+ result = self.jobIdOutput(stash.optimise_database())
elif task['task'] == "RenameGeneratedFiles":
- result = stash.rename_generated_files()
+ result = self.jobIdOutput(stash.rename_generated_files())
elif task['task'] == "GQL":
- result = stash.call_GQL(task['input'])
+ result = self.jobIdOutput(stash.call_GQL(task['input']))
elif task['task'] == "Log":
Msg = "Scheduled Logging (INFO)."
if 'msg' in task and task['msg'] != "":
@@ -251,11 +258,23 @@ def runTask(self, task):
elif task['task'] == "execute":
result = self.runExecuteProcessTask(task)
else:
- result = self.runPluginTask(task)
+ result = self.jobIdOutput(self.runPluginTask(task))
if result:
stash.Trace(f"Task '{task['task']}' result={result}")
+ def jobIdOutput(self, result):
+ if result == None or result == "":
+ return result
+ jobId = None
+ if type(result) is int:
+ jobId = result
+ elif str(result).isnumeric():
+ jobId = int(result)
+ else:
+ return result
+ return f"Task started with Job-ID#({jobId})"
+
def runExecuteProcessTask(self, task):
if 'command' in task and task['command'] != "":
cmd = task['command'].replace("", f"{pathlib.Path(__file__).resolve().parent}{os.sep}")
@@ -284,30 +303,44 @@ def runPythonScript(self, task):
return None
def runPluginTask(self, task):
- # ToDo: Add code to check if plugin is installed.
try:
- if 'pluginId' in task and task['pluginId'] != "":
- invalidDir = False
- validDirMsg = ""
- if 'validateDir' in task and task['validateDir'] != "":
- invalidDir = True
- communityPluginPath = f"{stash.PLUGINS_PATH}{os.sep}community{os.sep}{task['validateDir']}"
- basePluginPath = f"{stash.PLUGINS_PATH}{os.sep}{task['validateDir']}"
- if os.path.exists(communityPluginPath):
- invalidDir = False
- validDirMsg = f"Valid path in {communityPluginPath}"
- elif os.path.exists(basePluginPath):
- invalidDir = False
- validDirMsg = f"Valid path in {basePluginPath}"
- if invalidDir:
- stash.Error(f"Could not run task '{task['task']}' because sub directory '{task['validateDir']}' does not exist under path '{stash.PLUGINS_PATH}'")
- else:
- stash.Trace(f"Running plugin task pluginID={task['pluginId']}, task name = {task['task']}. {validDirMsg}")
- return stash.run_plugin_task(plugin_id=task['pluginId'], task_name=task['task'])
+ invalidDir = False
+ validDirMsg = ""
+ if 'validateDir' in task and task['validateDir'] != "":
+ invalidDir = True
+ communityPluginPath = f"{stash.PLUGINS_PATH}{os.sep}community{os.sep}{task['validateDir']}"
+ basePluginPath = f"{stash.PLUGINS_PATH}{os.sep}{task['validateDir']}"
+ if os.path.exists(communityPluginPath):
+ invalidDir = False
+ validDirMsg = f"Valid path in {communityPluginPath}"
+ elif os.path.exists(basePluginPath):
+ invalidDir = False
+ validDirMsg = f"Valid path in {basePluginPath}"
+ if invalidDir:
+ stash.Error(f"Could not run task '{task['task']}' because sub directory '{task['validateDir']}' does not exist under path '{stash.PLUGINS_PATH}'")
+ return None
+ if not turnOnSchedulerDeleteDup and (task['task'] == "Delete Duplicates" or ('taskName' in task and task['taskName'] == "Delete Duplicates") or ('taskMode' in task and task['taskMode'] == "delete_duplicates_task")):
+ stash.Warn(f"Not running task {task['task']}, because [Delete Duplicate Scheduler] is NOT enabled. See Stash UI option Settings->Plugins->Plugins->FileMonitor->[Delete Duplicate Scheduler]")
+ return None
+ # The pluginId field is only here for backward compatibility, and should not be used in future scheduler configurations
+ if 'pluginId' in task and task['pluginId'] != "": # Obsolete method
+ stash.Trace(f"Adding to Task Queue plugin task pluginID={task['pluginId']}, task name = {task['task']}. {validDirMsg}")
+ return stash.run_plugin_task(plugin_id=task['pluginId'], task_name=task['task'])
else:
- stash.Error(f"Can not run task '{task['task']}', because it's an invalid task.")
- stash.LogOnce(f"If task '{task['task']}' is supposed to be a built-in task, check for correct task name spelling.")
- stash.LogOnce(f"If task '{task['task']}' is supposed to be a plugin, make sure to include the pluginId field in the task. task={task}")
+ taskName = None
+ taskMode = None
+ if 'taskName' in task:
+ taskName = task['taskName']
+ if 'taskMode' in task:
+ taskMode = task['taskMode']
+ if ('taskQue' in task and task['taskQue'] == False) or taskName == None:
+ stash.Log(f"Running plugin task pluginID={task['task']}, task mode = {taskMode}. {validDirMsg}")
+ # Asynchronous threading logic to call run_plugin, because it's a blocking call.
+ stash.run_plugin(plugin_id=task['task'], task_mode=taskMode, asyn=True)
+ return None
+ else:
+ stash.Trace(f"Adding to Task Queue plugin task pluginID={task['task']}, task name = {taskName}. {validDirMsg}")
+ return stash.run_plugin_task(plugin_id=task['task'], task_name=taskName)
except Exception as e:
stash.LogOnce(f"Failed to call plugin {task['task']} with plugin-ID {task['pluginId']}. Error: {e}")
pass
@@ -683,7 +716,7 @@ def start_library_monitor_service():
if stash.API_KEY:
args = args + ["-a", stash.API_KEY]
stash.ExecutePythonScript(args)
-
+
if parse_args.stop or parse_args.restart or stash.PLUGIN_TASK_NAME == "stop_library_monitor":
stop_library_monitor()
if parse_args.restart:
@@ -701,7 +734,7 @@ def start_library_monitor_service():
elif not stash.CALLED_AS_STASH_PLUGIN:
try:
start_library_monitor()
- stash.Trace(f"Command line FileMonitor EXIT")
+ stash.Trace("Command line FileMonitor EXIT")
except Exception as e:
tb = traceback.format_exc()
stash.Error(f"Exception while running FileMonitor from the command line. Error: {e}\nTraceBack={tb}")
diff --git a/plugins/FileMonitor/filemonitor.yml b/plugins/FileMonitor/filemonitor.yml
index f96bf70e..5637ee3b 100644
--- a/plugins/FileMonitor/filemonitor.yml
+++ b/plugins/FileMonitor/filemonitor.yml
@@ -1,6 +1,6 @@
name: FileMonitor
description: Monitors the Stash library folders, and updates Stash if any changes occurs in the Stash library paths.
-version: 0.8.7
+version: 0.9.0
url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/FileMonitor
settings:
recursiveDisabled:
@@ -11,9 +11,13 @@ settings:
displayName: Scheduler
description: Enable to turn on the scheduler. See filemonitor_config.py for more details.
type: BOOLEAN
+ turnOnSchedulerDeleteDup:
+ displayName: Delete Duplicate Scheduler
+ description: Turn on scheduler for deleting duplicates in Stash library. (Requires plugin DupFileManager and [Scheduler] enabled)
+ type: BOOLEAN
zmaximumBackups:
displayName: Max DB Backups
- description: When value greater than 1, will trim the number of database backup files to set value. Requires [Scheduler] enabled and backupDirectoryPath populated with path length longer than 4.
+ description: Trim database backup files to set value. Requires [Scheduler] enabled and backupDirectoryPath path length longer than 4.
type: NUMBER
zzdebugTracing:
displayName: Debug Tracing
@@ -29,10 +33,10 @@ tasks:
defaultArgs:
mode: start_library_monitor_service
- name: Stop Library Monitor
- description: Stops library monitoring within 2 minute.
+ description: Stops library monitoring within 2 minutes.
defaultArgs:
mode: stop_library_monitor
- name: Monitor as a Plugin
- description: Run [Library Monitor] as a plugin (*not recommended method*)
+ description: Run [Library Monitor] as a plugin (*Not recommended*)
defaultArgs:
mode: start_library_monitor
diff --git a/plugins/FileMonitor/filemonitor_config.py b/plugins/FileMonitor/filemonitor_config.py
index 38ed73a0..60824fd6 100644
--- a/plugins/FileMonitor/filemonitor_config.py
+++ b/plugins/FileMonitor/filemonitor_config.py
@@ -11,12 +11,15 @@
# The hour section in time MUST be a two digit number, and use military time format. Example: 1PM = "13:00" and 1AM = "01:00"
# Note: Look at filemonitor_task_examples.py for many example task having more detailed usage.
"task_scheduler": [
- # To create a daily task, include each day of the week for the weekday field.
+ # To create a daily task, include each day of the week for the weekday field or "every"
# Optional field for task "Auto Tag" is 'paths'. For detail usage, see example #A3: in filemonitor_task_examples.py
{"task" : "Auto Tag", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "05:00"}, # Auto Tag -> [Auto Tag] (Daily at 6AM)
- # Task "Create Tags" is a plugin task. All plugin task have a REQUIRED pluginId field and an optional validateDir field. For detail usage, see examples #B1 and #B2 in filemonitor_task_examples.py
- {"task" : "Create Tags", "pluginId" : "pathParser", "validateDir" : "pathParser",
- "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "05:30"}, # [Plugin Tasks] - > [Path Parser] -> [Create Tags] (Daily at 5AM) : This task requires plugin [Path Parser]
+ # Task "Create Tags" is a plugin task. Optional fields are taskName and validateDir field. For detail usage, see examples #B1, #B2, #B3, and #B4 in filemonitor_task_examples.py
+ {"task" : "pathParser", "taskName" : "Create Tags", "validateDir" : "pathParser",
+ "weekday" : "every", "time" : "05:30"}, # [Plugin Tasks] - > [Path Parser] -> [Create Tags] (Daily at 5AM) : This task requires plugin [Path Parser]
+ # The following task runs plugin DupFileManager (tag_duplicates_task) if the plugin is installed. The task runs in the background because of "taskQue" : False
+ {"task" : "DupFileManager", "taskMode" : "tag_duplicates_task", "validateDir" : "DupFileManager", "taskQue" : False,
+ "weekday" : "every", "time" : "02:30"}, # [Plugin Tasks] -> DupFileManager -> [Delete Duplicates] (Daily at 2:30AM)
{"task" : "Optimise Database", "weekday" : "monday,tuesday,wednesday,thursday,friday", "time" : "07:00"}, # Maintenance -> [Optimise Database] (Every weekday at 7AM)
# The following tasks are scheduled weekly
@@ -37,6 +40,9 @@
# The Backup task is scheduled monthly
# Optional field for task "Backup" is maxBackup. For detail usage, see example #A5 in filemonitor_task_examples.py
{"task" : "Backup", "weekday" : "sunday", "time" : "01:00", "monthly" : 2}, # Backup -> [Backup] 2nd sunday of the month at 1AM (01:00)
+ # The following task requires plugin DupFileManager and UI option [Delete Duplicate Scheduler] enabled.
+ {"task" : "DupFileManager", "taskName" : "Delete Duplicates", "validateDir" : "DupFileManager",
+ "weekday" : "sunday", "time" : "02:00", "monthly" : 2}, # [Plugin Tasks] -> DupFileManager -> [Delete Duplicates] 2nd sunday of the month at 2AM (02:00)
# The [CheckStashIsRunning] task checks if Stash is running. If not running, it will start up stash.
# This task only works if FileMonitor is started as a service or in command line mode.
diff --git a/plugins/FileMonitor/filemonitor_self_unit_test.py b/plugins/FileMonitor/filemonitor_self_unit_test.py
index c30311fc..83942f46 100644
--- a/plugins/FileMonitor/filemonitor_self_unit_test.py
+++ b/plugins/FileMonitor/filemonitor_self_unit_test.py
@@ -4,39 +4,42 @@
# These tasks are usually executed before updating major releases on https://github.com/David-Maisonave/Axter-Stash/blob/main/plugins/FileMonitor
# These tasks are ALWAYS executed before updating to https://github.com/stashapp/CommunityScripts
self_unit_test = {
- "task_scheduler": [
+ "task_scheduler_repeat": [
{"task" : "TestBadTaskNameError", "minutes" : 1}, # Test invalid task name
{"task" : "execute", "minutes" : 1}, # Test invalid task (missing command)
{"task" : "python", "minutes" : 1}, # Test invalid task (missing scripts)
- {"task" : "PluginWithOutID", "minutes" : 1}, # Test invalid task (missing pluginId)
{"task" : "execute", "command" : "", "minutes" : 1}, # Test invalid task (missing command)
{"task" : "python", "script" : "", "minutes" : 1}, # Test invalid task (missing scripts)
- {"task" : "PluginWithOutID", "pluginId" : "", "minutes" : 1}, # Test invalid task (missing pluginId)
- {"task" : "Foo","pluginId":"foo","validateDir":"foo", "minutes" : 1}, # Test invalid task (missing plugin directory)
+ {"task" : "Foo","taskName":"foo","validateDir":"foo", "minutes" : 1}, # Test invalid task (missing plugin directory)
{"task" : "Log", "msg" : "Testing Scheduled Log", "minutes" : 1}, # Test plugin log file
{"task" : "Trace", "minutes" : 1}, # Test plugin trace logging
{"task" : "LogOnce", "seconds" :15}, # Test LogOnce
{"task" : "TraceOnce", "seconds" : 5}, # Test TraceOnce
- # {"task" : "CheckStashIsRunning", "RunAfter" : [{"task" : "Scan"}], "seconds" :15}, # To test CheckStashIsRunning, kill Stash after starting FileMonitor service via following command:taskkill /F /IM "stash-win.exe"
{"task" : "CheckStashIsRunning", "RunAfter" : [{"task" : "Scan"},{"task" : "Backup", "maxBackup" : 0},{"task" : "Clean"}], "seconds" :15}, # Test RunAfter
- {"task" : "CheckStashIsRunning", "command" : "stash-win.exe", "seconds" :10}, # Check if Stash is running. If not running, start up Stash.
- {"task" : "Generate", "weekday" : "friday", "time" : "12:03"},
- {"task" : "Clean", "weekday" : "friday", "time" : "12:03"},
- {"task" : "Auto Tag", "weekday" : "friday", "time" : "12:03"},
- {"task" : "Optimise Database", "weekday" : "friday", "time" : "12:03"},
- {"task" : "Create Tags", "pluginId" : "pathParser", "validateDir" : "pathParser", "weekday" : "friday", "time" : "12:03"}, # In task queue as -> Running plugin task: Create Tags
- {"task" : "Scan","paths": [r"B:\_\SpecialSet", r"C:\foo"], "weekday" : "friday", "time" : "12:03"},
- {"task" : "GQL", "input" : "mutation OptimiseDatabase { optimiseDatabase }", "weekday" : "friday", "time" : "12:03"}, # In task queue as -> Optimising database...
- {"task" : "Clean Generated Files", "weekday" : "friday", "time" : "12:03"},
- {"task" : "RenameGeneratedFiles", "weekday" : "friday", "time" : "12:03"}, # In task queue as -> Migrating scene hashes...
- {"task" : "Backup", "maxBackups" : 0, "weekday" : "friday", "time" : "12:03"}, # Does NOT show up in the Task Queue. Must check STASH log file to verify run.
- {"task" : "python", "script" : "test_hello_world2.py", "weekday" : "friday", "time" : "12:03"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=???
- {"task" : "python", "script" : "test_hello_world.py", "detach" : False, "weekday" : "friday", "time" : "12:03"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=???
- {"task" : "execute", "command" : "test_hello_world2.cmd", "weekday" : "friday", "time" : "12:03"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=???
- {"task" : "execute", "command" : "test_hello_world.bat", "args" : "--name David", "weekday" : "friday", "time" : "12:03"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=???
+ {"task" : "CheckStashIsRunning", "command" : "stash-win.exe", "seconds" :10}, # Check if Stash is running. If not running, start up Stash.
+ # {"task" : "CheckStashIsRunning", "RunAfter" : [{"task" : "Scan"}], "seconds" :15}, # To test CheckStashIsRunning, kill Stash after starting FileMonitor service via following command:taskkill /F /IM "stash-win.exe"
],
-
+ "task_scheduler_set_time": [
+ # Test [Delete Duplicates] with [Delete Duplicate Scheduler] disabled, and then with it enabled.
+ {"task" : "DupFileManager", "taskName" : "Delete Duplicates", "validateDir" : "DupFileManager", "weekday" : "every", "time" : "04:01"}, # [Plugin Tasks] -> DupFileManager -> [Delete Duplicates]
+ {"task" : "Generate", "weekday" : "every", "time" : "04:01"},
+ {"task" : "Clean", "weekday" : "every", "time" : "04:01"},
+ {"task" : "Auto Tag", "weekday" : "every", "time" : "04:01"},
+ {"task" : "Optimise Database", "weekday" : "every", "time" : "04:01"},
+ {"task" : "pathParser", "taskName" : "Create Tags", "validateDir" : "pathParser", "weekday" : "every", "time" : "04:01"}, # In task queue as -> Running plugin task: Create Tags
+ {"task" : "DupFileManager", "taskMode" : "tag_duplicates_task", "taskQue":False, "weekday" : "every", "time" : "04:01"}, # Does NOT run in the task queue
+ {"task" : "DupFileManager", "taskName" : "Tag Duplicates", "validateDir" : "DupFileManager", "weekday" : "every", "time" : "04:01"}, # [Plugin Tasks] -> DupFileManager -> [Tag Duplicates]
+ {"task" : "Scan","paths": [r"B:\_\SpecialSet", r"C:\foo"], "weekday" : "every", "time" : "04:01"},
+ {"task" : "GQL", "input" : "mutation OptimiseDatabase { optimiseDatabase }", "weekday" : "every", "time" : "04:01"}, # In task queue as -> Optimising database...
+ {"task" : "Clean Generated Files", "weekday" : "every", "time" : "04:01"},
+ {"task" : "RenameGeneratedFiles", "weekday" : "every", "time" : "04:01"}, # In task queue as -> Migrating scene hashes...
+ {"task" : "Backup", "maxBackups" : 0, "weekday" : "every", "time" : "04:01"}, # Does NOT show up in the Task Queue. Must check STASH log file to verify run.
+ {"task" : "python", "script" : "test_hello_world2.py", "weekday" : "every", "time" : "04:01"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=???
+ {"task" : "python", "script" : "test_hello_world.py", "detach" : False, "weekday" : "every", "time" : "04:01"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=???
+ {"task" : "execute", "command" : "test_hello_world2.cmd", "weekday" : "every", "time" : "04:01"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=???
+ {"task" : "execute", "command" : "test_hello_world.bat", "args" : "--name David", "weekday" : "every", "time" : "04:01"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=???
+ ],
# MUST ToDo: Always set selfUnitTest to False before checking in this code!!!
- # Enable to turn on self unit test.
- "selfUnitTest": False,
+ "selfUnitTest_repeat" : False , # Enable to turn on self unit test.
+ "selfUnitTest_set_time" : False , # Enable to turn on self unit test.
}
diff --git a/plugins/FileMonitor/filemonitor_task_examples.py b/plugins/FileMonitor/filemonitor_task_examples.py
index 3cbfad23..2bd58126 100644
--- a/plugins/FileMonitor/filemonitor_task_examples.py
+++ b/plugins/FileMonitor/filemonitor_task_examples.py
@@ -30,20 +30,24 @@
# And days usage is discourage, because it only works if FileMonitor is running for X many days non-stop.
# The below example tasks are done using hours and minutes, however any of these task types can be converted to a daily, weekly, or monthly syntax.
- # Example#B1: The following task is the syntax used for a plugin. A plugin task requires the plugin name for the [task] field, and the plugin-ID for the [pluginId] field.
- {"task" : "PluginButtonName_Here", "pluginId" : "PluginId_Here", "hours" : 0}, # The zero frequency value makes this task disabled.
+ # Example#B1: The following task is the syntax used for a plugin. A plugin task requires the plugin-ID for the [task] field. Optional fields are taskName, taskMode, validateDir, and taskQue.
+ {"task" : "PluginId_Here", "taskName" : "Task Name or Plugin Button Name Here", "hours" : 0}, # The zero frequency value makes this task disabled.
# Example#B2: Optionally, the validateDir field can be included which is used to validate that the plugin is installed either under the plugins folder or under the plugins-community folder.
- {"task" : "PluginButtonName_Here", "pluginId" : "PluginId_Here", "validateDir" : "UsuallySameAsPluginID", "hours" : 0}, # The zero frequency value makes this task disabled.
+ {"task" : "PluginId_Here", "taskName" : "Task Name or Plugin Button Name Here", "validateDir" : "UsuallySameAsPluginID", "hours" : 0},
+ # Example#B3: To run a plugin WITHOUT using the Task Queue, use taskMode instead of taskName and/or add field "taskQue":False. The plugin will run immediately
+ {"task" : "PluginId_Here", "taskMode" : "Plugin_Task_MODE", "taskQue" : False, "hours" : 0}, # Do NOT use taskName when including "taskQue":False
+ # Example#B4: When taskName field is missing, it will always run the task without using the Task Queue. The plugin will run immediately
+ {"task" : "PluginId_Here", "hours" : 0},
- # Example#B3: Task to execute a command
+ # Example#C1: Task to execute a command
{"task" : "execute", "command" : "C:\\MyPath\\HelloWorld.bat", "hours" : 0},
- # Example#B4: Task to execute a command with optional args field, and using keyword , which gets replaced with filemonitor.py current directory.
+ # Example#C2: Task to execute a command with optional args field, and using keyword , which gets replaced with filemonitor.py current directory.
{"task" : "execute", "command" : "HelloWorld.cmd", "args" : "--name David", "minutes" : 0},
- # Example#C1 Some OS may need the "command" field, which specifies the binary path.
+ # Example#D1 Some OS may need the "command" field, which specifies the binary path.
{"task" : "CheckStashIsRunning", "command" : "stash-linux-arm64v8", "minutes" :0},
- # Example#C2 RunAfter field can be used to specify task to run after starting Stash
+ # Example#D2 RunAfter field can be used to specify task to run after starting Stash
{"task" : "CheckStashIsRunning", "RunAfter" : [{"task" : "Scan"},{"task" : "Backup", "maxBackup" : 0},{"task" : "Clean"}], "minutes" :0},
],
}
From a667d8786c4979138461c39c212af1777e011cbb Mon Sep 17 00:00:00 2001
From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com>
Date: Wed, 28 Aug 2024 04:43:08 -0400
Subject: [PATCH 29/39] RenameFile Updates
Fixed issue with Studio triggering folder being created on rename.
Integrated changes to use StashPluginHelper class, and stream line the code.
Put rename back as default method over move.
Removed functions which are already included in stashapi and StashPluginHelper.
---
plugins/RenameFile/README.md | 2 +-
plugins/RenameFile/StashPluginHelper.py | 526 ++++++++++++++++++++++
plugins/RenameFile/renamefile.py | 407 ++++-------------
plugins/RenameFile/renamefile.yml | 8 +-
plugins/RenameFile/renamefile_settings.py | 4 +-
5 files changed, 627 insertions(+), 320 deletions(-)
create mode 100644 plugins/RenameFile/StashPluginHelper.py
diff --git a/plugins/RenameFile/README.md b/plugins/RenameFile/README.md
index bab3fca7..c2f7041e 100644
--- a/plugins/RenameFile/README.md
+++ b/plugins/RenameFile/README.md
@@ -1,4 +1,4 @@
-# RenameFile: Ver 0.4.2 (By David Maisonave)
+# RenameFile: Ver 0.4.6 (By David Maisonave)
RenameFile is a [Stash](https://github.com/stashapp/stash) plugin which performs the following tasks.
- **Rename Scene File Name** (On-The-Fly)
- **Append tag names** to file name
diff --git a/plugins/RenameFile/StashPluginHelper.py b/plugins/RenameFile/StashPluginHelper.py
new file mode 100644
index 00000000..6f0d3d15
--- /dev/null
+++ b/plugins/RenameFile/StashPluginHelper.py
@@ -0,0 +1,526 @@
+from stashapi.stashapp import StashInterface
+from logging.handlers import RotatingFileHandler
+import re, inspect, sys, os, pathlib, logging, json
+import concurrent.futures
+from stashapi.stash_types import PhashDistance
+import __main__
+
+_ARGUMENT_UNSPECIFIED_ = "_ARGUMENT_UNSPECIFIED_"
+
+# StashPluginHelper (By David Maisonave aka Axter)
+ # See end of this file for example usage
+ # Log Features:
+ # Can optionally log out to multiple outputs for each Log or Trace call.
+ # Logging includes source code line number
+ # Sets a maximum plugin log file size
+ # Stash Interface Features:
+ # Gets STASH_URL value from command line argument and/or from STDIN_READ
+ # Sets FRAGMENT_SERVER based on command line arguments or STDIN_READ
+ # Sets PLUGIN_ID based on the main script file name (in lower case)
+ # Gets PLUGIN_TASK_NAME value
+ # Sets pluginSettings (The plugin UI settings)
+ # Misc Features:
+ # Gets DRY_RUN value from command line argument and/or from UI and/or from config file
+ # Gets DEBUG_TRACING value from command line argument and/or from UI and/or from config file
+ # Sets RUNNING_IN_COMMAND_LINE_MODE to True if detects multiple arguments
+ # Sets CALLED_AS_STASH_PLUGIN to True if it's able to read from STDIN_READ
+class StashPluginHelper(StashInterface):
+ # Primary Members for external reference
+ PLUGIN_TASK_NAME = None
+ PLUGIN_ID = None
+ PLUGIN_CONFIGURATION = None
+ PLUGINS_PATH = None
+ pluginSettings = None
+ pluginConfig = None
+ STASH_URL = None
+ STASH_CONFIGURATION = None
+ JSON_INPUT = None
+ DEBUG_TRACING = False
+ DRY_RUN = False
+ CALLED_AS_STASH_PLUGIN = False
+ RUNNING_IN_COMMAND_LINE_MODE = False
+ FRAGMENT_SERVER = None
+ STASHPATHSCONFIG = None
+ STASH_PATHS = []
+ API_KEY = None
+ excludeMergeTags = None
+
+ # printTo argument
+ LOG_TO_FILE = 1
+ LOG_TO_CONSOLE = 2 # Note: Only see output when running in command line mode. In plugin mode, this output is lost.
+ LOG_TO_STDERR = 4 # Note: In plugin mode, output to StdErr ALWAYS gets sent to stash logging as an error.
+ LOG_TO_STASH = 8
+ LOG_TO_WARN = 16
+ LOG_TO_ERROR = 32
+ LOG_TO_CRITICAL = 64
+ LOG_TO_ALL = LOG_TO_FILE + LOG_TO_CONSOLE + LOG_TO_STDERR + LOG_TO_STASH
+
+ # Misc class variables
+ MAIN_SCRIPT_NAME = None
+ LOG_LEVEL = logging.INFO
+ LOG_FILE_DIR = None
+ LOG_FILE_NAME = None
+ STDIN_READ = None
+ pluginLog = None
+ logLinePreviousHits = []
+ thredPool = None
+ STASH_INTERFACE_INIT = False
+ _mergeMetadata = None
+ encodeToUtf8 = False
+ convertToAscii = False # If set True, it takes precedence over encodeToUtf8
+
+ # Prefix message value
+ LEV_TRACE = "TRACE: "
+ LEV_DBG = "DBG: "
+ LEV_INF = "INF: "
+ LEV_WRN = "WRN: "
+ LEV_ERR = "ERR: "
+ LEV_CRITICAL = "CRITICAL: "
+
+ # Default format
+ LOG_FORMAT = "[%(asctime)s] %(message)s"
+
+ # Externally modifiable variables
+ log_to_err_set = LOG_TO_FILE + LOG_TO_STDERR # This can be changed by the calling source in order to customize what targets get error messages
+ log_to_norm = LOG_TO_FILE + LOG_TO_CONSOLE # Can be change so-as to set target output for normal logging
+ # Warn message goes to both plugin log file and stash when sent to Stash log file.
+ log_to_wrn_set = LOG_TO_STASH # This can be changed by the calling source in order to customize what targets get warning messages
+
+ def __init__(self,
+ debugTracing = None, # Set debugTracing to True so as to output debug and trace logging
+ logFormat = LOG_FORMAT, # Plugin log line format
+ dateFmt = "%y%m%d %H:%M:%S", # Date format when logging to plugin log file
+ maxbytes = 8*1024*1024, # Max size of plugin log file
+ backupcount = 2, # Backup counts when log file size reaches max size
+ logToWrnSet = 0, # Customize the target output set which will get warning logging
+ logToErrSet = 0, # Customize the target output set which will get error logging
+ logToNormSet = 0, # Customize the target output set which will get normal logging
+ logFilePath = "", # Plugin log file. If empty, the log file name will be set based on current python file name and path
+ mainScriptName = "", # The main plugin script file name (full path)
+ pluginID = "",
+ settings = None, # Default settings for UI fields
+ config = None, # From pluginName_config.py or pluginName_setting.py
+ fragmentServer = None,
+ stash_url = None, # Stash URL (endpoint URL) Example: http://localhost:9999
+ apiKey = None, # API Key only needed when username and password set while running script via command line
+ DebugTraceFieldName = "zzdebugTracing",
+ DryRunFieldName = "zzdryRun",
+ setStashLoggerAsPluginLogger = False):
+ self.thredPool = concurrent.futures.ThreadPoolExecutor(max_workers=2)
+ if logToWrnSet: self.log_to_wrn_set = logToWrnSet
+ if logToErrSet: self.log_to_err_set = logToErrSet
+ if logToNormSet: self.log_to_norm = logToNormSet
+ if stash_url and len(stash_url): self.STASH_URL = stash_url
+ self.MAIN_SCRIPT_NAME = mainScriptName if mainScriptName != "" else __main__.__file__
+ self.PLUGIN_ID = pluginID if pluginID != "" else pathlib.Path(self.MAIN_SCRIPT_NAME).stem
+ # print(f"self.MAIN_SCRIPT_NAME={self.MAIN_SCRIPT_NAME}, self.PLUGIN_ID={self.PLUGIN_ID}", file=sys.stderr)
+ self.LOG_FILE_NAME = logFilePath if logFilePath != "" else f"{pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent}{os.sep}{pathlib.Path(self.MAIN_SCRIPT_NAME).stem}.log"
+ self.LOG_FILE_DIR = pathlib.Path(self.LOG_FILE_NAME).resolve().parent
+ RFH = RotatingFileHandler(
+ filename=self.LOG_FILE_NAME,
+ mode='a',
+ maxBytes=maxbytes,
+ backupCount=backupcount,
+ encoding=None,
+ delay=0
+ )
+ if fragmentServer:
+ self.FRAGMENT_SERVER = fragmentServer
+ else:
+ self.FRAGMENT_SERVER = {'Scheme': 'http', 'Host': '0.0.0.0', 'Port': '9999', 'SessionCookie': {'Name': 'session', 'Value': '', 'Path': '', 'Domain': '', 'Expires': '0001-01-01T00:00:00Z', 'RawExpires': '', 'MaxAge': 0, 'Secure': False, 'HttpOnly': False, 'SameSite': 0, 'Raw': '', 'Unparsed': None}, 'Dir': os.path.dirname(pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent), 'PluginDir': pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent}
+
+ if debugTracing: self.DEBUG_TRACING = debugTracing
+ if config:
+ self.pluginConfig = config
+ if self.Setting('apiKey', "") != "":
+ self.FRAGMENT_SERVER['ApiKey'] = self.Setting('apiKey')
+
+
+ if apiKey and apiKey != "":
+ self.FRAGMENT_SERVER['ApiKey'] = apiKey
+
+ if len(sys.argv) > 1:
+ RUNNING_IN_COMMAND_LINE_MODE = True
+ if not debugTracing or not stash_url:
+ for argValue in sys.argv[1:]:
+ if argValue.lower() == "--trace":
+ self.DEBUG_TRACING = True
+ elif argValue.lower() == "--dry_run" or argValue.lower() == "--dryrun":
+ self.DRY_RUN = True
+ elif ":" in argValue and not self.STASH_URL:
+ self.STASH_URL = argValue
+ if self.STASH_URL:
+ endpointUrlArr = self.STASH_URL.split(":")
+ if len(endpointUrlArr) == 3:
+ self.FRAGMENT_SERVER['Scheme'] = endpointUrlArr[0]
+ self.FRAGMENT_SERVER['Host'] = endpointUrlArr[1][2:]
+ self.FRAGMENT_SERVER['Port'] = endpointUrlArr[2]
+ super().__init__(self.FRAGMENT_SERVER)
+ self.STASH_INTERFACE_INIT = True
+ else:
+ try:
+ self.STDIN_READ = sys.stdin.read()
+ self.CALLED_AS_STASH_PLUGIN = True
+ except:
+ pass
+ if self.STDIN_READ:
+ self.JSON_INPUT = json.loads(self.STDIN_READ)
+ if "args" in self.JSON_INPUT and "mode" in self.JSON_INPUT["args"]:
+ self.PLUGIN_TASK_NAME = self.JSON_INPUT["args"]["mode"]
+ self.FRAGMENT_SERVER = self.JSON_INPUT["server_connection"]
+ self.STASH_URL = f"{self.FRAGMENT_SERVER['Scheme']}://{self.FRAGMENT_SERVER['Host']}:{self.FRAGMENT_SERVER['Port']}"
+ super().__init__(self.FRAGMENT_SERVER)
+ self.STASH_INTERFACE_INIT = True
+
+ if self.STASH_URL.startswith("http://0.0.0.0:"):
+ self.STASH_URL = self.STASH_URL.replace("http://0.0.0.0:", "http://localhost:")
+
+ if self.STASH_INTERFACE_INIT:
+ self.PLUGIN_CONFIGURATION = self.get_configuration()["plugins"]
+ self.STASH_CONFIGURATION = self.get_configuration()["general"]
+ self.STASHPATHSCONFIG = self.STASH_CONFIGURATION['stashes']
+ if 'pluginsPath' in self.STASH_CONFIGURATION:
+ self.PLUGINS_PATH = self.STASH_CONFIGURATION['pluginsPath']
+ for item in self.STASHPATHSCONFIG:
+ self.STASH_PATHS.append(item["path"])
+ if settings:
+ self.pluginSettings = settings
+ if self.PLUGIN_ID in self.PLUGIN_CONFIGURATION:
+ self.pluginSettings.update(self.PLUGIN_CONFIGURATION[self.PLUGIN_ID])
+ if 'apiKey' in self.STASH_CONFIGURATION:
+ self.API_KEY = self.STASH_CONFIGURATION['apiKey']
+
+ self.DRY_RUN = self.Setting(DryRunFieldName, self.DRY_RUN)
+ self.DEBUG_TRACING = self.Setting(DebugTraceFieldName, self.DEBUG_TRACING)
+ if self.DEBUG_TRACING: self.LOG_LEVEL = logging.DEBUG
+
+ logging.basicConfig(level=self.LOG_LEVEL, format=logFormat, datefmt=dateFmt, handlers=[RFH])
+ self.pluginLog = logging.getLogger(pathlib.Path(self.MAIN_SCRIPT_NAME).stem)
+ if setStashLoggerAsPluginLogger:
+ self.log = self.pluginLog
+
+ def __del__(self):
+ self.thredPool.shutdown(wait=False)
+
+ def Setting(self, name, default=_ARGUMENT_UNSPECIFIED_, raiseEx=True, notEmpty=False):
+ if self.pluginSettings != None and name in self.pluginSettings:
+ if notEmpty == False or self.pluginSettings[name] != "":
+ return self.pluginSettings[name]
+ if self.pluginConfig != None and name in self.pluginConfig:
+ if notEmpty == False or self.pluginConfig[name] != "":
+ return self.pluginConfig[name]
+ if default == _ARGUMENT_UNSPECIFIED_ and raiseEx:
+ raise Exception(f"Missing {name} from both UI settings and config file settings.")
+ return default
+
+ def Log(self, logMsg, printTo = 0, logLevel = logging.INFO, lineNo = -1, levelStr = "", logAlways = False, toAscii = None):
+ if toAscii or (toAscii == None and (self.encodeToUtf8 or self.convertToAscii)):
+ logMsg = self.asc2(logMsg)
+ else:
+ logMsg = logMsg
+ if printTo == 0:
+ printTo = self.log_to_norm
+ elif printTo == self.LOG_TO_ERROR and logLevel == logging.INFO:
+ logLevel = logging.ERROR
+ printTo = self.log_to_err_set
+ elif printTo == self.LOG_TO_CRITICAL and logLevel == logging.INFO:
+ logLevel = logging.CRITICAL
+ printTo = self.log_to_err_set
+ elif printTo == self.LOG_TO_WARN and logLevel == logging.INFO:
+ logLevel = logging.WARN
+ printTo = self.log_to_wrn_set
+ if lineNo == -1:
+ lineNo = inspect.currentframe().f_back.f_lineno
+ LN_Str = f"[LN:{lineNo}]"
+ # print(f"{LN_Str}, {logAlways}, {self.LOG_LEVEL}, {logging.DEBUG}, {levelStr}, {logMsg}")
+ if logLevel == logging.DEBUG and (logAlways == False or self.LOG_LEVEL == logging.DEBUG):
+ if levelStr == "": levelStr = self.LEV_DBG
+ if printTo & self.LOG_TO_FILE: self.pluginLog.debug(f"{LN_Str} {levelStr}{logMsg}")
+ if printTo & self.LOG_TO_STASH: self.log.debug(f"{LN_Str} {levelStr}{logMsg}")
+ elif logLevel == logging.INFO or logLevel == logging.DEBUG:
+ if levelStr == "": levelStr = self.LEV_INF if logLevel == logging.INFO else self.LEV_DBG
+ if printTo & self.LOG_TO_FILE: self.pluginLog.info(f"{LN_Str} {levelStr}{logMsg}")
+ if printTo & self.LOG_TO_STASH: self.log.info(f"{LN_Str} {levelStr}{logMsg}")
+ elif logLevel == logging.WARN:
+ if levelStr == "": levelStr = self.LEV_WRN
+ if printTo & self.LOG_TO_FILE: self.pluginLog.warning(f"{LN_Str} {levelStr}{logMsg}")
+ if printTo & self.LOG_TO_STASH: self.log.warning(f"{LN_Str} {levelStr}{logMsg}")
+ elif logLevel == logging.ERROR:
+ if levelStr == "": levelStr = self.LEV_ERR
+ if printTo & self.LOG_TO_FILE: self.pluginLog.error(f"{LN_Str} {levelStr}{logMsg}")
+ if printTo & self.LOG_TO_STASH: self.log.error(f"{LN_Str} {levelStr}{logMsg}")
+ elif logLevel == logging.CRITICAL:
+ if levelStr == "": levelStr = self.LEV_CRITICAL
+ if printTo & self.LOG_TO_FILE: self.pluginLog.critical(f"{LN_Str} {levelStr}{logMsg}")
+ if printTo & self.LOG_TO_STASH: self.log.error(f"{LN_Str} {levelStr}{logMsg}")
+ if (printTo & self.LOG_TO_CONSOLE) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways):
+ print(f"{LN_Str} {levelStr}{logMsg}")
+ if (printTo & self.LOG_TO_STDERR) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways):
+ print(f"StdErr: {LN_Str} {levelStr}{logMsg}", file=sys.stderr)
+
+ def Trace(self, logMsg = "", printTo = 0, logAlways = False, lineNo = -1, toAscii = None):
+ if printTo == 0: printTo = self.LOG_TO_FILE
+ if lineNo == -1:
+ lineNo = inspect.currentframe().f_back.f_lineno
+ logLev = logging.INFO if logAlways else logging.DEBUG
+ if self.DEBUG_TRACING or logAlways:
+ if logMsg == "":
+ logMsg = f"Line number {lineNo}..."
+ self.Log(logMsg, printTo, logLev, lineNo, self.LEV_TRACE, logAlways, toAscii=toAscii)
+
+ # Log once per session. Only logs the first time called from a particular line number in the code.
+ def TraceOnce(self, logMsg = "", printTo = 0, logAlways = False, toAscii = None):
+ lineNo = inspect.currentframe().f_back.f_lineno
+ if self.DEBUG_TRACING or logAlways:
+ FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}"
+ if FuncAndLineNo in self.logLinePreviousHits:
+ return
+ self.logLinePreviousHits.append(FuncAndLineNo)
+ self.Trace(logMsg, printTo, logAlways, lineNo, toAscii=toAscii)
+
+ # Log INFO on first call, then do Trace on remaining calls.
+ def LogOnce(self, logMsg = "", printTo = 0, logAlways = False, traceOnRemainingCalls = True, toAscii = None):
+ if printTo == 0: printTo = self.LOG_TO_FILE
+ lineNo = inspect.currentframe().f_back.f_lineno
+ FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}"
+ if FuncAndLineNo in self.logLinePreviousHits:
+ if traceOnRemainingCalls:
+ self.Trace(logMsg, printTo, logAlways, lineNo, toAscii=toAscii)
+ else:
+ self.logLinePreviousHits.append(FuncAndLineNo)
+ self.Log(logMsg, printTo, logging.INFO, lineNo, toAscii=toAscii)
+
+ def Warn(self, logMsg, printTo = 0, toAscii = None):
+ if printTo == 0: printTo = self.log_to_wrn_set
+ lineNo = inspect.currentframe().f_back.f_lineno
+ self.Log(logMsg, printTo, logging.WARN, lineNo, toAscii=toAscii)
+
+ def Error(self, logMsg, printTo = 0, toAscii = None):
+ if printTo == 0: printTo = self.log_to_err_set
+ lineNo = inspect.currentframe().f_back.f_lineno
+ self.Log(logMsg, printTo, logging.ERROR, lineNo, toAscii=toAscii)
+
+ def Status(self, printTo = 0, logLevel = logging.INFO, lineNo = -1):
+ if printTo == 0: printTo = self.log_to_norm
+ if lineNo == -1:
+ lineNo = inspect.currentframe().f_back.f_lineno
+ self.Log(f"StashPluginHelper Status: (CALLED_AS_STASH_PLUGIN={self.CALLED_AS_STASH_PLUGIN}), (RUNNING_IN_COMMAND_LINE_MODE={self.RUNNING_IN_COMMAND_LINE_MODE}), (DEBUG_TRACING={self.DEBUG_TRACING}), (DRY_RUN={self.DRY_RUN}), (PLUGIN_ID={self.PLUGIN_ID}), (PLUGIN_TASK_NAME={self.PLUGIN_TASK_NAME}), (STASH_URL={self.STASH_URL}), (MAIN_SCRIPT_NAME={self.MAIN_SCRIPT_NAME})",
+ printTo, logLevel, lineNo)
+
+ def ExecuteProcess(self, args, ExecDetach=False):
+ import platform, subprocess
+ is_windows = any(platform.win32_ver())
+ pid = None
+ self.Trace(f"is_windows={is_windows} args={args}")
+ if is_windows:
+ if ExecDetach:
+ self.Trace("Executing process using Windows DETACHED_PROCESS")
+ DETACHED_PROCESS = 0x00000008
+ pid = subprocess.Popen(args,creationflags=DETACHED_PROCESS, shell=True).pid
+ else:
+ pid = subprocess.Popen(args, shell=True).pid
+ else:
+ self.Trace("Executing process using normal Popen")
+ pid = subprocess.Popen(args).pid
+ self.Trace(f"pid={pid}")
+ return pid
+
+ def ExecutePythonScript(self, args, ExecDetach=True):
+ PythonExe = f"{sys.executable}"
+ argsWithPython = [f"{PythonExe}"] + args
+ return self.ExecuteProcess(argsWithPython,ExecDetach=ExecDetach)
+
+ def Submit(self, *args, **kwargs):
+ return self.thredPool.submit(*args, **kwargs)
+
+ def asc2(self, data, convertToAscii=None):
+ if convertToAscii or (convertToAscii == None and self.convertToAscii):
+ return ascii(data)
+ return str(str(data).encode('utf-8'))[2:-1] # This works better for logging than ascii function
+ # data = str(data).encode('ascii','ignore') # This works better for logging than ascii function
+ # return str(data)[2:-1] # strip out b'str'
+
+ def init_mergeMetadata(self, excludeMergeTags=None):
+ self.excludeMergeTags = excludeMergeTags
+ self._mergeMetadata = mergeMetadata(self, self.excludeMergeTags)
+
+ # Must call init_mergeMetadata, before calling merge_metadata
+ def merge_metadata(self, SrcData, DestData): # Input arguments can be scene ID or scene metadata
+ if type(SrcData) is int:
+ SrcData = self.find_scene(SrcData)
+ DestData = self.find_scene(DestData)
+ return self._mergeMetadata.merge(SrcData, DestData)
+
+ def Progress(self, currentIndex, maxCount):
+ progress = (currentIndex / maxCount) if currentIndex < maxCount else (maxCount / currentIndex)
+ self.log.progress(progress)
+
+ def run_plugin(self, plugin_id, task_mode=None, args:dict={}, asyn=False):
+ """Runs a plugin operation.
+ The operation is run immediately and does not use the job queue.
+ Args:
+ plugin_id (ID): plugin_id
+ task_name (str, optional): Plugin task to perform
+ args (dict, optional): Arguments to pass to plugin. Plugin access via JSON_INPUT['args']
+ Returns:
+ A map of the result.
+ """
+ query = """mutation RunPluginOperation($plugin_id: ID!, $args: Map!) {
+ runPluginOperation(plugin_id: $plugin_id, args: $args)
+ }"""
+ if task_mode != None:
+ args.update({"mode" : task_mode})
+ variables = {
+ "plugin_id": plugin_id,
+ "args": args,
+ }
+ if asyn:
+ self.Submit(self.call_GQL, query, variables)
+ return f"Made asynchronous call for plugin {plugin_id}"
+ else:
+ return self.call_GQL(query, variables)
+
+ def find_duplicate_scenes_diff(self, distance: PhashDistance=PhashDistance.EXACT, fragment='id', duration_diff: float=10.00 ):
+ query = """
+ query FindDuplicateScenes($distance: Int, $duration_diff: Float) {
+ findDuplicateScenes(distance: $distance, duration_diff: $duration_diff) {
+ ...SceneSlim
+ }
+ }
+ """
+ if fragment:
+ query = re.sub(r'\.\.\.SceneSlim', fragment, query)
+ else:
+ query += "fragment SceneSlim on Scene { id }"
+
+ variables = { "distance": distance, "duration_diff": duration_diff }
+ result = self.call_GQL(query, variables)
+ return result['findDuplicateScenes']
+
+ # #################################################################################################
+ # The below functions extends class StashInterface with functions which are not yet in the class
+ def get_all_scenes(self):
+ query_all_scenes = """
+ query AllScenes {
+ allScenes {
+ id
+ updated_at
+ }
+ }
+ """
+ return self.call_GQL(query_all_scenes)
+
+ def metadata_autotag(self, paths:list=[], performers:list=[], studios:list=[], tags:list=[]):
+ query = """
+ mutation MetadataAutoTag($input:AutoTagMetadataInput!) {
+ metadataAutoTag(input: $input)
+ }
+ """
+ metadata_autotag_input = {
+ "paths":paths,
+ "performers": performers,
+ "studios":studios,
+ "tags":tags,
+ }
+ result = self.call_GQL(query, {"input": metadata_autotag_input})
+ return result
+
+ def backup_database(self):
+ return self.call_GQL("mutation { backupDatabase(input: {download: false})}")
+
+ def optimise_database(self):
+ return self.call_GQL("mutation OptimiseDatabase { optimiseDatabase }")
+
+ def metadata_clean_generated(self, blobFiles=True, dryRun=False, imageThumbnails=True, markers=True, screenshots=True, sprites=True, transcodes=True):
+ query = """
+ mutation MetadataCleanGenerated($input: CleanGeneratedInput!) {
+ metadataCleanGenerated(input: $input)
+ }
+ """
+ clean_metadata_input = {
+ "blobFiles": blobFiles,
+ "dryRun": dryRun,
+ "imageThumbnails": imageThumbnails,
+ "markers": markers,
+ "screenshots": screenshots,
+ "sprites": sprites,
+ "transcodes": transcodes,
+ }
+ result = self.call_GQL(query, {"input": clean_metadata_input})
+ return result
+
+ def rename_generated_files(self):
+ return self.call_GQL("mutation MigrateHashNaming {migrateHashNaming}")
+
+class mergeMetadata: # A class to merge scene metadata from source scene to destination scene
+ srcData = None
+ destData = None
+ stash = None
+ excludeMergeTags = None
+ dataDict = None
+ result = "Nothing To Merge"
+ def __init__(self, stash, excludeMergeTags=None):
+ self.stash = stash
+ self.excludeMergeTags = excludeMergeTags
+
+ def merge(self, SrcData, DestData):
+ self.srcData = SrcData
+ self.destData = DestData
+ ORG_DATA_DICT = {'id' : self.destData['id']}
+ self.dataDict = ORG_DATA_DICT.copy()
+ self.mergeItems('tags', 'tag_ids', [], excludeName=self.excludeMergeTags)
+ self.mergeItems('performers', 'performer_ids', [])
+ self.mergeItems('galleries', 'gallery_ids', [])
+ self.mergeItems('movies', 'movies', [])
+ self.mergeItems('urls', listToAdd=self.destData['urls'], NotStartWith=self.stash.STASH_URL)
+ self.mergeItem('studio', 'studio_id', 'id')
+ self.mergeItem('title')
+ self.mergeItem('director')
+ self.mergeItem('date')
+ self.mergeItem('details')
+ self.mergeItem('rating100')
+ self.mergeItem('code')
+ if self.dataDict != ORG_DATA_DICT:
+ self.stash.Trace(f"Updating scene ID({self.destData['id']}) with {self.dataDict}; path={self.destData['files'][0]['path']}", toAscii=True)
+ self.result = self.stash.update_scene(self.dataDict)
+ return self.result
+
+ def Nothing(self, Data):
+ if not Data or Data == "" or (type(Data) is str and Data.strip() == ""):
+ return True
+ return False
+
+ def mergeItem(self,fieldName, updateFieldName=None, subField=None):
+ if updateFieldName == None:
+ updateFieldName = fieldName
+ if self.Nothing(self.destData[fieldName]) and not self.Nothing(self.srcData[fieldName]):
+ if subField == None:
+ self.dataDict.update({ updateFieldName : self.srcData[fieldName]})
+ else:
+ self.dataDict.update({ updateFieldName : self.srcData[fieldName][subField]})
+ def mergeItems(self, fieldName, updateFieldName=None, listToAdd=[], NotStartWith=None, excludeName=None):
+ dataAdded = ""
+ for item in self.srcData[fieldName]:
+ if item not in self.destData[fieldName]:
+ if NotStartWith == None or not item.startswith(NotStartWith):
+ if excludeName == None or item['name'] not in excludeName:
+ if fieldName == 'movies':
+ listToAdd += [{"movie_id" : item['movie']['id'], "scene_index" : item['scene_index']}]
+ dataAdded += f"{item['movie']['id']} "
+ elif updateFieldName == None:
+ listToAdd += [item]
+ dataAdded += f"{item} "
+ else:
+ listToAdd += [item['id']]
+ dataAdded += f"{item['id']} "
+ if dataAdded != "":
+ if updateFieldName == None:
+ updateFieldName = fieldName
+ else:
+ for item in self.destData[fieldName]:
+ if fieldName == 'movies':
+ listToAdd += [{"movie_id" : item['movie']['id'], "scene_index" : item['scene_index']}]
+ else:
+ listToAdd += [item['id']]
+ self.dataDict.update({ updateFieldName : listToAdd})
+ # self.stash.Trace(f"Added {fieldName} ({dataAdded}) to scene ID({self.destData['id']})", toAscii=True)
diff --git a/plugins/RenameFile/renamefile.py b/plugins/RenameFile/renamefile.py
index a8ab1fd4..4a00d84c 100644
--- a/plugins/RenameFile/renamefile.py
+++ b/plugins/RenameFile/renamefile.py
@@ -2,30 +2,17 @@
# By David Maisonave (aka Axter) Jul-2024 (https://www.axter.com/)
# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/RenameFile
# Based on source code from https://github.com/Serechops/Serechops-Stash/tree/main/plugins/Renamer
-import os
-import sys
-import shutil
-import hashlib
-import json
+import os, sys, shutil, json, requests, hashlib, pathlib, logging
from pathlib import Path
-import requests
-import logging
-from logging.handlers import RotatingFileHandler
import stashapi.log as log # Importing stashapi.log as log for critical events ONLY
from stashapi.stashapp import StashInterface
+from StashPluginHelper import StashPluginHelper
from renamefile_settings import config # Import settings from renamefile_settings.py
# **********************************************************************
# Constant global variables --------------------------------------------
-LOG_FILE_PATH = f"{Path(__file__).resolve().parent}\\{Path(__file__).stem}.log"
-FORMAT = "[%(asctime)s - LN:%(lineno)s] %(message)s"
DEFAULT_FIELD_KEY_LIST = "title,performers,studio,tags" # Default Field Key List with the desired order
-PLUGIN_ID = Path(__file__).stem.lower()
DEFAULT_SEPERATOR = "-"
-PLUGIN_ARGS = False
-PLUGIN_ARGS_MODE = False
-WRAPPER_STYLES = config["wrapper_styles"]
-POSTFIX_STYLES = config["postfix_styles"]
# GraphQL query to fetch all scenes
QUERY_ALL_SCENES = """
query AllScenes {
@@ -35,133 +22,81 @@
}
}
"""
-RFH = RotatingFileHandler(
- filename=LOG_FILE_PATH,
- mode='a',
- maxBytes=2*1024*1024, # Configure logging for this script with max log file size of 2000K
- backupCount=2,
- encoding=None,
- delay=0
-)
-
# **********************************************************************
# Global variables --------------------------------------------
inputToUpdateScenePost = False
exitMsg = "Change success!!"
-# Configure local log file for plugin within plugin folder having a limited max log file size
-logging.basicConfig(level=logging.INFO, format=FORMAT, datefmt="%y%m%d %H:%M:%S", handlers=[RFH])
-logger = logging.getLogger(PLUGIN_ID)
-
# **********************************************************************
# ----------------------------------------------------------------------
-# Code section to fetch variables from Plugin UI and from renamefile_settings.py
-json_input = json.loads(sys.stdin.read())
-FRAGMENT_SERVER = json_input['server_connection']
-stash = StashInterface(FRAGMENT_SERVER)
-pluginConfiguration = stash.get_configuration()["plugins"]
-
settings = {
"performerAppend": False,
"studioAppend": False,
"tagAppend": False,
"z_keyFIeldsIncludeInFileName": False,
- "zafileRenameViaRaname": False,
+ "zafileRenameViaMove": False,
"zfieldKeyList": DEFAULT_FIELD_KEY_LIST,
"zmaximumTagKeys": 12,
"zseparators": DEFAULT_SEPERATOR,
"zzdebugTracing": False,
"zzdryRun": False,
}
-if PLUGIN_ID in pluginConfiguration:
- settings.update(pluginConfiguration[PLUGIN_ID])
+stash = StashPluginHelper(
+ settings=settings,
+ config=config,
+ maxbytes=10*1024*1024,
+ )
+stash.Status(logLevel=logging.DEBUG)
+if stash.PLUGIN_ID in stash.PLUGIN_CONFIGURATION:
+ stash.pluginSettings.update(stash.PLUGIN_CONFIGURATION[stash.PLUGIN_ID])
# ----------------------------------------------------------------------
-debugTracing = settings["zzdebugTracing"]
+WRAPPER_STYLES = config["wrapper_styles"]
+POSTFIX_STYLES = config["postfix_styles"]
# Extract dry_run setting from settings
-dry_run = settings["zzdryRun"]
+dry_run = stash.pluginSettings["zzdryRun"]
dry_run_prefix = ''
try:
- PLUGIN_ARGS = json_input['args']
- PLUGIN_ARGS_MODE = json_input['args']["mode"]
-except:
- pass
-try:
- if json_input['args']['hookContext']['input']: inputToUpdateScenePost = True # This avoids calling rename logic twice
+ if stash.JSON_INPUT['args']['hookContext']['input']: inputToUpdateScenePost = True # This avoids calling rename logic twice
except:
pass
-logger.info(f"\nStarting (debugTracing={debugTracing}) (dry_run={dry_run}) (PLUGIN_ARGS_MODE={PLUGIN_ARGS_MODE}) (inputToUpdateScenePost={inputToUpdateScenePost})************************************************")
-if debugTracing: logger.info("settings: %s " % (settings,))
-
-if PLUGIN_ID in pluginConfiguration:
- if debugTracing: logger.info(f"Debug Tracing (pluginConfiguration[PLUGIN_ID]={pluginConfiguration[PLUGIN_ID]})................")
- # if 'zmaximumTagKeys' not in pluginConfiguration[PLUGIN_ID]:
- # if debugTracing: logger.info("Debug Tracing................")
- # try:
- # stash.configure_plugin(PLUGIN_ID, settings)
- # stash.configure_plugin("renamefile", {"zmaximumTagKeys": 12})
- # except Exception as e:
- # logger.error(f"configure_plugin failed!!! Error: {e}")
- # logger.exception('Got exception on main handler')
- # pass
- # # stash.configure_plugin(PLUGIN_ID, settings) # , init_defaults=True
- # if debugTracing: logger.info("Debug Tracing................")
+stash.Trace("settings: %s " % (stash.pluginSettings,))
if dry_run:
- logger.info("Dry run mode is enabled.")
+ stash.Log("Dry run mode is enabled.")
dry_run_prefix = "Would've "
-if debugTracing: logger.info("Debug Tracing................")
-max_tag_keys = settings["zmaximumTagKeys"] if settings["zmaximumTagKeys"] != 0 else 12 # Need this incase use explicitly sets value to zero in UI
-if debugTracing: logger.info("Debug Tracing................")
+max_tag_keys = stash.pluginSettings["zmaximumTagKeys"] if stash.pluginSettings["zmaximumTagKeys"] != 0 else 12 # Need this incase use explicitly sets value to zero in UI
# ToDo: Add split logic here to slpit possible string array into an array
exclude_paths = config["pathToExclude"]
exclude_paths = exclude_paths.split()
-if debugTracing: logger.info(f"Debug Tracing (exclude_paths={exclude_paths})................")
+stash.Trace(f"(exclude_paths={exclude_paths})")
excluded_tags = config["excludeTags"]
# Extract tag whitelist from settings
tag_whitelist = config["tagWhitelist"]
-if debugTracing: logger.info("Debug Tracing................")
if not tag_whitelist:
tag_whitelist = ""
-if debugTracing: logger.info(f"Debug Tracing (tag_whitelist={tag_whitelist})................")
+stash.Trace(f"(tag_whitelist={tag_whitelist})")
-endpointHost = json_input['server_connection']['Host']
+endpointHost = stash.JSON_INPUT['server_connection']['Host']
if endpointHost == "0.0.0.0":
endpointHost = "localhost"
-endpoint = f"{json_input['server_connection']['Scheme']}://{endpointHost}:{json_input['server_connection']['Port']}/graphql"
+endpoint = f"{stash.JSON_INPUT['server_connection']['Scheme']}://{endpointHost}:{stash.JSON_INPUT['server_connection']['Port']}/graphql"
-if debugTracing: logger.info(f"Debug Tracing (endpoint={endpoint})................")
-# Extract rename_files and move_files settings from renamefile_settings.py
-rename_files = config["rename_files"]
-move_files = False if settings["zafileRenameViaRaname"] else True
-if debugTracing: logger.info("Debug Tracing................")
-fieldKeyList = settings["zfieldKeyList"] # Default Field Key List with the desired order
+stash.Trace(f"(endpoint={endpoint})")
+move_files = stash.pluginSettings["zafileRenameViaMove"]
+fieldKeyList = stash.pluginSettings["zfieldKeyList"] # Default Field Key List with the desired order
if not fieldKeyList or fieldKeyList == "":
fieldKeyList = DEFAULT_FIELD_KEY_LIST
fieldKeyList = fieldKeyList.replace(" ", "")
fieldKeyList = fieldKeyList.replace(";", ",")
fieldKeyList = fieldKeyList.split(",")
-if debugTracing: logger.info(f"Debug Tracing (fieldKeyList={fieldKeyList})................")
-separator = settings["zseparators"]
+stash.Trace(f"(fieldKeyList={fieldKeyList})")
+separator = stash.pluginSettings["zseparators"]
# ----------------------------------------------------------------------
# **********************************************************************
double_separator = separator + separator
-if debugTracing: logger.info(f"Debug Tracing (PLUGIN_ARGS={PLUGIN_ARGS}) (WRAPPER_STYLES={WRAPPER_STYLES}) (POSTFIX_STYLES={POSTFIX_STYLES})................")
-if debugTracing: logger.info(f"Debug Tracing (PLUGIN_ID=\"{PLUGIN_ID}\")................")
-if debugTracing: logger.info("Debug Tracing................")
-
-# Function to make GraphQL requests
-def graphql_request(query, variables=None):
- if debugTracing: logger.info("Debug Tracing................%s", query)
- data = {'query': query}
- if variables:
- data['variables'] = variables
- if debugTracing: logger.info("Debug Tracing................")
- if debugTracing: logger.info("Debug Tracing................")
- response = requests.post(endpoint, json=data)
- if debugTracing: logger.info("Debug Tracing................")
- return response.json()
+stash.Trace(f"(WRAPPER_STYLES={WRAPPER_STYLES}) (POSTFIX_STYLES={POSTFIX_STYLES})")
# Function to replace illegal characters in filenames
def replace_illegal_characters(filename):
@@ -179,12 +114,11 @@ def should_exclude_path(scene_details):
# Function to form the new filename based on scene details and user settings
def form_filename(original_file_stem, scene_details):
- if debugTracing: logger.info("Debug Tracing................")
filename_parts = []
tag_keys_added = 0
default_title = ''
if_notitle_use_org_filename = config["if_notitle_use_org_filename"]
- include_keyField_if_in_name = settings["z_keyFIeldsIncludeInFileName"]
+ include_keyField_if_in_name = stash.pluginSettings["z_keyFIeldsIncludeInFileName"]
if if_notitle_use_org_filename:
default_title = original_file_stem
# ...................
@@ -195,44 +129,39 @@ def form_filename(original_file_stem, scene_details):
title = default_title
# ...................
- if debugTracing: logger.info(f"Debug Tracing (title=\"{title}\")................")
+ stash.Trace(f"(title=\"{title}\")")
# Function to add tag to filename
def add_tag(tag_name):
nonlocal tag_keys_added
nonlocal filename_parts
- if debugTracing: logger.info(f"Debug Tracing (tag_name={tag_name})................")
+ stash.Trace(f"(tag_name={tag_name})")
if max_tag_keys == -1 or (max_tag_keys is not None and tag_keys_added >= int(max_tag_keys)):
return # Skip adding more tags if the maximum limit is reached
if tag_name in excluded_tags:
- if debugTracing: logger.info(f"Debug Tracing EXCLUDING (tag_name={tag_name})")
+ stash.Trace(f"EXCLUDING (tag_name={tag_name})")
return
# Check if the tag name is in the whitelist
if tag_whitelist == "" or tag_whitelist == None or (tag_whitelist and tag_name in tag_whitelist):
if WRAPPER_STYLES.get('tag'):
filename_parts.append(f"{WRAPPER_STYLES['tag'][0]}{tag_name}{WRAPPER_STYLES['tag'][1]}")
- if debugTracing: logger.info("Debug Tracing................")
else:
filename_parts.append(tag_name)
- if debugTracing: logger.info("Debug Tracing................")
tag_keys_added += 1
- if debugTracing: logger.info("Debug Tracing................")
else:
- logger.info(f"Skipping tag not in whitelist: {tag_name}")
- if debugTracing: logger.info(f"Debug Tracing (tag_keys_added={tag_keys_added})................")
+ stash.Log(f"Skipping tag not in whitelist: {tag_name}")
+ stash.Trace(f"(tag_keys_added={tag_keys_added})")
for key in fieldKeyList:
if key == 'studio':
- if settings["studioAppend"]:
- if debugTracing: logger.info("Debug Tracing................")
+ if stash.pluginSettings["studioAppend"]:
studio_name = scene_details.get('studio', {})
- if debugTracing: logger.info(f"Debug Tracing (studio_name={studio_name})................")
+ stash.Trace(f"(studio_name={studio_name})")
if studio_name:
studio_name = scene_details.get('studio', {}).get('name', '')
- if debugTracing: logger.info(f"Debug Tracing (studio_name={studio_name})................")
+ stash.Trace(f"(studio_name={studio_name})")
if studio_name:
studio_name += POSTFIX_STYLES.get('studio')
- if debugTracing: logger.info("Debug Tracing................")
if include_keyField_if_in_name or studio_name.lower() not in title.lower():
if WRAPPER_STYLES.get('studio'):
filename_parts.append(f"{WRAPPER_STYLES['studio'][0]}{studio_name}{WRAPPER_STYLES['studio'][1]}")
@@ -246,23 +175,21 @@ def add_tag(tag_name):
else:
filename_parts.append(title)
elif key == 'performers':
- if settings["performerAppend"]:
+ if stash.pluginSettings["performerAppend"]:
performers = '-'.join([performer.get('name', '') for performer in scene_details.get('performers', [])])
if performers:
performers += POSTFIX_STYLES.get('performers')
- if debugTracing: logger.info(f"Debug Tracing (include_keyField_if_in_name={include_keyField_if_in_name})................")
+ stash.Trace(f"(include_keyField_if_in_name={include_keyField_if_in_name})")
if include_keyField_if_in_name or performers.lower() not in title.lower():
- if debugTracing: logger.info(f"Debug Tracing (performers={performers})................")
+ stash.Trace(f"(performers={performers})")
if WRAPPER_STYLES.get('performers'):
filename_parts.append(f"{WRAPPER_STYLES['performers'][0]}{performers}{WRAPPER_STYLES['performers'][1]}")
else:
filename_parts.append(performers)
elif key == 'date':
scene_date = scene_details.get('date', '')
- if debugTracing: logger.info("Debug Tracing................")
if scene_date:
scene_date += POSTFIX_STYLES.get('date')
- if debugTracing: logger.info("Debug Tracing................")
if WRAPPER_STYLES.get('date'):
scene_date = f"{WRAPPER_STYLES['date'][0]}{scene_date}{WRAPPER_STYLES['date'][1]}"
if scene_date not in title:
@@ -310,197 +237,53 @@ def add_tag(tag_name):
filename_parts.append(frame_rate)
elif key == 'galleries':
galleries = [gallery.get('title', '') for gallery in scene_details.get('galleries', [])]
- if debugTracing: logger.info("Debug Tracing................")
for gallery_name in galleries:
- if debugTracing: logger.info(f"Debug Tracing (include_keyField_if_in_name={include_keyField_if_in_name}) (gallery_name={gallery_name})................")
+ stash.Trace(f"(include_keyField_if_in_name={include_keyField_if_in_name}) (gallery_name={gallery_name})")
if include_keyField_if_in_name or gallery_name.lower() not in title.lower():
gallery_name += POSTFIX_STYLES.get('galleries')
if WRAPPER_STYLES.get('galleries'):
filename_parts.append(f"{WRAPPER_STYLES['galleries'][0]}{gallery_name}{WRAPPER_STYLES['galleries'][1]}")
- if debugTracing: logger.info("Debug Tracing................")
else:
filename_parts.append(gallery_name)
- if debugTracing: logger.info("Debug Tracing................")
- if debugTracing: logger.info(f"Debug Tracing (gallery_name={gallery_name})................")
- if debugTracing: logger.info("Debug Tracing................")
+ stash.Trace(f"(gallery_name={gallery_name})")
elif key == 'tags':
- if settings["tagAppend"]:
+ if stash.pluginSettings["tagAppend"]:
tags = [tag.get('name', '') for tag in scene_details.get('tags', [])]
- if debugTracing: logger.info("Debug Tracing................")
for tag_name in tags:
- if debugTracing: logger.info(f"Debug Tracing (include_keyField_if_in_name={include_keyField_if_in_name}) (tag_name={tag_name})................")
+ stash.Trace(f"(include_keyField_if_in_name={include_keyField_if_in_name}) (tag_name={tag_name})")
if include_keyField_if_in_name or tag_name.lower() not in title.lower():
add_tag(tag_name + POSTFIX_STYLES.get('tag'))
- if debugTracing: logger.info(f"Debug Tracing (tag_name={tag_name})................")
- if debugTracing: logger.info("Debug Tracing................")
+ stash.Trace(f"(tag_name={tag_name})")
- if debugTracing: logger.info(f"Debug Tracing (filename_parts={filename_parts})................")
+ stash.Trace(f"(filename_parts={filename_parts})")
new_filename = separator.join(filename_parts).replace(double_separator, separator)
- if debugTracing: logger.info(f"Debug Tracing (new_filename={new_filename})................")
+ stash.Trace(f"(new_filename={new_filename})")
# Check if the scene's path matches any of the excluded paths
if exclude_paths and should_exclude_path(scene_details):
- logger.info(f"Scene belongs to an excluded path. Skipping filename modification.")
+ stash.Log(f"Scene belongs to an excluded path. Skipping filename modification.")
return Path(scene_details['files'][0]['path']).name # Return the original filename
return replace_illegal_characters(new_filename)
-def find_scene_by_id(scene_id):
- query_find_scene = """
- query FindScene($scene_id: ID!) {
- findScene(id: $scene_id) {
- id
- title
- date
- files {
- path
- width
- height
- video_codec
- frame_rate
- }
- galleries {
- title
- }
- studio {
- name
- }
- performers {
- name
- }
- tags {
- name
- }
- }
- }
-"""
- scene_result = graphql_request(query_find_scene, variables={"scene_id": scene_id})
- return scene_result.get('data', {}).get('findScene')
-
-def move_or_rename_files(scene_details, new_filename, original_parent_directory):
+def rename_scene(scene_id):
global exitMsg
- studio_directory = None
- for file_info in scene_details['files']:
- path = file_info['path']
- original_path = Path(path)
-
- # Check if the file's path matches any of the excluded paths
- if exclude_paths and any(original_path.match(exclude_path) for exclude_path in exclude_paths):
- logger.info(f"File {path} belongs to an excluded path. Skipping modification.")
- continue
-
- new_path = original_parent_directory if not move_files else original_parent_directory / scene_details['studio']['name']
- if rename_files:
- new_path = new_path / (new_filename + original_path.suffix)
- try:
- if move_files:
- if studio_directory is None:
- studio_directory = original_parent_directory / scene_details['studio']['name']
- studio_directory.mkdir(parents=True, exist_ok=True)
- if rename_files: # Check if rename_files is True
- if not dry_run:
- shutil.move(original_path, new_path)
- logger.info(f"{dry_run_prefix}Moved and renamed file: {path} -> {new_path}")
- else:
- if not dry_run:
- shutil.move(original_path, new_path)
- logger.info(f"{dry_run_prefix}Moved file: {path} -> {new_path}")
- else:
- if rename_files: # Check if rename_files is True
- if not dry_run:
- original_path.rename(new_path)
- logger.info(f"{dry_run_prefix}Renamed file: {path} -> {new_path}")
- else:
- if not dry_run:
- shutil.move(original_path, new_path)
- logger.info(f"{dry_run_prefix}Moved file: {path} -> {new_path}")
- except FileNotFoundError:
- log.error(f"File not found: {path}. Skipping...")
- logger.error(f"File not found: {path}. Skipping...")
- exitMsg = "File not found"
- continue
- except OSError as e:
- log.error(f"Failed to move or rename file: {path}. Error: {e}")
- logger.error(f"Failed to move or rename file: {path}. Error: {e}")
- exitMsg = "Failed to move or rename file"
- continue
- return new_path # Return the new_path variable after the loop
-
-def perform_metadata_scan(metadata_scan_path):
- metadata_scan_path_windows = metadata_scan_path.resolve().as_posix()
- mutation_metadata_scan = """
- mutation {
- metadataScan(input: { paths: "%s" })
- }
- """ % metadata_scan_path_windows
- if debugTracing:
- logger.info(f"Attempting metadata scan mutation with path: {metadata_scan_path_windows}")
- logger.info(f"Mutation string: {mutation_metadata_scan}")
- graphql_request(mutation_metadata_scan)
-
-def rename_scene(scene_id, stash_directory):
- global exitMsg
- scene_details = find_scene_by_id(scene_id)
- if debugTracing: logger.info(f"Debug Tracing (scene_details={scene_details})................")
+ scene_details = stash.find_scene(scene_id)
+ stash.Trace(f"(scene_details1={scene_details})")
if not scene_details:
- log.error(f"Scene with ID {scene_id} not found.")
- logger.error(f"Scene with ID {scene_id} not found.")
- return
-
- if debugTracing: logger.info(f"Debug Tracing................")
-
+ stash.Error(f"Scene with ID {scene_id} not found.")
+ return None
original_file_path = scene_details['files'][0]['path']
original_parent_directory = Path(original_file_path).parent
- if debugTracing: logger.info(f"Debug Tracing (original_file_path={original_file_path})................")
-
+ stash.Trace(f"(original_file_path={original_file_path})")
# Check if the scene's path matches any of the excluded paths
if exclude_paths and any(Path(original_file_path).match(exclude_path) for exclude_path in exclude_paths):
- logger.info(f"Scene with ID {scene_id} belongs to an excluded path. Skipping modifications.")
- return
-
- if debugTracing: logger.info(f"Debug Tracing................")
- original_path_info = {'original_file_path': original_file_path,
- 'original_parent_directory': original_parent_directory}
-
- new_path_info = None
-
- original_file_stem = Path(original_file_path).stem
- original_file_name = Path(original_file_path).name
- new_filename = form_filename(original_file_stem, scene_details)
- newFilenameWithExt = new_filename + Path(original_file_path).suffix
- if debugTracing: logger.info(f"Debug Tracing (original_file_name={original_file_name})(newFilenameWithExt={newFilenameWithExt})................")
- if original_file_name == newFilenameWithExt:
- logger.info(f"Nothing to do, because new file name matches original file name: (newFilenameWithExt={newFilenameWithExt})")
- return
- if debugTracing: logger.info(f"Debug Tracing................")
-
- if rename_files:
- new_path = original_parent_directory / (newFilenameWithExt)
- new_path_info = {'new_file_path': new_path}
- if debugTracing: logger.info(f"{dry_run_prefix}New filename: {new_path}")
-
- if move_files and original_parent_directory.name != scene_details['studio']['name']:
- new_path = original_parent_directory / scene_details['studio']['name'] / (new_filename + Path(original_file_path).suffix)
- new_path_info = {'new_file_path': new_path}
- move_or_rename_files(scene_details, new_filename, original_parent_directory)
- logger.info(f"{dry_run_prefix}Moved to directory: '{new_path}'")
-
- # If rename_files is True, attempt renaming even if move_files is False
- if rename_files:
- new_file_path = original_parent_directory / (new_filename + Path(original_file_name).suffix)
- if original_file_name != new_filename:
- try:
- if not dry_run:
- os.rename(original_file_path, new_file_path)
- logger.info(f"{dry_run_prefix}Renamed file: {original_file_path} -> {new_file_path}")
- except Exception as e:
- exitMsg = "Failed to rename file"
- log.error(f"Failed to rename file: {original_file_path}. Error: {e}")
- logger.error(f"Failed to rename file: {original_file_path}. Error: {e}")
-
- metadata_scan_path = original_parent_directory
- perform_metadata_scan(metadata_scan_path)
+ stash.Log(f"Scene with ID {scene_id} belongs to an excluded path. Skipping modifications.")
+ return None
+ original_file_stem = Path(original_file_path).stem
+ original_file_name = Path(original_file_path).name
+ new_filename = form_filename(original_file_stem, scene_details)
max_filename_length = int(config["max_filename_length"])
if len(new_filename) > max_filename_length:
extension_length = len(Path(original_file_path).suffix)
@@ -508,61 +291,61 @@ def rename_scene(scene_id, stash_directory):
truncated_filename = new_filename[:max_base_filename_length]
hash_suffix = hashlib.md5(new_filename.encode()).hexdigest()
new_filename = truncated_filename + '_' + hash_suffix + Path(original_file_path).suffix
+ newFilenameWithExt = new_filename + Path(original_file_path).suffix
+ new_file_path = f"{original_parent_directory}{os.sep}{new_filename}{Path(original_file_name).suffix}"
+ stash.Trace(f"(original_file_name={original_file_name})(new_file_path={new_file_path})")
+ if original_file_name == newFilenameWithExt or original_file_name == new_filename:
+ stash.Log(f"Nothing to do, because new file name matches original file name: (newFilenameWithExt={newFilenameWithExt})")
+ return None
+ targetDidExist = True if os.path.isfile(new_file_path) else False
+ try:
+ if move_files:
+ if not dry_run:
+ shutil.move(original_file_path, new_file_path)
+ exitMsg = f"{dry_run_prefix}Moved file to '{new_file_path}' from '{original_file_path}'"
+ else:
+ if not dry_run:
+ os.rename(original_file_path, new_file_path)
+ exitMsg = f"{dry_run_prefix}Renamed file to '{new_file_path}' from '{original_file_path}'"
+ except OSError as e:
+ exitMsg = f"Failed to move/rename file: From {original_file_path} to {new_file_path}. Error: {e}"
+ stash.Error(exitMsg)
+ if not targetDidExist and os.path.isfile(new_file_path):
+ if os.path.isfile(original_file_path):
+ os.remove(original_file_path)
+ pass
+ else:
+ raise
- if debugTracing: logger.info(f"Debug Tracing (exitMsg={exitMsg})................")
- return new_filename, original_path_info, new_path_info
+ stash.metadata_scan(paths=[original_parent_directory.resolve().as_posix()])
+ stash.Log(exitMsg)
+ return new_filename
-# Main default function for rename scene
def rename_files_task():
- if debugTracing: logger.info("Debug Tracing................")
- # Execute the GraphQL query to fetch all scenes
- scene_result = graphql_request(QUERY_ALL_SCENES)
- if debugTracing: logger.info("Debug Tracing................")
- all_scenes = scene_result.get('data', {}).get('allScenes', [])
- if debugTracing: logger.info("Debug Tracing................")
+ scene_result = stash.get_all_scenes()
+ all_scenes = scene_result['allScenes']
if not all_scenes:
- if debugTracing: logger.info("Debug Tracing................")
- log.error("No scenes found.")
- logger.error("No scenes found.")
+ stash.Error("No scenes found.")
exit()
- if debugTracing: logger.info("Debug Tracing................")
-
# Find the scene with the latest updated_at timestamp
latest_scene = max(all_scenes, key=lambda scene: scene['updated_at'])
-
# Extract the ID of the latest scene
latest_scene_id = latest_scene.get('id')
-
- # Read stash directory from renamefile_settings.py
- stash_directory = config.get('stash_directory', '')
- if debugTracing: logger.info("Debug Tracing................")
-
# Rename the latest scene and trigger metadata scan
- new_filename = rename_scene(latest_scene_id, stash_directory)
- if debugTracing: logger.info(f"Debug Tracing (exitMsg={exitMsg})................")
-
+ new_filename = rename_scene(latest_scene_id)
# Log dry run state and indicate if no changes were made
if dry_run:
- log.info("Dry run: Script executed in dry run mode. No changes were made.")
- logger.info("Dry run: Script executed in dry run mode. No changes were made.")
+ stash.Log("Dry run: Script executed in dry run mode. No changes were made.")
elif not new_filename:
- logger.info("No changes were made.")
- else:
- logger.info(f"{exitMsg}")
- return
-
-def fetch_dup_filename_tags(): # Place holder for new implementation
+ stash.Log("No changes were made.")
return
-if PLUGIN_ARGS_MODE == "fetch_dup_filename_tags":
- fetch_dup_filename_tags()
-elif PLUGIN_ARGS_MODE == "rename_files_task":
+if stash.PLUGIN_TASK_NAME == "rename_files_task":
rename_files_task()
elif inputToUpdateScenePost:
rename_files_task()
-if debugTracing: logger.info("\n*********************************\nEXITING ***********************\n*********************************")
+stash.Trace("\n*********************************\nEXITING ***********************\n*********************************")
# ToDo: Wish List
- # Add logic to update Sqlite DB on file name change, instead of perform_metadata_scan.
# Add code to get tags from duplicate filenames
\ No newline at end of file
diff --git a/plugins/RenameFile/renamefile.yml b/plugins/RenameFile/renamefile.yml
index ca2c8f53..d2bcf1a3 100644
--- a/plugins/RenameFile/renamefile.yml
+++ b/plugins/RenameFile/renamefile.yml
@@ -1,6 +1,6 @@
name: RenameFile
description: Renames video (scene) file names when the user edits the [Title] field located in the scene [Edit] tab.
-version: 0.4.3
+version: 0.4.6
url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/RenameFile
settings:
performerAppend:
@@ -19,9 +19,9 @@ settings:
displayName: Include Existing Key Field
description: Enable to append performer, tags, studios, & galleries even if name already exists in the original file name.
type: BOOLEAN
- zafileRenameViaRaname:
- displayName: Rename Instead of Move
- description: Enable to rename file instead of Move file. (Not recommended for Windows OS)
+ zafileRenameViaMove:
+ displayName: Move Instead of Rename
+ description: Enable to move file instead of rename file. (Not recommended for Windows OS)
type: BOOLEAN
zfieldKeyList:
displayName: Key Fields
diff --git a/plugins/RenameFile/renamefile_settings.py b/plugins/RenameFile/renamefile_settings.py
index 6a4445db..a84aef41 100644
--- a/plugins/RenameFile/renamefile_settings.py
+++ b/plugins/RenameFile/renamefile_settings.py
@@ -38,13 +38,11 @@
"date": '',
},
# Add tags to exclude from RenameFile.
- "excludeTags": ["DuplicateMarkForDeletion", "DuplicateWhitelistFile","_DuplicateMarkForDeletion", "_DuplicateWhitelistFile","_DuplicateMarkForDeletion_", "_DuplicateWhitelistFile_"],
+ "excludeTags": ["DuplicateMarkForDeletion", "DuplicateMarkForSwap", "DuplicateWhitelistFile","_DuplicateMarkForDeletion","_DuplicateMarkForSwap", "_DuplicateWhitelistFile"],
# Add path(s) to exclude from RenameFile. Example Usage: r"/path/to/exclude1" When entering multiple paths, use space. Example: r"/path_1_to/exclude" r"/someOtherPath2Exclude" r"/yetAnotherPath"
"pathToExclude": "",
# Define a whitelist of allowed tags or EMPTY to allow all tags. Example Usage: "tag1", "tag2", "tag3"
"tagWhitelist": "",
- # Define whether files should be renamed when moved
- "rename_files": True,
# Define whether the original file name should be used if title is empty
"if_notitle_use_org_filename": True, # Warning: Do not recommend setting this to False.
# Current Stash DB schema only allows maximum base file name length to be 255
From cf6147b2e8f9fc2de0ebb06ce4954f2ab65147fb Mon Sep 17 00:00:00 2001
From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com>
Date: Wed, 28 Aug 2024 04:45:25 -0400
Subject: [PATCH 30/39] Adding DupFileManager plugin
---
plugins/DupFileManager/DupFileManager.py | 469 ++++++++++++++++
plugins/DupFileManager/DupFileManager.yml | 70 +++
.../DupFileManager/DupFileManager_config.py | 26 +
plugins/DupFileManager/README.md | 39 ++
plugins/DupFileManager/StashPluginHelper.py | 526 ++++++++++++++++++
plugins/DupFileManager/requirements.txt | 4 +
6 files changed, 1134 insertions(+)
create mode 100644 plugins/DupFileManager/DupFileManager.py
create mode 100644 plugins/DupFileManager/DupFileManager.yml
create mode 100644 plugins/DupFileManager/DupFileManager_config.py
create mode 100644 plugins/DupFileManager/README.md
create mode 100644 plugins/DupFileManager/StashPluginHelper.py
create mode 100644 plugins/DupFileManager/requirements.txt
diff --git a/plugins/DupFileManager/DupFileManager.py b/plugins/DupFileManager/DupFileManager.py
new file mode 100644
index 00000000..a1b2d541
--- /dev/null
+++ b/plugins/DupFileManager/DupFileManager.py
@@ -0,0 +1,469 @@
+# Description: This is a Stash plugin which manages duplicate files.
+# By David Maisonave (aka Axter) Jul-2024 (https://www.axter.com/)
+# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/DupFileManager
+# Note: To call this script outside of Stash, pass argument --url
+# Example: python DupFileManager.py --url http://localhost:9999 -a
+
+# Research:
+# Research following links to complete this plugin:
+# Python library for parse-reparsepoint
+# https://pypi.org/project/parse-reparsepoint/
+# pip install parse-reparsepoint
+import os, sys, time, pathlib, argparse, platform, shutil, logging
+from StashPluginHelper import StashPluginHelper
+from DupFileManager_config import config # Import config from DupFileManager_config.py
+
+parser = argparse.ArgumentParser()
+parser.add_argument('--url', '-u', dest='stash_url', type=str, help='Add Stash URL')
+parser.add_argument('--trace', '-t', dest='trace', action='store_true', help='Enables debug trace mode.')
+parser.add_argument('--add_dup_tag', '-a', dest='dup_tag', action='store_true', help='Set a tag to duplicate files.')
+parser.add_argument('--del_tag_dup', '-d', dest='del_tag', action='store_true', help='Only delete scenes having DuplicateMarkForDeletion tag.')
+parser.add_argument('--remove_dup', '-r', dest='remove', action='store_true', help='Remove (delete) duplicate files.')
+parse_args = parser.parse_args()
+
+settings = {
+ "mergeDupFilename": False,
+ "permanentlyDelete": False,
+ "whitelistDelDupInSameFolder": False,
+ "whitelistDoTagLowResDup": False,
+ "zCleanAfterDel": False,
+ "zSwapHighRes": False,
+ "zSwapLongLength": False,
+ "zWhitelist": "",
+ "zxGraylist": "",
+ "zyBlacklist": "",
+ "zyMaxDupToProcess": 0,
+ "zzdebugTracing": False,
+}
+stash = StashPluginHelper(
+ stash_url=parse_args.stash_url,
+ debugTracing=parse_args.trace,
+ settings=settings,
+ config=config,
+ maxbytes=10*1024*1024,
+ )
+if len(sys.argv) > 1:
+ stash.Log(f"argv = {sys.argv}")
+else:
+ stash.Trace(f"No command line arguments. JSON_INPUT['args'] = {stash.JSON_INPUT['args']}")
+stash.Status(logLevel=logging.DEBUG)
+
+# stash.Trace(f"\nStarting (__file__={__file__}) (stash.CALLED_AS_STASH_PLUGIN={stash.CALLED_AS_STASH_PLUGIN}) (stash.DEBUG_TRACING={stash.DEBUG_TRACING}) (stash.PLUGIN_TASK_NAME={stash.PLUGIN_TASK_NAME})************************************************")
+# stash.encodeToUtf8 = True
+
+
+LOG_STASH_N_PLUGIN = stash.LOG_TO_STASH if stash.CALLED_AS_STASH_PLUGIN else stash.LOG_TO_CONSOLE + stash.LOG_TO_FILE
+listSeparator = stash.Setting('listSeparator', ',', notEmpty=True)
+addPrimaryDupPathToDetails = stash.Setting('addPrimaryDupPathToDetails')
+mergeDupFilename = stash.Setting('mergeDupFilename')
+moveToTrashCan = False if stash.Setting('permanentlyDelete') else True
+alternateTrashCanPath = stash.Setting('dup_path')
+whitelistDelDupInSameFolder = stash.Setting('whitelistDelDupInSameFolder')
+whitelistDoTagLowResDup = stash.Setting('whitelistDoTagLowResDup')
+maxDupToProcess = int(stash.Setting('zyMaxDupToProcess'))
+swapHighRes = stash.Setting('zSwapHighRes')
+swapLongLength = stash.Setting('zSwapLongLength')
+significantTimeDiff = stash.Setting('significantTimeDiff')
+toRecycleBeforeSwap = stash.Setting('toRecycleBeforeSwap')
+cleanAfterDel = stash.Setting('zCleanAfterDel')
+duration_diff = float(stash.Setting('duration_diff'))
+if duration_diff > 10:
+ duration_diff = 10
+elif duration_diff < 1:
+ duration_diff = 1
+
+# significantTimeDiff can not be higher than 1 and shouldn't be lower than .5
+if significantTimeDiff > 1:
+ significantTimeDiff = 1
+if significantTimeDiff < .5:
+ significantTimeDiff = .5
+
+
+duplicateMarkForDeletion = stash.Setting('DupFileTag')
+if duplicateMarkForDeletion == "":
+ duplicateMarkForDeletion = 'DuplicateMarkForDeletion'
+
+duplicateWhitelistTag = stash.Setting('DupWhiteListTag')
+if duplicateWhitelistTag == "":
+ duplicateWhitelistTag = 'DuplicateWhitelistFile'
+
+excludeMergeTags = [duplicateMarkForDeletion, duplicateWhitelistTag]
+stash.init_mergeMetadata(excludeMergeTags)
+
+graylist = stash.Setting('zxGraylist').split(listSeparator)
+graylist = [item.lower() for item in graylist]
+if graylist == [""] : graylist = []
+stash.Trace(f"graylist = {graylist}")
+whitelist = stash.Setting('zWhitelist').split(listSeparator)
+whitelist = [item.lower() for item in whitelist]
+if whitelist == [""] : whitelist = []
+stash.Trace(f"whitelist = {whitelist}")
+blacklist = stash.Setting('zyBlacklist').split(listSeparator)
+blacklist = [item.lower() for item in blacklist]
+if blacklist == [""] : blacklist = []
+stash.Trace(f"blacklist = {blacklist}")
+
+def realpath(path):
+ """
+ get_symbolic_target for win
+ """
+ try:
+ import win32file
+ f = win32file.CreateFile(path, win32file.GENERIC_READ,
+ win32file.FILE_SHARE_READ, None,
+ win32file.OPEN_EXISTING,
+ win32file.FILE_FLAG_BACKUP_SEMANTICS, None)
+ target = win32file.GetFinalPathNameByHandle(f, 0)
+ # an above gives us something like u'\\\\?\\C:\\tmp\\scalarizr\\3.3.0.7978'
+ return target.strip('\\\\?\\')
+ except ImportError:
+ handle = open_dir(path)
+ target = get_symbolic_target(handle)
+ check_closed(handle)
+ return target
+
+def isReparsePoint(path):
+ import win32api
+ import win32con
+ from parse_reparsepoint import Navigator
+ FinalPathname = realpath(path)
+ stash.Log(f"(path='{path}') (FinalPathname='{FinalPathname}')")
+ if FinalPathname != path:
+ stash.Log(f"Symbolic link '{path}'")
+ return True
+ if not os.path.isdir(path):
+ path = os.path.dirname(path)
+ return win32api.GetFileAttributes(path) & win32con.FILE_ATTRIBUTE_REPARSE_POINT
+
+def testReparsePointAndSymLink(merge=False, deleteDup=False):
+ stash.Trace(f"Debug Tracing (platform.system()={platform.system()})")
+ myTestPath1 = r"B:\V\V\Tip\POV - Holly Molly petite ginger anal slut - RedTube.mp4" # not a reparse point or symbolic link
+ myTestPath2 = r"B:\_\SpecialSet\Amateur Anal Attempts\BRCC test studio name.m2ts" # reparse point
+ myTestPath3 = r"B:\_\SpecialSet\Amateur Anal Attempts\Amateur Anal Attempts 4.mp4" #symbolic link
+ myTestPath4 = r"E:\Stash\plugins\RenameFile\README.md" #symbolic link
+ myTestPath5 = r"E:\_\David-Maisonave\Axter-Stash\plugins\RenameFile\README.md" #symbolic link
+ myTestPath6 = r"E:\_\David-Maisonave\Axter-Stash\plugins\DeleteMe\Renamer\README.md" # not reparse point
+ stash.Log(f"Testing '{myTestPath1}'")
+ if isReparsePoint(myTestPath1):
+ stash.Log(f"isSymLink '{myTestPath1}'")
+ else:
+ stash.Log(f"Not isSymLink '{myTestPath1}'")
+
+ if isReparsePoint(myTestPath2):
+ stash.Log(f"isSymLink '{myTestPath2}'")
+ else:
+ stash.Log(f"Not isSymLink '{myTestPath2}'")
+
+ if isReparsePoint(myTestPath3):
+ stash.Log(f"isSymLink '{myTestPath3}'")
+ else:
+ stash.Log(f"Not isSymLink '{myTestPath3}'")
+
+ if isReparsePoint(myTestPath4):
+ stash.Log(f"isSymLink '{myTestPath4}'")
+ else:
+ stash.Log(f"Not isSymLink '{myTestPath4}'")
+
+ if isReparsePoint(myTestPath5):
+ stash.Log(f"isSymLink '{myTestPath5}'")
+ else:
+ stash.Log(f"Not isSymLink '{myTestPath5}'")
+
+ if isReparsePoint(myTestPath6):
+ stash.Log(f"isSymLink '{myTestPath6}'")
+ else:
+ stash.Log(f"Not isSymLink '{myTestPath6}'")
+ return
+
+
+def createTagId(tagName, tagName_descp, deleteIfExist = False):
+ tagId = stash.find_tags(q=tagName)
+ if len(tagId):
+ tagId = tagId[0]
+ if deleteIfExist:
+ stash.destroy_tag(int(tagId['id']))
+ else:
+ return tagId['id']
+ tagId = stash.create_tag({"name":tagName, "description":tagName_descp, "ignore_auto_tag": True})
+ stash.Log(f"Dup-tagId={tagId['id']}")
+ return tagId['id']
+
+def setTagId(tagId, tagName, sceneDetails, DupFileToKeep):
+ details = ""
+ ORG_DATA_DICT = {'id' : sceneDetails['id']}
+ dataDict = ORG_DATA_DICT.copy()
+ doAddTag = True
+ if addPrimaryDupPathToDetails:
+ BaseDupStr = f"BaseDup={DupFileToKeep['files'][0]['path']}\n{stash.STASH_URL}/scenes/{DupFileToKeep['id']}\n"
+ if sceneDetails['details'] == "":
+ details = BaseDupStr
+ elif not sceneDetails['details'].startswith(BaseDupStr):
+ details = f"{BaseDupStr};\n{sceneDetails['details']}"
+ for tag in sceneDetails['tags']:
+ if tag['name'] == tagName:
+ doAddTag = False
+ break
+ if doAddTag:
+ dataDict.update({'tag_ids' : tagId})
+ if details != "":
+ dataDict.update({'details' : details})
+ if dataDict != ORG_DATA_DICT:
+ stash.update_scene(dataDict)
+ stash.Trace(f"[setTagId] Updated {sceneDetails['files'][0]['path']} with metadata {dataDict}", toAscii=True)
+ else:
+ stash.Trace(f"[setTagId] Nothing to update {sceneDetails['files'][0]['path']}.", toAscii=True)
+
+
+def isInList(listToCk, pathToCk):
+ pathToCk = pathToCk.lower()
+ for item in listToCk:
+ if pathToCk.startswith(item):
+ return True
+ return False
+
+def hasSameDir(path1, path2):
+ if pathlib.Path(path1).resolve().parent == pathlib.Path(path2).resolve().parent:
+ return True
+ return False
+
+def sendToTrash(path):
+ if not os.path.isfile(path):
+ stash.Warn(f"File does not exist: {path}.", toAscii=True)
+ return False
+ try:
+ from send2trash import send2trash # Requirement: pip install Send2Trash
+ send2trash(path)
+ return True
+ except Exception as e:
+ stash.Error(f"Failed to send file {path} to recycle bin. Error: {e}", toAscii=True)
+ try:
+ if os.path.isfile(path):
+ os.remove(path)
+ return True
+ except Exception as e:
+ stash.Error(f"Failed to delete file {path}. Error: {e}", toAscii=True)
+ return False
+
+def significantLessTime(durrationToKeep, durrationOther):
+ timeDiff = durrationToKeep / durrationOther
+ if timeDiff < significantTimeDiff:
+ return True
+ return False
+
+def isSwapCandidate(DupFileToKeep, DupFile):
+ # Don't move if both are in whitelist
+ if isInList(whitelist, DupFileToKeep['files'][0]['path']) and isInList(whitelist, DupFile['files'][0]['path']):
+ return False
+ if swapHighRes and (int(DupFileToKeep['files'][0]['width']) > int(DupFile['files'][0]['width']) or int(DupFileToKeep['files'][0]['height']) > int(DupFile['files'][0]['height'])):
+ if not significantLessTime(int(DupFileToKeep['files'][0]['duration']), int(DupFile['files'][0]['duration'])):
+ return True
+ else:
+ stash.Warn(f"File '{DupFileToKeep['files'][0]['path']}' has a higher resolution than '{DupFile['files'][0]['path']}', but the duration is significantly shorter.", toAscii=True)
+ if swapLongLength and int(DupFileToKeep['files'][0]['duration']) > int(DupFile['files'][0]['duration']):
+ if int(DupFileToKeep['files'][0]['width']) >= int(DupFile['files'][0]['width']) or int(DupFileToKeep['files'][0]['height']) >= int(DupFile['files'][0]['height']):
+ return True
+ return False
+
+def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False):
+ duplicateMarkForDeletion_descp = 'Tag added to duplicate scenes so-as to tag them for deletion.'
+ stash.Trace(f"duplicateMarkForDeletion = {duplicateMarkForDeletion}")
+ dupTagId = createTagId(duplicateMarkForDeletion, duplicateMarkForDeletion_descp)
+ stash.Trace(f"dupTagId={dupTagId} name={duplicateMarkForDeletion}")
+
+ dupWhitelistTagId = None
+ if whitelistDoTagLowResDup:
+ stash.Trace(f"duplicateWhitelistTag = {duplicateWhitelistTag}")
+ duplicateWhitelistTag_descp = 'Tag added to duplicate scenes which are in the whitelist. This means there are two or more duplicates in the whitelist.'
+ dupWhitelistTagId = createTagId(duplicateWhitelistTag, duplicateWhitelistTag_descp)
+ stash.Trace(f"dupWhitelistTagId={dupWhitelistTagId} name={duplicateWhitelistTag}")
+
+ QtyDupSet = 0
+ QtyDup = 0
+ QtyExactDup = 0
+ QtyAlmostDup = 0
+ QtyRealTimeDiff = 0
+ QtyTagForDel = 0
+ QtySkipForDel = 0
+ QtySwap = 0
+ QtyMerge = 0
+ QtyDeleted = 0
+ stash.Log("#########################################################################")
+ stash.Trace("#########################################################################")
+ stash.Log(f"Waiting for find_duplicate_scenes_diff to return results; duration_diff={duration_diff}; significantTimeDiff={significantTimeDiff}", printTo=LOG_STASH_N_PLUGIN)
+ DupFileSets = stash.find_duplicate_scenes_diff(duration_diff=duration_diff)
+ qtyResults = len(DupFileSets)
+ stash.Trace("#########################################################################")
+ for DupFileSet in DupFileSets:
+ stash.Trace(f"DupFileSet={DupFileSet}")
+ QtyDupSet+=1
+ stash.Progress(QtyDupSet, qtyResults)
+ SepLine = "---------------------------"
+ DupFileToKeep = ""
+ DupToCopyFrom = ""
+ DupFileDetailList = []
+ for DupFile in DupFileSet:
+ QtyDup+=1
+ stash.log.sl.progress(f"Scene ID = {DupFile['id']}")
+ time.sleep(2)
+ Scene = stash.find_scene(DupFile['id'])
+ sceneData = f"Scene = {Scene}"
+ stash.Trace(sceneData, toAscii=True)
+ DupFileDetailList = DupFileDetailList + [Scene]
+ if DupFileToKeep != "":
+ if int(DupFileToKeep['files'][0]['duration']) == int(Scene['files'][0]['duration']): # Do not count fractions of a second as a difference
+ QtyExactDup+=1
+ else:
+ QtyAlmostDup+=1
+ SepLine = "***************************"
+ if significantLessTime(int(DupFileToKeep['files'][0]['duration']), int(Scene['files'][0]['duration'])):
+ QtyRealTimeDiff += 1
+ if int(DupFileToKeep['files'][0]['width']) < int(Scene['files'][0]['width']) or int(DupFileToKeep['files'][0]['height']) < int(Scene['files'][0]['height']):
+ DupFileToKeep = Scene
+ elif int(DupFileToKeep['files'][0]['duration']) < int(Scene['files'][0]['duration']):
+ DupFileToKeep = Scene
+ elif isInList(whitelist, Scene['files'][0]['path']) and not isInList(whitelist, DupFileToKeep['files'][0]['path']):
+ DupFileToKeep = Scene
+ elif isInList(blacklist, DupFileToKeep['files'][0]['path']) and not isInList(blacklist, Scene['files'][0]['path']):
+ DupFileToKeep = Scene
+ elif isInList(graylist, Scene['files'][0]['path']) and not isInList(graylist, DupFileToKeep['files'][0]['path']):
+ DupFileToKeep = Scene
+ elif len(DupFileToKeep['files'][0]['path']) < len(Scene['files'][0]['path']):
+ DupFileToKeep = Scene
+ elif int(DupFileToKeep['files'][0]['size']) < int(Scene['files'][0]['size']):
+ DupFileToKeep = Scene
+ else:
+ DupFileToKeep = Scene
+ # stash.Trace(f"DupFileToKeep = {DupFileToKeep}")
+ stash.Trace(f"KeepID={DupFileToKeep['id']}, ID={DupFile['id']} duration=({Scene['files'][0]['duration']}), Size=({Scene['files'][0]['size']}), Res=({Scene['files'][0]['width']} x {Scene['files'][0]['height']}) Name={Scene['files'][0]['path']}, KeepPath={DupFileToKeep['files'][0]['path']}", toAscii=True)
+
+ for DupFile in DupFileDetailList:
+ if DupFile['id'] != DupFileToKeep['id']:
+ if merge:
+ result = stash.merge_metadata(DupFile, DupFileToKeep)
+ if result != "Nothing To Merge":
+ QtyMerge += 1
+
+ if isInList(whitelist, DupFile['files'][0]['path']) and (not whitelistDelDupInSameFolder or not hasSameDir(DupFile['files'][0]['path'], DupFileToKeep['files'][0]['path'])):
+ if isSwapCandidate(DupFileToKeep, DupFile):
+ if merge:
+ stash.merge_metadata(DupFileToKeep, DupFile)
+ if toRecycleBeforeSwap:
+ sendToTrash(DupFile['files'][0]['path'])
+ shutil.move(DupFileToKeep['files'][0]['path'], DupFile['files'][0]['path'])
+ stash.Log(f"Moved better file '{DupFileToKeep['files'][0]['path']}' to '{DupFile['files'][0]['path']}'", toAscii=True, printTo=LOG_STASH_N_PLUGIN)
+ DupFileToKeep = DupFile
+ QtySwap+=1
+ else:
+ stash.Log(f"NOT processing duplicate, because it's in whitelist. '{DupFile['files'][0]['path']}'", toAscii=True)
+ if dupWhitelistTagId and tagDuplicates:
+ setTagId(dupWhitelistTagId, duplicateWhitelistTag, DupFile, DupFileToKeep)
+ QtySkipForDel+=1
+ else:
+ if deleteDup:
+ DupFileName = DupFile['files'][0]['path']
+ DupFileNameOnly = pathlib.Path(DupFileName).stem
+ stash.Warn(f"Deleting duplicate '{DupFileName}'", toAscii=True, printTo=LOG_STASH_N_PLUGIN)
+ if alternateTrashCanPath != "":
+ destPath = f"{alternateTrashCanPath }{os.sep}{DupFileNameOnly}"
+ if os.path.isfile(destPath):
+ destPath = f"{alternateTrashCanPath }{os.sep}_{time.time()}_{DupFileNameOnly}"
+ shutil.move(DupFileName, destPath)
+ elif moveToTrashCan:
+ sendToTrash(DupFileName)
+ stash.destroy_scene(DupFile['id'], delete_file=True)
+ QtyDeleted += 1
+ elif tagDuplicates:
+ if QtyTagForDel == 0:
+ stash.Log(f"Tagging duplicate {DupFile['files'][0]['path']} for deletion with tag {duplicateMarkForDeletion}.", toAscii=True, printTo=LOG_STASH_N_PLUGIN)
+ else:
+ stash.Log(f"Tagging duplicate {DupFile['files'][0]['path']} for deletion.", toAscii=True, printTo=LOG_STASH_N_PLUGIN)
+ setTagId(dupTagId, duplicateMarkForDeletion, DupFile, DupFileToKeep)
+ QtyTagForDel+=1
+ stash.Trace(SepLine)
+ if maxDupToProcess > 0 and QtyDup > maxDupToProcess:
+ break
+
+ stash.Log(f"QtyDupSet={QtyDupSet}, QtyDup={QtyDup}, QtyDeleted={QtyDeleted}, QtySwap={QtySwap}, QtyTagForDel={QtyTagForDel}, QtySkipForDel={QtySkipForDel}, QtyExactDup={QtyExactDup}, QtyAlmostDup={QtyAlmostDup}, QtyMerge={QtyMerge}, QtyRealTimeDiff={QtyRealTimeDiff}", printTo=LOG_STASH_N_PLUGIN)
+ if cleanAfterDel:
+ stash.Log("Adding clean jobs to the Task Queue", printTo=LOG_STASH_N_PLUGIN)
+ stash.metadata_clean(paths=stash.STASH_PATHS)
+ stash.metadata_clean_generated()
+ stash.optimise_database()
+
+def deleteTagggedDuplicates():
+ tagId = stash.find_tags(q=duplicateMarkForDeletion)
+ if len(tagId) > 0 and 'id' in tagId[0]:
+ tagId = tagId[0]['id']
+ else:
+ stash.Warn(f"Could not find tag ID for tag '{duplicateMarkForDeletion}'.")
+ return
+ QtyDup = 0
+ QtyDeleted = 0
+ QtyFailedQuery = 0
+ stash.Trace("#########################################################################")
+ sceneIDs = stash.find_scenes(f={"tags": {"value":tagId, "modifier":"INCLUDES"}}, fragment='id')
+ qtyResults = len(sceneIDs)
+ stash.Trace(f"Found {qtyResults} scenes with tag ({duplicateMarkForDeletion}): sceneIDs = {sceneIDs}")
+ for sceneID in sceneIDs:
+ # stash.Trace(f"Getting scene data for scene ID {sceneID['id']}.")
+ QtyDup += 1
+ prgs = QtyDup / qtyResults
+ stash.Progress(QtyDup, qtyResults)
+ scene = stash.find_scene(sceneID['id'])
+ if scene == None or len(scene) == 0:
+ stash.Warn(f"Could not get scene data for scene ID {sceneID['id']}.")
+ QtyFailedQuery += 1
+ continue
+ # stash.Log(f"scene={scene}")
+ DupFileName = scene['files'][0]['path']
+ DupFileNameOnly = pathlib.Path(DupFileName).stem
+ stash.Warn(f"Deleting duplicate '{DupFileName}'", toAscii=True, printTo=LOG_STASH_N_PLUGIN)
+ if alternateTrashCanPath != "":
+ destPath = f"{alternateTrashCanPath }{os.sep}{DupFileNameOnly}"
+ if os.path.isfile(destPath):
+ destPath = f"{alternateTrashCanPath }{os.sep}_{time.time()}_{DupFileNameOnly}"
+ shutil.move(DupFileName, destPath)
+ elif moveToTrashCan:
+ sendToTrash(DupFileName)
+ result = stash.destroy_scene(scene['id'], delete_file=True)
+ stash.Trace(f"destroy_scene result={result} for file {DupFileName}", toAscii=True)
+ QtyDeleted += 1
+ stash.Log(f"QtyDup={QtyDup}, QtyDeleted={QtyDeleted}, QtyFailedQuery={QtyFailedQuery}", printTo=LOG_STASH_N_PLUGIN)
+ return
+
+def testSetDupTagOnScene(sceneId):
+ scene = stash.find_scene(sceneId)
+ stash.Log(f"scene={scene}")
+ stash.Log(f"scene tags={scene['tags']}")
+ tag_ids = [dupTagId]
+ for tag in scene['tags']:
+ tag_ids = tag_ids + [tag['id']]
+ stash.Log(f"tag_ids={tag_ids}")
+ stash.update_scene({'id' : scene['id'], 'tag_ids' : tag_ids})
+
+if stash.PLUGIN_TASK_NAME == "tag_duplicates_task":
+ mangeDupFiles(tagDuplicates=True, merge=mergeDupFilename)
+ stash.Trace(f"{stash.PLUGIN_TASK_NAME} EXIT")
+elif stash.PLUGIN_TASK_NAME == "delete_tagged_duplicates_task":
+ deleteTagggedDuplicates()
+ stash.Trace(f"{stash.PLUGIN_TASK_NAME} EXIT")
+elif stash.PLUGIN_TASK_NAME == "delete_duplicates_task":
+ mangeDupFiles(deleteDup=True, merge=mergeDupFilename)
+ stash.Trace(f"{stash.PLUGIN_TASK_NAME} EXIT")
+elif parse_args.dup_tag:
+ mangeDupFiles(tagDuplicates=True, merge=mergeDupFilename)
+ stash.Trace(f"Tag duplicate EXIT")
+elif parse_args.del_tag:
+ deleteTagggedDuplicates()
+ stash.Trace(f"Delete Tagged duplicates EXIT")
+elif parse_args.remove:
+ mangeDupFiles(deleteDup=True, merge=mergeDupFilename)
+ stash.Trace(f"Delete duplicate EXIT")
+else:
+ stash.Log(f"Nothing to do!!! (PLUGIN_ARGS_MODE={stash.PLUGIN_TASK_NAME})")
+
+
+
+
+
+stash.Trace("\n*********************************\nEXITING ***********************\n*********************************")
diff --git a/plugins/DupFileManager/DupFileManager.yml b/plugins/DupFileManager/DupFileManager.yml
new file mode 100644
index 00000000..497aca1b
--- /dev/null
+++ b/plugins/DupFileManager/DupFileManager.yml
@@ -0,0 +1,70 @@
+name: DupFileManager
+description: Manages duplicate files.
+version: 0.1.2
+url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/DupFileManager
+settings:
+ mergeDupFilename:
+ displayName: Merge Duplicate Tags
+ description: Before deletion, merge metadata from duplicate. E.g. Tag names, performers, studios, title, galleries, rating, details, etc...
+ type: BOOLEAN
+ permanentlyDelete:
+ displayName: Permanent Delete
+ description: Enable to permanently delete files, instead of moving files to trash can.
+ type: BOOLEAN
+ whitelistDelDupInSameFolder:
+ displayName: Whitelist Delete In Same Folder
+ description: Allow whitelist deletion of duplicates within the same whitelist folder.
+ type: BOOLEAN
+ whitelistDoTagLowResDup:
+ displayName: Whitelist Duplicate Tagging
+ description: Enable to tag whitelist duplicates of lower resolution or duration or same folder.
+ type: BOOLEAN
+ zCleanAfterDel:
+ displayName: Run Clean After Delete
+ description: After running a 'Delete Duplicates' task, run Clean, Clean-Generated, and Optimize-Database.
+ type: BOOLEAN
+ zSwapHighRes:
+ displayName: Swap High Resolution
+ description: If enabled, swap higher resolution duplicate files to preferred path.
+ type: BOOLEAN
+ zSwapLongLength:
+ displayName: Swap Longer Duration
+ description: If enabled, swap longer duration media files to preferred path. Longer is determine by significantLongerTime field.
+ type: BOOLEAN
+ zWhitelist:
+ displayName: White List
+ description: A comma seperated list of paths NOT to be deleted. E.g. C:\Favorite\,E:\MustKeep\
+ type: STRING
+ zxGraylist:
+ displayName: Gray List
+ description: List of preferential paths to determine which duplicate should be the primary. E.g. C:\2nd_Favorite\,H:\ShouldKeep\
+ type: STRING
+ zyBlacklist:
+ displayName: Black List
+ description: List of LEAST preferential paths to determine primary candidates for deletion. E.g. C:\Downloads\,F:\DeleteMeFirst\
+ type: STRING
+ zyMaxDupToProcess:
+ displayName: Max Dup Process
+ description: Maximum number of duplicates to process. If 0, infinity
+ type: NUMBER
+ zzdebugTracing:
+ displayName: Debug Tracing
+ description: (Default=false) [***For Advanced Users***] Enable debug tracing. When enabled, additional tracing logging is added to Stash\plugins\DupFileManager\DupFileManager.log
+ type: BOOLEAN
+exec:
+ - python
+ - "{pluginDir}/DupFileManager.py"
+interface: raw
+tasks:
+ - name: Tag Duplicates
+ description: Set tag DuplicateMarkForDeletion to the duplicates with lower resolution, duration, file name length, or black list path.
+ defaultArgs:
+ mode: tag_duplicates_task
+ - name: Delete Tagged Duplicates
+ description: Only delete scenes having DuplicateMarkForDeletion tag.
+ defaultArgs:
+ mode: delete_tagged_duplicates_task
+ - name: Delete Duplicates
+ description: Delete duplicate scenes. Performs deletion without first tagging.
+ defaultArgs:
+ mode: delete_duplicates_task
diff --git a/plugins/DupFileManager/DupFileManager_config.py b/plugins/DupFileManager/DupFileManager_config.py
new file mode 100644
index 00000000..ab5b8178
--- /dev/null
+++ b/plugins/DupFileManager/DupFileManager_config.py
@@ -0,0 +1,26 @@
+# Description: This is a Stash plugin which manages duplicate files.
+# By David Maisonave (aka Axter) Jul-2024 (https://www.axter.com/)
+# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/DupFileManager
+config = {
+ # If enabled, adds the primary duplicate path to the scene detail.
+ "addPrimaryDupPathToDetails" : True,
+ # Alternative path to move duplicate files.
+ "dup_path": "", #Example: "C:\\TempDeleteFolder"
+ # The threshold as to what percentage is consider a significant shorter time.
+ "significantTimeDiff" : .90, # 90% threshold
+ # Valued passed to stash API function FindDuplicateScenes.
+ "duration_diff" : 10, # (default=10) A value from 1 to 10.
+ # If enabled, moves destination file to recycle bin before swapping Hi-Res file.
+ "toRecycleBeforeSwap" : True,
+ # Character used to seperate items on the whitelist, blacklist, and graylist
+ "listSeparator" : ",",
+ # Tag used to tag duplicates with lower resolution, duration, and file name length.
+ "DupFileTag" : "DuplicateMarkForDeletion",
+ # Tag name used to tag duplicates in the whitelist. E.g. DuplicateWhitelistFile
+ "DupWhiteListTag" : "DuplicateWhitelistFile",
+
+ # The following fields are ONLY used when running DupFileManager in script mode
+ "endpoint_Scheme" : "http", # Define endpoint to use when contacting the Stash server
+ "endpoint_Host" : "0.0.0.0", # Define endpoint to use when contacting the Stash server
+ "endpoint_Port" : 9999, # Define endpoint to use when contacting the Stash server
+}
diff --git a/plugins/DupFileManager/README.md b/plugins/DupFileManager/README.md
new file mode 100644
index 00000000..d5b35dfc
--- /dev/null
+++ b/plugins/DupFileManager/README.md
@@ -0,0 +1,39 @@
+# DupFileManager: Ver 0.1.2 (By David Maisonave)
+DupFileManager is a [Stash](https://github.com/stashapp/stash) plugin which manages duplicate file in the Stash system.
+### Features
+- Can merge potential source in the duplicate file names for tag names, performers, and studios.
+ - Normally when Stash searches the file name for tag names, performers, and studios, it only does so using the primary file.
+- Delete duplicate file task with the following options:
+ - Tasks (Settings->Task->[Plugin Tasks]->DupFileManager)
+ - **Tag Duplicate Filename** - Set tag DuplicateMarkForDeletion to the duplicates with lower resolution, duration, file name length, and/or black list path.
+ - **Delete Duplicates** - Deletes duplicate files
+ - Plugin UI options (Settings->Plugins->Plugins->[DupFileManager])
+ - Use a white-list of preferential directories to determine which duplicate will be the primary.
+ - Use a gray-list of preferential directories to determine which duplicate should be the primary.
+ - Use a black-list to determine which duplicates should be deleted first.
+ - **Permanent Delete** - Enable to permanently delete files, instead of moving files to trash can.
+ - **Max Dup Process** - Use to limit the maximum files to process. Can be used to do a limited test run.
+ - **Merge Duplicate Tags** - Before deletion, merge metadata from duplicate. E.g. Tag names, performers, studios, title, galleries, rating, details, etc...
+ - Options available via DupFileManager_config.py
+ - **dup_path** - Alternate path to move deleted files to. Example: "C:\TempDeleteFolder"
+ - **swapHighRes** - When enabled, swaps higher resolution files between whitelist and blacklist/graylist files.
+ - **swapLongLength** - When enabled, swaps scene with longer duration.
+ - **toRecycleBeforeSwap** - When enabled, moves destination file to recycle bin before swapping files.
+
+### Requirements
+`pip install --upgrade stashapp-tools`
+`pip install pyYAML`
+`pip install Send2Trash`
+
+### Installation
+- Follow **Requirements** instructions.
+- In the stash plugin directory (C:\Users\MyUserName\.stash\plugins), create a folder named **DupFileManager**.
+- Copy all the plugin files to this folder.(**C:\Users\MyUserName\\.stash\plugins\DupFileManager**).
+- Click the **[Reload Plugins]** button in Stash->Settings->Plugins->Plugins.
+
+That's it!!!
+
+### Options
+- Options are accessible in the GUI via Settings->Plugins->Plugins->[DupFileManager].
+- More options available in DupFileManager_config.py.
+
diff --git a/plugins/DupFileManager/StashPluginHelper.py b/plugins/DupFileManager/StashPluginHelper.py
new file mode 100644
index 00000000..6f0d3d15
--- /dev/null
+++ b/plugins/DupFileManager/StashPluginHelper.py
@@ -0,0 +1,526 @@
+from stashapi.stashapp import StashInterface
+from logging.handlers import RotatingFileHandler
+import re, inspect, sys, os, pathlib, logging, json
+import concurrent.futures
+from stashapi.stash_types import PhashDistance
+import __main__
+
+_ARGUMENT_UNSPECIFIED_ = "_ARGUMENT_UNSPECIFIED_"
+
+# StashPluginHelper (By David Maisonave aka Axter)
+ # See end of this file for example usage
+ # Log Features:
+ # Can optionally log out to multiple outputs for each Log or Trace call.
+ # Logging includes source code line number
+ # Sets a maximum plugin log file size
+ # Stash Interface Features:
+ # Gets STASH_URL value from command line argument and/or from STDIN_READ
+ # Sets FRAGMENT_SERVER based on command line arguments or STDIN_READ
+ # Sets PLUGIN_ID based on the main script file name (in lower case)
+ # Gets PLUGIN_TASK_NAME value
+ # Sets pluginSettings (The plugin UI settings)
+ # Misc Features:
+ # Gets DRY_RUN value from command line argument and/or from UI and/or from config file
+ # Gets DEBUG_TRACING value from command line argument and/or from UI and/or from config file
+ # Sets RUNNING_IN_COMMAND_LINE_MODE to True if detects multiple arguments
+ # Sets CALLED_AS_STASH_PLUGIN to True if it's able to read from STDIN_READ
+class StashPluginHelper(StashInterface):
+ # Primary Members for external reference
+ PLUGIN_TASK_NAME = None
+ PLUGIN_ID = None
+ PLUGIN_CONFIGURATION = None
+ PLUGINS_PATH = None
+ pluginSettings = None
+ pluginConfig = None
+ STASH_URL = None
+ STASH_CONFIGURATION = None
+ JSON_INPUT = None
+ DEBUG_TRACING = False
+ DRY_RUN = False
+ CALLED_AS_STASH_PLUGIN = False
+ RUNNING_IN_COMMAND_LINE_MODE = False
+ FRAGMENT_SERVER = None
+ STASHPATHSCONFIG = None
+ STASH_PATHS = []
+ API_KEY = None
+ excludeMergeTags = None
+
+ # printTo argument
+ LOG_TO_FILE = 1
+ LOG_TO_CONSOLE = 2 # Note: Only see output when running in command line mode. In plugin mode, this output is lost.
+ LOG_TO_STDERR = 4 # Note: In plugin mode, output to StdErr ALWAYS gets sent to stash logging as an error.
+ LOG_TO_STASH = 8
+ LOG_TO_WARN = 16
+ LOG_TO_ERROR = 32
+ LOG_TO_CRITICAL = 64
+ LOG_TO_ALL = LOG_TO_FILE + LOG_TO_CONSOLE + LOG_TO_STDERR + LOG_TO_STASH
+
+ # Misc class variables
+ MAIN_SCRIPT_NAME = None
+ LOG_LEVEL = logging.INFO
+ LOG_FILE_DIR = None
+ LOG_FILE_NAME = None
+ STDIN_READ = None
+ pluginLog = None
+ logLinePreviousHits = []
+ thredPool = None
+ STASH_INTERFACE_INIT = False
+ _mergeMetadata = None
+ encodeToUtf8 = False
+ convertToAscii = False # If set True, it takes precedence over encodeToUtf8
+
+ # Prefix message value
+ LEV_TRACE = "TRACE: "
+ LEV_DBG = "DBG: "
+ LEV_INF = "INF: "
+ LEV_WRN = "WRN: "
+ LEV_ERR = "ERR: "
+ LEV_CRITICAL = "CRITICAL: "
+
+ # Default format
+ LOG_FORMAT = "[%(asctime)s] %(message)s"
+
+ # Externally modifiable variables
+ log_to_err_set = LOG_TO_FILE + LOG_TO_STDERR # This can be changed by the calling source in order to customize what targets get error messages
+ log_to_norm = LOG_TO_FILE + LOG_TO_CONSOLE # Can be change so-as to set target output for normal logging
+ # Warn message goes to both plugin log file and stash when sent to Stash log file.
+ log_to_wrn_set = LOG_TO_STASH # This can be changed by the calling source in order to customize what targets get warning messages
+
+ def __init__(self,
+ debugTracing = None, # Set debugTracing to True so as to output debug and trace logging
+ logFormat = LOG_FORMAT, # Plugin log line format
+ dateFmt = "%y%m%d %H:%M:%S", # Date format when logging to plugin log file
+ maxbytes = 8*1024*1024, # Max size of plugin log file
+ backupcount = 2, # Backup counts when log file size reaches max size
+ logToWrnSet = 0, # Customize the target output set which will get warning logging
+ logToErrSet = 0, # Customize the target output set which will get error logging
+ logToNormSet = 0, # Customize the target output set which will get normal logging
+ logFilePath = "", # Plugin log file. If empty, the log file name will be set based on current python file name and path
+ mainScriptName = "", # The main plugin script file name (full path)
+ pluginID = "",
+ settings = None, # Default settings for UI fields
+ config = None, # From pluginName_config.py or pluginName_setting.py
+ fragmentServer = None,
+ stash_url = None, # Stash URL (endpoint URL) Example: http://localhost:9999
+ apiKey = None, # API Key only needed when username and password set while running script via command line
+ DebugTraceFieldName = "zzdebugTracing",
+ DryRunFieldName = "zzdryRun",
+ setStashLoggerAsPluginLogger = False):
+ self.thredPool = concurrent.futures.ThreadPoolExecutor(max_workers=2)
+ if logToWrnSet: self.log_to_wrn_set = logToWrnSet
+ if logToErrSet: self.log_to_err_set = logToErrSet
+ if logToNormSet: self.log_to_norm = logToNormSet
+ if stash_url and len(stash_url): self.STASH_URL = stash_url
+ self.MAIN_SCRIPT_NAME = mainScriptName if mainScriptName != "" else __main__.__file__
+ self.PLUGIN_ID = pluginID if pluginID != "" else pathlib.Path(self.MAIN_SCRIPT_NAME).stem
+ # print(f"self.MAIN_SCRIPT_NAME={self.MAIN_SCRIPT_NAME}, self.PLUGIN_ID={self.PLUGIN_ID}", file=sys.stderr)
+ self.LOG_FILE_NAME = logFilePath if logFilePath != "" else f"{pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent}{os.sep}{pathlib.Path(self.MAIN_SCRIPT_NAME).stem}.log"
+ self.LOG_FILE_DIR = pathlib.Path(self.LOG_FILE_NAME).resolve().parent
+ RFH = RotatingFileHandler(
+ filename=self.LOG_FILE_NAME,
+ mode='a',
+ maxBytes=maxbytes,
+ backupCount=backupcount,
+ encoding=None,
+ delay=0
+ )
+ if fragmentServer:
+ self.FRAGMENT_SERVER = fragmentServer
+ else:
+ self.FRAGMENT_SERVER = {'Scheme': 'http', 'Host': '0.0.0.0', 'Port': '9999', 'SessionCookie': {'Name': 'session', 'Value': '', 'Path': '', 'Domain': '', 'Expires': '0001-01-01T00:00:00Z', 'RawExpires': '', 'MaxAge': 0, 'Secure': False, 'HttpOnly': False, 'SameSite': 0, 'Raw': '', 'Unparsed': None}, 'Dir': os.path.dirname(pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent), 'PluginDir': pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent}
+
+ if debugTracing: self.DEBUG_TRACING = debugTracing
+ if config:
+ self.pluginConfig = config
+ if self.Setting('apiKey', "") != "":
+ self.FRAGMENT_SERVER['ApiKey'] = self.Setting('apiKey')
+
+
+ if apiKey and apiKey != "":
+ self.FRAGMENT_SERVER['ApiKey'] = apiKey
+
+ if len(sys.argv) > 1:
+ RUNNING_IN_COMMAND_LINE_MODE = True
+ if not debugTracing or not stash_url:
+ for argValue in sys.argv[1:]:
+ if argValue.lower() == "--trace":
+ self.DEBUG_TRACING = True
+ elif argValue.lower() == "--dry_run" or argValue.lower() == "--dryrun":
+ self.DRY_RUN = True
+ elif ":" in argValue and not self.STASH_URL:
+ self.STASH_URL = argValue
+ if self.STASH_URL:
+ endpointUrlArr = self.STASH_URL.split(":")
+ if len(endpointUrlArr) == 3:
+ self.FRAGMENT_SERVER['Scheme'] = endpointUrlArr[0]
+ self.FRAGMENT_SERVER['Host'] = endpointUrlArr[1][2:]
+ self.FRAGMENT_SERVER['Port'] = endpointUrlArr[2]
+ super().__init__(self.FRAGMENT_SERVER)
+ self.STASH_INTERFACE_INIT = True
+ else:
+ try:
+ self.STDIN_READ = sys.stdin.read()
+ self.CALLED_AS_STASH_PLUGIN = True
+ except:
+ pass
+ if self.STDIN_READ:
+ self.JSON_INPUT = json.loads(self.STDIN_READ)
+ if "args" in self.JSON_INPUT and "mode" in self.JSON_INPUT["args"]:
+ self.PLUGIN_TASK_NAME = self.JSON_INPUT["args"]["mode"]
+ self.FRAGMENT_SERVER = self.JSON_INPUT["server_connection"]
+ self.STASH_URL = f"{self.FRAGMENT_SERVER['Scheme']}://{self.FRAGMENT_SERVER['Host']}:{self.FRAGMENT_SERVER['Port']}"
+ super().__init__(self.FRAGMENT_SERVER)
+ self.STASH_INTERFACE_INIT = True
+
+ if self.STASH_URL.startswith("http://0.0.0.0:"):
+ self.STASH_URL = self.STASH_URL.replace("http://0.0.0.0:", "http://localhost:")
+
+ if self.STASH_INTERFACE_INIT:
+ self.PLUGIN_CONFIGURATION = self.get_configuration()["plugins"]
+ self.STASH_CONFIGURATION = self.get_configuration()["general"]
+ self.STASHPATHSCONFIG = self.STASH_CONFIGURATION['stashes']
+ if 'pluginsPath' in self.STASH_CONFIGURATION:
+ self.PLUGINS_PATH = self.STASH_CONFIGURATION['pluginsPath']
+ for item in self.STASHPATHSCONFIG:
+ self.STASH_PATHS.append(item["path"])
+ if settings:
+ self.pluginSettings = settings
+ if self.PLUGIN_ID in self.PLUGIN_CONFIGURATION:
+ self.pluginSettings.update(self.PLUGIN_CONFIGURATION[self.PLUGIN_ID])
+ if 'apiKey' in self.STASH_CONFIGURATION:
+ self.API_KEY = self.STASH_CONFIGURATION['apiKey']
+
+ self.DRY_RUN = self.Setting(DryRunFieldName, self.DRY_RUN)
+ self.DEBUG_TRACING = self.Setting(DebugTraceFieldName, self.DEBUG_TRACING)
+ if self.DEBUG_TRACING: self.LOG_LEVEL = logging.DEBUG
+
+ logging.basicConfig(level=self.LOG_LEVEL, format=logFormat, datefmt=dateFmt, handlers=[RFH])
+ self.pluginLog = logging.getLogger(pathlib.Path(self.MAIN_SCRIPT_NAME).stem)
+ if setStashLoggerAsPluginLogger:
+ self.log = self.pluginLog
+
+ def __del__(self):
+ self.thredPool.shutdown(wait=False)
+
+ def Setting(self, name, default=_ARGUMENT_UNSPECIFIED_, raiseEx=True, notEmpty=False):
+ if self.pluginSettings != None and name in self.pluginSettings:
+ if notEmpty == False or self.pluginSettings[name] != "":
+ return self.pluginSettings[name]
+ if self.pluginConfig != None and name in self.pluginConfig:
+ if notEmpty == False or self.pluginConfig[name] != "":
+ return self.pluginConfig[name]
+ if default == _ARGUMENT_UNSPECIFIED_ and raiseEx:
+ raise Exception(f"Missing {name} from both UI settings and config file settings.")
+ return default
+
+ def Log(self, logMsg, printTo = 0, logLevel = logging.INFO, lineNo = -1, levelStr = "", logAlways = False, toAscii = None):
+ if toAscii or (toAscii == None and (self.encodeToUtf8 or self.convertToAscii)):
+ logMsg = self.asc2(logMsg)
+ else:
+ logMsg = logMsg
+ if printTo == 0:
+ printTo = self.log_to_norm
+ elif printTo == self.LOG_TO_ERROR and logLevel == logging.INFO:
+ logLevel = logging.ERROR
+ printTo = self.log_to_err_set
+ elif printTo == self.LOG_TO_CRITICAL and logLevel == logging.INFO:
+ logLevel = logging.CRITICAL
+ printTo = self.log_to_err_set
+ elif printTo == self.LOG_TO_WARN and logLevel == logging.INFO:
+ logLevel = logging.WARN
+ printTo = self.log_to_wrn_set
+ if lineNo == -1:
+ lineNo = inspect.currentframe().f_back.f_lineno
+ LN_Str = f"[LN:{lineNo}]"
+ # print(f"{LN_Str}, {logAlways}, {self.LOG_LEVEL}, {logging.DEBUG}, {levelStr}, {logMsg}")
+ if logLevel == logging.DEBUG and (logAlways == False or self.LOG_LEVEL == logging.DEBUG):
+ if levelStr == "": levelStr = self.LEV_DBG
+ if printTo & self.LOG_TO_FILE: self.pluginLog.debug(f"{LN_Str} {levelStr}{logMsg}")
+ if printTo & self.LOG_TO_STASH: self.log.debug(f"{LN_Str} {levelStr}{logMsg}")
+ elif logLevel == logging.INFO or logLevel == logging.DEBUG:
+ if levelStr == "": levelStr = self.LEV_INF if logLevel == logging.INFO else self.LEV_DBG
+ if printTo & self.LOG_TO_FILE: self.pluginLog.info(f"{LN_Str} {levelStr}{logMsg}")
+ if printTo & self.LOG_TO_STASH: self.log.info(f"{LN_Str} {levelStr}{logMsg}")
+ elif logLevel == logging.WARN:
+ if levelStr == "": levelStr = self.LEV_WRN
+ if printTo & self.LOG_TO_FILE: self.pluginLog.warning(f"{LN_Str} {levelStr}{logMsg}")
+ if printTo & self.LOG_TO_STASH: self.log.warning(f"{LN_Str} {levelStr}{logMsg}")
+ elif logLevel == logging.ERROR:
+ if levelStr == "": levelStr = self.LEV_ERR
+ if printTo & self.LOG_TO_FILE: self.pluginLog.error(f"{LN_Str} {levelStr}{logMsg}")
+ if printTo & self.LOG_TO_STASH: self.log.error(f"{LN_Str} {levelStr}{logMsg}")
+ elif logLevel == logging.CRITICAL:
+ if levelStr == "": levelStr = self.LEV_CRITICAL
+ if printTo & self.LOG_TO_FILE: self.pluginLog.critical(f"{LN_Str} {levelStr}{logMsg}")
+ if printTo & self.LOG_TO_STASH: self.log.error(f"{LN_Str} {levelStr}{logMsg}")
+ if (printTo & self.LOG_TO_CONSOLE) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways):
+ print(f"{LN_Str} {levelStr}{logMsg}")
+ if (printTo & self.LOG_TO_STDERR) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways):
+ print(f"StdErr: {LN_Str} {levelStr}{logMsg}", file=sys.stderr)
+
+ def Trace(self, logMsg = "", printTo = 0, logAlways = False, lineNo = -1, toAscii = None):
+ if printTo == 0: printTo = self.LOG_TO_FILE
+ if lineNo == -1:
+ lineNo = inspect.currentframe().f_back.f_lineno
+ logLev = logging.INFO if logAlways else logging.DEBUG
+ if self.DEBUG_TRACING or logAlways:
+ if logMsg == "":
+ logMsg = f"Line number {lineNo}..."
+ self.Log(logMsg, printTo, logLev, lineNo, self.LEV_TRACE, logAlways, toAscii=toAscii)
+
+ # Log once per session. Only logs the first time called from a particular line number in the code.
+ def TraceOnce(self, logMsg = "", printTo = 0, logAlways = False, toAscii = None):
+ lineNo = inspect.currentframe().f_back.f_lineno
+ if self.DEBUG_TRACING or logAlways:
+ FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}"
+ if FuncAndLineNo in self.logLinePreviousHits:
+ return
+ self.logLinePreviousHits.append(FuncAndLineNo)
+ self.Trace(logMsg, printTo, logAlways, lineNo, toAscii=toAscii)
+
+ # Log INFO on first call, then do Trace on remaining calls.
+ def LogOnce(self, logMsg = "", printTo = 0, logAlways = False, traceOnRemainingCalls = True, toAscii = None):
+ if printTo == 0: printTo = self.LOG_TO_FILE
+ lineNo = inspect.currentframe().f_back.f_lineno
+ FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}"
+ if FuncAndLineNo in self.logLinePreviousHits:
+ if traceOnRemainingCalls:
+ self.Trace(logMsg, printTo, logAlways, lineNo, toAscii=toAscii)
+ else:
+ self.logLinePreviousHits.append(FuncAndLineNo)
+ self.Log(logMsg, printTo, logging.INFO, lineNo, toAscii=toAscii)
+
+ def Warn(self, logMsg, printTo = 0, toAscii = None):
+ if printTo == 0: printTo = self.log_to_wrn_set
+ lineNo = inspect.currentframe().f_back.f_lineno
+ self.Log(logMsg, printTo, logging.WARN, lineNo, toAscii=toAscii)
+
+ def Error(self, logMsg, printTo = 0, toAscii = None):
+ if printTo == 0: printTo = self.log_to_err_set
+ lineNo = inspect.currentframe().f_back.f_lineno
+ self.Log(logMsg, printTo, logging.ERROR, lineNo, toAscii=toAscii)
+
+ def Status(self, printTo = 0, logLevel = logging.INFO, lineNo = -1):
+ if printTo == 0: printTo = self.log_to_norm
+ if lineNo == -1:
+ lineNo = inspect.currentframe().f_back.f_lineno
+ self.Log(f"StashPluginHelper Status: (CALLED_AS_STASH_PLUGIN={self.CALLED_AS_STASH_PLUGIN}), (RUNNING_IN_COMMAND_LINE_MODE={self.RUNNING_IN_COMMAND_LINE_MODE}), (DEBUG_TRACING={self.DEBUG_TRACING}), (DRY_RUN={self.DRY_RUN}), (PLUGIN_ID={self.PLUGIN_ID}), (PLUGIN_TASK_NAME={self.PLUGIN_TASK_NAME}), (STASH_URL={self.STASH_URL}), (MAIN_SCRIPT_NAME={self.MAIN_SCRIPT_NAME})",
+ printTo, logLevel, lineNo)
+
+ def ExecuteProcess(self, args, ExecDetach=False):
+ import platform, subprocess
+ is_windows = any(platform.win32_ver())
+ pid = None
+ self.Trace(f"is_windows={is_windows} args={args}")
+ if is_windows:
+ if ExecDetach:
+ self.Trace("Executing process using Windows DETACHED_PROCESS")
+ DETACHED_PROCESS = 0x00000008
+ pid = subprocess.Popen(args,creationflags=DETACHED_PROCESS, shell=True).pid
+ else:
+ pid = subprocess.Popen(args, shell=True).pid
+ else:
+ self.Trace("Executing process using normal Popen")
+ pid = subprocess.Popen(args).pid
+ self.Trace(f"pid={pid}")
+ return pid
+
+ def ExecutePythonScript(self, args, ExecDetach=True):
+ PythonExe = f"{sys.executable}"
+ argsWithPython = [f"{PythonExe}"] + args
+ return self.ExecuteProcess(argsWithPython,ExecDetach=ExecDetach)
+
+ def Submit(self, *args, **kwargs):
+ return self.thredPool.submit(*args, **kwargs)
+
+ def asc2(self, data, convertToAscii=None):
+ if convertToAscii or (convertToAscii == None and self.convertToAscii):
+ return ascii(data)
+ return str(str(data).encode('utf-8'))[2:-1] # This works better for logging than ascii function
+ # data = str(data).encode('ascii','ignore') # This works better for logging than ascii function
+ # return str(data)[2:-1] # strip out b'str'
+
+ def init_mergeMetadata(self, excludeMergeTags=None):
+ self.excludeMergeTags = excludeMergeTags
+ self._mergeMetadata = mergeMetadata(self, self.excludeMergeTags)
+
+ # Must call init_mergeMetadata, before calling merge_metadata
+ def merge_metadata(self, SrcData, DestData): # Input arguments can be scene ID or scene metadata
+ if type(SrcData) is int:
+ SrcData = self.find_scene(SrcData)
+ DestData = self.find_scene(DestData)
+ return self._mergeMetadata.merge(SrcData, DestData)
+
+ def Progress(self, currentIndex, maxCount):
+ progress = (currentIndex / maxCount) if currentIndex < maxCount else (maxCount / currentIndex)
+ self.log.progress(progress)
+
+ def run_plugin(self, plugin_id, task_mode=None, args:dict={}, asyn=False):
+ """Runs a plugin operation.
+ The operation is run immediately and does not use the job queue.
+ Args:
+ plugin_id (ID): plugin_id
+ task_name (str, optional): Plugin task to perform
+ args (dict, optional): Arguments to pass to plugin. Plugin access via JSON_INPUT['args']
+ Returns:
+ A map of the result.
+ """
+ query = """mutation RunPluginOperation($plugin_id: ID!, $args: Map!) {
+ runPluginOperation(plugin_id: $plugin_id, args: $args)
+ }"""
+ if task_mode != None:
+ args.update({"mode" : task_mode})
+ variables = {
+ "plugin_id": plugin_id,
+ "args": args,
+ }
+ if asyn:
+ self.Submit(self.call_GQL, query, variables)
+ return f"Made asynchronous call for plugin {plugin_id}"
+ else:
+ return self.call_GQL(query, variables)
+
+ def find_duplicate_scenes_diff(self, distance: PhashDistance=PhashDistance.EXACT, fragment='id', duration_diff: float=10.00 ):
+ query = """
+ query FindDuplicateScenes($distance: Int, $duration_diff: Float) {
+ findDuplicateScenes(distance: $distance, duration_diff: $duration_diff) {
+ ...SceneSlim
+ }
+ }
+ """
+ if fragment:
+ query = re.sub(r'\.\.\.SceneSlim', fragment, query)
+ else:
+ query += "fragment SceneSlim on Scene { id }"
+
+ variables = { "distance": distance, "duration_diff": duration_diff }
+ result = self.call_GQL(query, variables)
+ return result['findDuplicateScenes']
+
+ # #################################################################################################
+ # The below functions extends class StashInterface with functions which are not yet in the class
+ def get_all_scenes(self):
+ query_all_scenes = """
+ query AllScenes {
+ allScenes {
+ id
+ updated_at
+ }
+ }
+ """
+ return self.call_GQL(query_all_scenes)
+
+ def metadata_autotag(self, paths:list=[], performers:list=[], studios:list=[], tags:list=[]):
+ query = """
+ mutation MetadataAutoTag($input:AutoTagMetadataInput!) {
+ metadataAutoTag(input: $input)
+ }
+ """
+ metadata_autotag_input = {
+ "paths":paths,
+ "performers": performers,
+ "studios":studios,
+ "tags":tags,
+ }
+ result = self.call_GQL(query, {"input": metadata_autotag_input})
+ return result
+
+ def backup_database(self):
+ return self.call_GQL("mutation { backupDatabase(input: {download: false})}")
+
+ def optimise_database(self):
+ return self.call_GQL("mutation OptimiseDatabase { optimiseDatabase }")
+
+ def metadata_clean_generated(self, blobFiles=True, dryRun=False, imageThumbnails=True, markers=True, screenshots=True, sprites=True, transcodes=True):
+ query = """
+ mutation MetadataCleanGenerated($input: CleanGeneratedInput!) {
+ metadataCleanGenerated(input: $input)
+ }
+ """
+ clean_metadata_input = {
+ "blobFiles": blobFiles,
+ "dryRun": dryRun,
+ "imageThumbnails": imageThumbnails,
+ "markers": markers,
+ "screenshots": screenshots,
+ "sprites": sprites,
+ "transcodes": transcodes,
+ }
+ result = self.call_GQL(query, {"input": clean_metadata_input})
+ return result
+
+ def rename_generated_files(self):
+ return self.call_GQL("mutation MigrateHashNaming {migrateHashNaming}")
+
+class mergeMetadata: # A class to merge scene metadata from source scene to destination scene
+ srcData = None
+ destData = None
+ stash = None
+ excludeMergeTags = None
+ dataDict = None
+ result = "Nothing To Merge"
+ def __init__(self, stash, excludeMergeTags=None):
+ self.stash = stash
+ self.excludeMergeTags = excludeMergeTags
+
+ def merge(self, SrcData, DestData):
+ self.srcData = SrcData
+ self.destData = DestData
+ ORG_DATA_DICT = {'id' : self.destData['id']}
+ self.dataDict = ORG_DATA_DICT.copy()
+ self.mergeItems('tags', 'tag_ids', [], excludeName=self.excludeMergeTags)
+ self.mergeItems('performers', 'performer_ids', [])
+ self.mergeItems('galleries', 'gallery_ids', [])
+ self.mergeItems('movies', 'movies', [])
+ self.mergeItems('urls', listToAdd=self.destData['urls'], NotStartWith=self.stash.STASH_URL)
+ self.mergeItem('studio', 'studio_id', 'id')
+ self.mergeItem('title')
+ self.mergeItem('director')
+ self.mergeItem('date')
+ self.mergeItem('details')
+ self.mergeItem('rating100')
+ self.mergeItem('code')
+ if self.dataDict != ORG_DATA_DICT:
+ self.stash.Trace(f"Updating scene ID({self.destData['id']}) with {self.dataDict}; path={self.destData['files'][0]['path']}", toAscii=True)
+ self.result = self.stash.update_scene(self.dataDict)
+ return self.result
+
+ def Nothing(self, Data):
+ if not Data or Data == "" or (type(Data) is str and Data.strip() == ""):
+ return True
+ return False
+
+ def mergeItem(self,fieldName, updateFieldName=None, subField=None):
+ if updateFieldName == None:
+ updateFieldName = fieldName
+ if self.Nothing(self.destData[fieldName]) and not self.Nothing(self.srcData[fieldName]):
+ if subField == None:
+ self.dataDict.update({ updateFieldName : self.srcData[fieldName]})
+ else:
+ self.dataDict.update({ updateFieldName : self.srcData[fieldName][subField]})
+ def mergeItems(self, fieldName, updateFieldName=None, listToAdd=[], NotStartWith=None, excludeName=None):
+ dataAdded = ""
+ for item in self.srcData[fieldName]:
+ if item not in self.destData[fieldName]:
+ if NotStartWith == None or not item.startswith(NotStartWith):
+ if excludeName == None or item['name'] not in excludeName:
+ if fieldName == 'movies':
+ listToAdd += [{"movie_id" : item['movie']['id'], "scene_index" : item['scene_index']}]
+ dataAdded += f"{item['movie']['id']} "
+ elif updateFieldName == None:
+ listToAdd += [item]
+ dataAdded += f"{item} "
+ else:
+ listToAdd += [item['id']]
+ dataAdded += f"{item['id']} "
+ if dataAdded != "":
+ if updateFieldName == None:
+ updateFieldName = fieldName
+ else:
+ for item in self.destData[fieldName]:
+ if fieldName == 'movies':
+ listToAdd += [{"movie_id" : item['movie']['id'], "scene_index" : item['scene_index']}]
+ else:
+ listToAdd += [item['id']]
+ self.dataDict.update({ updateFieldName : listToAdd})
+ # self.stash.Trace(f"Added {fieldName} ({dataAdded}) to scene ID({self.destData['id']})", toAscii=True)
diff --git a/plugins/DupFileManager/requirements.txt b/plugins/DupFileManager/requirements.txt
new file mode 100644
index 00000000..d503550d
--- /dev/null
+++ b/plugins/DupFileManager/requirements.txt
@@ -0,0 +1,4 @@
+stashapp-tools >= 0.2.50
+pyYAML
+watchdog
+Send2Trash
\ No newline at end of file
From 41031c0bdc0b7f1eb561ef93d4f550cd74790198 Mon Sep 17 00:00:00 2001
From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com>
Date: Wed, 28 Aug 2024 05:02:58 -0400
Subject: [PATCH 31/39] prettier changes
---
plugins/DupFileManager/DupFileManager.yml | 2 +-
plugins/DupFileManager/README.md | 9 +++--
plugins/FileMonitor/README.md | 41 +++++++++++++++--------
plugins/FileMonitor/filemonitor.yml | 2 +-
4 files changed, 36 insertions(+), 18 deletions(-)
diff --git a/plugins/DupFileManager/DupFileManager.yml b/plugins/DupFileManager/DupFileManager.yml
index 497aca1b..c75f561f 100644
--- a/plugins/DupFileManager/DupFileManager.yml
+++ b/plugins/DupFileManager/DupFileManager.yml
@@ -5,7 +5,7 @@ url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/DupFileMan
settings:
mergeDupFilename:
displayName: Merge Duplicate Tags
- description: Before deletion, merge metadata from duplicate. E.g. Tag names, performers, studios, title, galleries, rating, details, etc...
+ description: Before deletion, merge metadata from duplicate. E.g. Tag names, performers, studios, title, galleries, rating, details, etc...
type: BOOLEAN
permanentlyDelete:
displayName: Permanent Delete
diff --git a/plugins/DupFileManager/README.md b/plugins/DupFileManager/README.md
index d5b35dfc..82e3fb7d 100644
--- a/plugins/DupFileManager/README.md
+++ b/plugins/DupFileManager/README.md
@@ -1,6 +1,9 @@
# DupFileManager: Ver 0.1.2 (By David Maisonave)
+
DupFileManager is a [Stash](https://github.com/stashapp/stash) plugin which manages duplicate file in the Stash system.
+
### Features
+
- Can merge potential source in the duplicate file names for tag names, performers, and studios.
- Normally when Stash searches the file name for tag names, performers, and studios, it only does so using the primary file.
- Delete duplicate file task with the following options:
@@ -13,7 +16,7 @@ DupFileManager is a [Stash](https://github.com/stashapp/stash) plugin which mana
- Use a black-list to determine which duplicates should be deleted first.
- **Permanent Delete** - Enable to permanently delete files, instead of moving files to trash can.
- **Max Dup Process** - Use to limit the maximum files to process. Can be used to do a limited test run.
- - **Merge Duplicate Tags** - Before deletion, merge metadata from duplicate. E.g. Tag names, performers, studios, title, galleries, rating, details, etc...
+ - **Merge Duplicate Tags** - Before deletion, merge metadata from duplicate. E.g. Tag names, performers, studios, title, galleries, rating, details, etc...
- Options available via DupFileManager_config.py
- **dup_path** - Alternate path to move deleted files to. Example: "C:\TempDeleteFolder"
- **swapHighRes** - When enabled, swaps higher resolution files between whitelist and blacklist/graylist files.
@@ -21,11 +24,13 @@ DupFileManager is a [Stash](https://github.com/stashapp/stash) plugin which mana
- **toRecycleBeforeSwap** - When enabled, moves destination file to recycle bin before swapping files.
### Requirements
+
`pip install --upgrade stashapp-tools`
`pip install pyYAML`
`pip install Send2Trash`
### Installation
+
- Follow **Requirements** instructions.
- In the stash plugin directory (C:\Users\MyUserName\.stash\plugins), create a folder named **DupFileManager**.
- Copy all the plugin files to this folder.(**C:\Users\MyUserName\\.stash\plugins\DupFileManager**).
@@ -34,6 +39,6 @@ DupFileManager is a [Stash](https://github.com/stashapp/stash) plugin which mana
That's it!!!
### Options
+
- Options are accessible in the GUI via Settings->Plugins->Plugins->[DupFileManager].
- More options available in DupFileManager_config.py.
-
diff --git a/plugins/FileMonitor/README.md b/plugins/FileMonitor/README.md
index cca15a93..c801ee28 100644
--- a/plugins/FileMonitor/README.md
+++ b/plugins/FileMonitor/README.md
@@ -1,19 +1,24 @@
# FileMonitor: Ver 0.9.0 (By David Maisonave)
+
FileMonitor is a [Stash](https://github.com/stashapp/stash) plugin with the following two main features:
+
- Updates Stash when any file changes occurs in the Stash library.
- **Task Scheduler**: Runs scheduled task based on the scheduler configuration in **filemonitor_config.py**.
## Starting FileMonitor from the UI
+
From the GUI, FileMonitor can be started as a service or as a plugin. The recommended method is to start it as a service. When started as a service, it will jump on the Task Queue momentarily, and then disappear as it starts running in the background.
+
- To start monitoring file changes, go to **Stash->Settings->Task->[Plugin Tasks]->FileMonitor**, and click on the [Start Library Monitor Service] button.
- ![FileMonitorService](https://github.com/user-attachments/assets/b12aeca9-37a8-447f-90da-26e9440735ad)
- **Important Note**: At first, this will show up as a plugin in the Task Queue momentarily. It will then disappear from the Task Queue and run in the background as a service.
- To stop FileMonitor click on [Stop Library Monitor] button.
- The **[Monitor as a Plugin]** option is mainly available for backwards compatibility and for test purposes.
-
## Using FileMonitor as a script
+
**FileMonitor** can be called as a standalone script.
+
- To start monitoring call the script and pass --url and the Stash URL.
- python filemonitor.py --url http://localhost:9999
- To stop **FileMonitor**, pass argument **--stop**.
@@ -24,6 +29,7 @@ From the GUI, FileMonitor can be started as a service or as a plugin. The recomm
- The restart command restarts FileMonitor as a Task in Stash.
# Task Scheduler
+
To enable the scheduler go to **Stash->Settings->Plugins->Plugins->FileMonitor** and enable the **Scheduler** option.
![ReoccurringTaskScheduler](https://github.com/user-attachments/assets/5a7bf6a4-3bd6-4692-a6c3-e9f8f4664f14)
@@ -38,19 +44,20 @@ To enable the scheduler go to **Stash->Settings->Plugins->Plugins->FileMonitor**
- The example tasks are disabled by default because they either have a zero frequency value or the time field is set to **DISABLED**.
To configure the schedule or to add new task, edit the **task_scheduler** section in the **filemonitor_config.py** file.
-```` python
+
+```python
"task_scheduler": [
# To create a daily task, include each day of the week for the weekday field or "every"
# Optional field for task "Auto Tag" is 'paths'. For detail usage, see example #A3: in filemonitor_task_examples.py
{"task" : "Auto Tag", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "05:00"}, # Auto Tag -> [Auto Tag] (Daily at 6AM)
# Task "Create Tags" is a plugin task. Optional fields are taskName and validateDir field. For detail usage, see examples #B1, #B2, #B3, and #B4 in filemonitor_task_examples.py
- {"task" : "pathParser", "taskName" : "Create Tags", "validateDir" : "pathParser",
+ {"task" : "pathParser", "taskName" : "Create Tags", "validateDir" : "pathParser",
"weekday" : "every", "time" : "05:30"}, # [Plugin Tasks] - > [Path Parser] -> [Create Tags] (Daily at 5AM) : This task requires plugin [Path Parser]
# The following task runs plugin DupFileManager (tag_duplicates_task) if the plugin is installed. The task runs in the background because of "taskQue" : False
{"task" : "DupFileManager", "taskMode" : "tag_duplicates_task", "validateDir" : "DupFileManager", "taskQue" : False,
"weekday" : "every", "time" : "02:30"}, # [Plugin Tasks] -> DupFileManager -> [Delete Duplicates] (Daily at 2:30AM)
{"task" : "Optimise Database", "weekday" : "monday,tuesday,wednesday,thursday,friday", "time" : "07:00"}, # Maintenance -> [Optimise Database] (Every weekday at 7AM)
-
+
# The following tasks are scheduled weekly
# Optional field for task "Scan", "Auto Tag", and "Clean" is 'paths'. For detail usage, see examples #A3: in filemonitor_task_examples.py
{"task" : "Scan", "weekday" : "saturday", "time" : "03:00"}, # Library -> [Scan] (Weekly) (Every saturday at 3AM)
@@ -59,7 +66,7 @@ To configure the schedule or to add new task, edit the **task_scheduler** sectio
{"task" : "Clean", "weekday" : "saturday", "time" : "04:30"}, # Maintenance -> [Clean] (Every saturday at 4:30AM)
{"task" : "Clean Generated Files", "weekday" : "saturday", "time" : "05:00"}, # Maintenance -> [Clean Generated Files] (Every saturday at 5AM)
{"task" : "Optimise Database", "weekday" : "saturday", "time" : "05:30"}, # Maintenance -> [Optimise Database] (Every saturday at 5:30AM)
-
+
# To perform a task monthly, specify the day of the month as in the weekly schedule format, and add a monthly field.
# The monthly field value must be 1, 2, 3, or 4.
# 1 = 1st specified weekday of the month. Example 1st monday.
@@ -68,26 +75,29 @@ To configure the schedule or to add new task, edit the **task_scheduler** sectio
# 4 = 4th specified weekday of the month.
# The Backup task is scheduled monthly
# Optional field for task "Backup" is maxBackup. For detail usage, see example #A5 in filemonitor_task_examples.py
- {"task" : "Backup", "weekday" : "sunday", "time" : "01:00", "monthly" : 2}, # Backup -> [Backup] 2nd sunday of the month at 1AM (01:00)
+ {"task" : "Backup", "weekday" : "sunday", "time" : "01:00", "monthly" : 2}, # Backup -> [Backup] 2nd sunday of the month at 1AM (01:00)
# The following task requires plugin DupFileManager and UI option [Delete Duplicate Scheduler] enabled.
- {"task" : "DupFileManager", "taskName" : "Delete Duplicates", "validateDir" : "DupFileManager",
- "weekday" : "sunday", "time" : "02:00", "monthly" : 2}, # [Plugin Tasks] -> DupFileManager -> [Delete Duplicates] 2nd sunday of the month at 2AM (02:00)
-
- # The [CheckStashIsRunning] task checks if Stash is running. If not running, it will start up stash.
+ {"task" : "DupFileManager", "taskName" : "Delete Duplicates", "validateDir" : "DupFileManager",
+ "weekday" : "sunday", "time" : "02:00", "monthly" : 2}, # [Plugin Tasks] -> DupFileManager -> [Delete Duplicates] 2nd sunday of the month at 2AM (02:00)
+
+ # The [CheckStashIsRunning] task checks if Stash is running. If not running, it will start up stash.
# This task only works if FileMonitor is started as a service or in command line mode.
# Optional fields are 'command' and 'RunAfter'. For detail usage, see examples #C1 and #C2 in filemonitor_task_examples.py
{"task" : "CheckStashIsRunning", "minutes" :5}, # Checks every 5 minutes
],
-````
+```
+
- To add plugins to the task list, use the Plugin-ID in the "task" field. The plugin ID is usually the file name of the script without the extension.
- Plugin task have the following optional fields: taskName, taskMode, validateDir, and taskQue
- The **validateDir** field can be used to define the plugin sub directory, which is checked to see if it exist before running the task.
- **taskName** field is used to name the task to call for the associated plugin. It can not be used with "taskQue":False
- - **taskQue** field is used to call the plugin without using the Task Queue. I.E. "taskQue":False. When this field is set to False, the taskName field can NOT be used. Instead use taskMode to identify the task to call.
- - **taskMode** field is used in order to run the plugin without using the Task Queue. The plugin runs immediatly. Be careful not to confuse taskMode with taskName. Look in the plugin *.yml file under the **tasks** section where it defines both the task-name and the task-mode.
+ - **taskQue** field is used to call the plugin without using the Task Queue. I.E. "taskQue":False. When this field is set to False, the taskName field can NOT be used. Instead use taskMode to identify the task to call.
+ - **taskMode** field is used in order to run the plugin without using the Task Queue. The plugin runs immediatly. Be careful not to confuse taskMode with taskName. Look in the plugin \*.yml file under the **tasks** section where it defines both the task-name and the task-mode.
- Task can be scheduled to run monthly, weekly, hourly, and by minutes.
- The scheduler list uses two types of syntax. One is **weekday** based, and the other is **frequency** based.
+
- **weekday Based**
+
- Use the weekday based syntax for daily, weekly, and monthly schedules.
- All the weekday based methods must have a **weekday** field and a **time** field, which specifies the day(s) of the week and the time to start the task.
- **Daily**:
@@ -128,6 +138,7 @@ To configure the schedule or to add new task, edit the **task_scheduler** sectio
- For best results use the scheduler with FileMonitor running as a service.
## Requirements
+
- pip install -r requirements.txt
- Or manually install each requirement:
- `pip install stashapp-tools --upgrade`
@@ -136,6 +147,7 @@ To configure the schedule or to add new task, edit the **task_scheduler** sectio
- `pip install schedule`
## Installation
+
- Follow **Requirements** instructions.
- In the stash plugin directory (C:\Users\MyUserName\.stash\plugins), create a folder named **FileMonitor**.
- Copy all the plugin files to this folder.(**C:\Users\MyUserName\\.stash\plugins\FileMonitor**).
@@ -144,16 +156,17 @@ To configure the schedule or to add new task, edit the **task_scheduler** sectio
That's it!!!
## Options
+
- Main options are accessible in the GUI via Settings->Plugins->Plugins->[FileMonitor].
- When the UI option [Max DB Backups] is set to a value greater than 1, and when the scheduler is enabled, the quantity of database backup files are trim down to the set [**Max DB Backups**] value after the scheduler executes the Backup task.
- The other options are self explanatory from the UI.
- Additional options available in filemonitor_config.py. The options are well documented in the commented code.
## Bugs and Feature Request
+
Please use the following link to report FileMonitor bugs:
[FileMonitor Bug Report](https://github.com/David-Maisonave/Axter-Stash/issues/new?assignees=&labels=Plugin_Bug&projects=&template=bug_report_plugin.yml&title=%F0%9F%AA%B2%5BFileMonitor%5D+Your_Short_title)
Please use the following link to report FileMonitor Feature Request:[FileMonitor Feature Reques](https://github.com/David-Maisonave/Axter-Stash/issues/new?assignees=&labels=Enhancement&projects=&template=feature_request_plugin.yml&title=%F0%9F%92%A1%EF%B8%8F%5BEnhancement%5D%3A%5BFileMonitor%5D+Your_Short_title)
Please do **NOT** use the feature request to include any problems associated with errors. Instead use the bug report for error issues.
-
diff --git a/plugins/FileMonitor/filemonitor.yml b/plugins/FileMonitor/filemonitor.yml
index 5637ee3b..19f448cd 100644
--- a/plugins/FileMonitor/filemonitor.yml
+++ b/plugins/FileMonitor/filemonitor.yml
@@ -13,7 +13,7 @@ settings:
type: BOOLEAN
turnOnSchedulerDeleteDup:
displayName: Delete Duplicate Scheduler
- description: Turn on scheduler for deleting duplicates in Stash library. (Requires plugin DupFileManager and [Scheduler] enabled)
+ description: Turn on scheduler for deleting duplicates in Stash library. (Requires plugin DupFileManager and [Scheduler] enabled)
type: BOOLEAN
zmaximumBackups:
displayName: Max DB Backups
From d7ebfb0a11049bfb319e41127fd4941cb2dcc212 Mon Sep 17 00:00:00 2001
From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com>
Date: Wed, 28 Aug 2024 05:16:57 -0400
Subject: [PATCH 32/39] Update DupFileManager.py
---
plugins/DupFileManager/DupFileManager.py | 6 ------
1 file changed, 6 deletions(-)
diff --git a/plugins/DupFileManager/DupFileManager.py b/plugins/DupFileManager/DupFileManager.py
index a1b2d541..c9ef4a16 100644
--- a/plugins/DupFileManager/DupFileManager.py
+++ b/plugins/DupFileManager/DupFileManager.py
@@ -3,12 +3,6 @@
# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/DupFileManager
# Note: To call this script outside of Stash, pass argument --url
# Example: python DupFileManager.py --url http://localhost:9999 -a
-
-# Research:
-# Research following links to complete this plugin:
-# Python library for parse-reparsepoint
-# https://pypi.org/project/parse-reparsepoint/
-# pip install parse-reparsepoint
import os, sys, time, pathlib, argparse, platform, shutil, logging
from StashPluginHelper import StashPluginHelper
from DupFileManager_config import config # Import config from DupFileManager_config.py
From b4e59e118c96d81efee03f046cb28696a9af7078 Mon Sep 17 00:00:00 2001
From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com>
Date: Wed, 28 Aug 2024 05:46:50 -0400
Subject: [PATCH 33/39] Update README.md
---
plugins/DupFileManager/README.md | 20 +++++++++++++-------
1 file changed, 13 insertions(+), 7 deletions(-)
diff --git a/plugins/DupFileManager/README.md b/plugins/DupFileManager/README.md
index 82e3fb7d..7d0cf052 100644
--- a/plugins/DupFileManager/README.md
+++ b/plugins/DupFileManager/README.md
@@ -8,20 +8,26 @@ DupFileManager is a [Stash](https://github.com/stashapp/stash) plugin which mana
- Normally when Stash searches the file name for tag names, performers, and studios, it only does so using the primary file.
- Delete duplicate file task with the following options:
- Tasks (Settings->Task->[Plugin Tasks]->DupFileManager)
- - **Tag Duplicate Filename** - Set tag DuplicateMarkForDeletion to the duplicates with lower resolution, duration, file name length, and/or black list path.
- - **Delete Duplicates** - Deletes duplicate files
+ - **Tag Duplicates** - Set tag DuplicateMarkForDeletion to the duplicates with lower resolution, duration, file name length, and/or black list path.
+ - **Delete Tagged Duplicates** - Delete scenes having DuplicateMarkForDeletion tag.
+ - **Delete Duplicates** - Deletes duplicate files. Performs deletion without first tagging.
- Plugin UI options (Settings->Plugins->Plugins->[DupFileManager])
- - Use a white-list of preferential directories to determine which duplicate will be the primary.
- - Use a gray-list of preferential directories to determine which duplicate should be the primary.
- - Use a black-list to determine which duplicates should be deleted first.
+ - Has a 3 tier path selection to determine which duplicates to keep, and which should be candidates for deletions.
+ - **Whitelist** - List of paths NOT to be deleted.
+ - E.g. C:\Favorite\,E:\MustKeep\
+ - **Gray-List** - List of preferential paths to determine which duplicate should be the primary.
+ - E.g. C:\2nd_Favorite\,H:\ShouldKeep\
+ - **Blacklist** - List of LEAST preferential paths to determine primary candidates for deletion.
+ - E.g. C:\Downloads\,F:\DeleteMeFirst\
- **Permanent Delete** - Enable to permanently delete files, instead of moving files to trash can.
- **Max Dup Process** - Use to limit the maximum files to process. Can be used to do a limited test run.
- **Merge Duplicate Tags** - Before deletion, merge metadata from duplicate. E.g. Tag names, performers, studios, title, galleries, rating, details, etc...
+ - **Swap High Resolution** - When enabled, swaps higher resolution files between whitelist and blacklist/graylist files.
+ - **Swap Longer Duration** - When enabled, swaps scene with longer duration.
- Options available via DupFileManager_config.py
- **dup_path** - Alternate path to move deleted files to. Example: "C:\TempDeleteFolder"
- - **swapHighRes** - When enabled, swaps higher resolution files between whitelist and blacklist/graylist files.
- - **swapLongLength** - When enabled, swaps scene with longer duration.
- **toRecycleBeforeSwap** - When enabled, moves destination file to recycle bin before swapping files.
+ - **addPrimaryDupPathToDetails** - If enabled, adds the primary duplicate path to the scene detail.
### Requirements
From c4be9c2d804a64bd1e500af370be9c2d08431681 Mon Sep 17 00:00:00 2001
From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com>
Date: Wed, 28 Aug 2024 06:22:48 -0400
Subject: [PATCH 34/39] Added option to avoid "Delete Tagged Duplicates"
Added option to avoid "Delete Tagged Duplicates" without turnOnSchedulerDeleteDup enabled.
---
plugins/FileMonitor/filemonitor.py | 18 +++++++---
plugins/FileMonitor/filemonitor_config.py | 12 +++----
.../FileMonitor/filemonitor_self_unit_test.py | 35 ++++++++++---------
3 files changed, 38 insertions(+), 27 deletions(-)
diff --git a/plugins/FileMonitor/filemonitor.py b/plugins/FileMonitor/filemonitor.py
index 29aea88b..03575b3f 100644
--- a/plugins/FileMonitor/filemonitor.py
+++ b/plugins/FileMonitor/filemonitor.py
@@ -161,11 +161,11 @@ def __init__(self):
else:
weekDays = task['weekday'].lower()
if 'monthly' in task:
- stash.Log(f"Adding to scheduler task '{task['task']}' monthly on number {task['monthly']} {task['weekday']} at {task['time']}")
+ stash.Log(f"Adding to scheduler task '{self.taskName(task)}' monthly on number {task['monthly']} {task['weekday']} at {task['time']}")
elif task['weekday'] == "every":
- stash.Log(f"Adding to scheduler task '{task['task']}' (weekly) every day at {task['time']}")
+ stash.Log(f"Adding to scheduler task '{self.taskName(task)}' (weekly) every day at {task['time']}")
else:
- stash.Log(f"Adding to scheduler task '{task['task']}' (weekly) every {task['weekday']} at {task['time']}")
+ stash.Log(f"Adding to scheduler task '{self.taskName(task)}' (weekly) every {task['weekday']} at {task['time']}")
hasValidDay = False
if "monday" in weekDays or "every" in weekDays:
@@ -196,6 +196,16 @@ def __init__(self):
stash.Error(f"Task '{task['task']}' is missing fields.")
self.checkSchedulePending()
+ def taskName(self, task):
+ pluginTask = None
+ if 'taskName' in task:
+ pluginTask = task['taskName']
+ elif 'taskMode' in task:
+ pluginTask = task['taskMode']
+ if pluginTask == None or pluginTask == "":
+ return task['task']
+ return f"{task['task']}->{pluginTask}"
+
# ToDo: Add asynchronous threading logic to running task.
def runTask(self, task):
import datetime
@@ -319,7 +329,7 @@ def runPluginTask(self, task):
if invalidDir:
stash.Error(f"Could not run task '{task['task']}' because sub directory '{task['validateDir']}' does not exist under path '{stash.PLUGINS_PATH}'")
return None
- if not turnOnSchedulerDeleteDup and (task['task'] == "Delete Duplicates" or ('taskName' in task and task['taskName'] == "Delete Duplicates") or ('taskMode' in task and task['taskMode'] == "delete_duplicates_task")):
+ if not turnOnSchedulerDeleteDup and (task['task'] == "Delete Duplicates" or ('taskName' in task and (task['taskName'] == "Delete Duplicates" or task['taskName'] == "Delete Tagged Duplicates")) or ('taskMode' in task and task['taskMode'] == "delete_duplicates_task")):
stash.Warn(f"Not running task {task['task']}, because [Delete Duplicate Scheduler] is NOT enabled. See Stash UI option Settings->Plugins->Plugins->FileMonitor->[Delete Duplicate Scheduler]")
return None
# The pluginId field is only here for backward compatibility, and should not be used in future scheduler configurations
diff --git a/plugins/FileMonitor/filemonitor_config.py b/plugins/FileMonitor/filemonitor_config.py
index 60824fd6..a2456471 100644
--- a/plugins/FileMonitor/filemonitor_config.py
+++ b/plugins/FileMonitor/filemonitor_config.py
@@ -17,9 +17,6 @@
# Task "Create Tags" is a plugin task. Optional fields are taskName and validateDir field. For detail usage, see examples #B1, #B2, #B3, and #B4 in filemonitor_task_examples.py
{"task" : "pathParser", "taskName" : "Create Tags", "validateDir" : "pathParser",
"weekday" : "every", "time" : "05:30"}, # [Plugin Tasks] - > [Path Parser] -> [Create Tags] (Daily at 5AM) : This task requires plugin [Path Parser]
- # The following task runs plugin DupFileManager (tag_duplicates_task) if the plugin is installed. The task runs in the background because of "taskQue" : False
- {"task" : "DupFileManager", "taskMode" : "tag_duplicates_task", "validateDir" : "DupFileManager", "taskQue" : False,
- "weekday" : "every", "time" : "02:30"}, # [Plugin Tasks] -> DupFileManager -> [Delete Duplicates] (Daily at 2:30AM)
{"task" : "Optimise Database", "weekday" : "monday,tuesday,wednesday,thursday,friday", "time" : "07:00"}, # Maintenance -> [Optimise Database] (Every weekday at 7AM)
# The following tasks are scheduled weekly
@@ -30,6 +27,12 @@
{"task" : "Clean", "weekday" : "saturday", "time" : "04:30"}, # Maintenance -> [Clean] (Every saturday at 4:30AM)
{"task" : "Clean Generated Files", "weekday" : "saturday", "time" : "05:00"}, # Maintenance -> [Clean Generated Files] (Every saturday at 5AM)
{"task" : "Optimise Database", "weekday" : "saturday", "time" : "05:30"}, # Maintenance -> [Optimise Database] (Every saturday at 5:30AM)
+ # The following task runs plugin DupFileManager (tag_duplicates_task) if the plugin is installed. The task runs in the background because of "taskQue" : False
+ {"task" : "DupFileManager", "taskMode" : "tag_duplicates_task", "validateDir" : "DupFileManager", "taskQue" : False,
+ "weekday" : "sunday", "time" : "02:30"}, # [Plugin Tasks] -> DupFileManager -> [Tag Duplicates] (Sunday at 2:30AM)
+ # The following task requires plugin DupFileManager and UI option [Delete Duplicate Scheduler] enabled.
+ {"task" : "DupFileManager", "taskName" : "Delete Tagged Duplicates", "validateDir" : "DupFileManager",
+ "weekday" : "saturday", "time" : "02:30"}, # [Plugin Tasks] -> DupFileManager -> [Delete Tagged Duplicates] 6 days after tagging at 2:30AM
# To perform a task monthly, specify the day of the month as in the weekly schedule format, and add a monthly field.
# The monthly field value must be 1, 2, 3, or 4.
@@ -40,9 +43,6 @@
# The Backup task is scheduled monthly
# Optional field for task "Backup" is maxBackup. For detail usage, see example #A5 in filemonitor_task_examples.py
{"task" : "Backup", "weekday" : "sunday", "time" : "01:00", "monthly" : 2}, # Backup -> [Backup] 2nd sunday of the month at 1AM (01:00)
- # The following task requires plugin DupFileManager and UI option [Delete Duplicate Scheduler] enabled.
- {"task" : "DupFileManager", "taskName" : "Delete Duplicates", "validateDir" : "DupFileManager",
- "weekday" : "sunday", "time" : "02:00", "monthly" : 2}, # [Plugin Tasks] -> DupFileManager -> [Delete Duplicates] 2nd sunday of the month at 2AM (02:00)
# The [CheckStashIsRunning] task checks if Stash is running. If not running, it will start up stash.
# This task only works if FileMonitor is started as a service or in command line mode.
diff --git a/plugins/FileMonitor/filemonitor_self_unit_test.py b/plugins/FileMonitor/filemonitor_self_unit_test.py
index 83942f46..135a1eba 100644
--- a/plugins/FileMonitor/filemonitor_self_unit_test.py
+++ b/plugins/FileMonitor/filemonitor_self_unit_test.py
@@ -21,23 +21,24 @@
],
"task_scheduler_set_time": [
# Test [Delete Duplicates] with [Delete Duplicate Scheduler] disabled, and then with it enabled.
- {"task" : "DupFileManager", "taskName" : "Delete Duplicates", "validateDir" : "DupFileManager", "weekday" : "every", "time" : "04:01"}, # [Plugin Tasks] -> DupFileManager -> [Delete Duplicates]
- {"task" : "Generate", "weekday" : "every", "time" : "04:01"},
- {"task" : "Clean", "weekday" : "every", "time" : "04:01"},
- {"task" : "Auto Tag", "weekday" : "every", "time" : "04:01"},
- {"task" : "Optimise Database", "weekday" : "every", "time" : "04:01"},
- {"task" : "pathParser", "taskName" : "Create Tags", "validateDir" : "pathParser", "weekday" : "every", "time" : "04:01"}, # In task queue as -> Running plugin task: Create Tags
- {"task" : "DupFileManager", "taskMode" : "tag_duplicates_task", "taskQue":False, "weekday" : "every", "time" : "04:01"}, # Does NOT run in the task queue
- {"task" : "DupFileManager", "taskName" : "Tag Duplicates", "validateDir" : "DupFileManager", "weekday" : "every", "time" : "04:01"}, # [Plugin Tasks] -> DupFileManager -> [Tag Duplicates]
- {"task" : "Scan","paths": [r"B:\_\SpecialSet", r"C:\foo"], "weekday" : "every", "time" : "04:01"},
- {"task" : "GQL", "input" : "mutation OptimiseDatabase { optimiseDatabase }", "weekday" : "every", "time" : "04:01"}, # In task queue as -> Optimising database...
- {"task" : "Clean Generated Files", "weekday" : "every", "time" : "04:01"},
- {"task" : "RenameGeneratedFiles", "weekday" : "every", "time" : "04:01"}, # In task queue as -> Migrating scene hashes...
- {"task" : "Backup", "maxBackups" : 0, "weekday" : "every", "time" : "04:01"}, # Does NOT show up in the Task Queue. Must check STASH log file to verify run.
- {"task" : "python", "script" : "test_hello_world2.py", "weekday" : "every", "time" : "04:01"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=???
- {"task" : "python", "script" : "test_hello_world.py", "detach" : False, "weekday" : "every", "time" : "04:01"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=???
- {"task" : "execute", "command" : "test_hello_world2.cmd", "weekday" : "every", "time" : "04:01"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=???
- {"task" : "execute", "command" : "test_hello_world.bat", "args" : "--name David", "weekday" : "every", "time" : "04:01"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=???
+ {"task" : "DupFileManager", "taskName" : "Delete Duplicates", "validateDir" : "DupFileManager", "weekday" : "every", "time" : "06:17"}, # [Plugin Tasks] -> DupFileManager -> [Delete Duplicates]
+ {"task" : "Generate", "weekday" : "every", "time" : "06:17"},
+ {"task" : "Clean", "weekday" : "every", "time" : "06:17"},
+ {"task" : "Auto Tag", "weekday" : "every", "time" : "06:17"},
+ {"task" : "Optimise Database", "weekday" : "every", "time" : "06:17"},
+ {"task" : "pathParser", "taskName" : "Create Tags", "validateDir" : "pathParser", "weekday" : "every", "time" : "06:17"}, # In task queue as -> Running plugin task: Create Tags
+ {"task" : "DupFileManager", "taskMode" : "tag_duplicates_task", "taskQue":False, "weekday" : "every", "time" : "06:17"}, # Does NOT run in the task queue
+ {"task" : "DupFileManager", "taskName" : "Tag Duplicates", "validateDir" : "DupFileManager", "weekday" : "every", "time" : "06:17"}, # [Plugin Tasks] -> DupFileManager -> [Tag Duplicates]
+ {"task" : "DupFileManager", "taskName" : "Delete Tagged Duplicates", "weekday" : "every", "time" : "06:17"}, # [Plugin Tasks] -> DupFileManager -> [Tag Duplicates]
+ {"task" : "Scan","paths": [r"B:\_\SpecialSet", r"C:\foo"], "weekday" : "every", "time" : "06:17"},
+ {"task" : "GQL", "input" : "mutation OptimiseDatabase { optimiseDatabase }", "weekday" : "every", "time" : "06:17"}, # In task queue as -> Optimising database...
+ {"task" : "Clean Generated Files", "weekday" : "every", "time" : "06:17"},
+ {"task" : "RenameGeneratedFiles", "weekday" : "every", "time" : "06:17"}, # In task queue as -> Migrating scene hashes...
+ {"task" : "Backup", "maxBackups" : 0, "weekday" : "every", "time" : "06:17"}, # Does NOT show up in the Task Queue. Must check STASH log file to verify run.
+ {"task" : "python", "script" : "test_hello_world2.py", "weekday" : "every", "time" : "06:17"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=???
+ {"task" : "python", "script" : "test_hello_world.py", "detach" : False, "weekday" : "every", "time" : "06:17"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=???
+ {"task" : "execute", "command" : "test_hello_world2.cmd", "weekday" : "every", "time" : "06:17"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=???
+ {"task" : "execute", "command" : "test_hello_world.bat", "args" : "--name David", "weekday" : "every", "time" : "06:17"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=???
],
# MUST ToDo: Always set selfUnitTest to False before checking in this code!!!
"selfUnitTest_repeat" : False , # Enable to turn on self unit test.
From b34fb92790e03713cb8bb0bc44ff2b046618f594 Mon Sep 17 00:00:00 2001
From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com>
Date: Sat, 23 Nov 2024 00:17:40 -0500
Subject: [PATCH 35/39] Added report feature, tools UI, and advanced menu
---
plugins/DupFileManager/DupFileManager.css | 67 +
plugins/DupFileManager/DupFileManager.css.map | 1 +
plugins/DupFileManager/DupFileManager.dev.py | 1440 +++++++++++++
plugins/DupFileManager/DupFileManager.js | 310 +++
plugins/DupFileManager/DupFileManager.js.map | 1 +
plugins/DupFileManager/DupFileManager.py | 1500 +++++++++++--
plugins/DupFileManager/DupFileManager.yml | 81 +-
.../DupFileManager/DupFileManager_config.py | 74 +-
.../DupFileManager_config_dev.py | 24 +
.../DupFileManager_report_config.py | 212 ++
plugins/DupFileManager/ModulesValidate.py | 126 ++
plugins/DupFileManager/README.md | 73 +-
plugins/DupFileManager/StashPluginHelper.py | 847 ++++++--
plugins/DupFileManager/advance_options.html | 1902 +++++++++++++++++
plugins/DupFileManager/requirements.txt | 3 +-
15 files changed, 6265 insertions(+), 396 deletions(-)
create mode 100644 plugins/DupFileManager/DupFileManager.css
create mode 100644 plugins/DupFileManager/DupFileManager.css.map
create mode 100644 plugins/DupFileManager/DupFileManager.dev.py
create mode 100644 plugins/DupFileManager/DupFileManager.js
create mode 100644 plugins/DupFileManager/DupFileManager.js.map
create mode 100644 plugins/DupFileManager/DupFileManager_config_dev.py
create mode 100644 plugins/DupFileManager/DupFileManager_report_config.py
create mode 100644 plugins/DupFileManager/ModulesValidate.py
create mode 100644 plugins/DupFileManager/advance_options.html
diff --git a/plugins/DupFileManager/DupFileManager.css b/plugins/DupFileManager/DupFileManager.css
new file mode 100644
index 00000000..7ef71ede
--- /dev/null
+++ b/plugins/DupFileManager/DupFileManager.css
@@ -0,0 +1,67 @@
+.scene-card__date {
+ color: #bfccd6;
+ font-size: 0.85em;
+}
+
+.scene-card__performer {
+ display: inline-block;
+ font-weight: 500;
+ margin-right: 0.5em;
+}
+.scene-card__performer a {
+ color: #137cbd;
+}
+
+.scene-card__performers,
+.scene-card__tags {
+ -webkit-box-orient: vertical;
+ display: -webkit-box;
+ -webkit-line-clamp: 1;
+ overflow: hidden;
+}
+.scene-card__performers:hover,
+.scene-card__tags:hover {
+ -webkit-line-clamp: unset;
+ overflow: visible;
+}
+
+.scene-card__tags .tag-item {
+ margin-left: 0;
+}
+
+.scene-performer-popover .image-thumbnail {
+ margin: 1em;
+}
+
+ /* Dashed border */
+hr.dashed {
+ border-top: 3px dashed #bbb;
+}
+
+/* Dotted border */
+hr.dotted {
+ border-top: 3px dotted #bbb;
+}
+
+/* Solid border */
+hr.solid {
+ border-top: 3px solid #bbb;
+}
+
+/* Rounded border */
+hr.rounded {
+ border-top: 8px solid #bbb;
+ border-radius: 5px;
+}
+
+h3.under_construction {
+ color:red;
+ background-color:yellow;
+}
+
+h3.submenu {
+ color:Tomato;
+ background-color:rgba(100, 100, 100);
+}
+
+/*# sourceMappingURL=DupFileManager.css.map */
diff --git a/plugins/DupFileManager/DupFileManager.css.map b/plugins/DupFileManager/DupFileManager.css.map
new file mode 100644
index 00000000..a4afe07b
--- /dev/null
+++ b/plugins/DupFileManager/DupFileManager.css.map
@@ -0,0 +1 @@
+{"version":3,"sourceRoot":"","sources":["../src/DupFileManager.scss"],"names":[],"mappings":"AAAA;EACE;EACA;;;AAGF;EACE;EACA;EACA;;AAEA;EACE;;;AAIJ;AAAA;EAEE;EACA;EACA;EACA;;AAEA;AAAA;EACE;EACA;;;AAIJ;EACE;;;AAGF;EACE","file":"DupFileManager.css"}
\ No newline at end of file
diff --git a/plugins/DupFileManager/DupFileManager.dev.py b/plugins/DupFileManager/DupFileManager.dev.py
new file mode 100644
index 00000000..630e16e2
--- /dev/null
+++ b/plugins/DupFileManager/DupFileManager.dev.py
@@ -0,0 +1,1440 @@
+# Description: This is a Stash plugin which manages duplicate files.
+# By David Maisonave (aka Axter) Jul-2024 (https://www.axter.com/)
+# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/DupFileManager
+# Note: To call this script outside of Stash, pass argument --url
+# Example: python DupFileManager.py --url http://localhost:9999 -a
+try:
+ import ModulesValidate
+ ModulesValidate.modulesInstalled(["send2trash", "requests"], silent=True)
+except Exception as e:
+ import traceback, sys
+ tb = traceback.format_exc()
+ print(f"ModulesValidate Exception. Error: {e}\nTraceBack={tb}", file=sys.stderr)
+import os, sys, time, pathlib, argparse, platform, shutil, traceback, logging, requests
+from datetime import datetime
+from StashPluginHelper import StashPluginHelper
+from stashapi.stash_types import PhashDistance
+from DupFileManager_config import config # Import config from DupFileManager_config.py
+from DupFileManager_report_config import report_config
+
+# ToDo: make sure the following line of code works
+config += report_config
+
+parser = argparse.ArgumentParser()
+parser.add_argument('--url', '-u', dest='stash_url', type=str, help='Add Stash URL')
+parser.add_argument('--trace', '-t', dest='trace', action='store_true', help='Enables debug trace mode.')
+parser.add_argument('--add_dup_tag', '-a', dest='dup_tag', action='store_true', help='Set a tag to duplicate files.')
+parser.add_argument('--clear_dup_tag', '-c', dest='clear_tag', action='store_true', help='Clear duplicates of duplicate tags.')
+parser.add_argument('--del_tag_dup', '-d', dest='del_tag', action='store_true', help='Only delete scenes having DuplicateMarkForDeletion tag.')
+parser.add_argument('--remove_dup', '-r', dest='remove', action='store_true', help='Remove (delete) duplicate files.')
+parse_args = parser.parse_args()
+
+settings = {
+ "matchDupDistance": 0,
+ "mergeDupFilename": False,
+ "whitelistDelDupInSameFolder": False,
+ "zvWhitelist": "",
+ "zwGraylist": "",
+ "zxBlacklist": "",
+ "zyMaxDupToProcess": 0,
+ "zySwapHighRes": False,
+ "zySwapLongLength": False,
+ "zySwapBetterBitRate": False,
+ "zySwapCodec": False,
+ "zySwapBetterFrameRate": False,
+ "zzDebug": False,
+ "zzTracing": False,
+
+ "zzObsoleteSettingsCheckVer2": False, # This is a hidden variable that is NOT displayed in the UI
+
+ # Obsolete setting names
+ "zWhitelist": "",
+ "zxGraylist": "",
+ "zyBlacklist": "",
+ "zyMatchDupDistance": 0,
+ "zSwapHighRes": False,
+ "zSwapLongLength": False,
+ "zSwapBetterBitRate": False,
+ "zSwapCodec": False,
+ "zSwapBetterFrameRate": False,
+}
+stash = StashPluginHelper(
+ stash_url=parse_args.stash_url,
+ debugTracing=parse_args.trace,
+ settings=settings,
+ config=config,
+ maxbytes=10*1024*1024,
+ DebugTraceFieldName="zzTracing",
+ DebugFieldName="zzDebug",
+ )
+stash.convertToAscii = True
+
+advanceMenuOptions = [ "applyCombo", "applyComboBlacklist", "pathToDelete", "pathToDeleteBlacklist", "sizeToDeleteLess", "sizeToDeleteGreater", "sizeToDeleteBlacklistLess", "sizeToDeleteBlacklistGreater", "durationToDeleteLess", "durationToDeleteGreater", "durationToDeleteBlacklistLess", "durationToDeleteBlacklistGreater",
+ "commonResToDeleteLess", "commonResToDeleteEq", "commonResToDeleteGreater", "commonResToDeleteBlacklistLess", "commonResToDeleteBlacklistEq", "commonResToDeleteBlacklistGreater", "resolutionToDeleteLess", "resolutionToDeleteEq", "resolutionToDeleteGreater",
+ "resolutionToDeleteBlacklistLess", "resolutionToDeleteBlacklistEq", "resolutionToDeleteBlacklistGreater", "ratingToDeleteLess", "ratingToDeleteEq", "ratingToDeleteGreater", "ratingToDeleteBlacklistLess", "ratingToDeleteBlacklistEq", "ratingToDeleteBlacklistGreater",
+ "tagToDelete", "tagToDeleteBlacklist", "titleToDelete", "titleToDeleteBlacklist", "pathStrToDelete", "pathStrToDeleteBlacklist"]
+
+doJsonReturnModeTypes = ["tag_duplicates_task", "removeDupTag", "addExcludeTag", "removeExcludeTag", "mergeTags", "getLocalDupReportPath",
+ "createDuplicateReportWithoutTagging", "deleteLocalDupReportHtmlFiles", "clear_duplicate_tags_task",
+ "deleteAllDupFileManagerTags", "deleteBlackListTaggedDuplicatesTask", "deleteTaggedDuplicatesLwrResOrLwrDuration",
+ "deleteBlackListTaggedDuplicatesLwrResOrLwrDuration"]
+doJsonReturnModeTypes += [advanceMenuOptions]
+doJsonReturn = False
+if len(sys.argv) < 2 and stash.PLUGIN_TASK_NAME in doJsonReturnModeTypes:
+ doJsonReturn = True
+ stash.log_to_norm = stash.LogTo.FILE
+elif stash.PLUGIN_TASK_NAME == "doEarlyExit":
+ time.sleep(3)
+ stash.Log("Doing early exit because of task name")
+ time.sleep(3)
+ exit(0)
+
+stash.Log("******************* Starting *******************")
+if len(sys.argv) > 1:
+ stash.Log(f"argv = {sys.argv}")
+else:
+ stash.Debug(f"No command line arguments. JSON_INPUT['args'] = {stash.JSON_INPUT['args']}; PLUGIN_TASK_NAME = {stash.PLUGIN_TASK_NAME}; argv = {sys.argv}")
+stash.status(logLevel=logging.DEBUG)
+
+obsoleteSettingsToConvert = {"zWhitelist" : "zvWhitelist", "zxGraylist" : "zwGraylist", "zyBlacklist" : "zxBlacklist", "zyMatchDupDistance" : "matchDupDistance", "zSwapHighRes" : "zySwapHighRes", "zSwapLongLength" : "zySwapLongLength", "zSwapBetterBitRate" : "zySwapBetterBitRate", "zSwapCodec" : "zySwapCodec", "zSwapBetterFrameRate" : "zySwapBetterFrameRate"}
+stash.replaceObsoleteSettings(obsoleteSettingsToConvert, "zzObsoleteSettingsCheckVer2")
+
+
+LOG_STASH_N_PLUGIN = stash.LogTo.STASH if stash.CALLED_AS_STASH_PLUGIN else stash.LogTo.CONSOLE + stash.LogTo.FILE
+listSeparator = stash.Setting('listSeparator', ',', notEmpty=True)
+addPrimaryDupPathToDetails = stash.Setting('addPrimaryDupPathToDetails')
+clearAllDupfileManagerTags = stash.Setting('clearAllDupfileManagerTags')
+doGeneratePhash = stash.Setting('doGeneratePhash')
+mergeDupFilename = stash.Setting('mergeDupFilename')
+moveToTrashCan = False if stash.Setting('permanentlyDelete') else True
+alternateTrashCanPath = stash.Setting('dup_path')
+whitelistDelDupInSameFolder = stash.Setting('whitelistDelDupInSameFolder')
+graylistTagging = stash.Setting('graylistTagging')
+maxDupToProcess = int(stash.Setting('zyMaxDupToProcess'))
+significantTimeDiff = float(stash.Setting('significantTimeDiff'))
+toRecycleBeforeSwap = stash.Setting('toRecycleBeforeSwap')
+cleanAfterDel = stash.Setting('cleanAfterDel')
+
+swapHighRes = stash.Setting('zySwapHighRes')
+swapLongLength = stash.Setting('zySwapLongLength')
+swapBetterBitRate = stash.Setting('zySwapBetterBitRate')
+swapCodec = stash.Setting('zySwapCodec')
+swapBetterFrameRate = stash.Setting('zySwapBetterFrameRate')
+favorLongerFileName = stash.Setting('favorLongerFileName')
+favorLargerFileSize = stash.Setting('favorLargerFileSize')
+favorBitRateChange = stash.Setting('favorBitRateChange')
+favorHighBitRate = stash.Setting('favorHighBitRate')
+favorFrameRateChange = stash.Setting('favorFrameRateChange')
+favorHigherFrameRate = stash.Setting('favorHigherFrameRate')
+favorCodecRanking = stash.Setting('favorCodecRanking')
+codecRankingSetToUse = stash.Setting('codecRankingSetToUse')
+if codecRankingSetToUse == 4:
+ codecRanking = stash.Setting('codecRankingSet4')
+elif codecRankingSetToUse == 3:
+ codecRanking = stash.Setting('codecRankingSet3')
+elif codecRankingSetToUse == 2:
+ codecRanking = stash.Setting('codecRankingSet2')
+else:
+ codecRanking = stash.Setting('codecRankingSet1')
+skipIfTagged = stash.Setting('skipIfTagged')
+killScanningPostProcess = stash.Setting('killScanningPostProcess')
+tagLongDurationLowRes = stash.Setting('tagLongDurationLowRes')
+bitRateIsImporantComp = stash.Setting('bitRateIsImporantComp')
+codecIsImporantComp = stash.Setting('codecIsImporantComp')
+
+excludeFromReportIfSignificantTimeDiff = False
+
+matchDupDistance = int(stash.Setting('matchDupDistance'))
+matchPhaseDistance = PhashDistance.EXACT
+matchPhaseDistanceText = "Exact Match"
+if stash.PLUGIN_TASK_NAME == "tag_duplicates_task" and 'Target' in stash.JSON_INPUT['args']:
+ if stash.JSON_INPUT['args']['Target'].startswith("0"):
+ matchDupDistance = 0
+ elif stash.JSON_INPUT['args']['Target'].startswith("1"):
+ matchDupDistance = 1
+ elif stash.JSON_INPUT['args']['Target'].startswith("2"):
+ matchDupDistance = 2
+ elif stash.JSON_INPUT['args']['Target'].startswith("3"):
+ matchDupDistance = 3
+
+ if stash.JSON_INPUT['args']['Target'].find(":") == 1:
+ significantTimeDiff = float(stash.JSON_INPUT['args']['Target'][2:])
+ excludeFromReportIfSignificantTimeDiff = True
+
+if matchDupDistance == 1:
+ matchPhaseDistance = PhashDistance.HIGH
+ matchPhaseDistanceText = "High Match"
+elif matchDupDistance == 2:
+ matchPhaseDistance = PhashDistance.MEDIUM
+ matchPhaseDistanceText = "Medium Match"
+elif matchDupDistance == 3:
+ matchPhaseDistance = PhashDistance.LOW
+ matchPhaseDistanceText = "Low Match"
+
+# significantTimeDiff can not be higher than 1 and shouldn't be lower than .5
+if significantTimeDiff > 1:
+ significantTimeDiff = float(1.00)
+if significantTimeDiff < .25:
+ significantTimeDiff = float(0.25)
+
+
+duplicateMarkForDeletion = stash.Setting('DupFileTag')
+if duplicateMarkForDeletion == "":
+ duplicateMarkForDeletion = 'DuplicateMarkForDeletion'
+
+base1_duplicateMarkForDeletion = duplicateMarkForDeletion
+
+duplicateWhitelistTag = stash.Setting('DupWhiteListTag')
+if duplicateWhitelistTag == "":
+ duplicateWhitelistTag = '_DuplicateWhitelistFile'
+
+excludeDupFileDeleteTag = stash.Setting('excludeDupFileDeleteTag')
+if excludeDupFileDeleteTag == "":
+ excludeDupFileDeleteTag = '_ExcludeDuplicateMarkForDeletion'
+
+graylistMarkForDeletion = stash.Setting('graylistMarkForDeletion')
+if graylistMarkForDeletion == "":
+ graylistMarkForDeletion = '_GraylistMarkForDeletion'
+
+longerDurationLowerResolution = stash.Setting('longerDurationLowerResolution')
+if longerDurationLowerResolution == "":
+ longerDurationLowerResolution = '_LongerDurationLowerResolution'
+
+excludeMergeTags = [duplicateMarkForDeletion, duplicateWhitelistTag, excludeDupFileDeleteTag]
+
+if stash.Setting('underscoreDupFileTag') and not duplicateMarkForDeletion.startswith('_'):
+ duplicateMarkForDeletionWithOutUnderscore = duplicateMarkForDeletion
+ duplicateMarkForDeletion = "_" + duplicateMarkForDeletion
+ if stash.renameTag(duplicateMarkForDeletionWithOutUnderscore, duplicateMarkForDeletion):
+ stash.Log(f"Renamed tag {duplicateMarkForDeletionWithOutUnderscore} to {duplicateMarkForDeletion}")
+ stash.Trace(f"Added underscore to {duplicateMarkForDeletionWithOutUnderscore} = {duplicateMarkForDeletion}")
+ excludeMergeTags += [duplicateMarkForDeletion]
+else:
+ stash.Trace(f"duplicateMarkForDeletion = {duplicateMarkForDeletion}")
+
+base2_duplicateMarkForDeletion = duplicateMarkForDeletion
+
+if stash.Setting('appendMatchDupDistance'):
+ duplicateMarkForDeletion += f"_{matchDupDistance}"
+ excludeMergeTags += [duplicateMarkForDeletion]
+
+stash.initMergeMetadata(excludeMergeTags)
+
+graylist = stash.Setting('zwGraylist').split(listSeparator)
+graylist = [item.lower() for item in graylist]
+if graylist == [""] : graylist = []
+stash.Trace(f"graylist = {graylist}")
+whitelist = stash.Setting('zvWhitelist').split(listSeparator)
+whitelist = [item.lower() for item in whitelist]
+if whitelist == [""] : whitelist = []
+stash.Trace(f"whitelist = {whitelist}")
+blacklist = stash.Setting('zxBlacklist').split(listSeparator)
+blacklist = [item.lower() for item in blacklist]
+if blacklist == [""] : blacklist = []
+stash.Trace(f"blacklist = {blacklist}")
+
+def realpath(path):
+ """
+ get_symbolic_target for win
+ """
+ try:
+ import win32file
+ f = win32file.CreateFile(path, win32file.GENERIC_READ,
+ win32file.FILE_SHARE_READ, None,
+ win32file.OPEN_EXISTING,
+ win32file.FILE_FLAG_BACKUP_SEMANTICS, None)
+ target = win32file.GetFinalPathNameByHandle(f, 0)
+ # an above gives us something like u'\\\\?\\C:\\tmp\\scalarizr\\3.3.0.7978'
+ return target.strip('\\\\?\\')
+ except ImportError:
+ handle = open_dir(path)
+ target = get_symbolic_target(handle)
+ check_closed(handle)
+ return target
+
+def isReparsePoint(path):
+ import win32api
+ import win32con
+ from parse_reparsepoint import Navigator
+ FinalPathname = realpath(path)
+ stash.Log(f"(path='{path}') (FinalPathname='{FinalPathname}')")
+ if FinalPathname != path:
+ stash.Log(f"Symbolic link '{path}'")
+ return True
+ if not os.path.isdir(path):
+ path = os.path.dirname(path)
+ return win32api.GetFileAttributes(path) & win32con.FILE_ATTRIBUTE_REPARSE_POINT
+
+def testReparsePointAndSymLink(merge=False, deleteDup=False):
+ stash.Trace(f"Debug Tracing (platform.system()={platform.system()})")
+ myTestPath1 = r"B:\V\V\Tip\POV - Holly Molly petite ginger anal slut - RedTube.mp4" # not a reparse point or symbolic link
+ myTestPath2 = r"B:\_\SpecialSet\Amateur Anal Attempts\BRCC test studio name.m2ts" # reparse point
+ myTestPath3 = r"B:\_\SpecialSet\Amateur Anal Attempts\Amateur Anal Attempts 4.mp4" #symbolic link
+ myTestPath4 = r"E:\Stash\plugins\RenameFile\README.md" #symbolic link
+ myTestPath5 = r"E:\_\David-Maisonave\Axter-Stash\plugins\RenameFile\README.md" #symbolic link
+ myTestPath6 = r"E:\_\David-Maisonave\Axter-Stash\plugins\DeleteMe\Renamer\README.md" # not reparse point
+ stash.Log(f"Testing '{myTestPath1}'")
+ if isReparsePoint(myTestPath1):
+ stash.Log(f"isSymLink '{myTestPath1}'")
+ else:
+ stash.Log(f"Not isSymLink '{myTestPath1}'")
+
+ if isReparsePoint(myTestPath2):
+ stash.Log(f"isSymLink '{myTestPath2}'")
+ else:
+ stash.Log(f"Not isSymLink '{myTestPath2}'")
+
+ if isReparsePoint(myTestPath3):
+ stash.Log(f"isSymLink '{myTestPath3}'")
+ else:
+ stash.Log(f"Not isSymLink '{myTestPath3}'")
+
+ if isReparsePoint(myTestPath4):
+ stash.Log(f"isSymLink '{myTestPath4}'")
+ else:
+ stash.Log(f"Not isSymLink '{myTestPath4}'")
+
+ if isReparsePoint(myTestPath5):
+ stash.Log(f"isSymLink '{myTestPath5}'")
+ else:
+ stash.Log(f"Not isSymLink '{myTestPath5}'")
+
+ if isReparsePoint(myTestPath6):
+ stash.Log(f"isSymLink '{myTestPath6}'")
+ else:
+ stash.Log(f"Not isSymLink '{myTestPath6}'")
+ return
+
+detailPrefix = "BaseDup="
+detailPostfix = "\n"
+
+def setTagId(tagName, sceneDetails, DupFileToKeep, TagReason="", ignoreAutoTag=False):
+ details = ""
+ ORG_DATA_DICT = {'id' : sceneDetails['id']}
+ dataDict = ORG_DATA_DICT.copy()
+ doAddTag = True
+ if addPrimaryDupPathToDetails:
+ BaseDupStr = f"{detailPrefix}{DupFileToKeep['files'][0]['path']}\n{stash.STASH_URL}/scenes/{DupFileToKeep['id']}\n{TagReason}(matchDupDistance={matchPhaseDistanceText})\n{detailPostfix}"
+ if sceneDetails['details'] == "":
+ details = BaseDupStr
+ elif not sceneDetails['details'].startswith(detailPrefix):
+ details = f"{BaseDupStr};\n{sceneDetails['details']}"
+ for tag in sceneDetails['tags']:
+ if tag['name'] == tagName:
+ doAddTag = False
+ break
+ if doAddTag:
+ stash.addTag(sceneDetails, tagName, ignoreAutoTag=ignoreAutoTag)
+ if details != "":
+ dataDict.update({'details' : details})
+ if dataDict != ORG_DATA_DICT:
+ stash.updateScene(dataDict)
+ stash.Trace(f"[setTagId] Updated {sceneDetails['files'][0]['path']} with metadata {dataDict} and tag {tagName}", toAscii=True)
+ else:
+ stash.Trace(f"[setTagId] Nothing to update {sceneDetails['files'][0]['path']} already has tag {tagName}.", toAscii=True)
+ return doAddTag
+
+def setTagId_withRetry(tagName, sceneDetails, DupFileToKeep, TagReason="", ignoreAutoTag=False, retryCount = 12, sleepSecondsBetweenRetry = 5):
+ errMsg = None
+ for i in range(0, retryCount):
+ try:
+ if errMsg != None:
+ stash.Warn(errMsg)
+ return setTagId(tagName, sceneDetails, DupFileToKeep, TagReason, ignoreAutoTag)
+ except (requests.exceptions.ConnectionError, ConnectionResetError):
+ tb = traceback.format_exc()
+ errMsg = f"[setTagId] Exception calling setTagId. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
+ except Exception as e:
+ tb = traceback.format_exc()
+ errMsg = f"[setTagId] Unknown exception calling setTagId. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
+ time.sleep(sleepSecondsBetweenRetry)
+
+def hasSameDir(path1, path2):
+ if pathlib.Path(path1).resolve().parent == pathlib.Path(path2).resolve().parent:
+ return True
+ return False
+
+def sendToTrash(path):
+ if not os.path.isfile(path):
+ stash.Warn(f"File does not exist: {path}.", toAscii=True)
+ return False
+ try:
+ from send2trash import send2trash # Requirement: pip install Send2Trash
+ send2trash(path)
+ return True
+ except Exception as e:
+ stash.Error(f"Failed to send file {path} to recycle bin. Error: {e}", toAscii=True)
+ try:
+ if os.path.isfile(path):
+ os.remove(path)
+ return True
+ except Exception as e:
+ stash.Error(f"Failed to delete file {path}. Error: {e}", toAscii=True)
+ return False
+# If ckTimeDiff=False: Does durration2 have significant more time than durration1
+def significantTimeDiffCheck(durration1, durration2, ckTimeDiff = False): # If ckTimeDiff=True: is time different significant in either direction.
+ if not isinstance(durration1, int) and 'files' in durration1:
+ durration1 = int(durration1['files'][0]['duration'])
+ durration2 = int(durration2['files'][0]['duration'])
+ timeDiff = getTimeDif(durration1, durration2)
+ if ckTimeDiff and timeDiff > 1:
+ timeDiff = getTimeDif(durration2, durration1)
+ if timeDiff < significantTimeDiff:
+ return True
+ return False
+
+def getTimeDif(durration1, durration2): # Where durration1 is ecpected to be smaller than durration2 IE(45/60=.75)
+ return durration1 / durration2
+
+def isBetterVideo(scene1, scene2, swapCandidateCk = False): # is scene2 better than scene1
+ # Prioritize higher reslution over codec, bit rate, and frame rate
+ if int(scene1['files'][0]['width']) * int(scene1['files'][0]['height']) > int(scene2['files'][0]['width']) * int(scene2['files'][0]['height']):
+ return False
+ if (favorBitRateChange and swapCandidateCk == False) or (swapCandidateCk and swapBetterBitRate):
+ if (favorHighBitRate and int(scene2['files'][0]['bit_rate']) > int(scene1['files'][0]['bit_rate'])) or (not favorHighBitRate and int(scene2['files'][0]['bit_rate']) < int(scene1['files'][0]['bit_rate'])):
+ stash.Trace(f"[isBetterVideo]:[favorHighBitRate={favorHighBitRate}] Better bit rate. {scene1['files'][0]['path']}={scene1['files'][0]['bit_rate']} v.s. {scene2['files'][0]['path']}={scene2['files'][0]['bit_rate']}")
+ return True
+ if (favorCodecRanking and swapCandidateCk == False) or (swapCandidateCk and swapCodec):
+ scene1CodecRank = stash.indexStartsWithInList(codecRanking, scene1['files'][0]['video_codec'])
+ scene2CodecRank = stash.indexStartsWithInList(codecRanking, scene2['files'][0]['video_codec'])
+ if scene2CodecRank < scene1CodecRank:
+ stash.Trace(f"[isBetterVideo] Better codec. {scene1['files'][0]['path']}={scene1['files'][0]['video_codec']}:Rank={scene1CodecRank} v.s. {scene2['files'][0]['path']}={scene2['files'][0]['video_codec']}:Rank={scene2CodecRank}")
+ return True
+ if (favorFrameRateChange and swapCandidateCk == False) or (swapCandidateCk and swapBetterFrameRate):
+ if (favorHigherFrameRate and int(scene2['files'][0]['frame_rate']) > int(scene1['files'][0]['frame_rate'])) or (not favorHigherFrameRate and int(scene2['files'][0]['frame_rate']) < int(scene1['files'][0]['frame_rate'])):
+ stash.Trace(f"[isBetterVideo]:[favorHigherFrameRate={favorHigherFrameRate}] Better frame rate. {scene1['files'][0]['path']}={scene1['files'][0]['frame_rate']} v.s. {scene2['files'][0]['path']}={scene2['files'][0]['frame_rate']}")
+ return True
+ return False
+
+def significantMoreTimeCompareToBetterVideo(scene1, scene2): # is scene2 better than scene1
+ if isinstance(scene1, int):
+ scene1 = stash.find_scene(scene1)
+ scene2 = stash.find_scene(scene2)
+ if int(scene1['files'][0]['duration']) >= int(scene2['files'][0]['duration']):
+ return False
+ if int(scene1['files'][0]['width']) * int(scene1['files'][0]['height']) > int(scene2['files'][0]['width']) * int(scene2['files'][0]['height']):
+ if significantTimeDiffCheck(scene1, scene2):
+ if tagLongDurationLowRes:
+ didAddTag = setTagId_withRetry(longerDurationLowerResolution, scene2, scene1, ignoreAutoTag=True)
+ stash.Log(f"Tagged sene2 with tag {longerDurationLowerResolution}, because scene1 is better video, but it has significant less time ({getTimeDif(int(scene1['files'][0]['duration']), int(scene2['files'][0]['duration']))}%) compare to scene2; scene1={scene1['files'][0]['path']} (ID={scene1['id']})(duration={scene1['files'][0]['duration']}); scene2={scene2['files'][0]['path']} (ID={scene2['id']}) (duration={scene1['files'][0]['duration']}); didAddTag={didAddTag}")
+ else:
+ stash.Warn(f"Scene1 is better video, but it has significant less time ({getTimeDif(int(scene1['files'][0]['duration']), int(scene2['files'][0]['duration']))}%) compare to scene2; Scene1={scene1['files'][0]['path']} (ID={scene1['id']})(duration={scene1['files'][0]['duration']}); Scene2={scene2['files'][0]['path']} (ID={scene2['id']}) (duration={scene1['files'][0]['duration']})")
+ return False
+ return True
+
+def allThingsEqual(scene1, scene2): # If all important things are equal, return true
+ if int(scene1['files'][0]['duration']) != int(scene2['files'][0]['duration']):
+ return False
+ if scene1['files'][0]['width'] != scene2['files'][0]['width']:
+ return False
+ if scene1['files'][0]['height'] != scene2['files'][0]['height']:
+ return False
+ if bitRateIsImporantComp and scene1['files'][0]['bit_rate'] != scene2['files'][0]['bit_rate']:
+ return False
+ if codecIsImporantComp and scene1['files'][0]['video_codec'] != scene2['files'][0]['video_codec']:
+ return False
+ return True
+
+def isSwapCandidate(DupFileToKeep, DupFile):
+ # Don't move if both are in whitelist
+ if stash.startsWithInList(whitelist, DupFileToKeep['files'][0]['path']) and stash.startsWithInList(whitelist, DupFile['files'][0]['path']):
+ return False
+ if swapHighRes and int(DupFileToKeep['files'][0]['width']) * int(DupFileToKeep['files'][0]['height']) > int(DupFile['files'][0]['width']) * int(DupFile['files'][0]['height']):
+ if not significantTimeDiffCheck(DupFileToKeep, DupFile):
+ return True
+ else:
+ stash.Warn(f"File '{DupFileToKeep['files'][0]['path']}' has a higher resolution than '{DupFile['files'][0]['path']}', but the duration is significantly shorter.", toAscii=True)
+ if swapLongLength and int(DupFileToKeep['files'][0]['duration']) > int(DupFile['files'][0]['duration']):
+ if int(DupFileToKeep['files'][0]['width']) >= int(DupFile['files'][0]['width']) or int(DupFileToKeep['files'][0]['height']) >= int(DupFile['files'][0]['height']):
+ return True
+ if isBetterVideo(DupFile, DupFileToKeep, swapCandidateCk=True):
+ if not significantTimeDiffCheck(DupFileToKeep, DupFile):
+ return True
+ else:
+ stash.Warn(f"File '{DupFileToKeep['files'][0]['path']}' has better codec/bit-rate than '{DupFile['files'][0]['path']}', but the duration is significantly shorter; DupFileToKeep-ID={DupFileToKeep['id']};DupFile-ID={DupFile['id']};BitRate {DupFileToKeep['files'][0]['bit_rate']} vs {DupFile['files'][0]['bit_rate']};Codec {DupFileToKeep['files'][0]['video_codec']} vs {DupFile['files'][0]['video_codec']};FrameRate {DupFileToKeep['files'][0]['frame_rate']} vs {DupFile['files'][0]['frame_rate']};", toAscii=True)
+ return False
+
+dupWhitelistTagId = None
+def addDupWhitelistTag():
+ global dupWhitelistTagId
+ stash.Trace(f"Adding tag duplicateWhitelistTag = {duplicateWhitelistTag}")
+ descp = 'Tag added to duplicate scenes which are in the whitelist. This means there are two or more duplicates in the whitelist.'
+ dupWhitelistTagId = stash.createTagId(duplicateWhitelistTag, descp, ignoreAutoTag=True)
+ stash.Trace(f"dupWhitelistTagId={dupWhitelistTagId} name={duplicateWhitelistTag}")
+
+excludeDupFileDeleteTagId = None
+def addExcludeDupTag():
+ global excludeDupFileDeleteTagId
+ stash.Trace(f"Adding tag excludeDupFileDeleteTag = {excludeDupFileDeleteTag}")
+ descp = 'Excludes duplicate scene from DupFileManager tagging and deletion process. A scene having this tag will not get deleted by DupFileManager'
+ excludeDupFileDeleteTagId = stash.createTagId(excludeDupFileDeleteTag, descp, ignoreAutoTag=True)
+ stash.Trace(f"dupWhitelistTagId={excludeDupFileDeleteTagId} name={excludeDupFileDeleteTag}")
+
+def isTaggedExcluded(Scene):
+ for tag in Scene['tags']:
+ if tag['name'] == excludeDupFileDeleteTag:
+ return True
+ return False
+
+def isWorseKeepCandidate(DupFileToKeep, Scene):
+ if not stash.startsWithInList(whitelist, Scene['files'][0]['path']) and stash.startsWithInList(whitelist, DupFileToKeep['files'][0]['path']):
+ return True
+ if not stash.startsWithInList(graylist, Scene['files'][0]['path']) and stash.startsWithInList(graylist, DupFileToKeep['files'][0]['path']):
+ return True
+ if not stash.startsWithInList(blacklist, DupFileToKeep['files'][0]['path']) and stash.startsWithInList(blacklist, Scene['files'][0]['path']):
+ return True
+
+ if stash.startsWithInList(graylist, Scene['files'][0]['path']) and stash.startsWithInList(graylist, DupFileToKeep['files'][0]['path']) and stash.indexStartsWithInList(graylist, DupFileToKeep['files'][0]['path']) < stash.indexStartsWithInList(graylist, Scene['files'][0]['path']):
+ return True
+ if stash.startsWithInList(blacklist, DupFileToKeep['files'][0]['path']) and stash.startsWithInList(blacklist, Scene['files'][0]['path']) and stash.indexStartsWithInList(blacklist, DupFileToKeep['files'][0]['path']) < stash.indexStartsWithInList(blacklist, Scene['files'][0]['path']):
+ return True
+ return False
+
+def killScanningJobs():
+ try:
+ if killScanningPostProcess:
+ stash.stopJobs(1, "Scanning...")
+ except Exception as e:
+ tb = traceback.format_exc()
+ stash.Error(f"Exception while trying to kill scan jobs; Error: {e}\nTraceBack={tb}")
+
+def getPath(Scene, getParent = False):
+ path = stash.asc2(Scene['files'][0]['path'])
+ path = path.replace("'", "")
+ path = path.replace("\\\\", "\\")
+ if getParent:
+ return pathlib.Path(path).resolve().parent
+ return path
+
+def getHtmlReportTableRow(qtyResults, tagDuplicates):
+ htmlReportPrefix = stash.Setting('htmlReportPrefix')
+ htmlReportPrefix = htmlReportPrefix.replace('http://127.0.0.1:9999/graphql', stash.url)
+ htmlReportPrefix = htmlReportPrefix.replace('http://localhost:9999/graphql', stash.url)
+ if tagDuplicates == False:
+ htmlReportPrefix = htmlReportPrefix.replace('