diff --git a/plugins/DupFileManager/DupFileManager.css b/plugins/DupFileManager/DupFileManager.css
new file mode 100644
index 00000000..05f75f14
--- /dev/null
+++ b/plugins/DupFileManager/DupFileManager.css
@@ -0,0 +1,67 @@
+.scene-card__date {
+ color: #bfccd6;
+ font-size: 0.85em;
+}
+
+.scene-card__performer {
+ display: inline-block;
+ font-weight: 500;
+ margin-right: 0.5em;
+}
+.scene-card__performer a {
+ color: #137cbd;
+}
+
+.scene-card__performers,
+.scene-card__tags {
+ -webkit-box-orient: vertical;
+ display: -webkit-box;
+ -webkit-line-clamp: 1;
+ overflow: hidden;
+}
+.scene-card__performers:hover,
+.scene-card__tags:hover {
+ -webkit-line-clamp: unset;
+ overflow: visible;
+}
+
+.scene-card__tags .tag-item {
+ margin-left: 0;
+}
+
+.scene-performer-popover .image-thumbnail {
+ margin: 1em;
+}
+
+/* Dashed border */
+hr.dashed {
+ border-top: 3px dashed #bbb;
+}
+
+/* Dotted border */
+hr.dotted {
+ border-top: 3px dotted #bbb;
+}
+
+/* Solid border */
+hr.solid {
+ border-top: 3px solid #bbb;
+}
+
+/* Rounded border */
+hr.rounded {
+ border-top: 8px solid #bbb;
+ border-radius: 5px;
+}
+
+h3.under_construction {
+ color: red;
+ background-color: yellow;
+}
+
+h3.submenu {
+ color: Tomato;
+ background-color: rgba(100, 100, 100);
+}
+
+/*# sourceMappingURL=DupFileManager.css.map */
diff --git a/plugins/DupFileManager/DupFileManager.css.map b/plugins/DupFileManager/DupFileManager.css.map
new file mode 100644
index 00000000..a4afe07b
--- /dev/null
+++ b/plugins/DupFileManager/DupFileManager.css.map
@@ -0,0 +1 @@
+{"version":3,"sourceRoot":"","sources":["../src/DupFileManager.scss"],"names":[],"mappings":"AAAA;EACE;EACA;;;AAGF;EACE;EACA;EACA;;AAEA;EACE;;;AAIJ;AAAA;EAEE;EACA;EACA;EACA;;AAEA;AAAA;EACE;EACA;;;AAIJ;EACE;;;AAGF;EACE","file":"DupFileManager.css"}
\ No newline at end of file
diff --git a/plugins/DupFileManager/DupFileManager.js b/plugins/DupFileManager/DupFileManager.js
new file mode 100644
index 00000000..c4d6b67c
--- /dev/null
+++ b/plugins/DupFileManager/DupFileManager.js
@@ -0,0 +1,695 @@
+(function () {
+ /*! jQuery v3.7.1 | (c) OpenJS Foundation and other contributors | jquery.org/license */
+ // prettier-ignore
+ !function(e,t){"use strict";"object"==typeof module&&"object"==typeof module.exports?module.exports=e.document?t(e,!0):function(e){if(!e.document)throw new Error("jQuery requires a window with a document");return t(e)}:t(e)}("undefined"!=typeof window?window:this,function(ie,e){"use strict";var oe=[],r=Object.getPrototypeOf,ae=oe.slice,g=oe.flat?function(e){return oe.flat.call(e)}:function(e){return oe.concat.apply([],e)},s=oe.push,se=oe.indexOf,n={},i=n.toString,ue=n.hasOwnProperty,o=ue.toString,a=o.call(Object),le={},v=function(e){return"function"==typeof e&&"number"!=typeof e.nodeType&&"function"!=typeof e.item},y=function(e){return null!=e&&e===e.window},C=ie.document,u={type:!0,src:!0,nonce:!0,noModule:!0};function m(e,t,n){var r,i,o=(n=n||C).createElement("script");if(o.text=e,t)for(r in u)(i=t[r]||t.getAttribute&&t.getAttribute(r))&&o.setAttribute(r,i);n.head.appendChild(o).parentNode.removeChild(o)}function x(e){return null==e?e+"":"object"==typeof e||"function"==typeof e?n[i.call(e)]||"object":typeof e}var t="3.7.1",l=/HTML$/i,ce=function(e,t){return new ce.fn.init(e,t)};function c(e){var t=!!e&&"length"in e&&e.length,n=x(e);return!v(e)&&!y(e)&&("array"===n||0===t||"number"==typeof t&&0+~]|"+ge+")"+ge+"*"),x=new RegExp(ge+"|>"),j=new RegExp(g),A=new RegExp("^"+t+"$"),D={ID:new RegExp("^#("+t+")"),CLASS:new RegExp("^\\.("+t+")"),TAG:new RegExp("^("+t+"|[*])"),ATTR:new RegExp("^"+p),PSEUDO:new RegExp("^"+g),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+ge+"*(even|odd|(([+-]|)(\\d*)n|)"+ge+"*(?:([+-]|)"+ge+"*(\\d+)|))"+ge+"*\\)|)","i"),bool:new RegExp("^(?:"+f+")$","i"),needsContext:new RegExp("^"+ge+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+ge+"*((?:-\\d)?\\d*)"+ge+"*\\)|)(?=[^-]|$)","i")},N=/^(?:input|select|textarea|button)$/i,q=/^h\d$/i,L=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,H=/[+~]/,O=new RegExp("\\\\[\\da-fA-F]{1,6}"+ge+"?|\\\\([^\\r\\n\\f])","g"),P=function(e,t){var n="0x"+e.slice(1)-65536;return t||(n<0?String.fromCharCode(n+65536):String.fromCharCode(n>>10|55296,1023&n|56320))},M=function(){V()},R=J(function(e){return!0===e.disabled&&fe(e,"fieldset")},{dir:"parentNode",next:"legend"});try{k.apply(oe=ae.call(ye.childNodes),ye.childNodes),oe[ye.childNodes.length].nodeType}catch(e){k={apply:function(e,t){me.apply(e,ae.call(t))},call:function(e){me.apply(e,ae.call(arguments,1))}}}function I(t,e,n,r){var i,o,a,s,u,l,c,f=e&&e.ownerDocument,p=e?e.nodeType:9;if(n=n||[],"string"!=typeof t||!t||1!==p&&9!==p&&11!==p)return n;if(!r&&(V(e),e=e||T,C)){if(11!==p&&(u=L.exec(t)))if(i=u[1]){if(9===p){if(!(a=e.getElementById(i)))return n;if(a.id===i)return k.call(n,a),n}else if(f&&(a=f.getElementById(i))&&I.contains(e,a)&&a.id===i)return k.call(n,a),n}else{if(u[2])return k.apply(n,e.getElementsByTagName(t)),n;if((i=u[3])&&e.getElementsByClassName)return k.apply(n,e.getElementsByClassName(i)),n}if(!(h[t+" "]||d&&d.test(t))){if(c=t,f=e,1===p&&(x.test(t)||m.test(t))){(f=H.test(t)&&U(e.parentNode)||e)==e&&le.scope||((s=e.getAttribute("id"))?s=ce.escapeSelector(s):e.setAttribute("id",s=S)),o=(l=Y(t)).length;while(o--)l[o]=(s?"#"+s:":scope")+" "+Q(l[o]);c=l.join(",")}try{return k.apply(n,f.querySelectorAll(c)),n}catch(e){h(t,!0)}finally{s===S&&e.removeAttribute("id")}}}return re(t.replace(ve,"$1"),e,n,r)}function W(){var r=[];return function e(t,n){return r.push(t+" ")>b.cacheLength&&delete e[r.shift()],e[t+" "]=n}}function F(e){return e[S]=!0,e}function $(e){var t=T.createElement("fieldset");try{return!!e(t)}catch(e){return!1}finally{t.parentNode&&t.parentNode.removeChild(t),t=null}}function B(t){return function(e){return fe(e,"input")&&e.type===t}}function _(t){return function(e){return(fe(e,"input")||fe(e,"button"))&&e.type===t}}function z(t){return function(e){return"form"in e?e.parentNode&&!1===e.disabled?"label"in e?"label"in e.parentNode?e.parentNode.disabled===t:e.disabled===t:e.isDisabled===t||e.isDisabled!==!t&&R(e)===t:e.disabled===t:"label"in e&&e.disabled===t}}function X(a){return F(function(o){return o=+o,F(function(e,t){var n,r=a([],e.length,o),i=r.length;while(i--)e[n=r[i]]&&(e[n]=!(t[n]=e[n]))})})}function U(e){return e&&"undefined"!=typeof e.getElementsByTagName&&e}function V(e){var t,n=e?e.ownerDocument||e:ye;return n!=T&&9===n.nodeType&&n.documentElement&&(r=(T=n).documentElement,C=!ce.isXMLDoc(T),i=r.matches||r.webkitMatchesSelector||r.msMatchesSelector,r.msMatchesSelector&&ye!=T&&(t=T.defaultView)&&t.top!==t&&t.addEventListener("unload",M),le.getById=$(function(e){return r.appendChild(e).id=ce.expando,!T.getElementsByName||!T.getElementsByName(ce.expando).length}),le.disconnectedMatch=$(function(e){return i.call(e,"*")}),le.scope=$(function(){return T.querySelectorAll(":scope")}),le.cssHas=$(function(){try{return T.querySelector(":has(*,:jqfake)"),!1}catch(e){return!0}}),le.getById?(b.filter.ID=function(e){var t=e.replace(O,P);return function(e){return e.getAttribute("id")===t}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&C){var n=t.getElementById(e);return n?[n]:[]}}):(b.filter.ID=function(e){var n=e.replace(O,P);return function(e){var t="undefined"!=typeof e.getAttributeNode&&e.getAttributeNode("id");return t&&t.value===n}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&C){var n,r,i,o=t.getElementById(e);if(o){if((n=o.getAttributeNode("id"))&&n.value===e)return[o];i=t.getElementsByName(e),r=0;while(o=i[r++])if((n=o.getAttributeNode("id"))&&n.value===e)return[o]}return[]}}),b.find.TAG=function(e,t){return"undefined"!=typeof t.getElementsByTagName?t.getElementsByTagName(e):t.querySelectorAll(e)},b.find.CLASS=function(e,t){if("undefined"!=typeof t.getElementsByClassName&&C)return t.getElementsByClassName(e)},d=[],$(function(e){var t;r.appendChild(e).innerHTML="",e.querySelectorAll("[selected]").length||d.push("\\["+ge+"*(?:value|"+f+")"),e.querySelectorAll("[id~="+S+"-]").length||d.push("~="),e.querySelectorAll("a#"+S+"+*").length||d.push(".#.+[+~]"),e.querySelectorAll(":checked").length||d.push(":checked"),(t=T.createElement("input")).setAttribute("type","hidden"),e.appendChild(t).setAttribute("name","D"),r.appendChild(e).disabled=!0,2!==e.querySelectorAll(":disabled").length&&d.push(":enabled",":disabled"),(t=T.createElement("input")).setAttribute("name",""),e.appendChild(t),e.querySelectorAll("[name='']").length||d.push("\\["+ge+"*name"+ge+"*="+ge+"*(?:''|\"\")")}),le.cssHas||d.push(":has"),d=d.length&&new RegExp(d.join("|")),l=function(e,t){if(e===t)return a=!0,0;var n=!e.compareDocumentPosition-!t.compareDocumentPosition;return n||(1&(n=(e.ownerDocument||e)==(t.ownerDocument||t)?e.compareDocumentPosition(t):1)||!le.sortDetached&&t.compareDocumentPosition(e)===n?e===T||e.ownerDocument==ye&&I.contains(ye,e)?-1:t===T||t.ownerDocument==ye&&I.contains(ye,t)?1:o?se.call(o,e)-se.call(o,t):0:4&n?-1:1)}),T}for(e in I.matches=function(e,t){return I(e,null,null,t)},I.matchesSelector=function(e,t){if(V(e),C&&!h[t+" "]&&(!d||!d.test(t)))try{var n=i.call(e,t);if(n||le.disconnectedMatch||e.document&&11!==e.document.nodeType)return n}catch(e){h(t,!0)}return 0":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace(O,P),e[3]=(e[3]||e[4]||e[5]||"").replace(O,P),"~="===e[2]&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),"nth"===e[1].slice(0,3)?(e[3]||I.error(e[0]),e[4]=+(e[4]?e[5]+(e[6]||1):2*("even"===e[3]||"odd"===e[3])),e[5]=+(e[7]+e[8]||"odd"===e[3])):e[3]&&I.error(e[0]),e},PSEUDO:function(e){var t,n=!e[6]&&e[2];return D.CHILD.test(e[0])?null:(e[3]?e[2]=e[4]||e[5]||"":n&&j.test(n)&&(t=Y(n,!0))&&(t=n.indexOf(")",n.length-t)-n.length)&&(e[0]=e[0].slice(0,t),e[2]=n.slice(0,t)),e.slice(0,3))}},filter:{TAG:function(e){var t=e.replace(O,P).toLowerCase();return"*"===e?function(){return!0}:function(e){return fe(e,t)}},CLASS:function(e){var t=s[e+" "];return t||(t=new RegExp("(^|"+ge+")"+e+"("+ge+"|$)"))&&s(e,function(e){return t.test("string"==typeof e.className&&e.className||"undefined"!=typeof e.getAttribute&&e.getAttribute("class")||"")})},ATTR:function(n,r,i){return function(e){var t=I.attr(e,n);return null==t?"!="===r:!r||(t+="","="===r?t===i:"!="===r?t!==i:"^="===r?i&&0===t.indexOf(i):"*="===r?i&&-1:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i;function T(e,n,r){return v(n)?ce.grep(e,function(e,t){return!!n.call(e,t,e)!==r}):n.nodeType?ce.grep(e,function(e){return e===n!==r}):"string"!=typeof n?ce.grep(e,function(e){return-1)[^>]*|#([\w-]+))$/;(ce.fn.init=function(e,t,n){var r,i;if(!e)return this;if(n=n||k,"string"==typeof e){if(!(r="<"===e[0]&&">"===e[e.length-1]&&3<=e.length?[null,e,null]:S.exec(e))||!r[1]&&t)return!t||t.jquery?(t||n).find(e):this.constructor(t).find(e);if(r[1]){if(t=t instanceof ce?t[0]:t,ce.merge(this,ce.parseHTML(r[1],t&&t.nodeType?t.ownerDocument||t:C,!0)),w.test(r[1])&&ce.isPlainObject(t))for(r in t)v(this[r])?this[r](t[r]):this.attr(r,t[r]);return this}return(i=C.getElementById(r[2]))&&(this[0]=i,this.length=1),this}return e.nodeType?(this[0]=e,this.length=1,this):v(e)?void 0!==n.ready?n.ready(e):e(ce):ce.makeArray(e,this)}).prototype=ce.fn,k=ce(C);var E=/^(?:parents|prev(?:Until|All))/,j={children:!0,contents:!0,next:!0,prev:!0};function A(e,t){while((e=e[t])&&1!==e.nodeType);return e}ce.fn.extend({has:function(e){var t=ce(e,this),n=t.length;return this.filter(function(){for(var e=0;e\x20\t\r\n\f]*)/i,Ce=/^$|^module$|\/(?:java|ecma)script/i;xe=C.createDocumentFragment().appendChild(C.createElement("div")),(be=C.createElement("input")).setAttribute("type","radio"),be.setAttribute("checked","checked"),be.setAttribute("name","t"),xe.appendChild(be),le.checkClone=xe.cloneNode(!0).cloneNode(!0).lastChild.checked,xe.innerHTML="",le.noCloneChecked=!!xe.cloneNode(!0).lastChild.defaultValue,xe.innerHTML="",le.option=!!xe.lastChild;var ke={thead:[1,""],col:[2,""],tr:[2,""],td:[3,""],_default:[0,"",""]};function Se(e,t){var n;return n="undefined"!=typeof e.getElementsByTagName?e.getElementsByTagName(t||"*"):"undefined"!=typeof e.querySelectorAll?e.querySelectorAll(t||"*"):[],void 0===t||t&&fe(e,t)?ce.merge([e],n):n}function Ee(e,t){for(var n=0,r=e.length;n",""]);var je=/<|?\w+;/;function Ae(e,t,n,r,i){for(var o,a,s,u,l,c,f=t.createDocumentFragment(),p=[],d=0,h=e.length;d\s*$/g;function Re(e,t){return fe(e,"table")&&fe(11!==t.nodeType?t:t.firstChild,"tr")&&ce(e).children("tbody")[0]||e}function Ie(e){return e.type=(null!==e.getAttribute("type"))+"/"+e.type,e}function We(e){return"true/"===(e.type||"").slice(0,5)?e.type=e.type.slice(5):e.removeAttribute("type"),e}function Fe(e,t){var n,r,i,o,a,s;if(1===t.nodeType){if(_.hasData(e)&&(s=_.get(e).events))for(i in _.remove(t,"handle events"),s)for(n=0,r=s[i].length;n").attr(n.scriptAttrs||{}).prop({charset:n.scriptCharset,src:n.url}).on("load error",i=function(e){r.remove(),i=null,e&&t("error"===e.type?404:200,e.type)}),C.head.appendChild(r[0])},abort:function(){i&&i()}}});var Jt,Kt=[],Zt=/(=)\?(?=&|$)|\?\?/;ce.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var e=Kt.pop()||ce.expando+"_"+jt.guid++;return this[e]=!0,e}}),ce.ajaxPrefilter("json jsonp",function(e,t,n){var r,i,o,a=!1!==e.jsonp&&(Zt.test(e.url)?"url":"string"==typeof e.data&&0===(e.contentType||"").indexOf("application/x-www-form-urlencoded")&&Zt.test(e.data)&&"data");if(a||"jsonp"===e.dataTypes[0])return r=e.jsonpCallback=v(e.jsonpCallback)?e.jsonpCallback():e.jsonpCallback,a?e[a]=e[a].replace(Zt,"$1"+r):!1!==e.jsonp&&(e.url+=(At.test(e.url)?"&":"?")+e.jsonp+"="+r),e.converters["script json"]=function(){return o||ce.error(r+" was not called"),o[0]},e.dataTypes[0]="json",i=ie[r],ie[r]=function(){o=arguments},n.always(function(){void 0===i?ce(ie).removeProp(r):ie[r]=i,e[r]&&(e.jsonpCallback=t.jsonpCallback,Kt.push(r)),o&&v(i)&&i(o[0]),o=i=void 0}),"script"}),le.createHTMLDocument=((Jt=C.implementation.createHTMLDocument("").body).innerHTML="",2===Jt.childNodes.length),ce.parseHTML=function(e,t,n){return"string"!=typeof e?[]:("boolean"==typeof t&&(n=t,t=!1),t||(le.createHTMLDocument?((r=(t=C.implementation.createHTMLDocument("")).createElement("base")).href=C.location.href,t.head.appendChild(r)):t=C),o=!n&&[],(i=w.exec(e))?[t.createElement(i[1])]:(i=Ae([e],t,o),o&&o.length&&ce(o).remove(),ce.merge([],i.childNodes)));var r,i,o},ce.fn.load=function(e,t,n){var r,i,o,a=this,s=e.indexOf(" ");return-1").append(ce.parseHTML(e)).find(r):e)}).always(n&&function(e,t){a.each(function(){n.apply(this,o||[e.responseText,t,e])})}),this},ce.expr.pseudos.animated=function(t){return ce.grep(ce.timers,function(e){return t===e.elem}).length},ce.offset={setOffset:function(e,t,n){var r,i,o,a,s,u,l=ce.css(e,"position"),c=ce(e),f={};"static"===l&&(e.style.position="relative"),s=c.offset(),o=ce.css(e,"top"),u=ce.css(e,"left"),("absolute"===l||"fixed"===l)&&-1<(o+u).indexOf("auto")?(a=(r=c.position()).top,i=r.left):(a=parseFloat(o)||0,i=parseFloat(u)||0),v(t)&&(t=t.call(e,n,ce.extend({},s))),null!=t.top&&(f.top=t.top-s.top+a),null!=t.left&&(f.left=t.left-s.left+i),"using"in t?t.using.call(e,f):c.css(f)}},ce.fn.extend({offset:function(t){if(arguments.length)return void 0===t?this:this.each(function(e){ce.offset.setOffset(this,t,e)});var e,n,r=this[0];return r?r.getClientRects().length?(e=r.getBoundingClientRect(),n=r.ownerDocument.defaultView,{top:e.top+n.pageYOffset,left:e.left+n.pageXOffset}):{top:0,left:0}:void 0},position:function(){if(this[0]){var e,t,n,r=this[0],i={top:0,left:0};if("fixed"===ce.css(r,"position"))t=r.getBoundingClientRect();else{t=this.offset(),n=r.ownerDocument,e=r.offsetParent||n.documentElement;while(e&&(e===n.body||e===n.documentElement)&&"static"===ce.css(e,"position"))e=e.parentNode;e&&e!==r&&1===e.nodeType&&((i=ce(e).offset()).top+=ce.css(e,"borderTopWidth",!0),i.left+=ce.css(e,"borderLeftWidth",!0))}return{top:t.top-i.top-ce.css(r,"marginTop",!0),left:t.left-i.left-ce.css(r,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){var e=this.offsetParent;while(e&&"static"===ce.css(e,"position"))e=e.offsetParent;return e||J})}}),ce.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(t,i){var o="pageYOffset"===i;ce.fn[t]=function(e){return M(this,function(e,t,n){var r;if(y(e)?r=e:9===e.nodeType&&(r=e.defaultView),void 0===n)return r?r[i]:e[t];r?r.scrollTo(o?r.pageXOffset:n,o?n:r.pageYOffset):e[t]=n},t,e,arguments.length)}}),ce.each(["top","left"],function(e,n){ce.cssHooks[n]=Ye(le.pixelPosition,function(e,t){if(t)return t=Ge(e,n),_e.test(t)?ce(e).position()[n]+"px":t})}),ce.each({Height:"height",Width:"width"},function(a,s){ce.each({padding:"inner"+a,content:s,"":"outer"+a},function(r,o){ce.fn[o]=function(e,t){var n=arguments.length&&(r||"boolean"!=typeof e),i=r||(!0===e||!0===t?"margin":"border");return M(this,function(e,t,n){var r;return y(e)?0===o.indexOf("outer")?e["inner"+a]:e.document.documentElement["client"+a]:9===e.nodeType?(r=e.documentElement,Math.max(e.body["scroll"+a],r["scroll"+a],e.body["offset"+a],r["offset"+a],r["client"+a])):void 0===n?ce.css(e,t,i):ce.style(e,t,n,i)},s,n?e:void 0,n)}})}),ce.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],function(e,t){ce.fn[t]=function(e){return this.on(t,e)}}),ce.fn.extend({bind:function(e,t,n){return this.on(e,null,t,n)},unbind:function(e,t){return this.off(e,null,t)},delegate:function(e,t,n,r){return this.on(t,e,n,r)},undelegate:function(e,t,n){return 1===arguments.length?this.off(e,"**"):this.off(t,e||"**",n)},hover:function(e,t){return this.on("mouseenter",e).on("mouseleave",t||e)}}),ce.each("blur focus focusin focusout resize scroll click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup contextmenu".split(" "),function(e,n){ce.fn[n]=function(e,t){return 0 {
+ var LocalDuplicateReportPath = GetLocalDuplicateReportPath();
+ console.log(LocalDupReportExist);
+ var MyHeader = React.createElement(
+ "h1",
+ null,
+ "DupFileManager Report Menu"
+ );
+ if (LocalDupReportExist)
+ return React.createElement(
+ "center",
+ null,
+ MyHeader,
+ GetShowReportButton(
+ LocalDuplicateReportPath,
+ "Show Duplicate-File Report"
+ ),
+ React.createElement("p", null),
+ GetAdvanceMenuButton(),
+ React.createElement("p", null),
+ GetCreateReportNoTagButton("Create New Report (NO Tagging)"),
+ React.createElement("p", null),
+ GetCreateReportButton("Create New Report with Tagging"),
+ React.createElement("p", null),
+ ToolsMenuOptionButton
+ );
+ return React.createElement(
+ "center",
+ null,
+ MyHeader,
+ GetCreateReportNoTagButton("Create Duplicate-File Report (NO Tagging)"),
+ React.createElement("p", null),
+ GetCreateReportButton("Create Duplicate-File Report with Tagging"),
+ React.createElement("p", null),
+ ToolsMenuOptionButton
+ );
+ };
+ const CreateReport = () => {
+ const componentsLoading = PluginApi.hooks.useLoadComponents([
+ PluginApi.loadableComponents.SceneCard,
+ ]);
+ if (componentsLoading)
+ return React.createElement(LoadingIndicator, {
+ message:
+ "Running task to create report. This may take a while. Please standby.",
+ });
+ RunPluginDupFileManager("tag_duplicates_task");
+ return React.createElement(
+ "center",
+ null,
+ React.createElement(
+ "h1",
+ null,
+ "Report complete. Click [Show Report] to view report."
+ ),
+ GetShowReportButton(GetLocalDuplicateReportPath(), "Show Report"),
+ React.createElement("p", null),
+ GetAdvanceMenuButton(),
+ React.createElement("p", null),
+ DupFileManagerReportMenuButton,
+ React.createElement("p", null),
+ ToolsMenuOptionButton
+ );
+ };
+ const CreateReportWithNoTagging = () => {
+ const componentsLoading = PluginApi.hooks.useLoadComponents([
+ PluginApi.loadableComponents.SceneCard,
+ ]);
+ if (componentsLoading)
+ return React.createElement(LoadingIndicator, {
+ message: "Running task to create report. Please standby.",
+ });
+ RunPluginDupFileManager("createDuplicateReportWithoutTagging");
+ return React.createElement(
+ "center",
+ null,
+ React.createElement(
+ "h1",
+ null,
+ "Created HTML report without tagging. Click [Show Report] to view report."
+ ),
+ GetShowReportButton(GetLocalDuplicateReportPath(), "Show Report"),
+ React.createElement("p", null),
+ GetAdvanceMenuButton(),
+ React.createElement("p", null),
+ DupFileManagerReportMenuButton,
+ React.createElement("p", null),
+ ToolsMenuOptionButton
+ );
+ };
+ const ToolsAndUtilities = () => {
+ return React.createElement(
+ "center",
+ null,
+ React.createElement("h1", null, "DupFileManager Tools and Utilities"),
+ React.createElement("p", null),
+
+ React.createElement("h3", { class: "submenu" }, "Report Options"),
+ React.createElement("p", null),
+ GetCreateReportNoTagButton("Create Report (NO Tagging)"),
+ React.createElement("p", null),
+ GetCreateReportButton("Create Report (Tagging)"),
+ React.createElement("p", null),
+ DupFileManagerReportMenuButton,
+ React.createElement("p", null),
+ GetShowReportButton(
+ GetLocalDuplicateReportPath(),
+ "Show Duplicate-File Report"
+ ),
+ React.createElement("p", null),
+ React.createElement(
+ Link,
+ {
+ to: "/plugin/DupFileManager_deleteLocalDupReportHtmlFiles",
+ title: "Delete local HTML duplicate file report.",
+ },
+ React.createElement(
+ Button,
+ null,
+ "Delete Duplicate-File Report HTML Files"
+ )
+ ),
+ React.createElement("hr", { class: "dotted" }),
+
+ React.createElement(
+ "h3",
+ { class: "submenu" },
+ "Tagged Duplicates Options"
+ ),
+ React.createElement("p", null),
+ GetAdvanceMenuButton(),
+ React.createElement("p", null),
+ React.createElement(
+ Link,
+ {
+ to: "/plugin/DupFileManager_deleteTaggedDuplicatesTask",
+ title:
+ "Delete scenes previously given duplicate tag (_DuplicateMarkForDeletion).",
+ },
+ React.createElement(Button, null, "Delete Tagged Duplicates")
+ ),
+ React.createElement("p", null),
+ React.createElement(
+ Link,
+ {
+ to: "/plugin/DupFileManager_deleteBlackListTaggedDuplicatesTask",
+ title:
+ "Delete scenes only in blacklist which where previously given duplicate tag (_DuplicateMarkForDeletion).",
+ },
+ React.createElement(
+ Button,
+ null,
+ "Delete Tagged Duplicates in Blacklist Only"
+ )
+ ),
+ React.createElement("p", null),
+ React.createElement(
+ Link,
+ {
+ to: "/plugin/DupFileManager_deleteTaggedDuplicatesLwrResOrLwrDuration",
+ title:
+ "Delete scenes previously given duplicate tag (_DuplicateMarkForDeletion) and lower resultion or duration compare to primary (ToKeep) duplicate.",
+ },
+ React.createElement(
+ Button,
+ null,
+ "Delete Low Res/Dur Tagged Duplicates"
+ )
+ ),
+ React.createElement("p", null),
+ React.createElement(
+ Link,
+ {
+ to: "/plugin/DupFileManager_deleteBlackListTaggedDuplicatesLwrResOrLwrDuration",
+ title:
+ "Delete scenes only in blacklist which where previously given duplicate tag (_DuplicateMarkForDeletion) and lower resultion or duration compare to primary (ToKeep) duplicate.",
+ },
+ React.createElement(
+ Button,
+ null,
+ "Delete Low Res/Dur Tagged Duplicates in Blacklist Only"
+ )
+ ),
+ React.createElement("p", null),
+ React.createElement("hr", { class: "dotted" }),
+
+ React.createElement(
+ "h3",
+ { class: "submenu" },
+ "Tagged Management Options"
+ ),
+ React.createElement("p", null),
+ React.createElement(
+ Link,
+ {
+ to: "/plugin/DupFileManager_ClearAllDuplicateTags",
+ title:
+ "Remove duplicate tag from all scenes. This task may take some time to complete.",
+ },
+ React.createElement(Button, null, "Clear All Duplicate Tags")
+ ),
+ React.createElement("p", null),
+ React.createElement(
+ Link,
+ {
+ to: "/plugin/DupFileManager_deleteAllDupFileManagerTags",
+ title: "Delete all DupFileManager tags from stash.",
+ },
+ React.createElement(Button, null, "Delete All DupFileManager Tags")
+ ),
+ React.createElement("p", null),
+ React.createElement(
+ Link,
+ {
+ to: "/plugin/DupFileManager_tagGrayList",
+ title:
+ "Set tag _GraylistMarkForDeletion to scenes having DuplicateMarkForDeletion tag and that are in the Graylist.",
+ },
+ React.createElement(Button, null, "Tag Graylist")
+ ),
+ React.createElement("hr", { class: "dotted" }),
+
+ React.createElement("h3", { class: "submenu" }, "Miscellaneous Options"),
+ React.createElement(
+ Link,
+ {
+ to: "/plugin/DupFileManager_generatePHASH_Matching",
+ title:
+ "Generate PHASH (Perceptual hashes) matching. Used for file comparisons.",
+ },
+ React.createElement(
+ Button,
+ null,
+ "Generate PHASH (Perceptual hashes) Matching"
+ )
+ ),
+ React.createElement("p", null),
+ React.createElement("p", null),
+ React.createElement("p", null),
+ React.createElement("p", null)
+ );
+ };
+ const ClearAllDuplicateTags = () => {
+ const componentsLoading = PluginApi.hooks.useLoadComponents([
+ PluginApi.loadableComponents.SceneCard,
+ ]);
+ if (componentsLoading)
+ return React.createElement(LoadingIndicator, {
+ message:
+ "Running clear duplicate tags in background. This may take a while. Please standby.",
+ });
+ RunPluginDupFileManager("clear_duplicate_tags_task");
+ return React.createElement(
+ "div",
+ null,
+ React.createElement(
+ "h1",
+ null,
+ "Removed duplicate tags from all scenes."
+ ),
+ DupFileManagerReportMenuButton,
+ React.createElement("p", null),
+ ToolsMenuOptionButton
+ );
+ };
+ const deleteLocalDupReportHtmlFiles = () => {
+ const componentsLoading = PluginApi.hooks.useLoadComponents([
+ PluginApi.loadableComponents.SceneCard,
+ ]);
+ if (componentsLoading)
+ return React.createElement(LoadingIndicator, {
+ message: "Running task to delete HTML files. Please standby.",
+ });
+ RunPluginDupFileManager("deleteLocalDupReportHtmlFiles");
+ return React.createElement(
+ "div",
+ null,
+ React.createElement(
+ "h2",
+ null,
+ "Deleted the HTML duplicate file report from local files."
+ ),
+ DupFileManagerReportMenuButton,
+ React.createElement("p", null),
+ ToolsMenuOptionButton
+ );
+ };
+ const deleteAllDupFileManagerTags = () => {
+ const componentsLoading = PluginApi.hooks.useLoadComponents([
+ PluginApi.loadableComponents.SceneCard,
+ ]);
+ if (componentsLoading)
+ return React.createElement(LoadingIndicator, {
+ message:
+ "Running task to delete all DupFileManager tags in background. This may take a while. Please standby.",
+ });
+ RunPluginDupFileManager("deleteAllDupFileManagerTags");
+ return React.createElement(
+ "div",
+ null,
+ React.createElement("h1", null, "Deleted all DupFileManager tags."),
+ DupFileManagerReportMenuButton,
+ React.createElement("p", null),
+ ToolsMenuOptionButton
+ );
+ };
+ const generatePHASH_Matching = () => {
+ const componentsLoading = PluginApi.hooks.useLoadComponents([
+ PluginApi.loadableComponents.SceneCard,
+ ]);
+ if (componentsLoading)
+ return React.createElement(LoadingIndicator, {
+ message:
+ "Running task generate PHASH (Perceptual hashes) matching in background. This may take a while. Please standby.",
+ });
+ RunPluginDupFileManager("generate_phash_task");
+ return React.createElement(
+ "div",
+ null,
+ React.createElement("h1", null, "PHASH (Perceptual hashes) complete."),
+ DupFileManagerReportMenuButton,
+ React.createElement("p", null),
+ ToolsMenuOptionButton
+ );
+ };
+ const tagGrayList = () => {
+ const componentsLoading = PluginApi.hooks.useLoadComponents([
+ PluginApi.loadableComponents.SceneCard,
+ ]);
+ if (componentsLoading)
+ return React.createElement(LoadingIndicator, {
+ message:
+ "Running task to tag _GraylistMarkForDeletion to scenes having DuplicateMarkForDeletion tag and that are in the Graylist. This may take a while. Please standby.",
+ });
+ RunPluginDupFileManager("graylist_tag_task");
+ return React.createElement(
+ "div",
+ null,
+ React.createElement("h1", null, "Gray list tagging complete."),
+ DupFileManagerReportMenuButton,
+ React.createElement("p", null),
+ ToolsMenuOptionButton
+ );
+ };
+ const deleteTaggedDuplicatesTask = () => {
+ let result = confirm(
+ "Are you sure you want to delete all scenes having _DuplicateMarkForDeletion tags? This will delete the files, and remove them from stash."
+ );
+ if (result) {
+ const componentsLoading = PluginApi.hooks.useLoadComponents([
+ PluginApi.loadableComponents.SceneCard,
+ ]);
+ if (componentsLoading)
+ return React.createElement(LoadingIndicator, {
+ message:
+ "Running task to delete all scenes with _DuplicateMarkForDeletion tag. This may take a while. Please standby.",
+ });
+ RunPluginDupFileManager("delete_tagged_duplicates_task");
+ return React.createElement(
+ "div",
+ null,
+ React.createElement("h1", null, "Scenes with dup tag deleted."),
+ DupFileManagerReportMenuButton,
+ React.createElement("p", null),
+ ToolsMenuOptionButton
+ );
+ }
+ return ToolsAndUtilities();
+ };
+ const deleteBlackListTaggedDuplicatesTask = () => {
+ let result = confirm(
+ "Are you sure you want to delete all scenes in blacklist having _DuplicateMarkForDeletion tags? This will delete the files, and remove tem from stash."
+ );
+ if (result) {
+ const componentsLoading = PluginApi.hooks.useLoadComponents([
+ PluginApi.loadableComponents.SceneCard,
+ ]);
+ if (componentsLoading)
+ return React.createElement(LoadingIndicator, {
+ message:
+ "Running task to delete all scenes in blacklist with _DuplicateMarkForDeletion tag. This may take a while. Please standby.",
+ });
+ RunPluginDupFileManager("deleteBlackListTaggedDuplicatesTask");
+ return React.createElement(
+ "div",
+ null,
+ React.createElement(
+ "h1",
+ null,
+ "Blacklist scenes with dup tag deleted."
+ ),
+ DupFileManagerReportMenuButton,
+ React.createElement("p", null),
+ ToolsMenuOptionButton
+ );
+ }
+ return ToolsAndUtilities();
+ };
+ const deleteTaggedDuplicatesLwrResOrLwrDuration = () => {
+ let result = confirm(
+ "Are you sure you want to delete scenes having _DuplicateMarkForDeletion tags and lower resultion or duration? This will delete the files, and remove them from stash."
+ );
+ if (result) {
+ const componentsLoading = PluginApi.hooks.useLoadComponents([
+ PluginApi.loadableComponents.SceneCard,
+ ]);
+ if (componentsLoading)
+ return React.createElement(LoadingIndicator, {
+ message:
+ "Running task to delete all scenes with _DuplicateMarkForDeletion tag and lower resultion or duration. This may take a while. Please standby.",
+ });
+ RunPluginDupFileManager("deleteTaggedDuplicatesLwrResOrLwrDuration");
+ return React.createElement(
+ "div",
+ null,
+ React.createElement(
+ "h1",
+ null,
+ "Scenes with dup tag and lower resultion or duration deleted."
+ ),
+ DupFileManagerReportMenuButton,
+ React.createElement("p", null),
+ ToolsMenuOptionButton
+ );
+ }
+ return ToolsAndUtilities();
+ };
+ const deleteBlackListTaggedDuplicatesLwrResOrLwrDuration = () => {
+ let result = confirm(
+ "Are you sure you want to delete scenes in blacklist having _DuplicateMarkForDeletion tags and lower resultion or duration? This will delete the files, and remove tem from stash."
+ );
+ if (result) {
+ const componentsLoading = PluginApi.hooks.useLoadComponents([
+ PluginApi.loadableComponents.SceneCard,
+ ]);
+ if (componentsLoading)
+ return React.createElement(LoadingIndicator, {
+ message:
+ "Running task to delete all scenes in blacklist with _DuplicateMarkForDeletion tag and lower resultion or duration. This may take a while. Please standby.",
+ });
+ RunPluginDupFileManager(
+ "deleteBlackListTaggedDuplicatesLwrResOrLwrDuration"
+ );
+ return React.createElement(
+ "div",
+ null,
+ React.createElement(
+ "h1",
+ null,
+ "Blacklist scenes with dup tag and lower resultion or duration deleted."
+ ),
+ DupFileManagerReportMenuButton,
+ React.createElement("p", null),
+ ToolsMenuOptionButton
+ );
+ }
+ return ToolsAndUtilities();
+ };
+ PluginApi.register.route("/plugin/DupFileManager", HomePage);
+ PluginApi.register.route("/plugin/DupFileManager_CreateReport", CreateReport);
+ PluginApi.register.route(
+ "/plugin/DupFileManager_CreateReportWithNoTagging",
+ CreateReportWithNoTagging
+ );
+ PluginApi.register.route(
+ "/plugin/DupFileManager_ToolsAndUtilities",
+ ToolsAndUtilities
+ );
+ PluginApi.register.route(
+ "/plugin/DupFileManager_ClearAllDuplicateTags",
+ ClearAllDuplicateTags
+ );
+ PluginApi.register.route(
+ "/plugin/DupFileManager_deleteLocalDupReportHtmlFiles",
+ deleteLocalDupReportHtmlFiles
+ );
+ PluginApi.register.route(
+ "/plugin/DupFileManager_deleteAllDupFileManagerTags",
+ deleteAllDupFileManagerTags
+ );
+ PluginApi.register.route(
+ "/plugin/DupFileManager_generatePHASH_Matching",
+ generatePHASH_Matching
+ );
+ PluginApi.register.route("/plugin/DupFileManager_tagGrayList", tagGrayList);
+ PluginApi.register.route(
+ "/plugin/DupFileManager_deleteTaggedDuplicatesTask",
+ deleteTaggedDuplicatesTask
+ );
+ PluginApi.register.route(
+ "/plugin/DupFileManager_deleteBlackListTaggedDuplicatesTask",
+ deleteBlackListTaggedDuplicatesTask
+ );
+ PluginApi.register.route(
+ "/plugin/DupFileManager_deleteTaggedDuplicatesLwrResOrLwrDuration",
+ deleteTaggedDuplicatesLwrResOrLwrDuration
+ );
+ PluginApi.register.route(
+ "/plugin/DupFileManager_deleteBlackListTaggedDuplicatesLwrResOrLwrDuration",
+ deleteBlackListTaggedDuplicatesLwrResOrLwrDuration
+ );
+ PluginApi.patch.before("SettingsToolsSection", function (props) {
+ const { Setting } = PluginApi.components;
+ return [
+ {
+ children: React.createElement(
+ React.Fragment,
+ null,
+ props.children,
+ React.createElement(Setting, {
+ heading: React.createElement(
+ Link,
+ { to: "/plugin/DupFileManager", title: ReportMenuButtonToolTip },
+ React.createElement(
+ Button,
+ null,
+ "Duplicate File Report (DupFileManager)"
+ )
+ ),
+ }),
+ React.createElement(Setting, {
+ heading: React.createElement(
+ Link,
+ {
+ to: "/plugin/DupFileManager_ToolsAndUtilities",
+ title: ToolsMenuToolTip,
+ },
+ React.createElement(
+ Button,
+ null,
+ "DupFileManager Tools and Utilities"
+ )
+ ),
+ })
+ ),
+ },
+ ];
+ });
+ PluginApi.patch.before("MainNavBar.UtilityItems", function (props) {
+ const { Icon } = PluginApi.components;
+ return [
+ {
+ children: React.createElement(
+ React.Fragment,
+ null,
+ props.children,
+ React.createElement(
+ NavLink,
+ {
+ className: "nav-utility",
+ exact: true,
+ to: "/plugin/DupFileManager",
+ },
+ React.createElement(
+ Button,
+ {
+ className: "minimal d-flex align-items-center h-100",
+ title: ReportMenuButtonToolTip,
+ },
+ React.createElement(Icon, { icon: faEthernet })
+ )
+ )
+ ),
+ },
+ ];
+ });
+})();
diff --git a/plugins/DupFileManager/DupFileManager.js.map b/plugins/DupFileManager/DupFileManager.js.map
new file mode 100644
index 00000000..5fdfda50
--- /dev/null
+++ b/plugins/DupFileManager/DupFileManager.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"DupFileManager.js","sourceRoot":"","sources":["../src/DupFileManager.tsx"],"names":[],"mappings":";AA0CA,CAAC;IACC,MAAM,SAAS,GAAI,MAAc,CAAC,SAAuB,CAAC;IAC1D,MAAM,KAAK,GAAG,SAAS,CAAC,KAAK,CAAC;IAC9B,MAAM,GAAG,GAAG,SAAS,CAAC,GAAG,CAAC;IAE1B,MAAM,EAAE,MAAM,EAAE,GAAG,SAAS,CAAC,SAAS,CAAC,SAAS,CAAC;IACjD,MAAM,EAAE,UAAU,EAAE,GAAG,SAAS,CAAC,SAAS,CAAC,gBAAgB,CAAC;IAC5D,MAAM,EACJ,IAAI,EACJ,OAAO,GACR,GAAG,SAAS,CAAC,SAAS,CAAC,cAAc,CAAC;IAEvC,MAAM,EACJ,QAAQ,EACT,GAAG,SAAS,CAAC,KAAK,CAAC;IAEpB,SAAS,CAAC,KAAK,CAAC,gBAAgB,CAAC,gBAAgB,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,QAAQ,EAAE,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAA;IAEtJ,MAAM,cAAc,GAEf,CAAC,EAAE,SAAS,EAAE,EAAE,EAAE;QACrB,8EAA8E;QAC9E,yDAAyD;QACzD,MAAM,EACJ,YAAY,GACb,GAAG,SAAS,CAAC,UAAU,CAAC;QAEzB,MAAM,cAAc,GAAG,KAAK,CAAC,OAAO,CAClC,GAAG,EAAE;;YAAC,OAAA,CACJ,6BAAK,SAAS,EAAC,yBAAyB;gBACtC,oBAAC,IAAI,IAAC,EAAE,EAAE,eAAe,SAAS,CAAC,EAAE,EAAE;oBACrC,6BACE,SAAS,EAAC,iBAAiB,EAC3B,GAAG,EAAE,MAAA,SAAS,CAAC,IAAI,mCAAI,EAAE,EACzB,GAAG,EAAE,MAAA,SAAS,CAAC,UAAU,mCAAI,EAAE,GAC/B,CACG,CACH,CACP,CAAA;SAAA,EACD,CAAC,SAAS,CAAC,CACZ,CAAC;QAEF,OAAO,CACL,oBAAC,YAAY,IACX,SAAS,EAAC,uBAAuB,EACjC,SAAS,EAAC,KAAK,EACf,OAAO,EAAE,cAAc,EACvB,UAAU,EAAE,GAAG;YAEf,2BAAG,IAAI,EAAE,QAAQ,CAAC,sBAAsB,CAAC,SAAS,CAAC,IAAG,SAAS,CAAC,IAAI,CAAK,CAC5D,CAChB,CAAC;IACJ,CAAC,CAAC;IAEF,SAAS,YAAY,CAAC,KAAU;QAC9B,MAAM,EACJ,OAAO,GACR,GAAG,SAAS,CAAC,UAAU,CAAC;QAEzB,SAAS,qBAAqB;YAC5B,IAAI,KAAK,CAAC,KAAK,CAAC,UAAU,CAAC,MAAM,IAAI,CAAC;gBAAE,OAAO;YAE/C,OAAO,CACL,6BAAK,SAAS,EAAC,wBAAwB,IACpC,KAAK,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,SAAc,EAAE,EAAE,CAAC,CAC9C,oBAAC,cAAc,IAAC,SAAS,EAAE,SAAS,EAAE,GAAG,EAAE,SAAS,CAAC,EAAE,GAAI,CAC5D,CAAC,CACE,CACP,CAAC;QACJ,CAAC;QAED,SAAS,eAAe;YACtB,IAAI,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,IAAI,CAAC;gBAAE,OAAO;YAEzC,OAAO,CACL,6BAAK,SAAS,EAAC,kBAAkB,IAC9B,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,GAAQ,EAAE,EAAE,CAAC,CAClC,oBAAC,OAAO,IAAC,GAAG,EAAE,GAAG,CAAC,EAAE,EAAE,GAAG,EAAE,GAAG,GAAI,CACnC,CAAC,CACE,CACP,CAAC;QACJ,CAAC;QAED,OAAO,CACL,6BAAK,SAAS,EAAC,qBAAqB;YAClC,8BAAM,SAAS,EAAC,kBAAkB,IAAE,KAAK,CAAC,KAAK,CAAC,IAAI,CAAQ;YAC3D,qBAAqB,EAAE;YACvB,eAAe,EAAE,CACd,CACP,CAAC;IACJ,CAAC;IAED,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,mBAAmB,EAAE,UAAU,KAAU,EAAE,CAAM,EAAE,QAAa;QACtF,OAAO,oBAAC,YAAY,OAAK,KAAK,GAAI,CAAC;IACrC,CAAC,CAAC,CAAC;IAEH,MAAM,QAAQ,GAAa,GAAG,EAAE;QAC9B,MAAM,iBAAiB,GAAG,SAAS,CAAC,KAAK,CAAC,iBAAiB,CAAC,CAAC,SAAS,CAAC,kBAAkB,CAAC,SAAS,CAAC,CAAC,CAAC;QAEtG,MAAM,EACJ,SAAS,EACT,gBAAgB,GACjB,GAAG,SAAS,CAAC,UAAU,CAAC;QAEzB,mDAAmD;QACnD,MAAM,EAAE,IAAI,EAAE,GAAG,GAAG,CAAC,kBAAkB,CAAC;YACtC,SAAS,EAAE;gBACT,MAAM,EAAE;oBACN,QAAQ,EAAE,CAAC;oBACX,IAAI,EAAE,QAAQ;iBACf;aACF;SACF,CAAC,CAAC;QAEH,MAAM,KAAK,GAAG,IAAI,aAAJ,IAAI,uBAAJ,IAAI,CAAE,UAAU,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;QAEzC,IAAI,iBAAiB;YAAE,OAAO,CAC5B,oBAAC,gBAAgB,OAAG,CACrB,CAAC;QAEF,OAAO,CACL;YACE,wDAA+B;YAC9B,CAAC,CAAC,KAAK,IAAI,oBAAC,SAAS,IAAC,KAAK,EAAE,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,CAAC,GAAI,CACvD,CACP,CAAC;IACJ,CAAC,CAAC;IAEF,SAAS,CAAC,QAAQ,CAAC,KAAK,CAAC,oBAAoB,EAAE,QAAQ,CAAC,CAAC;IAEzD,SAAS,CAAC,KAAK,CAAC,MAAM,CAAC,sBAAsB,EAAE,UAAU,KAAU;QACjE,MAAM,EACJ,OAAO,GACR,GAAG,SAAS,CAAC,UAAU,CAAC;QAEzB,OAAO;YACL;gBACE,QAAQ,EAAE,CACR;oBACG,KAAK,CAAC,QAAQ;oBACf,oBAAC,OAAO,IACN,OAAO,EACL,oBAAC,IAAI,IAAC,EAAE,EAAC,oBAAoB;4BAC3B,oBAAC,MAAM,oBAEE,CACJ,GAET,CACD,CACJ;aACF;SACF,CAAC;IACJ,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,KAAK,CAAC,MAAM,CAAC,yBAAyB,EAAE,UAAU,KAAU;QACpE,MAAM,EACJ,IAAI,GACL,GAAG,SAAS,CAAC,UAAU,CAAC;QAEzB,OAAO;YACL;gBACE,QAAQ,EAAE,CACR;oBACG,KAAK,CAAC,QAAQ;oBACf,oBAAC,OAAO,IACN,SAAS,EAAC,aAAa,EACvB,KAAK,QACL,EAAE,EAAC,oBAAoB;wBAEvB,oBAAC,MAAM,IACL,SAAS,EAAC,yCAAyC,EACnD,KAAK,EAAC,WAAW;4BAEjB,oBAAC,IAAI,IAAC,IAAI,EAAE,UAAU,GAAI,CACnB,CACD,CACT,CACJ;aACF;SACF,CAAA;IACH,CAAC,CAAC,CAAA;AACJ,CAAC,CAAC,EAAE,CAAC"}
\ No newline at end of file
diff --git a/plugins/DupFileManager/DupFileManager.py b/plugins/DupFileManager/DupFileManager.py
index c9ef4a16..16625534 100644
--- a/plugins/DupFileManager/DupFileManager.py
+++ b/plugins/DupFileManager/DupFileManager.py
@@ -3,31 +3,60 @@
# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/DupFileManager
# Note: To call this script outside of Stash, pass argument --url
# Example: python DupFileManager.py --url http://localhost:9999 -a
-import os, sys, time, pathlib, argparse, platform, shutil, logging
+try:
+ import ModulesValidate
+ ModulesValidate.modulesInstalled(["send2trash", "requests"], silent=True)
+except Exception as e:
+ import traceback, sys
+ tb = traceback.format_exc()
+ print(f"ModulesValidate Exception. Error: {e}\nTraceBack={tb}", file=sys.stderr)
+import os, sys, time, pathlib, argparse, platform, shutil, traceback, logging, requests
+from datetime import datetime
from StashPluginHelper import StashPluginHelper
+from stashapi.stash_types import PhashDistance
from DupFileManager_config import config # Import config from DupFileManager_config.py
+from DupFileManager_report_config import report_config
+
+# ToDo: make sure the following line of code works
+config |= report_config
parser = argparse.ArgumentParser()
parser.add_argument('--url', '-u', dest='stash_url', type=str, help='Add Stash URL')
parser.add_argument('--trace', '-t', dest='trace', action='store_true', help='Enables debug trace mode.')
parser.add_argument('--add_dup_tag', '-a', dest='dup_tag', action='store_true', help='Set a tag to duplicate files.')
+parser.add_argument('--clear_dup_tag', '-c', dest='clear_tag', action='store_true', help='Clear duplicates of duplicate tags.')
parser.add_argument('--del_tag_dup', '-d', dest='del_tag', action='store_true', help='Only delete scenes having DuplicateMarkForDeletion tag.')
parser.add_argument('--remove_dup', '-r', dest='remove', action='store_true', help='Remove (delete) duplicate files.')
parse_args = parser.parse_args()
settings = {
+ "matchDupDistance": 0,
"mergeDupFilename": False,
- "permanentlyDelete": False,
"whitelistDelDupInSameFolder": False,
- "whitelistDoTagLowResDup": False,
- "zCleanAfterDel": False,
- "zSwapHighRes": False,
- "zSwapLongLength": False,
+ "zvWhitelist": "",
+ "zwGraylist": "",
+ "zxBlacklist": "",
+ "zyMaxDupToProcess": 0,
+ "zySwapHighRes": False,
+ "zySwapLongLength": False,
+ "zySwapBetterBitRate": False,
+ "zySwapCodec": False,
+ "zySwapBetterFrameRate": False,
+ "zzDebug": False,
+ "zzTracing": False,
+
+ "zzObsoleteSettingsCheckVer2": False, # This is a hidden variable that is NOT displayed in the UI
+
+ # Obsolete setting names
"zWhitelist": "",
"zxGraylist": "",
"zyBlacklist": "",
- "zyMaxDupToProcess": 0,
- "zzdebugTracing": False,
+ "zyMatchDupDistance": 0,
+ "zSwapHighRes": False,
+ "zSwapLongLength": False,
+ "zSwapBetterBitRate": False,
+ "zSwapCodec": False,
+ "zSwapBetterFrameRate": False,
}
stash = StashPluginHelper(
stash_url=parse_args.stash_url,
@@ -35,64 +64,172 @@
settings=settings,
config=config,
maxbytes=10*1024*1024,
+ DebugTraceFieldName="zzTracing",
+ DebugFieldName="zzDebug",
)
+stash.convertToAscii = True
+
+advanceMenuOptions = [ "applyCombo", "applyComboBlacklist", "pathToDelete", "pathToDeleteBlacklist", "sizeToDeleteLess", "sizeToDeleteGreater", "sizeToDeleteBlacklistLess", "sizeToDeleteBlacklistGreater", "durationToDeleteLess", "durationToDeleteGreater", "durationToDeleteBlacklistLess", "durationToDeleteBlacklistGreater",
+ "commonResToDeleteLess", "commonResToDeleteEq", "commonResToDeleteGreater", "commonResToDeleteBlacklistLess", "commonResToDeleteBlacklistEq", "commonResToDeleteBlacklistGreater", "resolutionToDeleteLess", "resolutionToDeleteEq", "resolutionToDeleteGreater",
+ "resolutionToDeleteBlacklistLess", "resolutionToDeleteBlacklistEq", "resolutionToDeleteBlacklistGreater", "ratingToDeleteLess", "ratingToDeleteEq", "ratingToDeleteGreater", "ratingToDeleteBlacklistLess", "ratingToDeleteBlacklistEq", "ratingToDeleteBlacklistGreater",
+ "tagToDelete", "tagToDeleteBlacklist", "titleToDelete", "titleToDeleteBlacklist", "pathStrToDelete", "pathStrToDeleteBlacklist"]
+
+doJsonReturnModeTypes = ["tag_duplicates_task", "removeDupTag", "addExcludeTag", "removeExcludeTag", "mergeTags", "getLocalDupReportPath",
+ "createDuplicateReportWithoutTagging", "deleteLocalDupReportHtmlFiles", "clear_duplicate_tags_task",
+ "deleteAllDupFileManagerTags", "deleteBlackListTaggedDuplicatesTask", "deleteTaggedDuplicatesLwrResOrLwrDuration",
+ "deleteBlackListTaggedDuplicatesLwrResOrLwrDuration", "create_duplicate_report_task"]
+doJsonReturnModeTypes += [advanceMenuOptions]
+doJsonReturn = False
+if len(sys.argv) < 2 and stash.PLUGIN_TASK_NAME in doJsonReturnModeTypes:
+ doJsonReturn = True
+ stash.log_to_norm = stash.LogTo.FILE
+elif stash.PLUGIN_TASK_NAME == "doEarlyExit":
+ time.sleep(3)
+ stash.Log("Doing early exit because of task name")
+ time.sleep(3)
+ exit(0)
+
+stash.Log("******************* Starting *******************")
if len(sys.argv) > 1:
stash.Log(f"argv = {sys.argv}")
else:
- stash.Trace(f"No command line arguments. JSON_INPUT['args'] = {stash.JSON_INPUT['args']}")
-stash.Status(logLevel=logging.DEBUG)
+ stash.Debug(f"No command line arguments. JSON_INPUT['args'] = {stash.JSON_INPUT['args']}; PLUGIN_TASK_NAME = {stash.PLUGIN_TASK_NAME}; argv = {sys.argv}")
+stash.status(logLevel=logging.DEBUG)
-# stash.Trace(f"\nStarting (__file__={__file__}) (stash.CALLED_AS_STASH_PLUGIN={stash.CALLED_AS_STASH_PLUGIN}) (stash.DEBUG_TRACING={stash.DEBUG_TRACING}) (stash.PLUGIN_TASK_NAME={stash.PLUGIN_TASK_NAME})************************************************")
-# stash.encodeToUtf8 = True
+obsoleteSettingsToConvert = {"zWhitelist" : "zvWhitelist", "zxGraylist" : "zwGraylist", "zyBlacklist" : "zxBlacklist", "zyMatchDupDistance" : "matchDupDistance", "zSwapHighRes" : "zySwapHighRes", "zSwapLongLength" : "zySwapLongLength", "zSwapBetterBitRate" : "zySwapBetterBitRate", "zSwapCodec" : "zySwapCodec", "zSwapBetterFrameRate" : "zySwapBetterFrameRate"}
+stash.replaceObsoleteSettings(obsoleteSettingsToConvert, "zzObsoleteSettingsCheckVer2")
-LOG_STASH_N_PLUGIN = stash.LOG_TO_STASH if stash.CALLED_AS_STASH_PLUGIN else stash.LOG_TO_CONSOLE + stash.LOG_TO_FILE
+LOG_STASH_N_PLUGIN = stash.LogTo.STASH if stash.CALLED_AS_STASH_PLUGIN else stash.LogTo.CONSOLE + stash.LogTo.FILE
listSeparator = stash.Setting('listSeparator', ',', notEmpty=True)
addPrimaryDupPathToDetails = stash.Setting('addPrimaryDupPathToDetails')
+clearAllDupfileManagerTags = stash.Setting('clearAllDupfileManagerTags')
+doGeneratePhash = stash.Setting('doGeneratePhash')
mergeDupFilename = stash.Setting('mergeDupFilename')
moveToTrashCan = False if stash.Setting('permanentlyDelete') else True
alternateTrashCanPath = stash.Setting('dup_path')
whitelistDelDupInSameFolder = stash.Setting('whitelistDelDupInSameFolder')
-whitelistDoTagLowResDup = stash.Setting('whitelistDoTagLowResDup')
+graylistTagging = stash.Setting('graylistTagging')
maxDupToProcess = int(stash.Setting('zyMaxDupToProcess'))
-swapHighRes = stash.Setting('zSwapHighRes')
-swapLongLength = stash.Setting('zSwapLongLength')
-significantTimeDiff = stash.Setting('significantTimeDiff')
+significantTimeDiff = float(stash.Setting('significantTimeDiff'))
toRecycleBeforeSwap = stash.Setting('toRecycleBeforeSwap')
-cleanAfterDel = stash.Setting('zCleanAfterDel')
-duration_diff = float(stash.Setting('duration_diff'))
-if duration_diff > 10:
- duration_diff = 10
-elif duration_diff < 1:
- duration_diff = 1
+cleanAfterDel = stash.Setting('cleanAfterDel')
+
+swapHighRes = stash.Setting('zySwapHighRes')
+swapLongLength = stash.Setting('zySwapLongLength')
+swapBetterBitRate = stash.Setting('zySwapBetterBitRate')
+swapCodec = stash.Setting('zySwapCodec')
+swapBetterFrameRate = stash.Setting('zySwapBetterFrameRate')
+favorLongerFileName = stash.Setting('favorLongerFileName')
+favorLargerFileSize = stash.Setting('favorLargerFileSize')
+favorBitRateChange = stash.Setting('favorBitRateChange')
+favorHighBitRate = stash.Setting('favorHighBitRate')
+favorFrameRateChange = stash.Setting('favorFrameRateChange')
+favorHigherFrameRate = stash.Setting('favorHigherFrameRate')
+favorCodecRanking = stash.Setting('favorCodecRanking')
+codecRankingSetToUse = stash.Setting('codecRankingSetToUse')
+if codecRankingSetToUse == 4:
+ codecRanking = stash.Setting('codecRankingSet4')
+elif codecRankingSetToUse == 3:
+ codecRanking = stash.Setting('codecRankingSet3')
+elif codecRankingSetToUse == 2:
+ codecRanking = stash.Setting('codecRankingSet2')
+else:
+ codecRanking = stash.Setting('codecRankingSet1')
+skipIfTagged = stash.Setting('skipIfTagged')
+killScanningPostProcess = stash.Setting('killScanningPostProcess')
+tagLongDurationLowRes = stash.Setting('tagLongDurationLowRes')
+bitRateIsImporantComp = stash.Setting('bitRateIsImporantComp')
+codecIsImporantComp = stash.Setting('codecIsImporantComp')
+
+excludeFromReportIfSignificantTimeDiff = False
+
+matchDupDistance = int(stash.Setting('matchDupDistance'))
+matchPhaseDistance = PhashDistance.EXACT
+matchPhaseDistanceText = "Exact Match"
+if (stash.PLUGIN_TASK_NAME == "tag_duplicates_task" or stash.PLUGIN_TASK_NAME == "create_duplicate_report_task") and 'Target' in stash.JSON_INPUT['args']:
+ stash.enableProgressBar(False)
+ if stash.JSON_INPUT['args']['Target'].startswith("0"):
+ matchDupDistance = 0
+ elif stash.JSON_INPUT['args']['Target'].startswith("1"):
+ matchDupDistance = 1
+ elif stash.JSON_INPUT['args']['Target'].startswith("2"):
+ matchDupDistance = 2
+ elif stash.JSON_INPUT['args']['Target'].startswith("3"):
+ matchDupDistance = 3
+
+ if stash.JSON_INPUT['args']['Target'].find(":") == 1:
+ significantTimeDiff = float(stash.JSON_INPUT['args']['Target'][2:])
+ excludeFromReportIfSignificantTimeDiff = True
+
+if matchDupDistance == 1:
+ matchPhaseDistance = PhashDistance.HIGH
+ matchPhaseDistanceText = "High Match"
+elif matchDupDistance == 2:
+ matchPhaseDistance = PhashDistance.MEDIUM
+ matchPhaseDistanceText = "Medium Match"
+elif matchDupDistance == 3:
+ matchPhaseDistance = PhashDistance.LOW
+ matchPhaseDistanceText = "Low Match"
# significantTimeDiff can not be higher than 1 and shouldn't be lower than .5
if significantTimeDiff > 1:
- significantTimeDiff = 1
-if significantTimeDiff < .5:
- significantTimeDiff = .5
+ significantTimeDiff = float(1.00)
+if significantTimeDiff < .25:
+ significantTimeDiff = float(0.25)
duplicateMarkForDeletion = stash.Setting('DupFileTag')
if duplicateMarkForDeletion == "":
duplicateMarkForDeletion = 'DuplicateMarkForDeletion'
+base1_duplicateMarkForDeletion = duplicateMarkForDeletion
+
duplicateWhitelistTag = stash.Setting('DupWhiteListTag')
if duplicateWhitelistTag == "":
- duplicateWhitelistTag = 'DuplicateWhitelistFile'
+ duplicateWhitelistTag = '_DuplicateWhitelistFile'
+
+excludeDupFileDeleteTag = stash.Setting('excludeDupFileDeleteTag')
+if excludeDupFileDeleteTag == "":
+ excludeDupFileDeleteTag = '_ExcludeDuplicateMarkForDeletion'
+
+graylistMarkForDeletion = stash.Setting('graylistMarkForDeletion')
+if graylistMarkForDeletion == "":
+ graylistMarkForDeletion = '_GraylistMarkForDeletion'
+
+longerDurationLowerResolution = stash.Setting('longerDurationLowerResolution')
+if longerDurationLowerResolution == "":
+ longerDurationLowerResolution = '_LongerDurationLowerResolution'
-excludeMergeTags = [duplicateMarkForDeletion, duplicateWhitelistTag]
-stash.init_mergeMetadata(excludeMergeTags)
+excludeMergeTags = [duplicateMarkForDeletion, duplicateWhitelistTag, excludeDupFileDeleteTag]
-graylist = stash.Setting('zxGraylist').split(listSeparator)
+if stash.Setting('underscoreDupFileTag') and not duplicateMarkForDeletion.startswith('_'):
+ duplicateMarkForDeletionWithOutUnderscore = duplicateMarkForDeletion
+ duplicateMarkForDeletion = "_" + duplicateMarkForDeletion
+ if stash.renameTag(duplicateMarkForDeletionWithOutUnderscore, duplicateMarkForDeletion):
+ stash.Log(f"Renamed tag {duplicateMarkForDeletionWithOutUnderscore} to {duplicateMarkForDeletion}")
+ stash.Trace(f"Added underscore to {duplicateMarkForDeletionWithOutUnderscore} = {duplicateMarkForDeletion}")
+ excludeMergeTags += [duplicateMarkForDeletion]
+else:
+ stash.Trace(f"duplicateMarkForDeletion = {duplicateMarkForDeletion}")
+
+base2_duplicateMarkForDeletion = duplicateMarkForDeletion
+
+if stash.Setting('appendMatchDupDistance'):
+ duplicateMarkForDeletion += f"_{matchDupDistance}"
+ excludeMergeTags += [duplicateMarkForDeletion]
+
+stash.initMergeMetadata(excludeMergeTags)
+
+graylist = stash.Setting('zwGraylist').split(listSeparator)
graylist = [item.lower() for item in graylist]
if graylist == [""] : graylist = []
stash.Trace(f"graylist = {graylist}")
-whitelist = stash.Setting('zWhitelist').split(listSeparator)
+whitelist = stash.Setting('zvWhitelist').split(listSeparator)
whitelist = [item.lower() for item in whitelist]
if whitelist == [""] : whitelist = []
stash.Trace(f"whitelist = {whitelist}")
-blacklist = stash.Setting('zyBlacklist').split(listSeparator)
+blacklist = stash.Setting('zxBlacklist').split(listSeparator)
blacklist = [item.lower() for item in blacklist]
if blacklist == [""] : blacklist = []
stash.Trace(f"blacklist = {blacklist}")
@@ -169,51 +306,49 @@ def testReparsePointAndSymLink(merge=False, deleteDup=False):
stash.Log(f"Not isSymLink '{myTestPath6}'")
return
+detailPrefix = "BaseDup="
+detailPostfix = "\n"
-def createTagId(tagName, tagName_descp, deleteIfExist = False):
- tagId = stash.find_tags(q=tagName)
- if len(tagId):
- tagId = tagId[0]
- if deleteIfExist:
- stash.destroy_tag(int(tagId['id']))
- else:
- return tagId['id']
- tagId = stash.create_tag({"name":tagName, "description":tagName_descp, "ignore_auto_tag": True})
- stash.Log(f"Dup-tagId={tagId['id']}")
- return tagId['id']
-
-def setTagId(tagId, tagName, sceneDetails, DupFileToKeep):
+def setTagId(tagName, sceneDetails, DupFileToKeep, TagReason="", ignoreAutoTag=False):
details = ""
ORG_DATA_DICT = {'id' : sceneDetails['id']}
dataDict = ORG_DATA_DICT.copy()
doAddTag = True
if addPrimaryDupPathToDetails:
- BaseDupStr = f"BaseDup={DupFileToKeep['files'][0]['path']}\n{stash.STASH_URL}/scenes/{DupFileToKeep['id']}\n"
+ BaseDupStr = f"{detailPrefix}{DupFileToKeep['files'][0]['path']}\n{stash.STASH_URL}/scenes/{DupFileToKeep['id']}\n{TagReason}(matchDupDistance={matchPhaseDistanceText})\n{detailPostfix}"
if sceneDetails['details'] == "":
details = BaseDupStr
- elif not sceneDetails['details'].startswith(BaseDupStr):
+ elif not sceneDetails['details'].startswith(detailPrefix):
details = f"{BaseDupStr};\n{sceneDetails['details']}"
for tag in sceneDetails['tags']:
if tag['name'] == tagName:
doAddTag = False
break
if doAddTag:
- dataDict.update({'tag_ids' : tagId})
+ stash.addTag(sceneDetails, tagName, ignoreAutoTag=ignoreAutoTag)
if details != "":
dataDict.update({'details' : details})
if dataDict != ORG_DATA_DICT:
- stash.update_scene(dataDict)
- stash.Trace(f"[setTagId] Updated {sceneDetails['files'][0]['path']} with metadata {dataDict}", toAscii=True)
+ stash.updateScene(dataDict)
+ stash.Trace(f"[setTagId] Updated {sceneDetails['files'][0]['path']} with metadata {dataDict} and tag {tagName}", toAscii=True)
else:
- stash.Trace(f"[setTagId] Nothing to update {sceneDetails['files'][0]['path']}.", toAscii=True)
-
+ stash.Trace(f"[setTagId] Nothing to update {sceneDetails['files'][0]['path']} already has tag {tagName}.", toAscii=True)
+ return doAddTag
-def isInList(listToCk, pathToCk):
- pathToCk = pathToCk.lower()
- for item in listToCk:
- if pathToCk.startswith(item):
- return True
- return False
+def setTagId_withRetry(tagName, sceneDetails, DupFileToKeep, TagReason="", ignoreAutoTag=False, retryCount = 12, sleepSecondsBetweenRetry = 5):
+ errMsg = None
+ for i in range(0, retryCount):
+ try:
+ if errMsg != None:
+ stash.Warn(errMsg)
+ return setTagId(tagName, sceneDetails, DupFileToKeep, TagReason, ignoreAutoTag)
+ except (requests.exceptions.ConnectionError, ConnectionResetError):
+ tb = traceback.format_exc()
+ errMsg = f"[setTagId] Exception calling setTagId. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
+ except Exception as e:
+ tb = traceback.format_exc()
+ errMsg = f"[setTagId] Unknown exception calling setTagId. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
+ time.sleep(sleepSecondsBetweenRetry)
def hasSameDir(path1, path2):
if pathlib.Path(path1).resolve().parent == pathlib.Path(path2).resolve().parent:
@@ -237,39 +372,284 @@ def sendToTrash(path):
except Exception as e:
stash.Error(f"Failed to delete file {path}. Error: {e}", toAscii=True)
return False
-
-def significantLessTime(durrationToKeep, durrationOther):
- timeDiff = durrationToKeep / durrationOther
+# If ckTimeDiff=False: Does durration2 have significant more time than durration1
+def significantTimeDiffCheck(durration1, durration2, ckTimeDiff = False): # If ckTimeDiff=True: is time different significant in either direction.
+ if not isinstance(durration1, int) and 'files' in durration1:
+ durration1 = int(durration1['files'][0]['duration'])
+ durration2 = int(durration2['files'][0]['duration'])
+ timeDiff = getTimeDif(durration1, durration2)
+ if ckTimeDiff and timeDiff > 1:
+ timeDiff = getTimeDif(durration2, durration1)
if timeDiff < significantTimeDiff:
return True
return False
+def getTimeDif(durration1, durration2): # Where durration1 is ecpected to be smaller than durration2 IE(45/60=.75)
+ return durration1 / durration2
+
+def isBetterVideo(scene1, scene2, swapCandidateCk = False): # is scene2 better than scene1
+ # Prioritize higher reslution over codec, bit rate, and frame rate
+ if int(scene1['files'][0]['width']) * int(scene1['files'][0]['height']) > int(scene2['files'][0]['width']) * int(scene2['files'][0]['height']):
+ return False
+ if (favorBitRateChange and swapCandidateCk == False) or (swapCandidateCk and swapBetterBitRate):
+ if (favorHighBitRate and int(scene2['files'][0]['bit_rate']) > int(scene1['files'][0]['bit_rate'])) or (not favorHighBitRate and int(scene2['files'][0]['bit_rate']) < int(scene1['files'][0]['bit_rate'])):
+ stash.Trace(f"[isBetterVideo]:[favorHighBitRate={favorHighBitRate}] Better bit rate. {scene1['files'][0]['path']}={scene1['files'][0]['bit_rate']} v.s. {scene2['files'][0]['path']}={scene2['files'][0]['bit_rate']}")
+ return True
+ if (favorCodecRanking and swapCandidateCk == False) or (swapCandidateCk and swapCodec):
+ scene1CodecRank = stash.indexStartsWithInList(codecRanking, scene1['files'][0]['video_codec'])
+ scene2CodecRank = stash.indexStartsWithInList(codecRanking, scene2['files'][0]['video_codec'])
+ if scene2CodecRank < scene1CodecRank:
+ stash.Trace(f"[isBetterVideo] Better codec. {scene1['files'][0]['path']}={scene1['files'][0]['video_codec']}:Rank={scene1CodecRank} v.s. {scene2['files'][0]['path']}={scene2['files'][0]['video_codec']}:Rank={scene2CodecRank}")
+ return True
+ if (favorFrameRateChange and swapCandidateCk == False) or (swapCandidateCk and swapBetterFrameRate):
+ if (favorHigherFrameRate and int(scene2['files'][0]['frame_rate']) > int(scene1['files'][0]['frame_rate'])) or (not favorHigherFrameRate and int(scene2['files'][0]['frame_rate']) < int(scene1['files'][0]['frame_rate'])):
+ stash.Trace(f"[isBetterVideo]:[favorHigherFrameRate={favorHigherFrameRate}] Better frame rate. {scene1['files'][0]['path']}={scene1['files'][0]['frame_rate']} v.s. {scene2['files'][0]['path']}={scene2['files'][0]['frame_rate']}")
+ return True
+ return False
+
+def significantMoreTimeCompareToBetterVideo(scene1, scene2): # is scene2 better than scene1
+ if isinstance(scene1, int):
+ scene1 = stash.find_scene(scene1)
+ scene2 = stash.find_scene(scene2)
+ if int(scene1['files'][0]['duration']) >= int(scene2['files'][0]['duration']):
+ return False
+ if int(scene1['files'][0]['width']) * int(scene1['files'][0]['height']) > int(scene2['files'][0]['width']) * int(scene2['files'][0]['height']):
+ if significantTimeDiffCheck(scene1, scene2):
+ if tagLongDurationLowRes:
+ didAddTag = setTagId_withRetry(longerDurationLowerResolution, scene2, scene1, ignoreAutoTag=True)
+ stash.Log(f"Tagged sene2 with tag {longerDurationLowerResolution}, because scene1 is better video, but it has significant less time ({getTimeDif(int(scene1['files'][0]['duration']), int(scene2['files'][0]['duration']))}%) compare to scene2; scene1={scene1['files'][0]['path']} (ID={scene1['id']})(duration={scene1['files'][0]['duration']}); scene2={scene2['files'][0]['path']} (ID={scene2['id']}) (duration={scene1['files'][0]['duration']}); didAddTag={didAddTag}")
+ else:
+ stash.Warn(f"Scene1 is better video, but it has significant less time ({getTimeDif(int(scene1['files'][0]['duration']), int(scene2['files'][0]['duration']))}%) compare to scene2; Scene1={scene1['files'][0]['path']} (ID={scene1['id']})(duration={scene1['files'][0]['duration']}); Scene2={scene2['files'][0]['path']} (ID={scene2['id']}) (duration={scene1['files'][0]['duration']})")
+ return False
+ return True
+
+def allThingsEqual(scene1, scene2): # If all important things are equal, return true
+ if int(scene1['files'][0]['duration']) != int(scene2['files'][0]['duration']):
+ return False
+ if scene1['files'][0]['width'] != scene2['files'][0]['width']:
+ return False
+ if scene1['files'][0]['height'] != scene2['files'][0]['height']:
+ return False
+ if bitRateIsImporantComp and scene1['files'][0]['bit_rate'] != scene2['files'][0]['bit_rate']:
+ return False
+ if codecIsImporantComp and scene1['files'][0]['video_codec'] != scene2['files'][0]['video_codec']:
+ return False
+ return True
+
def isSwapCandidate(DupFileToKeep, DupFile):
# Don't move if both are in whitelist
- if isInList(whitelist, DupFileToKeep['files'][0]['path']) and isInList(whitelist, DupFile['files'][0]['path']):
+ if stash.startsWithInList(whitelist, DupFileToKeep['files'][0]['path']) and stash.startsWithInList(whitelist, DupFile['files'][0]['path']):
return False
- if swapHighRes and (int(DupFileToKeep['files'][0]['width']) > int(DupFile['files'][0]['width']) or int(DupFileToKeep['files'][0]['height']) > int(DupFile['files'][0]['height'])):
- if not significantLessTime(int(DupFileToKeep['files'][0]['duration']), int(DupFile['files'][0]['duration'])):
+ if swapHighRes and int(DupFileToKeep['files'][0]['width']) * int(DupFileToKeep['files'][0]['height']) > int(DupFile['files'][0]['width']) * int(DupFile['files'][0]['height']):
+ if not significantTimeDiffCheck(DupFileToKeep, DupFile):
return True
else:
stash.Warn(f"File '{DupFileToKeep['files'][0]['path']}' has a higher resolution than '{DupFile['files'][0]['path']}', but the duration is significantly shorter.", toAscii=True)
if swapLongLength and int(DupFileToKeep['files'][0]['duration']) > int(DupFile['files'][0]['duration']):
if int(DupFileToKeep['files'][0]['width']) >= int(DupFile['files'][0]['width']) or int(DupFileToKeep['files'][0]['height']) >= int(DupFile['files'][0]['height']):
return True
+ if isBetterVideo(DupFile, DupFileToKeep, swapCandidateCk=True):
+ if not significantTimeDiffCheck(DupFileToKeep, DupFile):
+ return True
+ else:
+ stash.Warn(f"File '{DupFileToKeep['files'][0]['path']}' has better codec/bit-rate than '{DupFile['files'][0]['path']}', but the duration is significantly shorter; DupFileToKeep-ID={DupFileToKeep['id']};DupFile-ID={DupFile['id']};BitRate {DupFileToKeep['files'][0]['bit_rate']} vs {DupFile['files'][0]['bit_rate']};Codec {DupFileToKeep['files'][0]['video_codec']} vs {DupFile['files'][0]['video_codec']};FrameRate {DupFileToKeep['files'][0]['frame_rate']} vs {DupFile['files'][0]['frame_rate']};", toAscii=True)
+ return False
+
+dupWhitelistTagId = None
+def addDupWhitelistTag():
+ global dupWhitelistTagId
+ stash.Trace(f"Adding tag duplicateWhitelistTag = {duplicateWhitelistTag}")
+ descp = 'Tag added to duplicate scenes which are in the whitelist. This means there are two or more duplicates in the whitelist.'
+ dupWhitelistTagId = stash.createTagId(duplicateWhitelistTag, descp, ignoreAutoTag=True)
+ stash.Trace(f"dupWhitelistTagId={dupWhitelistTagId} name={duplicateWhitelistTag}")
+
+excludeDupFileDeleteTagId = None
+def addExcludeDupTag():
+ global excludeDupFileDeleteTagId
+ stash.Trace(f"Adding tag excludeDupFileDeleteTag = {excludeDupFileDeleteTag}")
+ descp = 'Excludes duplicate scene from DupFileManager tagging and deletion process. A scene having this tag will not get deleted by DupFileManager'
+ excludeDupFileDeleteTagId = stash.createTagId(excludeDupFileDeleteTag, descp, ignoreAutoTag=True)
+ stash.Trace(f"dupWhitelistTagId={excludeDupFileDeleteTagId} name={excludeDupFileDeleteTag}")
+
+def isTaggedExcluded(Scene):
+ for tag in Scene['tags']:
+ if tag['name'] == excludeDupFileDeleteTag:
+ return True
+ return False
+
+def isWorseKeepCandidate(DupFileToKeep, Scene):
+ if not stash.startsWithInList(whitelist, Scene['files'][0]['path']) and stash.startsWithInList(whitelist, DupFileToKeep['files'][0]['path']):
+ return True
+ if not stash.startsWithInList(graylist, Scene['files'][0]['path']) and stash.startsWithInList(graylist, DupFileToKeep['files'][0]['path']):
+ return True
+ if not stash.startsWithInList(blacklist, DupFileToKeep['files'][0]['path']) and stash.startsWithInList(blacklist, Scene['files'][0]['path']):
+ return True
+
+ if stash.startsWithInList(graylist, Scene['files'][0]['path']) and stash.startsWithInList(graylist, DupFileToKeep['files'][0]['path']) and stash.indexStartsWithInList(graylist, DupFileToKeep['files'][0]['path']) < stash.indexStartsWithInList(graylist, Scene['files'][0]['path']):
+ return True
+ if stash.startsWithInList(blacklist, DupFileToKeep['files'][0]['path']) and stash.startsWithInList(blacklist, Scene['files'][0]['path']) and stash.indexStartsWithInList(blacklist, DupFileToKeep['files'][0]['path']) < stash.indexStartsWithInList(blacklist, Scene['files'][0]['path']):
+ return True
return False
-def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False):
+def killScanningJobs():
+ try:
+ if killScanningPostProcess:
+ stash.stopJobs(1, "Scanning...")
+ except Exception as e:
+ tb = traceback.format_exc()
+ stash.Error(f"Exception while trying to kill scan jobs; Error: {e}\nTraceBack={tb}")
+
+def getPath(Scene, getParent = False):
+ path = stash.asc2(Scene['files'][0]['path'])
+ path = path.replace("'", "")
+ path = path.replace("\\\\", "\\")
+ if getParent:
+ return pathlib.Path(path).resolve().parent
+ return path
+
+def getHtmlReportTableRow(qtyResults, tagDuplicates):
+ htmlReportPrefix = stash.Setting('htmlReportPrefix')
+ htmlReportPrefix = htmlReportPrefix.replace('http://127.0.0.1:9999/graphql', stash.url)
+ htmlReportPrefix = htmlReportPrefix.replace('http://localhost:9999/graphql', stash.url)
+ if tagDuplicates == False:
+ htmlReportPrefix = htmlReportPrefix.replace(' | ")
+ fileHtmlReport.write(f"{getSceneID(DupFile)}{getPath(DupFile)}")
+ fileHtmlReport.write(f"Res | Durration | BitRate | Codec | FrameRate | size | ID | index |
")
+ fileHtmlReport.write(f"{DupFile['files'][0]['width']}x{DupFile['files'][0]['height']} | {DupFile['files'][0]['duration']} | {DupFile['files'][0]['bit_rate']} | {DupFile['files'][0]['video_codec']} | {DupFile['files'][0]['frame_rate']} | {DupFile['files'][0]['size']} | {DupFile['id']} | {QtyTagForDel} |
")
+
+ if DupFile['id'] in reasonDict:
+ fileHtmlReport.write(f"Reason: {reasonDict[DupFile['id']]} |
")
+ # elif DupFileToKeep['id'] in reasonDict:
+ # fileHtmlReport.write(f"Reason: {reasonDict[DupFileToKeep['id']]} |
")
+ elif int(DupFileToKeep['files'][0]['width']) * int(DupFileToKeep['files'][0]['height']) > int(DupFile['files'][0]['width']) * int(DupFile['files'][0]['height']):
+ fileHtmlReport.write(f"Reason: Resolution {DupFile['files'][0]['width']}x{DupFile['files'][0]['height']} < {DupFileToKeep['files'][0]['width']}x{DupFileToKeep['files'][0]['height']} |
")
+ elif significantMoreTimeCompareToBetterVideo(DupFile, DupFileToKeep):
+ if significantTimeDiffCheck(DupFile, DupFileToKeep):
+ theReason = f"Significant-Duration: {DupFile['files'][0]['duration']} < {DupFileToKeep['files'][0]['duration']}"
+ else:
+ theReason = f"Duration: {DupFile['files'][0]['duration']} < {DupFileToKeep['files'][0]['duration']}"
+ fileHtmlReport.write(f"Reason: {theReason} |
")
+ elif isBetterVideo(DupFile, DupFileToKeep):
+ fileHtmlReport.write(f"Reason: Better Video |
")
+ elif stash.startsWithInList(DupFileToKeep, DupFile['files'][0]['path']) and not stash.startsWithInList(whitelist, DupFile['files'][0]['path']):
+ fileHtmlReport.write(f"Reason: not whitelist vs whitelist |
")
+ elif isTaggedExcluded(DupFileToKeep) and not isTaggedExcluded(DupFile):
+ fileHtmlReport.write(f"Reason: not ExcludeTag vs ExcludeTag |
")
+
+ fileHtmlReport.write("
")
+ fileHtmlReport.write(f"")
+ fileHtmlReport.write(f"")
+ fileHtmlReport.write(f"")
+ fileHtmlReport.write(f"")
+ fileHtmlReport.write(f"")
+ fileHtmlReport.write(f"")
+ # ToDo: Add following buttons:
+ # rename file
+ if dupFileExist and tagDuplicates:
+ fileHtmlReport.write(f"")
+ fileHtmlReport.write(f"")
+ fileHtmlReport.write(f"")
+ if dupFileExist:
+ fileHtmlReport.write(f"[Folder]")
+ fileHtmlReport.write(f"[Play]")
+ else:
+ fileHtmlReport.write("[File NOT Exist]")
+ fileHtmlReport.write("
")
+
+ videoPreview = f""
+ if htmlIncludeImagePreview:
+ imagePreview = f"
"
+ fileHtmlReport.write(f"{getSceneID(DupFileToKeep)}{videoPreview} | {imagePreview} |
")
+ else:
+ fileHtmlReport.write(f"{getSceneID(DupFileToKeep)}{videoPreview}")
+ fileHtmlReport.write(f"{getSceneID(DupFileToKeep)}{getPath(DupFileToKeep)}")
+ fileHtmlReport.write(f"Res | Durration | BitRate | Codec | FrameRate | size | ID |
")
+ fileHtmlReport.write(f"{DupFileToKeep['files'][0]['width']}x{DupFileToKeep['files'][0]['height']} | {DupFileToKeep['files'][0]['duration']} | {DupFileToKeep['files'][0]['bit_rate']} | {DupFileToKeep['files'][0]['video_codec']} | {DupFileToKeep['files'][0]['frame_rate']} | {DupFileToKeep['files'][0]['size']} | {DupFileToKeep['id']} |
")
+ fileHtmlReport.write(f"")
+ fileHtmlReport.write(f"")
+ fileHtmlReport.write(f"")
+ if isTaggedExcluded(DupFileToKeep):
+ fileHtmlReport.write(f"")
+ fileHtmlReport.write(f"[Folder]")
+ if toKeepFileExist:
+ fileHtmlReport.write(f"[Play]")
+ else:
+ fileHtmlReport.write("[File NOT Exist]")
+ fileHtmlReport.write(f"")
+ # ToDo: Add following buttons:
+ # rename file
+ fileHtmlReport.write(f"")
+
+ fileHtmlReport.write("\n")
+
+def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False, deleteBlacklistOnly=False, deleteLowerResAndDuration=False):
+ global reasonDict
duplicateMarkForDeletion_descp = 'Tag added to duplicate scenes so-as to tag them for deletion.'
stash.Trace(f"duplicateMarkForDeletion = {duplicateMarkForDeletion}")
- dupTagId = createTagId(duplicateMarkForDeletion, duplicateMarkForDeletion_descp)
+ dupTagId = stash.createTagId(duplicateMarkForDeletion, duplicateMarkForDeletion_descp, ignoreAutoTag=True)
stash.Trace(f"dupTagId={dupTagId} name={duplicateMarkForDeletion}")
+ createHtmlReport = stash.Setting('createHtmlReport')
+ htmlReportNameHomePage = htmlReportName
+ htmlReportPaginate = stash.Setting('htmlReportPaginate')
+
- dupWhitelistTagId = None
- if whitelistDoTagLowResDup:
- stash.Trace(f"duplicateWhitelistTag = {duplicateWhitelistTag}")
- duplicateWhitelistTag_descp = 'Tag added to duplicate scenes which are in the whitelist. This means there are two or more duplicates in the whitelist.'
- dupWhitelistTagId = createTagId(duplicateWhitelistTag, duplicateWhitelistTag_descp)
- stash.Trace(f"dupWhitelistTagId={dupWhitelistTagId} name={duplicateWhitelistTag}")
+ addDupWhitelistTag()
+ addExcludeDupTag()
QtyDupSet = 0
QtyDup = 0
@@ -277,187 +657,897 @@ def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False):
QtyAlmostDup = 0
QtyRealTimeDiff = 0
QtyTagForDel = 0
+ QtyTagForDelPaginate = 0
+ PaginateId = 0
+ QtyNewlyTag = 0
QtySkipForDel = 0
+ QtyExcludeForDel = 0
QtySwap = 0
QtyMerge = 0
QtyDeleted = 0
stash.Log("#########################################################################")
stash.Trace("#########################################################################")
- stash.Log(f"Waiting for find_duplicate_scenes_diff to return results; duration_diff={duration_diff}; significantTimeDiff={significantTimeDiff}", printTo=LOG_STASH_N_PLUGIN)
- DupFileSets = stash.find_duplicate_scenes_diff(duration_diff=duration_diff)
+ stash.Log(f"Waiting for find_duplicate_scenes_diff to return results; matchDupDistance={matchPhaseDistanceText}; significantTimeDiff={significantTimeDiff}", printTo=LOG_STASH_N_PLUGIN)
+ stash.startSpinningProcessBar()
+ htmlFileData = " paths {screenshot sprite " + htmlPreviewOrStream + "} " if createHtmlReport else ""
+ mergeFieldData = " code director title rating100 date studio {id} movies {movie {id} } galleries {id} performers {id} urls " if merge else ""
+ DupFileSets = stash.find_duplicate_scenes(matchPhaseDistance, fragment='id tags {id name} files {path width height duration size video_codec bit_rate frame_rate} details ' + mergeFieldData + htmlFileData)
+ stash.stopSpinningProcessBar()
qtyResults = len(DupFileSets)
+ stash.setProgressBarIter(qtyResults)
stash.Trace("#########################################################################")
+ stash.Log(f"Found {qtyResults} duplicate sets...")
+ fileHtmlReport = None
+ if createHtmlReport:
+ if not os.path.isdir(htmlReportNameFolder):
+ os.mkdir(htmlReportNameFolder)
+ if not os.path.isdir(htmlReportNameFolder):
+ stash.Error(f"Failed to create report directory {htmlReportNameFolder}.")
+ return
+ deleteLocalDupReportHtmlFiles(False)
+ fileHtmlReport = open(htmlReportName, "w")
+ fileHtmlReport.write(f"{getHtmlReportTableRow(qtyResults, tagDuplicates)}\n")
+ fileHtmlReport.write(f"{stash.Setting('htmlReportTable')}\n")
+ htmlReportTableHeader = stash.Setting('htmlReportTableHeader')
+ fileHtmlReport.write(f"{htmlReportTableRow}{htmlReportTableHeader}Scene{htmlReportTableHeader}Duplicate to Delete{htmlReportTableHeader}Scene-ToKeep{htmlReportTableHeader}Duplicate to Keep\n")
+
for DupFileSet in DupFileSets:
- stash.Trace(f"DupFileSet={DupFileSet}")
+ # stash.Trace(f"DupFileSet={DupFileSet}", toAscii=True)
QtyDupSet+=1
- stash.Progress(QtyDupSet, qtyResults)
+ stash.progressBar(QtyDupSet, qtyResults)
SepLine = "---------------------------"
- DupFileToKeep = ""
+ DupFileToKeep = None
DupToCopyFrom = ""
DupFileDetailList = []
for DupFile in DupFileSet:
QtyDup+=1
- stash.log.sl.progress(f"Scene ID = {DupFile['id']}")
- time.sleep(2)
- Scene = stash.find_scene(DupFile['id'])
- sceneData = f"Scene = {Scene}"
- stash.Trace(sceneData, toAscii=True)
+ Scene = DupFile
+ if skipIfTagged and createHtmlReport == False and duplicateMarkForDeletion in Scene['tags']:
+ stash.Trace(f"Skipping scene '{Scene['files'][0]['path']}' because already tagged with {duplicateMarkForDeletion}")
+ continue
+ stash.TraceOnce(f"Scene = {Scene}", toAscii=True)
DupFileDetailList = DupFileDetailList + [Scene]
- if DupFileToKeep != "":
- if int(DupFileToKeep['files'][0]['duration']) == int(Scene['files'][0]['duration']): # Do not count fractions of a second as a difference
- QtyExactDup+=1
+ if os.path.isfile(Scene['files'][0]['path']):
+ if DupFileToKeep != None:
+ if int(DupFileToKeep['files'][0]['duration']) == int(Scene['files'][0]['duration']): # Do not count fractions of a second as a difference
+ QtyExactDup+=1
+ else:
+ QtyAlmostDup+=1
+ SepLine = "***************************"
+ if significantTimeDiffCheck(DupFileToKeep, Scene):
+ QtyRealTimeDiff += 1
+
+ if int(DupFileToKeep['files'][0]['width']) * int(DupFileToKeep['files'][0]['height']) < int(Scene['files'][0]['width']) * int(Scene['files'][0]['height']):
+ logReason(DupFileToKeep, Scene, f"resolution: {DupFileToKeep['files'][0]['width']}x{DupFileToKeep['files'][0]['height']} < {Scene['files'][0]['width']}x{Scene['files'][0]['height']}")
+ DupFileToKeep = Scene
+ elif significantMoreTimeCompareToBetterVideo(DupFileToKeep, Scene):
+ if significantTimeDiffCheck(DupFileToKeep, Scene):
+ theReason = f"significant-duration: {DupFileToKeep['files'][0]['duration']} < {Scene['files'][0]['duration']}"
+ else:
+ theReason = f"duration: {DupFileToKeep['files'][0]['duration']} < {Scene['files'][0]['duration']}"
+ reasonKeyword = "significant-duration" if significantTimeDiffCheck(DupFileToKeep, Scene) else "duration"
+ logReason(DupFileToKeep, Scene, theReason)
+ DupFileToKeep = Scene
+ elif isBetterVideo(DupFileToKeep, Scene):
+ logReason(DupFileToKeep, Scene, f"codec,bit_rate, or frame_rate: {DupFileToKeep['files'][0]['video_codec']}, {DupFileToKeep['files'][0]['bit_rate']}, {DupFileToKeep['files'][0]['frame_rate']} : {Scene['files'][0]['video_codec']}, {Scene['files'][0]['bit_rate']}, {Scene['files'][0]['frame_rate']}")
+ DupFileToKeep = Scene
+ elif stash.startsWithInList(whitelist, Scene['files'][0]['path']) and not stash.startsWithInList(whitelist, DupFileToKeep['files'][0]['path']):
+ logReason(DupFileToKeep, Scene, f"not whitelist vs whitelist")
+ DupFileToKeep = Scene
+ elif isTaggedExcluded(Scene) and not isTaggedExcluded(DupFileToKeep):
+ logReason(DupFileToKeep, Scene, f"not ExcludeTag vs ExcludeTag")
+ DupFileToKeep = Scene
+ elif allThingsEqual(DupFileToKeep, Scene):
+ # Only do below checks if all imporant things are equal.
+ if stash.startsWithInList(blacklist, DupFileToKeep['files'][0]['path']) and not stash.startsWithInList(blacklist, Scene['files'][0]['path']):
+ logReason(DupFileToKeep, Scene, f"blacklist vs not blacklist")
+ DupFileToKeep = Scene
+ elif stash.startsWithInList(blacklist, DupFileToKeep['files'][0]['path']) and stash.startsWithInList(blacklist, Scene['files'][0]['path']) and stash.indexStartsWithInList(blacklist, DupFileToKeep['files'][0]['path']) > stash.indexStartsWithInList(blacklist, Scene['files'][0]['path']):
+ logReason(DupFileToKeep, Scene, f"blacklist-index {stash.indexStartsWithInList(blacklist, DupFileToKeep['files'][0]['path'])} > {stash.indexStartsWithInList(blacklist, Scene['files'][0]['path'])}")
+ DupFileToKeep = Scene
+ elif stash.startsWithInList(graylist, Scene['files'][0]['path']) and not stash.startsWithInList(graylist, DupFileToKeep['files'][0]['path']):
+ logReason(DupFileToKeep, Scene, f"not graylist vs graylist")
+ DupFileToKeep = Scene
+ elif stash.startsWithInList(graylist, Scene['files'][0]['path']) and stash.startsWithInList(graylist, DupFileToKeep['files'][0]['path']) and stash.indexStartsWithInList(graylist, DupFileToKeep['files'][0]['path']) > stash.indexStartsWithInList(graylist, Scene['files'][0]['path']):
+ logReason(DupFileToKeep, Scene, f"graylist-index {stash.indexStartsWithInList(graylist, DupFileToKeep['files'][0]['path'])} > {stash.indexStartsWithInList(graylist, Scene['files'][0]['path'])}")
+ DupFileToKeep = Scene
+ elif favorLongerFileName and len(DupFileToKeep['files'][0]['path']) < len(Scene['files'][0]['path']) and not isWorseKeepCandidate(DupFileToKeep, Scene):
+ logReason(DupFileToKeep, Scene, f"path-len {len(DupFileToKeep['files'][0]['path'])} < {len(Scene['files'][0]['path'])}")
+ DupFileToKeep = Scene
+ elif favorLargerFileSize and int(DupFileToKeep['files'][0]['size']) < int(Scene['files'][0]['size']) and not isWorseKeepCandidate(DupFileToKeep, Scene):
+ logReason(DupFileToKeep, Scene, f"size {DupFileToKeep['files'][0]['size']} < {Scene['files'][0]['size']}")
+ DupFileToKeep = Scene
+ elif not favorLongerFileName and len(DupFileToKeep['files'][0]['path']) > len(Scene['files'][0]['path']) and not isWorseKeepCandidate(DupFileToKeep, Scene):
+ logReason(DupFileToKeep, Scene, f"path-len {len(DupFileToKeep['files'][0]['path'])} > {len(Scene['files'][0]['path'])}")
+ DupFileToKeep = Scene
+ elif not favorLargerFileSize and int(DupFileToKeep['files'][0]['size']) > int(Scene['files'][0]['size']) and not isWorseKeepCandidate(DupFileToKeep, Scene):
+ logReason(DupFileToKeep, Scene, f"size {DupFileToKeep['files'][0]['size']} > {Scene['files'][0]['size']}")
+ DupFileToKeep = Scene
else:
- QtyAlmostDup+=1
- SepLine = "***************************"
- if significantLessTime(int(DupFileToKeep['files'][0]['duration']), int(Scene['files'][0]['duration'])):
- QtyRealTimeDiff += 1
- if int(DupFileToKeep['files'][0]['width']) < int(Scene['files'][0]['width']) or int(DupFileToKeep['files'][0]['height']) < int(Scene['files'][0]['height']):
- DupFileToKeep = Scene
- elif int(DupFileToKeep['files'][0]['duration']) < int(Scene['files'][0]['duration']):
- DupFileToKeep = Scene
- elif isInList(whitelist, Scene['files'][0]['path']) and not isInList(whitelist, DupFileToKeep['files'][0]['path']):
- DupFileToKeep = Scene
- elif isInList(blacklist, DupFileToKeep['files'][0]['path']) and not isInList(blacklist, Scene['files'][0]['path']):
- DupFileToKeep = Scene
- elif isInList(graylist, Scene['files'][0]['path']) and not isInList(graylist, DupFileToKeep['files'][0]['path']):
- DupFileToKeep = Scene
- elif len(DupFileToKeep['files'][0]['path']) < len(Scene['files'][0]['path']):
- DupFileToKeep = Scene
- elif int(DupFileToKeep['files'][0]['size']) < int(Scene['files'][0]['size']):
DupFileToKeep = Scene
+ # stash.Trace(f"DupFileToKeep = {DupFileToKeep}")
+ stash.Debug(f"KeepID={DupFileToKeep['id']}, ID={DupFile['id']} duration=({Scene['files'][0]['duration']}), Size=({Scene['files'][0]['size']}), Res=({Scene['files'][0]['width']} x {Scene['files'][0]['height']}) Name={Scene['files'][0]['path']}, KeepPath={DupFileToKeep['files'][0]['path']}", toAscii=True)
else:
- DupFileToKeep = Scene
- # stash.Trace(f"DupFileToKeep = {DupFileToKeep}")
- stash.Trace(f"KeepID={DupFileToKeep['id']}, ID={DupFile['id']} duration=({Scene['files'][0]['duration']}), Size=({Scene['files'][0]['size']}), Res=({Scene['files'][0]['width']} x {Scene['files'][0]['height']}) Name={Scene['files'][0]['path']}, KeepPath={DupFileToKeep['files'][0]['path']}", toAscii=True)
+ stash.Error(f"Scene does NOT exist; path={Scene['files'][0]['path']}; ID={Scene['id']}")
for DupFile in DupFileDetailList:
- if DupFile['id'] != DupFileToKeep['id']:
+ if DupFileToKeep != None and DupFile['id'] != DupFileToKeep['id']:
if merge:
- result = stash.merge_metadata(DupFile, DupFileToKeep)
+ result = stash.mergeMetadata(DupFile, DupFileToKeep)
if result != "Nothing To Merge":
QtyMerge += 1
-
- if isInList(whitelist, DupFile['files'][0]['path']) and (not whitelistDelDupInSameFolder or not hasSameDir(DupFile['files'][0]['path'], DupFileToKeep['files'][0]['path'])):
+ didAddTag = False
+ if stash.startsWithInList(whitelist, DupFile['files'][0]['path']) and (not whitelistDelDupInSameFolder or not hasSameDir(DupFile['files'][0]['path'], DupFileToKeep['files'][0]['path'])):
+ QtySkipForDel+=1
if isSwapCandidate(DupFileToKeep, DupFile):
if merge:
- stash.merge_metadata(DupFileToKeep, DupFile)
+ stash.mergeMetadata(DupFileToKeep, DupFile)
if toRecycleBeforeSwap:
sendToTrash(DupFile['files'][0]['path'])
- shutil.move(DupFileToKeep['files'][0]['path'], DupFile['files'][0]['path'])
- stash.Log(f"Moved better file '{DupFileToKeep['files'][0]['path']}' to '{DupFile['files'][0]['path']}'", toAscii=True, printTo=LOG_STASH_N_PLUGIN)
+ stash.Log(f"Moving better file '{DupFileToKeep['files'][0]['path']}' to '{DupFile['files'][0]['path']}'; SrcID={DupFileToKeep['id']};DescID={DupFile['id']};QtyDup={QtyDup};Set={QtyDupSet} of {qtyResults};QtySwap={QtySwap};QtySkipForDel={QtySkipForDel}", toAscii=True, printTo=LOG_STASH_N_PLUGIN)
+ try:
+ shutil.move(DupFileToKeep['files'][0]['path'], DupFile['files'][0]['path'])
+ QtySwap+=1
+ except Exception as e:
+ tb = traceback.format_exc()
+ stash.Error(f"Exception while moving file '{DupFileToKeep['files'][0]['path']}' to '{DupFile['files'][0]['path']}; SrcID={DupFileToKeep['id']};DescID={DupFile['id']}'; Error: {e}\nTraceBack={tb}")
DupFileToKeep = DupFile
- QtySwap+=1
else:
- stash.Log(f"NOT processing duplicate, because it's in whitelist. '{DupFile['files'][0]['path']}'", toAscii=True)
if dupWhitelistTagId and tagDuplicates:
- setTagId(dupWhitelistTagId, duplicateWhitelistTag, DupFile, DupFileToKeep)
- QtySkipForDel+=1
+ didAddTag = setTagId_withRetry(duplicateWhitelistTag, DupFile, DupFileToKeep, ignoreAutoTag=True)
+ stash.Log(f"NOT processing duplicate, because it's in whitelist. '{DupFile['files'][0]['path']}';AddTagW={didAddTag};QtyDup={QtyDup};Set={QtyDupSet} of {qtyResults};QtySkipForDel={QtySkipForDel}", toAscii=True)
else:
- if deleteDup:
- DupFileName = DupFile['files'][0]['path']
- DupFileNameOnly = pathlib.Path(DupFileName).stem
- stash.Warn(f"Deleting duplicate '{DupFileName}'", toAscii=True, printTo=LOG_STASH_N_PLUGIN)
- if alternateTrashCanPath != "":
- destPath = f"{alternateTrashCanPath }{os.sep}{DupFileNameOnly}"
- if os.path.isfile(destPath):
- destPath = f"{alternateTrashCanPath }{os.sep}_{time.time()}_{DupFileNameOnly}"
- shutil.move(DupFileName, destPath)
- elif moveToTrashCan:
- sendToTrash(DupFileName)
- stash.destroy_scene(DupFile['id'], delete_file=True)
- QtyDeleted += 1
- elif tagDuplicates:
- if QtyTagForDel == 0:
- stash.Log(f"Tagging duplicate {DupFile['files'][0]['path']} for deletion with tag {duplicateMarkForDeletion}.", toAscii=True, printTo=LOG_STASH_N_PLUGIN)
- else:
- stash.Log(f"Tagging duplicate {DupFile['files'][0]['path']} for deletion.", toAscii=True, printTo=LOG_STASH_N_PLUGIN)
- setTagId(dupTagId, duplicateMarkForDeletion, DupFile, DupFileToKeep)
- QtyTagForDel+=1
+ if isTaggedExcluded(DupFile):
+ QtyExcludeForDel+=1
+ stash.Log(f"Excluding file {DupFile['files'][0]['path']} because tagged for exclusion via tag {excludeDupFileDeleteTag};QtyDup={QtyDup};Set={QtyDupSet} of {qtyResults}")
+ else:
+ # ToDo: Add merge logic here
+ if deleteDup:
+ DupFileName = DupFile['files'][0]['path']
+ if not deleteBlacklistOnly or stash.startsWithInList(blacklist, DupFile['files'][0]['path']):
+ if not deleteLowerResAndDuration or (isBetterVideo(DupFile, DupFileToKeep) and not significantMoreTimeCompareToBetterVideo(DupFileToKeep, DupFile)) or (significantMoreTimeCompareToBetterVideo(DupFile, DupFileToKeep) and not isBetterVideo(DupFileToKeep, DupFile)):
+ QtyDeleted += 1
+ DupFileNameOnly = pathlib.Path(DupFileName).stem
+ stash.Warn(f"Deleting duplicate '{DupFileName}';QtyDup={QtyDup};Set={QtyDupSet} of {qtyResults};QtyDeleted={QtyDeleted}", toAscii=True, printTo=LOG_STASH_N_PLUGIN)
+ if alternateTrashCanPath != "":
+ destPath = f"{alternateTrashCanPath }{os.sep}{DupFileNameOnly}"
+ if os.path.isfile(destPath):
+ destPath = f"{alternateTrashCanPath }{os.sep}_{time.time()}_{DupFileNameOnly}"
+ shutil.move(DupFileName, destPath)
+ elif moveToTrashCan:
+ sendToTrash(DupFileName)
+ stash.destroyScene(DupFile['id'], delete_file=True)
+ elif tagDuplicates or fileHtmlReport != None:
+ if excludeFromReportIfSignificantTimeDiff and significantTimeDiffCheck(DupFile, DupFileToKeep, True):
+ stash.Log(f"Skipping duplicate {DupFile['files'][0]['path']} (ID={DupFile['id']}), because of time difference greater than {significantTimeDiff} for file {DupFileToKeep['files'][0]['path']}.")
+ continue
+ QtyTagForDel+=1
+ QtyTagForDelPaginate+=1
+ didAddTag = False
+ if tagDuplicates:
+ didAddTag = setTagId_withRetry(duplicateMarkForDeletion, DupFile, DupFileToKeep, ignoreAutoTag=True)
+ if fileHtmlReport != None:
+ # ToDo: Add icons using github path
+ # add copy button with copy icon
+ # add move button with r-sqr icon
+ # repace delete button with trashcan icon
+ # add rename file code and button
+ # add delete only from stash db code and button using DB delete icon
+ stash.Debug(f"Adding scene {DupFile['id']} to HTML report.")
+ writeRowToHtmlReport(fileHtmlReport, DupFile, DupFileToKeep, QtyTagForDel, tagDuplicates)
+ if QtyTagForDelPaginate >= htmlReportPaginate:
+ QtyTagForDelPaginate = 0
+ fileHtmlReport.write("\n")
+ homeHtmReportLink = f"[Home]"
+ prevHtmReportLink = ""
+ if PaginateId > 0:
+ if PaginateId > 1:
+ prevHtmReport = htmlReportNameHomePage.replace(".html", f"_{PaginateId-1}.html")
+ else:
+ prevHtmReport = htmlReportNameHomePage
+ prevHtmReportLink = f"[Prev]"
+ nextHtmReport = htmlReportNameHomePage.replace(".html", f"_{PaginateId+1}.html")
+ nextHtmReportLink = f"[Next]"
+ fileHtmlReport.write(f"{homeHtmReportLink} | {prevHtmReportLink} | {nextHtmReportLink} |
")
+ fileHtmlReport.write(f"{stash.Setting('htmlReportPostfix')}")
+ fileHtmlReport.close()
+ PaginateId+=1
+ fileHtmlReport = open(nextHtmReport, "w")
+ fileHtmlReport.write(f"{getHtmlReportTableRow(qtyResults, tagDuplicates)}\n")
+ if PaginateId > 1:
+ prevHtmReport = htmlReportNameHomePage.replace(".html", f"_{PaginateId-1}.html")
+ else:
+ prevHtmReport = htmlReportNameHomePage
+ prevHtmReportLink = f"[Prev]"
+ if len(DupFileSets) > (QtyTagForDel + htmlReportPaginate):
+ nextHtmReport = htmlReportNameHomePage.replace(".html", f"_{PaginateId+1}.html")
+ nextHtmReportLink = f"[Next]"
+ fileHtmlReport.write(f"{homeHtmReportLink} | {prevHtmReportLink} | {nextHtmReportLink} |
")
+ else:
+ stash.Debug(f"DupFileSets Qty = {len(DupFileSets)}; DupFileDetailList Qty = {len(DupFileDetailList)}; QtyTagForDel = {QtyTagForDel}; htmlReportPaginate = {htmlReportPaginate}; QtyTagForDel + htmlReportPaginate = {QtyTagForDel+htmlReportPaginate}")
+ fileHtmlReport.write(f"{homeHtmReportLink} | {prevHtmReportLink} |
")
+ fileHtmlReport.write(f"{stash.Setting('htmlReportTable')}\n")
+ fileHtmlReport.write(f"{htmlReportTableRow}{htmlReportTableHeader}Scene{htmlReportTableHeader}Duplicate to Delete{htmlReportTableHeader}Scene-ToKeep{htmlReportTableHeader}Duplicate to Keep\n")
+
+ if tagDuplicates and graylistTagging and stash.startsWithInList(graylist, DupFile['files'][0]['path']):
+ stash.addTag(DupFile, graylistMarkForDeletion, ignoreAutoTag=True)
+ if didAddTag:
+ QtyNewlyTag+=1
+ if QtyTagForDel == 1:
+ stash.Log(f"Tagging duplicate {DupFile['files'][0]['path']} for deletion with tag {duplicateMarkForDeletion}", toAscii=True, printTo=LOG_STASH_N_PLUGIN)
+ else:
+ didAddTag = 1 if didAddTag else 0
+ stash.Log(f"Tagging duplicate {DupFile['files'][0]['path']} for deletion;AddTag={didAddTag};Qty={QtyDup};Set={QtyDupSet} of {qtyResults};NewlyTag={QtyNewlyTag};isTag={QtyTagForDel}", toAscii=True, printTo=LOG_STASH_N_PLUGIN)
stash.Trace(SepLine)
- if maxDupToProcess > 0 and QtyDup > maxDupToProcess:
+ if maxDupToProcess > 0 and ((QtyTagForDel > maxDupToProcess) or (QtyTagForDel == 0 and QtyDup > maxDupToProcess)):
break
- stash.Log(f"QtyDupSet={QtyDupSet}, QtyDup={QtyDup}, QtyDeleted={QtyDeleted}, QtySwap={QtySwap}, QtyTagForDel={QtyTagForDel}, QtySkipForDel={QtySkipForDel}, QtyExactDup={QtyExactDup}, QtyAlmostDup={QtyAlmostDup}, QtyMerge={QtyMerge}, QtyRealTimeDiff={QtyRealTimeDiff}", printTo=LOG_STASH_N_PLUGIN)
- if cleanAfterDel:
+ if fileHtmlReport != None:
+ fileHtmlReport.write("\n")
+ if PaginateId > 0:
+ homeHtmReportLink = f"[Home]"
+ if PaginateId > 1:
+ prevHtmReport = htmlReportNameHomePage.replace(".html", f"_{PaginateId-1}.html")
+ else:
+ prevHtmReport = htmlReportNameHomePage
+ prevHtmReportLink = f"[Prev]"
+ fileHtmlReport.write(f"{homeHtmReportLink} | {prevHtmReportLink} |
")
+ fileHtmlReport.write(f"Total Tagged for Deletion {QtyTagForDel}
\n")
+ fileHtmlReport.write(f"{stash.Setting('htmlReportPostfix')}")
+ fileHtmlReport.close()
+ stash.Log(f"************************************************************", printTo = stash.LogTo.STASH)
+ stash.Log(f"************************************************************", printTo = stash.LogTo.STASH)
+ stash.Log(f"View Stash duplicate report using Stash->Settings->Tools->[Duplicate File Report]", printTo = stash.LogTo.STASH)
+ stash.Log(f"************************************************************", printTo = stash.LogTo.STASH)
+ stash.Log(f"************************************************************", printTo = stash.LogTo.STASH)
+
+
+ stash.Debug("#####################################################")
+ stash.Log(f"QtyDupSet={QtyDupSet}, QtyDup={QtyDup}, QtyDeleted={QtyDeleted}, QtySwap={QtySwap}, QtyTagForDel={QtyTagForDel}, QtySkipForDel={QtySkipForDel}, QtyExcludeForDel={QtyExcludeForDel}, QtyExactDup={QtyExactDup}, QtyAlmostDup={QtyAlmostDup}, QtyMerge={QtyMerge}, QtyRealTimeDiff={QtyRealTimeDiff}", printTo=LOG_STASH_N_PLUGIN)
+ killScanningJobs()
+ if cleanAfterDel and deleteDup:
stash.Log("Adding clean jobs to the Task Queue", printTo=LOG_STASH_N_PLUGIN)
- stash.metadata_clean(paths=stash.STASH_PATHS)
+ stash.metadata_clean()
stash.metadata_clean_generated()
stash.optimise_database()
+ if doGeneratePhash:
+ stash.metadata_generate({"phashes": True})
+ sys.stdout.write("Report complete")
-def deleteTagggedDuplicates():
- tagId = stash.find_tags(q=duplicateMarkForDeletion)
- if len(tagId) > 0 and 'id' in tagId[0]:
- tagId = tagId[0]['id']
- else:
+def findCurrentTagId(tagNames):
+ # tagNames = [i for n, i in enumerate(tagNames) if i not in tagNames[:n]]
+ for tagName in tagNames:
+ tagId = stash.find_tags(q=tagName)
+ if len(tagId) > 0 and 'id' in tagId[0]:
+ stash.Debug(f"Using tag name {tagName} with Tag ID {tagId[0]['id']}")
+ return tagId[0]['id']
+ return "-1"
+
+def toJson(data):
+ import json
+ # data = data.replace("'", '"')
+ data = data.replace("\\", "\\\\")
+ data = data.replace("\\\\\\\\", "\\\\")
+ return json.loads(data)
+
+def getAnAdvanceMenuOptionSelected(taskName, target, isBlackList, pathToDelete, sizeToDelete, durationToDelete, resolutionToDelete, ratingToDelete, tagToDelete, titleToDelete, pathStrToDelete, fileNotExistToDelete, compareToLess, compareToGreater):
+ stash.Log(f"Processing taskName = {taskName}, target = {target}")
+ if "Blacklist" in taskName:
+ isBlackList = True
+ if "Less" in taskName:
+ compareToLess = True
+ if "Greater" in taskName:
+ compareToGreater = True
+
+ if "pathToDelete" in taskName:
+ pathToDelete = target.lower()
+ elif "sizeToDelete" in taskName:
+ sizeToDelete = int(target)
+ elif "durationToDelete" in taskName:
+ durationToDelete = int(target)
+ elif "commonResToDelete" in taskName:
+ resolutionToDelete = int(target)
+ elif "resolutionToDelete" in taskName:
+ resolutionToDelete = int(target)
+ elif "ratingToDelete" in taskName:
+ ratingToDelete = int(target) * 20
+ elif "tagToDelete" in taskName:
+ tagToDelete = target.lower()
+ elif "titleToDelete" in taskName:
+ titleToDelete = target.lower()
+ elif "pathStrToDelete" in taskName:
+ pathStrToDelete = target.lower()
+ elif "fileNotExistToDelete" in taskName:
+ fileNotExistToDelete = True
+ return isBlackList, pathToDelete, sizeToDelete, durationToDelete, resolutionToDelete, ratingToDelete, tagToDelete, titleToDelete, pathStrToDelete, fileNotExistToDelete, compareToLess, compareToGreater
+
+def getAdvanceMenuOptionSelected(advanceMenuOptionSelected):
+ isBlackList = False
+ pathToDelete = ""
+ sizeToDelete = -1
+ durationToDelete = -1
+ resolutionToDelete = -1
+ ratingToDelete = -1
+ tagToDelete = ""
+ titleToDelete = ""
+ pathStrToDelete = ""
+ fileNotExistToDelete = False
+ compareToLess = False
+ compareToGreater = False
+ if advanceMenuOptionSelected:
+ stash.enableProgressBar(False)
+ if 'Target' in stash.JSON_INPUT['args']:
+ if "applyCombo" in stash.PLUGIN_TASK_NAME:
+ jsonObject = toJson(stash.JSON_INPUT['args']['Target'])
+ for taskName in jsonObject:
+ isBlackList, pathToDelete, sizeToDelete, durationToDelete, resolutionToDelete, ratingToDelete, tagToDelete, titleToDelete, pathStrToDelete, fileNotExistToDelete, compareToLess, compareToGreater = getAnAdvanceMenuOptionSelected(taskName, jsonObject[taskName], isBlackList, pathToDelete, sizeToDelete, durationToDelete, resolutionToDelete, ratingToDelete, tagToDelete, titleToDelete, pathStrToDelete, compareToLess, compareToGreater)
+ else:
+ return getAnAdvanceMenuOptionSelected(stash.PLUGIN_TASK_NAME, stash.JSON_INPUT['args']['Target'], isBlackList, pathToDelete, sizeToDelete, durationToDelete, resolutionToDelete, ratingToDelete, tagToDelete, titleToDelete, pathStrToDelete, compareToLess, compareToGreater)
+ return isBlackList, pathToDelete, sizeToDelete, durationToDelete, resolutionToDelete, ratingToDelete, tagToDelete, titleToDelete, pathStrToDelete, fileNotExistToDelete, compareToLess, compareToGreater
+
+# //////////////////////////////////////////////////////////////////////////////
+# //////////////////////////////////////////////////////////////////////////////
+def manageTagggedDuplicates(deleteScenes=False, clearTag=False, setGrayListTag=False, tagId=-1, advanceMenuOptionSelected=False):
+ if tagId == -1:
+ tagId = findCurrentTagId([duplicateMarkForDeletion, base1_duplicateMarkForDeletion, base2_duplicateMarkForDeletion, 'DuplicateMarkForDeletion', '_DuplicateMarkForDeletion'])
+ if int(tagId) < 0:
stash.Warn(f"Could not find tag ID for tag '{duplicateMarkForDeletion}'.")
return
+
+ excludedTags = [duplicateMarkForDeletion]
+ if clearAllDupfileManagerTags:
+ excludedTags = [duplicateMarkForDeletion, duplicateWhitelistTag, excludeDupFileDeleteTag, graylistMarkForDeletion, longerDurationLowerResolution]
+
+ isBlackList, pathToDelete, sizeToDelete, durationToDelete, resolutionToDelete, ratingToDelete, tagToDelete, titleToDelete, pathStrToDelete, fileNotExistToDelete, compareToLess, compareToGreater = getAdvanceMenuOptionSelected(advanceMenuOptionSelected)
+ if advanceMenuOptionSelected and deleteScenes and pathToDelete == "" and tagToDelete == "" and titleToDelete == "" and pathStrToDelete == "" and sizeToDelete == -1 and durationToDelete == -1 and resolutionToDelete == -1 and ratingToDelete == -1 and fileNotExistToDelete == False:
+ stash.Error("Running advance menu option with no options enabled.")
+ return
+
QtyDup = 0
QtyDeleted = 0
+ QtyClearedTags = 0
+ QtySetGraylistTag = 0
QtyFailedQuery = 0
- stash.Trace("#########################################################################")
- sceneIDs = stash.find_scenes(f={"tags": {"value":tagId, "modifier":"INCLUDES"}}, fragment='id')
- qtyResults = len(sceneIDs)
- stash.Trace(f"Found {qtyResults} scenes with tag ({duplicateMarkForDeletion}): sceneIDs = {sceneIDs}")
- for sceneID in sceneIDs:
- # stash.Trace(f"Getting scene data for scene ID {sceneID['id']}.")
+ stash.Debug("#########################################################################")
+ stash.startSpinningProcessBar()
+ scenes = stash.find_scenes(f={"tags": {"value":tagId, "modifier":"INCLUDES"}}, fragment='id tags {id name} files {path width height duration size video_codec bit_rate frame_rate} details title rating100')
+ stash.stopSpinningProcessBar()
+ qtyResults = len(scenes)
+ stash.Log(f"Found {qtyResults} scenes with tag ({duplicateMarkForDeletion})")
+ stash.setProgressBarIter(qtyResults)
+ for scene in scenes:
QtyDup += 1
- prgs = QtyDup / qtyResults
- stash.Progress(QtyDup, qtyResults)
- scene = stash.find_scene(sceneID['id'])
- if scene == None or len(scene) == 0:
- stash.Warn(f"Could not get scene data for scene ID {sceneID['id']}.")
- QtyFailedQuery += 1
- continue
- # stash.Log(f"scene={scene}")
- DupFileName = scene['files'][0]['path']
- DupFileNameOnly = pathlib.Path(DupFileName).stem
- stash.Warn(f"Deleting duplicate '{DupFileName}'", toAscii=True, printTo=LOG_STASH_N_PLUGIN)
- if alternateTrashCanPath != "":
- destPath = f"{alternateTrashCanPath }{os.sep}{DupFileNameOnly}"
- if os.path.isfile(destPath):
- destPath = f"{alternateTrashCanPath }{os.sep}_{time.time()}_{DupFileNameOnly}"
- shutil.move(DupFileName, destPath)
- elif moveToTrashCan:
- sendToTrash(DupFileName)
- result = stash.destroy_scene(scene['id'], delete_file=True)
- stash.Trace(f"destroy_scene result={result} for file {DupFileName}", toAscii=True)
- QtyDeleted += 1
- stash.Log(f"QtyDup={QtyDup}, QtyDeleted={QtyDeleted}, QtyFailedQuery={QtyFailedQuery}", printTo=LOG_STASH_N_PLUGIN)
- return
+ stash.progressBar(QtyDup, qtyResults)
+ # scene = stash.find_scene(sceneID['id'])
+ # if scene == None or len(scene) == 0:
+ # stash.Warn(f"Could not get scene data for scene ID {scene['id']}.")
+ # QtyFailedQuery += 1
+ # continue
+ # stash.Trace(f"scene={scene}")
+ if clearTag:
+ QtyClearedTags += 1
+ # ToDo: Add logic to exclude graylistMarkForDeletion
+ tags = [int(item['id']) for item in scene["tags"] if item['name'] not in excludedTags]
+ # if clearAllDupfileManagerTags:
+ # tags = []
+ # for tag in scene["tags"]:
+ # if tag['name'] in excludedTags:
+ # continue
+ # tags += [int(tag['id'])]
+ stash.TraceOnce(f"tagId={tagId}, len={len(tags)}, tags = {tags}")
+ dataDict = {'id' : scene['id']}
+ if addPrimaryDupPathToDetails:
+ sceneDetails = scene['details']
+ if sceneDetails.find(detailPrefix) == 0 and sceneDetails.find(detailPostfix) > 1:
+ Pos1 = sceneDetails.find(detailPrefix)
+ Pos2 = sceneDetails.find(detailPostfix)
+ sceneDetails = sceneDetails[0:Pos1] + sceneDetails[Pos2 + len(detailPostfix):]
+ dataDict.update({'details' : sceneDetails})
+ dataDict.update({'tag_ids' : tags})
+ stash.Log(f"Updating scene with {dataDict};QtyClearedTags={QtyClearedTags};Count={QtyDup} of {qtyResults}")
+ stash.updateScene(dataDict)
+ # stash.removeTag(scene, duplicateMarkForDeletion)
+ elif setGrayListTag:
+ if stash.startsWithInList(graylist, scene['files'][0]['path']):
+ QtySetGraylistTag+=1
+ if stash.addTag(scene, graylistMarkForDeletion, ignoreAutoTag=True):
+ stash.Log(f"Added tag {graylistMarkForDeletion} to scene {scene['files'][0]['path']};QtySetGraylistTag={QtySetGraylistTag};Count={QtyDup} of {qtyResults}")
+ else:
+ stash.Trace(f"Scene already had tag {graylistMarkForDeletion}; {scene['files'][0]['path']}")
+ elif deleteScenes:
+ DupFileName = scene['files'][0]['path']
+ DupFileNameOnly = pathlib.Path(DupFileName).stem
+ if advanceMenuOptionSelected:
+ if isBlackList:
+ if not stash.startsWithInList(blacklist, scene['files'][0]['path']):
+ continue
+ if pathToDelete != "":
+ if not DupFileName.lower().startswith(pathToDelete):
+ stash.Debug(f"Skipping file {DupFileName} because it does not start with {pathToDelete}.")
+ continue
+ if pathStrToDelete != "":
+ if not pathStrToDelete in DupFileName.lower():
+ stash.Debug(f"Skipping file {DupFileName} because it does not contain value {pathStrToDelete}.")
+ continue
+ if sizeToDelete != -1:
+ compareTo = int(scene['files'][0]['size'])
+ if compareToLess:
+ if not (compareTo < sizeToDelete):
+ continue
+ elif compareToGreater:
+ if not (compareTo > sizeToDelete):
+ continue
+ else:
+ if not compareTo == sizeToDelete:
+ continue
+ if durationToDelete != -1:
+ compareTo = int(scene['files'][0]['duration'])
+ if compareToLess:
+ if not (compareTo < durationToDelete):
+ continue
+ elif compareToGreater:
+ if not (compareTo > durationToDelete):
+ continue
+ else:
+ if not compareTo == durationToDelete:
+ continue
+ if resolutionToDelete != -1:
+ compareTo = int(scene['files'][0]['width']) * int(scene['files'][0]['height'])
+ if compareToLess:
+ if not (compareTo < resolutionToDelete):
+ continue
+ elif compareToGreater:
+ if not (compareTo > resolutionToDelete):
+ continue
+ else:
+ if not compareTo == resolutionToDelete:
+ continue
+ if ratingToDelete != -1:
+ if scene['rating100'] == "None":
+ compareTo = 0
+ else:
+ compareTo = int(scene['rating100'])
+ if compareToLess:
+ if not (compareTo < resolutionToDelete):
+ continue
+ elif compareToGreater:
+ if not (compareTo > resolutionToDelete):
+ continue
+ else:
+ if not compareTo == resolutionToDelete:
+ continue
+ if titleToDelete != "":
+ if not titleToDelete in scene['title'].lower():
+ stash.Debug(f"Skipping file {DupFileName} because it does not contain value {titleToDelete} in title ({scene['title']}).")
+ continue
+ if tagToDelete != "":
+ doProcessThis = False
+ for tag in scene['tags']:
+ if tag['name'].lower() == tagToDelete:
+ doProcessThis = True
+ break
+ if doProcessThis == False:
+ continue
+ if fileNotExistToDelete:
+ if os.path.isfile(scene['files'][0]['path']):
+ continue
+ stash.Warn(f"Deleting duplicate '{DupFileName}'", toAscii=True, printTo=LOG_STASH_N_PLUGIN)
+ if alternateTrashCanPath != "":
+ destPath = f"{alternateTrashCanPath }{os.sep}{DupFileNameOnly}"
+ if os.path.isfile(destPath):
+ destPath = f"{alternateTrashCanPath }{os.sep}_{time.time()}_{DupFileNameOnly}"
+ shutil.move(DupFileName, destPath)
+ elif moveToTrashCan:
+ sendToTrash(DupFileName)
+ result = stash.destroyScene(scene['id'], delete_file=True)
+ QtyDeleted += 1
+ stash.Debug(f"destroyScene result={result} for file {DupFileName};QtyDeleted={QtyDeleted};Count={QtyDup} of {qtyResults}", toAscii=True)
+ else:
+ stash.Error("manageTagggedDuplicates called with invlaid input arguments. Doing early exit.")
+ return
+ stash.Debug("#####################################################")
+ stash.Log(f"QtyDup={QtyDup}, QtyClearedTags={QtyClearedTags}, QtySetGraylistTag={QtySetGraylistTag}, QtyDeleted={QtyDeleted}, QtyFailedQuery={QtyFailedQuery}", printTo=LOG_STASH_N_PLUGIN)
+ killScanningJobs()
+ if deleteScenes and not advanceMenuOptionSelected:
+ if cleanAfterDel:
+ stash.Log("Adding clean jobs to the Task Queue", printTo=LOG_STASH_N_PLUGIN)
+ stash.metadata_clean()
+ stash.metadata_clean_generated()
+ stash.optimise_database()
-def testSetDupTagOnScene(sceneId):
- scene = stash.find_scene(sceneId)
- stash.Log(f"scene={scene}")
- stash.Log(f"scene tags={scene['tags']}")
- tag_ids = [dupTagId]
- for tag in scene['tags']:
- tag_ids = tag_ids + [tag['id']]
- stash.Log(f"tag_ids={tag_ids}")
- stash.update_scene({'id' : scene['id'], 'tag_ids' : tag_ids})
-
-if stash.PLUGIN_TASK_NAME == "tag_duplicates_task":
- mangeDupFiles(tagDuplicates=True, merge=mergeDupFilename)
- stash.Trace(f"{stash.PLUGIN_TASK_NAME} EXIT")
-elif stash.PLUGIN_TASK_NAME == "delete_tagged_duplicates_task":
- deleteTagggedDuplicates()
- stash.Trace(f"{stash.PLUGIN_TASK_NAME} EXIT")
-elif stash.PLUGIN_TASK_NAME == "delete_duplicates_task":
- mangeDupFiles(deleteDup=True, merge=mergeDupFilename)
- stash.Trace(f"{stash.PLUGIN_TASK_NAME} EXIT")
-elif parse_args.dup_tag:
- mangeDupFiles(tagDuplicates=True, merge=mergeDupFilename)
- stash.Trace(f"Tag duplicate EXIT")
-elif parse_args.del_tag:
- deleteTagggedDuplicates()
- stash.Trace(f"Delete Tagged duplicates EXIT")
-elif parse_args.remove:
- mangeDupFiles(deleteDup=True, merge=mergeDupFilename)
- stash.Trace(f"Delete duplicate EXIT")
-else:
- stash.Log(f"Nothing to do!!! (PLUGIN_ARGS_MODE={stash.PLUGIN_TASK_NAME})")
+def removeDupTag():
+ if 'Target' not in stash.JSON_INPUT['args']:
+ stash.Error(f"Could not find Target in JSON_INPUT ({stash.JSON_INPUT['args']})")
+ return
+ scene = stash.JSON_INPUT['args']['Target']
+ stash.Log(f"Processing scene ID# {scene}")
+ stash.removeTag(scene, duplicateMarkForDeletion)
+ stash.Log(f"Done removing tag from scene {scene}.")
+ jsonReturn = "{'removeDupTag' : 'complete', 'id': '" + f"{scene}" + "'}"
+ stash.Log(f"Sending json value {jsonReturn}")
+ sys.stdout.write(jsonReturn)
+
+def addExcludeTag():
+ if 'Target' not in stash.JSON_INPUT['args']:
+ stash.Error(f"Could not find Target in JSON_INPUT ({stash.JSON_INPUT['args']})")
+ return
+ scene = stash.JSON_INPUT['args']['Target']
+ stash.Log(f"Processing scene ID# {scene}")
+ stash.addTag(scene, excludeDupFileDeleteTag)
+ stash.Log(f"Done adding exclude tag to scene {scene}.")
+ sys.stdout.write("{" + f"addExcludeTag : 'complete', id: '{scene}'" + "}")
+
+def removeExcludeTag():
+ if 'Target' not in stash.JSON_INPUT['args']:
+ stash.Error(f"Could not find Target in JSON_INPUT ({stash.JSON_INPUT['args']})")
+ return
+ scene = stash.JSON_INPUT['args']['Target']
+ stash.Log(f"Processing scene ID# {scene}")
+ stash.removeTag(scene, excludeDupFileDeleteTag)
+ stash.Log(f"Done removing exclude tag from scene {scene}.")
+ sys.stdout.write("{" + f"removeExcludeTag : 'complete', id: '{scene}'" + "}")
+
+def getParseData(getSceneDetails1=True, getSceneDetails2=True):
+ if 'Target' not in stash.JSON_INPUT['args']:
+ stash.Error(f"Could not find Target in JSON_INPUT ({stash.JSON_INPUT['args']})")
+ return None, None
+ targetsSrc = stash.JSON_INPUT['args']['Target']
+ targets = targetsSrc.split(":")
+ if len(targets) < 2:
+ stash.Error(f"Could not get both targets from string {targetsSrc}")
+ return None, None
+ stash.Log(f"Parsed targets {targets[0]} and {targets[1]}")
+ target1 = targets[0]
+ target2 = targets[1]
+ if getSceneDetails1:
+ target1 = stash.find_scene(int(target1))
+ if getSceneDetails2:
+ target2 = stash.find_scene(int(target2))
+ elif len(targets) > 2:
+ target2 = target2 + targets[2]
+ return target1, target2
+
+
+def mergeTags():
+ scene1, scene2 = getParseData()
+ if scene1 == None or scene2 == None:
+ sys.stdout.write("{" + f"mergeTags : 'failed', id1: '{scene1}', id2: '{scene2}'" + "}")
+ return
+ stash.mergeMetadata(scene1, scene2)
+ stash.Log(f"Done merging scenes for scene {scene1['id']} and scene {scene2['id']}")
+ sys.stdout.write("{" + f"mergeTags : 'complete', id1: '{scene1['id']}', id2: '{scene2['id']}'" + "}")
+
+def getLocalDupReportPath():
+ htmlReportExist = "true" if os.path.isfile(htmlReportName) else "false"
+ localPath = htmlReportName.replace("\\", "\\\\")
+ jsonReturn = "{'LocalDupReportExist' : " + f"{htmlReportExist}" + ", 'Path': '" + f"{localPath}" + "'}"
+ stash.Log(f"Sending json value {jsonReturn}")
+ sys.stdout.write(jsonReturn)
+
+def deleteLocalDupReportHtmlFiles(doJsonOutput = True):
+ htmlReportExist = "true" if os.path.isfile(htmlReportName) else "false"
+ if os.path.isfile(htmlReportName):
+ stash.Log(f"Deleting file {htmlReportName}")
+ os.remove(htmlReportName)
+ for x in range(2, 9999):
+ fileName = htmlReportName.replace(".html", f"_{x-1}.html")
+ stash.Debug(f"Checking if file '{fileName}' exist.")
+ if not os.path.isfile(fileName):
+ break
+ stash.Log(f"Deleting file {fileName}")
+ os.remove(fileName)
+ else:
+ stash.Log(f"Report file does not exist: {htmlReportName}")
+ if doJsonOutput:
+ jsonReturn = "{'LocalDupReportExist' : " + f"{htmlReportExist}" + ", 'Path': '" + f"{htmlReportName}" + "', 'qty': '" + f"{x}" + "'}"
+ stash.Log(f"Sending json value {jsonReturn}")
+ sys.stdout.write(jsonReturn)
+
+def removeTagFromAllScenes(tagName, deleteTags):
+ # ToDo: Replace code with SQL code if DB version 68
+ tagId = stash.find_tags(q=tagName)
+ if len(tagId) > 0 and 'id' in tagId[0]:
+ if deleteTags:
+ stash.Debug(f"Deleting tag name {tagName} with Tag ID {tagId[0]['id']} from stash.")
+ stash.destroy_tag(int(tagId[0]['id']))
+ else:
+ stash.Debug(f"Removing tag name {tagName} with Tag ID {tagId[0]['id']} from all scenes.")
+ manageTagggedDuplicates(clearTag=True, tagId=int(tagId[0]['id']))
+ return True
+ return False
+
+def removeAllDupTagsFromAllScenes(deleteTags=False):
+ tagsToClear = [duplicateMarkForDeletion, base1_duplicateMarkForDeletion, base2_duplicateMarkForDeletion, graylistMarkForDeletion, longerDurationLowerResolution, duplicateWhitelistTag]
+ for x in range(0, 3):
+ tagsToClear += [base1_duplicateMarkForDeletion + f"_{x}"]
+ for x in range(0, 3):
+ tagsToClear += [base2_duplicateMarkForDeletion + f"_{x}"]
+ tagsToClear = list(set(tagsToClear)) # Remove duplicates
+ validTags = []
+ for tagToClear in tagsToClear:
+ if removeTagFromAllScenes(tagToClear, deleteTags):
+ validTags +=[tagToClear]
+ if doJsonReturn:
+ jsonReturn = "{'removeAllDupTagFromAllScenes' : " + f"{duplicateMarkForDeletion}" + ", 'OtherTags': '" + f"{validTags}" + "'}"
+ stash.Log(f"Sending json value {jsonReturn}")
+ sys.stdout.write(jsonReturn)
+ else:
+ stash.Log(f"Clear tags {tagsToClear}")
+
+def updateScenesInReport(fileName, scene):
+ stash.Log(f"Updating table rows with scene {scene} in file {fileName}")
+ scene1 = -1
+ scene2 = -1
+ strToFind = "class=\"ID_"
+ lines = None
+ with open(fileName, 'r') as file:
+ lines = file.readlines()
+ stash.Log(f"line count = {len(lines)}")
+ with open(fileName, 'w') as file:
+ for line in lines:
+ # stash.Debug(f"line = {line}")
+ if f"class=\"ID_{scene}\"" in line:
+ idx = 0
+ while line.find(strToFind, idx) > -1:
+ idx = line.find(strToFind, idx) + len(strToFind)
+ id = line[idx:]
+ stash.Debug(f"id = {id}, idx = {idx}")
+ id = id[:id.find('"')]
+ stash.Debug(f"id = {id}")
+ if scene1 == -1:
+ scene1 = int(id)
+ elif scene1 != int(id) and scene2 == -1:
+ scene2 = int(id)
+ elif scene1 != -1 and scene2 != -1:
+ break
+ if scene1 != -1 and scene2 != -1:
+ sceneDetail1 = stash.find_scene(scene1)
+ sceneDetail2 = stash.find_scene(scene2)
+ if sceneDetail1 == None or sceneDetail2 == None:
+ stash.Error("Could not get scene details for both scene1 ({scene1}) and scene2 ({scene2}); sceneDetail1={sceneDetail1}; sceneDetail2={sceneDetail2};")
+ else:
+ writeRowToHtmlReport(file, sceneDetail1, sceneDetail2)
+ else:
+ stash.Error(f"Could not get both scene ID associated with scene {scene}; scene1 = {scene1}; scene2 = {scene2}")
+ file.write(line)
+ else:
+ file.write(line)
+def updateScenesInReports(scene, ReportName = htmlReportName):
+ if os.path.isfile(ReportName):
+ updateScenesInReport(ReportName, scene)
+ for x in range(2, 9999):
+ fileName = ReportName.replace(".html", f"_{x-1}.html")
+ stash.Debug(f"Checking if file '{fileName}' exist.")
+ if not os.path.isfile(fileName):
+ break
+ updateScenesInReport(fileName, scene)
+ else:
+ stash.Log(f"Report file does not exist: {ReportName}")
+def addPropertyToSceneClass(fileName, scene, property):
+ stash.Log(f"Inserting property {property} for scene {scene} in file {fileName}")
+ doStyleEndTagCheck = True
+ lines = None
+ with open(fileName, 'r') as file:
+ lines = file.readlines()
+ stash.Log(f"line count = {len(lines)}")
+ with open(fileName, 'w') as file:
+ for line in lines:
+ # stash.Debug(f"line = {line}")
+ if doStyleEndTagCheck:
+ if property == "" and line.startswith(f".ID_{scene}" + "{"):
+ continue
+ if line.startswith(""):
+ if property != "":
+ styleSetting = f".ID_{scene}{property}\n"
+ stash.Log(f"styleSetting = {styleSetting}")
+ file.write(styleSetting)
+ doStyleEndTagCheck = False
+ file.write(line)
+def addPropertyToSceneClassToAllFiles(scene, property, ReportName = htmlReportName):
+ if os.path.isfile(ReportName):
+ addPropertyToSceneClass(ReportName, scene, property)
+ for x in range(2, 9999):
+ fileName = ReportName.replace(".html", f"_{x-1}.html")
+ stash.Debug(f"Checking if file '{fileName}' exist.")
+ if not os.path.isfile(fileName):
+ break
+ addPropertyToSceneClass(fileName, scene, property)
+ else:
+ stash.Log(f"Report file does not exist: {ReportName}")
+
+def deleteScene(disableInReport=True, deleteFile=True):
+ if 'Target' not in stash.JSON_INPUT['args']:
+ stash.Error(f"Could not find Target in JSON_INPUT ({stash.JSON_INPUT['args']})")
+ return
+ scene = stash.JSON_INPUT['args']['Target']
+ stash.Log(f"Processing scene ID# {scene}")
+ result = None
+ result = stash.destroyScene(scene, delete_file=deleteFile)
+ if disableInReport:
+ addPropertyToSceneClassToAllFiles(scene, "{background-color:gray;pointer-events:none;}")
+ stash.Log(f"{stash.PLUGIN_TASK_NAME} complete for scene {scene} with results = {result}")
+ sys.stdout.write("{" + f"{stash.PLUGIN_TASK_NAME} : 'complete', id: '{scene}', result: '{result}'" + "}")
+
+def copyScene(moveScene=False):
+ scene1, scene2 = getParseData()
+ if scene1 == None or scene2 == None:
+ sys.stdout.write("{" + f"{stash.PLUGIN_TASK_NAME} : 'failed', id1: '{scene1}', id2: '{scene2}'" + "}")
+ return
+ if moveScene:
+ stash.mergeMetadata(scene1, scene2)
+ result = shutil.copy(scene1['files'][0]['path'], scene2['files'][0]['path'])
+ if moveScene:
+ result = stash.destroyScene(scene1['id'], delete_file=True)
+ stash.Log(f"destroyScene for scene {scene1['id']} results = {result}")
+ stash.Log(f"{stash.PLUGIN_TASK_NAME} complete for scene {scene1['id']} and {scene2['id']}")
+ sys.stdout.write("{" + f"{stash.PLUGIN_TASK_NAME} : 'complete', id1: '{scene1['id']}', id2: '{scene2['id']}', result: '{result}'" + "}")
+
+def renameFile():
+ scene, newName = getParseData(getSceneDetails2=False)
+ if scene == None or newName == None:
+ sys.stdout.write("{" + f"{stash.PLUGIN_TASK_NAME} : 'failed', scene: '{scene}', newName: '{newName}'" + "}")
+ return
+ newName = newName.strip("'")
+ ext = pathlib.Path(scene['files'][0]['path']).suffix
+ newNameFull = f"{pathlib.Path(scene['files'][0]['path']).resolve().parent}{os.sep}{newName}{ext}"
+ newNameFull = newNameFull.strip("'")
+ newNameFull = newNameFull.replace("\\\\", "\\")
+ oldNameFull = scene['files'][0]['path']
+ oldNameFull = oldNameFull.strip("'")
+ oldNameFull = oldNameFull.replace("\\\\", "\\")
+ stash.Log(f"renaming file '{stash.asc2(oldNameFull)}' to '{stash.asc2(newNameFull)}'")
+ result = os.rename(oldNameFull, newNameFull)
+ stash.renameFileNameInDB(scene['files'][0]['id'], pathlib.Path(oldNameFull).stem, f"{newName}{ext}", UpdateUsingIdOnly = True)
+ updateScenesInReports(scene['id'])
+ stash.Log(f"{stash.PLUGIN_TASK_NAME} complete for scene {scene['id']} ;renamed to {newName}; result={result}")
+ sys.stdout.write("{" + f"{stash.PLUGIN_TASK_NAME} : 'complete', scene: '{scene['id']}', newName: '{newName}', result: '{result}'" + "}")
+
+def flagScene():
+ scene, flagType = getParseData(False, False)
+ if scene == None or flagType == None:
+ sys.stdout.write("{" + f"{stash.PLUGIN_TASK_NAME} : 'failed', scene: '{scene}', flagType: '{flagType}'" + "}")
+ return
+ if flagType == "disable-scene":
+ addPropertyToSceneClassToAllFiles(scene, "{background-color:gray;pointer-events:none;}")
+ elif flagType == "strike-through":
+ addPropertyToSceneClassToAllFiles(scene, "{text-decoration: line-through;}")
+ elif flagType == "yellow highlight":
+ addPropertyToSceneClassToAllFiles(scene, "{background-color:yellow;}")
+ elif flagType == "green highlight":
+ addPropertyToSceneClassToAllFiles(scene, "{background-color:#00FF00;}")
+ elif flagType == "orange highlight":
+ addPropertyToSceneClassToAllFiles(scene, "{background-color:orange;}")
+ elif flagType == "cyan highlight":
+ addPropertyToSceneClassToAllFiles(scene, "{background-color:cyan;}")
+ elif flagType == "pink highlight":
+ addPropertyToSceneClassToAllFiles(scene, "{background-color:pink;}")
+ elif flagType == "red highlight":
+ addPropertyToSceneClassToAllFiles(scene, "{background-color:red;}")
+ elif flagType == "remove all flags":
+ addPropertyToSceneClassToAllFiles(scene, "")
+ else:
+ stash.Log(f"Invalid flagType ({flagType})")
+ sys.stdout.write("{" + f"{stash.PLUGIN_TASK_NAME} : 'failed', scene: '{scene}', flagType: '{flagType}'" + "}")
+ return
+ sys.stdout.write("{" + f"{stash.PLUGIN_TASK_NAME} : 'complete', scene: '{scene}', flagType: '{flagType}'" + "}")
+
+# ToDo: Add to UI menu
+# Remove all Dup tagged files (Just remove from stash, and leave file)
+# Clear GraylistMarkForDel tag
+# Delete GraylistMarkForDel tag
+# Remove from stash all files no longer part of stash library
+# Remove from stash all files in the Exclusion list (Not supporting regexps)
+# ToDo: Add to advance menu
+# Remove only graylist dup
+# Exclude graylist from delete
+# Include graylist in delete
+
+try:
+ if stash.PLUGIN_TASK_NAME == "tag_duplicates_task":
+ mangeDupFiles(tagDuplicates=True, merge=mergeDupFilename)
+ stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
+ elif stash.PLUGIN_TASK_NAME == "create_duplicate_report_task":
+ mangeDupFiles(tagDuplicates=False, merge=mergeDupFilename)
+ stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
+ elif stash.PLUGIN_TASK_NAME == "delete_tagged_duplicates_task":
+ manageTagggedDuplicates(deleteScenes=True)
+ stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
+ elif stash.PLUGIN_TASK_NAME == "delete_duplicates_task":
+ mangeDupFiles(deleteDup=True, merge=mergeDupFilename)
+ stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
+ elif stash.PLUGIN_TASK_NAME == "clear_duplicate_tags_task":
+ removeAllDupTagsFromAllScenes()
+ stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
+ elif stash.PLUGIN_TASK_NAME == "graylist_tag_task":
+ manageTagggedDuplicates(setGrayListTag=True)
+ stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
+ elif stash.PLUGIN_TASK_NAME == "generate_phash_task":
+ stash.metadata_generate({"phashes": True})
+ stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
+ elif stash.PLUGIN_TASK_NAME == "deleteScene":
+ deleteScene()
+ stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
+ elif stash.PLUGIN_TASK_NAME == "removeScene":
+ deleteScene(deleteFile=False)
+ stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
+ elif stash.PLUGIN_TASK_NAME == "renameFile":
+ renameFile()
+ stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
+ elif stash.PLUGIN_TASK_NAME == "flagScene":
+ flagScene()
+ stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
+ elif stash.PLUGIN_TASK_NAME == "copyScene":
+ copyScene()
+ stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
+ elif stash.PLUGIN_TASK_NAME == "moveScene":
+ copyScene(moveScene=True)
+ stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
+ elif stash.PLUGIN_TASK_NAME == "removeDupTag":
+ removeDupTag()
+ stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
+ elif stash.PLUGIN_TASK_NAME == "addExcludeTag":
+ addExcludeTag()
+ stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
+ elif stash.PLUGIN_TASK_NAME == "removeExcludeTag":
+ removeExcludeTag()
+ stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
+ elif stash.PLUGIN_TASK_NAME == "mergeTags":
+ mergeTags()
+ stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
+ elif stash.PLUGIN_TASK_NAME == "getLocalDupReportPath":
+ getLocalDupReportPath()
+ stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
+ elif stash.PLUGIN_TASK_NAME == "deleteLocalDupReportHtmlFiles":
+ deleteLocalDupReportHtmlFiles()
+ stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
+ elif stash.PLUGIN_TASK_NAME == "createDuplicateReportWithoutTagging":
+ mangeDupFiles(tagDuplicates=False, merge=mergeDupFilename)
+ stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
+ elif stash.PLUGIN_TASK_NAME == "deleteAllDupFileManagerTags":
+ removeAllDupTagsFromAllScenes(deleteTags=True)
+ stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
+ elif stash.PLUGIN_TASK_NAME == "deleteBlackListTaggedDuplicatesTask":
+ mangeDupFiles(deleteDup=True, merge=mergeDupFilename, deleteBlacklistOnly=True)
+ stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
+ elif stash.PLUGIN_TASK_NAME == "deleteTaggedDuplicatesLwrResOrLwrDuration":
+ mangeDupFiles(deleteDup=True, merge=mergeDupFilename, deleteLowerResAndDuration=True)
+ stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
+ elif stash.PLUGIN_TASK_NAME == "deleteBlackListTaggedDuplicatesLwrResOrLwrDuration":
+ mangeDupFiles(deleteDup=True, merge=mergeDupFilename, deleteBlacklistOnly=True, deleteLowerResAndDuration=True)
+ stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
+ elif parse_args.dup_tag:
+ stash.PLUGIN_TASK_NAME = "dup_tag"
+ mangeDupFiles(tagDuplicates=True, merge=mergeDupFilename)
+ stash.Debug(f"Tag duplicate EXIT")
+ elif parse_args.del_tag:
+ stash.PLUGIN_TASK_NAME = "del_tag"
+ manageTagggedDuplicates(deleteScenes=True)
+ stash.Debug(f"Delete Tagged duplicates EXIT")
+ elif parse_args.clear_tag:
+ stash.PLUGIN_TASK_NAME = "clear_tag"
+ removeAllDupTagsFromAllScenes()
+ stash.Debug(f"Clear duplicate tags EXIT")
+ elif parse_args.remove:
+ stash.PLUGIN_TASK_NAME = "remove"
+ mangeDupFiles(deleteDup=True, merge=mergeDupFilename)
+ stash.Debug(f"Delete duplicate EXIT")
+ elif len(sys.argv) < 2 and stash.PLUGIN_TASK_NAME in advanceMenuOptions:
+ manageTagggedDuplicates(deleteScenes=True, advanceMenuOptionSelected=True)
+ stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
+ else:
+ stash.Log(f"Nothing to do!!! (PLUGIN_ARGS_MODE={stash.PLUGIN_TASK_NAME})")
+except Exception as e:
+ tb = traceback.format_exc()
+
+ stash.Error(f"Exception while running DupFileManager Task({stash.PLUGIN_TASK_NAME}); Error: {e}\nTraceBack={tb}")
+ killScanningJobs()
+ stash.convertToAscii = False
+ stash.Error(f"Error: {e}\nTraceBack={tb}")
+ if doJsonReturn:
+ sys.stdout.write("{" + f"Exception : '{e}; See log file for TraceBack' " + "}")
-stash.Trace("\n*********************************\nEXITING ***********************\n*********************************")
+stash.Log("\n*********************************\nEXITING ***********************\n*********************************")
diff --git a/plugins/DupFileManager/DupFileManager.yml b/plugins/DupFileManager/DupFileManager.yml
index c75f561f..3d2f6ff1 100644
--- a/plugins/DupFileManager/DupFileManager.yml
+++ b/plugins/DupFileManager/DupFileManager.yml
@@ -1,55 +1,70 @@
name: DupFileManager
description: Manages duplicate files.
-version: 0.1.2
+version: 0.1.9
url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/DupFileManager
+ui:
+ javascript:
+ - DupFileManager.js
+ css:
+ - DupFileManager.css
+ - DupFileManager.css.map
+ - DupFileManager.js.map
settings:
+ matchDupDistance:
+ displayName: Match Duplicate Distance
+ description: (Default=0) Where 0 = Exact Match, 1 = High Match, 2 = Medium Match, and 3 = Low Match.
+ type: NUMBER
mergeDupFilename:
displayName: Merge Duplicate Tags
description: Before deletion, merge metadata from duplicate. E.g. Tag names, performers, studios, title, galleries, rating, details, etc...
type: BOOLEAN
- permanentlyDelete:
- displayName: Permanent Delete
- description: Enable to permanently delete files, instead of moving files to trash can.
- type: BOOLEAN
whitelistDelDupInSameFolder:
displayName: Whitelist Delete In Same Folder
description: Allow whitelist deletion of duplicates within the same whitelist folder.
type: BOOLEAN
- whitelistDoTagLowResDup:
- displayName: Whitelist Duplicate Tagging
- description: Enable to tag whitelist duplicates of lower resolution or duration or same folder.
- type: BOOLEAN
- zCleanAfterDel:
- displayName: Run Clean After Delete
- description: After running a 'Delete Duplicates' task, run Clean, Clean-Generated, and Optimize-Database.
- type: BOOLEAN
- zSwapHighRes:
- displayName: Swap High Resolution
- description: If enabled, swap higher resolution duplicate files to preferred path.
- type: BOOLEAN
- zSwapLongLength:
- displayName: Swap Longer Duration
- description: If enabled, swap longer duration media files to preferred path. Longer is determine by significantLongerTime field.
- type: BOOLEAN
- zWhitelist:
+ zvWhitelist:
displayName: White List
description: A comma seperated list of paths NOT to be deleted. E.g. C:\Favorite\,E:\MustKeep\
type: STRING
- zxGraylist:
+ zwGraylist:
displayName: Gray List
- description: List of preferential paths to determine which duplicate should be the primary. E.g. C:\2nd_Favorite\,H:\ShouldKeep\
+ description: Preferential paths to determine which duplicate should be kept. E.g. C:\2nd_Fav,C:\3rd_Fav,C:\4th_Fav,H:\ShouldKeep
type: STRING
- zyBlacklist:
+ zxBlacklist:
displayName: Black List
- description: List of LEAST preferential paths to determine primary candidates for deletion. E.g. C:\Downloads\,F:\DeleteMeFirst\
+ description: Least preferential paths; Determine primary deletion candidates. E.g. C:\Downloads,C:\DelMe-3rd,C:\DelMe-2nd,C:\DeleteMeFirst
type: STRING
zyMaxDupToProcess:
displayName: Max Dup Process
- description: Maximum number of duplicates to process. If 0, infinity
+ description: (Default=0) Maximum number of duplicates to process. If 0, infinity.
type: NUMBER
- zzdebugTracing:
- displayName: Debug Tracing
- description: (Default=false) [***For Advanced Users***] Enable debug tracing. When enabled, additional tracing logging is added to Stash\plugins\DupFileManager\DupFileManager.log
+ zySwapBetterBitRate:
+ displayName: Swap Better Bit Rate
+ description: Swap better bit rate for duplicate files. Use with DupFileManager_config.py file option favorHighBitRate
+ type: BOOLEAN
+ zySwapBetterFrameRate:
+ displayName: Swap Better Frame Rate
+ description: Swap better frame rate for duplicates. Use with DupFileManager_config.py file option favorHigherFrameRate
+ type: BOOLEAN
+ zySwapCodec:
+ displayName: Swap Better Codec
+ description: If enabled, swap better codec duplicate files to preferred path.
+ type: BOOLEAN
+ zySwapHighRes:
+ displayName: Swap Higher Resolution
+ description: If enabled, swap higher resolution duplicate files to preferred path.
+ type: BOOLEAN
+ zySwapLongLength:
+ displayName: Swap Longer Duration
+ description: If enabled, swap longer duration media files to preferred path. Longer is determine by significantLongerTime field.
+ type: BOOLEAN
+ zzDebug:
+ displayName: Debug
+ description: Enable debug so-as to add additional debug logging in Stash\plugins\DupFileManager\DupFileManager.log
+ type: BOOLEAN
+ zzTracing:
+ displayName: Tracing
+ description: Enable tracing and debug so-as to add additional tracing and debug logging in Stash\plugins\DupFileManager\DupFileManager.log
type: BOOLEAN
exec:
- python
@@ -60,7 +75,11 @@ tasks:
description: Set tag DuplicateMarkForDeletion to the duplicates with lower resolution, duration, file name length, or black list path.
defaultArgs:
mode: tag_duplicates_task
- - name: Delete Tagged Duplicates
+ - name: Clear Tags
+ description: Clear tag DuplicateMarkForDeletion. Remove the tag from all files.
+ defaultArgs:
+ mode: clear_duplicate_tags_task
+ - name: Delete Tagged Scenes
description: Only delete scenes having DuplicateMarkForDeletion tag.
defaultArgs:
mode: delete_tagged_duplicates_task
diff --git a/plugins/DupFileManager/DupFileManager_config.py b/plugins/DupFileManager/DupFileManager_config.py
index ab5b8178..65ee067c 100644
--- a/plugins/DupFileManager/DupFileManager_config.py
+++ b/plugins/DupFileManager/DupFileManager_config.py
@@ -8,19 +8,85 @@
"dup_path": "", #Example: "C:\\TempDeleteFolder"
# The threshold as to what percentage is consider a significant shorter time.
"significantTimeDiff" : .90, # 90% threshold
- # Valued passed to stash API function FindDuplicateScenes.
- "duration_diff" : 10, # (default=10) A value from 1 to 10.
# If enabled, moves destination file to recycle bin before swapping Hi-Res file.
"toRecycleBeforeSwap" : True,
# Character used to seperate items on the whitelist, blacklist, and graylist
"listSeparator" : ",",
+ # Enable to permanently delete files, instead of moving files to trash can.
+ "permanentlyDelete" : False,
+ # After running a 'Delete Duplicates' task, run Clean, Clean-Generated, and Optimize-Database.
+ "cleanAfterDel" : True,
+ # Generate PHASH after tag or delete task.
+ "doGeneratePhash" : False,
+ # If enabled, skip processing tagged scenes. This option is ignored if createHtmlReport is True
+ "skipIfTagged" : False,
+ # If enabled, stop multiple scanning jobs after processing duplicates
+ "killScanningPostProcess" : True,
+ # If enabled, tag scenes which have longer duration, but lower resolution
+ "tagLongDurationLowRes" : True,
+ # If enabled, bit-rate is used in important comparisons for function allThingsEqual
+ "bitRateIsImporantComp" : True,
+ # If enabled, codec is used in important comparisons for function allThingsEqual
+ "codecIsImporantComp" : True,
+
+ # Tag names **************************************************
# Tag used to tag duplicates with lower resolution, duration, and file name length.
"DupFileTag" : "DuplicateMarkForDeletion",
- # Tag name used to tag duplicates in the whitelist. E.g. DuplicateWhitelistFile
- "DupWhiteListTag" : "DuplicateWhitelistFile",
+ # Tag name used to tag duplicates in the whitelist. E.g. _DuplicateWhitelistFile
+ "DupWhiteListTag" : "_DuplicateWhitelistFile",
+ # Tag name used to exclude duplicate from deletion
+ "excludeDupFileDeleteTag" : "_ExcludeDuplicateMarkForDeletion",
+ # Tag name used to tag scenes with existing tag DuplicateMarkForDeletion, and that are in the graylist
+ "graylistMarkForDeletion" : "_GraylistMarkForDeletion",
+ # Tag name for scenes with significant longer duration but lower resolution
+ "longerDurationLowerResolution" : "_LongerDurationLowerResolution",
+
+ # Other tag related options **************************************************
+ # If enabled, when adding tag DuplicateMarkForDeletion to graylist scene, also add tag _GraylistMarkForDeletion.
+ "graylistTagging" : True,
+ # If enabled, the Clear Tags task clears scenes of all tags (DuplicateMarkForDeletion, _DuplicateWhite..., _ExcludeDup..., _Graylist..., _LongerDur...)
+ "clearAllDupfileManagerTags" : True,
+ # If enabled, append dup tag name with match duplicate distance number. I.E. (DuplicateMarkForDeletion_0) or (DuplicateMarkForDeletion_1)
+ "appendMatchDupDistance" : True,
+ # If enabled, start dup tag name with an underscore. I.E. (_DuplicateMarkForDeletion). Places tag at the end of tag list.
+ "underscoreDupFileTag" : True,
+
+ # Favor setings *********************************************
+ # If enabled, favor longer file name over shorter. If disabled, favor shorter file name.
+ "favorLongerFileName" : True,
+ # If enabled, favor larger file size over smaller. If disabled, favor smaller file size.
+ "favorLargerFileSize" : True,
+ # If enabled, favor videos with a different bit rate value. If favorHighBitRate is true, favor higher rate. If favorHighBitRate is false, favor lower rate
+ "favorBitRateChange" : True,
+ # If enabled, favor videos with higher bit rate. Used with either favorBitRateChange option or UI [Swap Bit Rate Change] option.
+ "favorHighBitRate" : True,
+ # If enabled, favor videos with a different frame rate value. If favorHigherFrameRate is true, favor higher rate. If favorHigherFrameRate is false, favor lower rate
+ "favorFrameRateChange" : True,
+ # If enabled, favor videos with higher frame rate. Used with either favorFrameRateChange option or UI [Swap Better Frame Rate] option.
+ "favorHigherFrameRate" : True,
+ # If enabled, favor videos with better codec according to codecRanking
+ "favorCodecRanking" : True,
+ # Codec Ranking in order of preference (default (codecRankingSet1) is order of ranking based on maximum potential efficiency)
+ "codecRankingSet1" : ["h266", "vvc", "av1", "vvdec", "shvc", "h265", "hevc", "xvc", "vp9", "h264", "avc", "mvc", "msmpeg4v10", "vp8", "vcb", "msmpeg4v3", "h263", "h263i", "msmpeg4v2", "msmpeg4v1", "mpeg4", "mpeg-4", "mpeg4video", "theora", "vc3", "vc-3", "vp7", "vp6f", "vp6", "vc1", "vc-1", "mpeg2", "mpeg-2", "mpeg2video", "h262", "h222", "h261", "vp5", "vp4", "vp3", "wmv3", "mpeg1", "mpeg-1", "mpeg1video", "vp3", "wmv2", "wmv1", "wmv", "flv1", "png", "gif", "jpeg", "m-jpeg", "mjpeg"],
+ # codecRankingSet2 is in order of least potential efficiency
+ "codecRankingSet2" : ["gif", "png", "flv1", "mpeg1video", "mpeg1", "wmv1", "wmv2", "wmv3", "mpeg2video", "mpeg2", "AVC", "vc1", "vc-1", "msmpeg4v1", "msmpeg4v2", "msmpeg4v3", "mpeg4", "vp6f", "vp8", "h263i", "h263", "h264", "h265", "av1", "vp9", "h266"],
+ # codecRankingSet3 is in order of quality
+ "codecRankingSet3" : ["h266", "vp9", "av1", "h265", "h264", "h263", "h263i", "vp8", "vp6f", "mpeg4", "msmpeg4v3", "msmpeg4v2", "msmpeg4v1", "vc-1", "vc1", "AVC", "mpeg2", "mpeg2video", "wmv3", "wmv2", "wmv1", "mpeg1", "mpeg1video", "flv1", "png", "gif"],
+ # codecRankingSet4 is in order of compatibility
+ "codecRankingSet4" : ["h264", "vp8", "mpeg4", "msmpeg4v3", "msmpeg4v2", "msmpeg4v1", "h266", "vp9", "av1", "h265", "h263", "h263i", "vp6f", "vc-1", "vc1", "AVC", "mpeg2", "mpeg2video", "wmv3", "wmv2", "wmv1", "mpeg1", "mpeg1video", "flv1", "png", "gif"],
+ # Determines which codecRankingSet to use when ranking codec. Default is 1 for codecRankingSet1
+ "codecRankingSetToUse" : 1,
# The following fields are ONLY used when running DupFileManager in script mode
"endpoint_Scheme" : "http", # Define endpoint to use when contacting the Stash server
"endpoint_Host" : "0.0.0.0", # Define endpoint to use when contacting the Stash server
"endpoint_Port" : 9999, # Define endpoint to use when contacting the Stash server
}
+
+# Codec ranking research source:
+ # https://imagekit.io/blog/video-encoding/
+ # https://support.spinetix.com/wiki/Video_decoding
+ # https://en.wikipedia.org/wiki/Comparison_of_video_codecs
+ # https://en.wikipedia.org/wiki/List_of_open-source_codecs
+ # https://en.wikipedia.org/wiki/List_of_codecs
+ # https://en.wikipedia.org/wiki/Comparison_of_video_container_formats
diff --git a/plugins/DupFileManager/DupFileManager_report_config.py b/plugins/DupFileManager/DupFileManager_report_config.py
new file mode 100644
index 00000000..81151229
--- /dev/null
+++ b/plugins/DupFileManager/DupFileManager_report_config.py
@@ -0,0 +1,212 @@
+# Description: This is a Stash plugin which manages duplicate files.
+# By David Maisonave (aka Axter) Jul-2024 (https://www.axter.com/)
+# Get the latest developers version from following link:
+# https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/DupFileManager
+
+# HTML Report Options **************************************************
+report_config = {
+ # Paginate HTML report. Maximum number of results to display on one page, before adding (paginating) an additional page.
+ "htmlReportPaginate" : 100,
+ # Name of the HTML file to create
+ "htmlReportName" : "DuplicateTagScenes.html",
+ # If enabled, report displays an image preview similar to sceneDuplicateChecker
+ "htmlIncludeImagePreview" : False,
+ "htmlImagePreviewPopupSize" : 600,
+ # HTML report prefix, before table listing
+ "htmlReportPrefix" : """
+
+
+Stash Duplicate Report
+
+
+
+
+
+
+
+
+Report Info | Report Options |
+
+
+Found (QtyPlaceHolder) duplice sets |
+Date Created: (DateCreatedPlaceHolder) |
+
|
+ |
+
+Stash Duplicate Scenes Report (MatchTypePlaceHolder)
\n""",
+ # HTML report postfiox, after table listing
+ "htmlReportPostfix" : "\n",
+ # HTML report table
+ "htmlReportTable" : "",
+ # HTML report table row
+ "htmlReportTableRow" : "",
+ # HTML report table header
+ "htmlReportTableHeader" : "",
+ # HTML report table data
+ "htmlReportTableData" : " | ",
+ # HTML report video preview
+ "htmlReportVideoPreview" : "width='160' height='120' controls", # Alternative option "autoplay loop controls" or "autoplay controls"
+ # The number off seconds in time difference for supper highlight on htmlReport
+ "htmlHighlightTimeDiff" : 3,
+ # Supper highlight for details with higher resolution or duration
+ "htmlSupperHighlight" : "yellow",
+ # Lower highlight for details with slightly higher duration
+ "htmlLowerHighlight" : "nyanza",
+ # Text color for details with different resolution, duration, size, bitrate,codec, or framerate
+ "htmlDetailDiffTextColor" : "red",
+ # If enabled, create an HTML report when tagging duplicate files
+ "createHtmlReport" : True,
+ # If enabled, report displays stream instead of preview for video
+ "streamOverPreview" : False, # This option works in Chrome, but does not work very well on firefox.
+}
diff --git a/plugins/DupFileManager/ModulesValidate.py b/plugins/DupFileManager/ModulesValidate.py
new file mode 100644
index 00000000..4de2f3a4
--- /dev/null
+++ b/plugins/DupFileManager/ModulesValidate.py
@@ -0,0 +1,126 @@
+# ModulesValidate (By David Maisonave aka Axter)
+# Description:
+# Checks if packages are installed, and optionally install packages if missing.
+# The below example usage code should be plave at the very top of the scource code before any other imports.
+# Example Usage:
+# import ModulesValidate
+# ModulesValidate.modulesInstalled(["watchdog", "schedule", "requests"])
+# Testing:
+# To test, uninstall packages via command line: pip uninstall -y watchdog schedule requests
+import sys, os, pathlib, platform, traceback
+# ToDo: Add logic to optionally pull package requirements from requirements.txt file.
+
+def modulesInstalled(moduleNames, install=True, silent=False):
+ retrnValue = True
+ for moduleName in moduleNames:
+ try: # Try Python 3.3 > way
+ import importlib
+ import importlib.util
+ if moduleName in sys.modules:
+ if not silent: print(f"{moduleName!r} already in sys.modules")
+ elif isModuleInstalled(moduleName):
+ if not silent: print(f"Module {moduleName!r} is available.")
+ else:
+ if install and (results:=installModule(moduleName)) > 0:
+ if results == 1:
+ print(f"Module {moduleName!r} has been installed")
+ else:
+ if not silent: print(f"Module {moduleName!r} is already installed")
+ continue
+ else:
+ if install:
+ print(f"Can't find the {moduleName!r} module")
+ retrnValue = False
+ except Exception as e:
+ try:
+ i = importlib.import_module(moduleName)
+ except ImportError as e:
+ if install and (results:=installModule(moduleName)) > 0:
+ if results == 1:
+ print(f"Module {moduleName!r} has been installed")
+ else:
+ if not silent: print(f"Module {moduleName!r} is already installed")
+ continue
+ else:
+ if install:
+ tb = traceback.format_exc()
+ print(f"Can't find the {moduleName!r} module! Error: {e}\nTraceBack={tb}")
+ retrnValue = False
+ return retrnValue
+
+def isModuleInstalled(moduleName):
+ try:
+ __import__(moduleName)
+ return True
+ except Exception as e:
+ pass
+ return False
+
+def installModule(moduleName):
+ try:
+ if isLinux():
+ # Note: Linux may first need : sudo apt install python3-pip
+ # if error starts with "Command 'pip' not found"
+ # or includes "No module named pip"
+ results = os.popen(f"pip --disable-pip-version-check --version").read()
+ if results.find("Command 'pip' not found") != -1 or results.find("No module named pip") != -1:
+ results = os.popen(f"sudo apt install python3-pip").read()
+ results = os.popen(f"pip --disable-pip-version-check --version").read()
+ if results.find("Command 'pip' not found") != -1 or results.find("No module named pip") != -1:
+ return -1
+ if isFreeBSD():
+ print("Warning: installModule may NOT work on freebsd")
+ pipArg = " --disable-pip-version-check"
+ if isDocker():
+ pipArg += " --break-system-packages"
+ results = os.popen(f"{sys.executable} -m pip install {moduleName}{pipArg}").read() # May need to be f"{sys.executable} -m pip install {moduleName}"
+ results = results.strip("\n")
+ if results.find("Requirement already satisfied:") > -1:
+ return 2
+ elif results.find("Successfully installed") > -1:
+ return 1
+ elif modulesInstalled(moduleNames=[moduleName], install=False):
+ return 1
+ except Exception as e:
+ pass
+ return 0
+
+def installPackage(package): # Should delete this. It doesn't work consistently
+ try:
+ import pip
+ if hasattr(pip, 'main'):
+ pip.main(['install', package])
+ else:
+ pip._internal.main(['install', package])
+ except Exception as e:
+ return False
+ return True
+
+def isDocker():
+ cgroup = pathlib.Path('/proc/self/cgroup')
+ return pathlib.Path('/.dockerenv').is_file() or cgroup.is_file() and 'docker' in cgroup.read_text()
+
+def isWindows():
+ if any(platform.win32_ver()):
+ return True
+ return False
+
+def isLinux():
+ if platform.system().lower().startswith("linux"):
+ return True
+ return False
+
+def isFreeBSD():
+ if platform.system().lower().startswith("freebsd"):
+ return True
+ return False
+
+def isMacOS():
+ if sys.platform == "darwin":
+ return True
+ return False
+
+def isWindows():
+ if any(platform.win32_ver()):
+ return True
+ return False
diff --git a/plugins/DupFileManager/README.md b/plugins/DupFileManager/README.md
index 7d0cf052..0a90703c 100644
--- a/plugins/DupFileManager/README.md
+++ b/plugins/DupFileManager/README.md
@@ -1,11 +1,40 @@
-# DupFileManager: Ver 0.1.2 (By David Maisonave)
+# DupFileManager: Ver 0.1.9 (By David Maisonave)
-DupFileManager is a [Stash](https://github.com/stashapp/stash) plugin which manages duplicate file in the Stash system.
+DupFileManager is a [Stash](https://github.com/stashapp/stash) plugin which manages duplicate files in the Stash system.
+It has both **task** and **tools-UI** components.
### Features
+- Creates a duplicate file report which can be accessed from the settings->tools menu options.The report is created as an HTML file and stored in local path under plugins\DupFileManager\report\DuplicateTagScenes.html.
+ - See screenshot at the bottom of this page for example report.
+ - Items on the left side of the report are the primary duplicates designated for deletion. By default, these duplicates are given a special \_duplicate tag.
+ - Items on the right side of the report are designated as primary duplicates to keep. They usually have higher resolution, duration and/or preferred paths.
+ - The report has the following options:
+ - Delete: Delete file and remove from Stash library.
+ - Remove: Remove from Stash library.
+ - Rename: Rename file.
+ - Copy: Copy file from left (source) to right (to-keep).
+ - Move: Copy file and metadata left to right.
+ - Cpy-Name: Copy file name left to right.
+ - Add-Exclude: Add exclude tag to scene,so that scene is excluded from deletion.
+ - Remove-Tag: Remove duplicate tag from scene.
+ - Flag-Scene: Flag (mark) scene in report as reviewed (or as requiring further review). Optional flags (yellow, green, orange, cyan, pink, red, strike-through, & disable-scene)
+ - Merge: Copy Metadata (tags, performers,& studios) from left to right.
- Can merge potential source in the duplicate file names for tag names, performers, and studios.
- Normally when Stash searches the file name for tag names, performers, and studios, it only does so using the primary file.
+- Advance menu (for specially tagged duplicates)
+ ![Screenshot 2024-11-22 145139](https://github.com/user-attachments/assets/d76646f0-c5a8-4069-ad0f-a6e5e96e7ed0)
+ - Delete only specially tagged duplicates in blacklist path.
+ - Delete duplicates with specified file path.
+ - Delete duplicates with specific string in File name.
+ - Delete duplicates with specified file size range.
+ - Delete with specified duration range.
+ - Delete with resolution range.
+ - Delete duplicates having specified tags.
+ - Delete duplicates with specified rating.
+ - Delete duplicates with any of the above combinations.
+- Bottom extended portion of the Advanced Menu screen.
+ - ![Screenshot 2024-11-22 232005](https://github.com/user-attachments/assets/9a0d2e9d-783b-4ea2-8fa5-3805b40af4eb)
- Delete duplicate file task with the following options:
- Tasks (Settings->Task->[Plugin Tasks]->DupFileManager)
- **Tag Duplicates** - Set tag DuplicateMarkForDeletion to the duplicates with lower resolution, duration, file name length, and/or black list path.
@@ -13,11 +42,11 @@ DupFileManager is a [Stash](https://github.com/stashapp/stash) plugin which mana
- **Delete Duplicates** - Deletes duplicate files. Performs deletion without first tagging.
- Plugin UI options (Settings->Plugins->Plugins->[DupFileManager])
- Has a 3 tier path selection to determine which duplicates to keep, and which should be candidates for deletions.
- - **Whitelist** - List of paths NOT to be deleted.
+ - **Whitelist** - List of paths NOT to be deleted.
- E.g. C:\Favorite\,E:\MustKeep\
- - **Gray-List** - List of preferential paths to determine which duplicate should be the primary.
+ - **Gray-List** - List of preferential paths to determine which duplicate should be the primary.
- E.g. C:\2nd_Favorite\,H:\ShouldKeep\
- - **Blacklist** - List of LEAST preferential paths to determine primary candidates for deletion.
+ - **Blacklist** - List of LEAST preferential paths to determine primary candidates for deletion.
- E.g. C:\Downloads\,F:\DeleteMeFirst\
- **Permanent Delete** - Enable to permanently delete files, instead of moving files to trash can.
- **Max Dup Process** - Use to limit the maximum files to process. Can be used to do a limited test run.
@@ -28,12 +57,15 @@ DupFileManager is a [Stash](https://github.com/stashapp/stash) plugin which mana
- **dup_path** - Alternate path to move deleted files to. Example: "C:\TempDeleteFolder"
- **toRecycleBeforeSwap** - When enabled, moves destination file to recycle bin before swapping files.
- **addPrimaryDupPathToDetails** - If enabled, adds the primary duplicate path to the scene detail.
+- Tools UI Menu
+ ![Screenshot 2024-11-22 145512](https://github.com/user-attachments/assets/03e166eb-ddaa-4eb8-8160-4c9180ca1323)
+ - Can access either **Duplicate File Report (DupFileManager)** or **DupFileManager Tools and Utilities** menu options.
### Requirements
-`pip install --upgrade stashapp-tools`
-`pip install pyYAML`
-`pip install Send2Trash`
+- `pip install --upgrade stashapp-tools`
+- `pip install requests`
+- `pip install Send2Trash`
### Installation
@@ -48,3 +80,31 @@ That's it!!!
- Options are accessible in the GUI via Settings->Plugins->Plugins->[DupFileManager].
- More options available in DupFileManager_config.py.
+
+### Screenshots
+
+- Example DupFileManager duplicate report. (file names have been edited to PG).
+ - The report displays preview videos that are playable. Will play a few seconds sample of the video. This requires scan setting **[Generate animated image previews]** to be enabled when scanning all files.
+ - ![Screenshot 2024-11-22 225359](https://github.com/user-attachments/assets/dc705b24-e2d7-4663-92fd-1516aa7aacf5)
+ - If there's a scene on the left side that has a higher resolution or duration, it gets a yellow highlight on the report.
+ - There's an optional setting that allows both preview videos and preview images to be displayed on the report. See settings **htmlIncludeImagePreview** in the **DupFileManager_report_config.py** file.
+ - There are many more options available for how the report is created. These options are targeted for more advanced users. The options are all available in the **DupFileManager_report_config.py** file, and the settings have commented descriptions preceeding them. See the **DupFileManager_report_config.py** file in the DupFileManager plugin folder for more details.
+- Tools UI Menu
+ ![Screenshot 2024-11-22 145512](https://github.com/user-attachments/assets/03e166eb-ddaa-4eb8-8160-4c9180ca1323)
+ - Can access either **Duplicate File Report (DupFileManager)** or **DupFileManager Tools and Utilities** menu options.
+- DupFileManager Report Menu
+ - ![Screenshot 2024-11-22 151630](https://github.com/user-attachments/assets/834ee60f-1a4a-4a3e-bbf7-23aeca2bda1f)
+- DupFileManager Tools and Utilities
+ - ![Screenshot 2024-11-22 152023](https://github.com/user-attachments/assets/4daaea9e-f603-4619-b536-e6609135bab1)
+- Full bottom extended portion of the Advanced Menu screen.
+ - ![Screenshot 2024-11-22 232208](https://github.com/user-attachments/assets/bf1f3021-3a8c-4875-9737-60ee3d7fe675)
+
+### Future Planned Features
+
+- Currently, the report and advanced menu do not work with Stash settings requiring a password. Additional logic will be added to have them use the API Key. Planned for 1.0.0 Version.
+- Add an advanced menu that will work with non-tagged reports. It will iterated through the existing report file(s) to aplly deletions, instead of searching Stash DB for tagged files. Planned for 1.1.0 Version.
+- Greylist deletion option will be added to the advanced menu. Planned for 1.0.5 Version.
+- Add advanced menu directly to the Settings->Tools menu. Planned for 1.5.0 Version.
+- Add report directly to the Settings->Tools menu. Planned for 1.5.0 Version.
+- Remove all flags from all scenes option. Planned for 1.0.5 Version.
+- Transfer option settings **[Disable Complete Confirmation]** and **[Disable Delete Confirmation]** when paginating. Planned for 1.0.5 Version.
diff --git a/plugins/DupFileManager/StashPluginHelper.py b/plugins/DupFileManager/StashPluginHelper.py
index 6f0d3d15..a9be414e 100644
--- a/plugins/DupFileManager/StashPluginHelper.py
+++ b/plugins/DupFileManager/StashPluginHelper.py
@@ -1,12 +1,3 @@
-from stashapi.stashapp import StashInterface
-from logging.handlers import RotatingFileHandler
-import re, inspect, sys, os, pathlib, logging, json
-import concurrent.futures
-from stashapi.stash_types import PhashDistance
-import __main__
-
-_ARGUMENT_UNSPECIFIED_ = "_ARGUMENT_UNSPECIFIED_"
-
# StashPluginHelper (By David Maisonave aka Axter)
# See end of this file for example usage
# Log Features:
@@ -24,6 +15,14 @@
# Gets DEBUG_TRACING value from command line argument and/or from UI and/or from config file
# Sets RUNNING_IN_COMMAND_LINE_MODE to True if detects multiple arguments
# Sets CALLED_AS_STASH_PLUGIN to True if it's able to read from STDIN_READ
+from stashapi.stashapp import StashInterface
+from logging.handlers import RotatingFileHandler
+import re, inspect, sys, os, pathlib, logging, json, platform, subprocess, traceback, time
+import concurrent.futures
+from stashapi.stash_types import PhashDistance
+from enum import Enum, IntEnum
+import __main__
+
class StashPluginHelper(StashInterface):
# Primary Members for external reference
PLUGIN_TASK_NAME = None
@@ -45,15 +44,44 @@ class StashPluginHelper(StashInterface):
API_KEY = None
excludeMergeTags = None
+ # class EnumInt(IntEnum):
+ # def __repr__(self) -> str:
+ # return f"{self.__class__.__name__}.{self.name}"
+ # def __str__(self) -> str:
+ # return str(self.value)
+ # def serialize(self):
+ # return self.value
+
+ class EnumValue(Enum):
+ def __repr__(self) -> str:
+ return f"{self.__class__.__name__}.{self.name}"
+ def __str__(self) -> str:
+ return str(self.value)
+ def __add__(self, other):
+ return self.value + other.value
+ def serialize(self):
+ return self.value
+
# printTo argument
- LOG_TO_FILE = 1
- LOG_TO_CONSOLE = 2 # Note: Only see output when running in command line mode. In plugin mode, this output is lost.
- LOG_TO_STDERR = 4 # Note: In plugin mode, output to StdErr ALWAYS gets sent to stash logging as an error.
- LOG_TO_STASH = 8
- LOG_TO_WARN = 16
- LOG_TO_ERROR = 32
- LOG_TO_CRITICAL = 64
- LOG_TO_ALL = LOG_TO_FILE + LOG_TO_CONSOLE + LOG_TO_STDERR + LOG_TO_STASH
+ class LogTo(IntEnum):
+ FILE = 1
+ CONSOLE = 2 # Note: Only see output when running in command line mode. In plugin mode, this output is lost.
+ STDERR = 4 # Note: In plugin mode, output to StdErr ALWAYS gets sent to stash logging as an error.
+ STASH = 8
+ WARN = 16
+ ERROR = 32
+ CRITICAL = 64
+ ALL = FILE + CONSOLE + STDERR + STASH
+
+ class DbgLevel(IntEnum):
+ TRACE = 1
+ DBG = 2
+ INF = 3
+ WRN = 4
+ ERR = 5
+ CRITICAL = 6
+
+ DBG_LEVEL = DbgLevel.INF
# Misc class variables
MAIN_SCRIPT_NAME = None
@@ -61,6 +89,25 @@ class StashPluginHelper(StashInterface):
LOG_FILE_DIR = None
LOG_FILE_NAME = None
STDIN_READ = None
+ stopProcessBarSpin = True
+ updateProgressbarOnIter = 0
+ currentProgressbarIteration = 0
+
+ class OS_Type(IntEnum):
+ WINDOWS = 1
+ LINUX = 2
+ MAC_OS = 3
+ FREEBSD = 4
+ UNKNOWN_OS = 5
+
+ OS_TYPE = OS_Type.UNKNOWN_OS
+
+ IS_DOCKER = False
+ IS_WINDOWS = False
+ IS_LINUX = False
+ IS_FREEBSD = False
+ IS_MAC_OS = False
+
pluginLog = None
logLinePreviousHits = []
thredPool = None
@@ -68,45 +115,76 @@ class StashPluginHelper(StashInterface):
_mergeMetadata = None
encodeToUtf8 = False
convertToAscii = False # If set True, it takes precedence over encodeToUtf8
+ progressBarIsEnabled = True
# Prefix message value
- LEV_TRACE = "TRACE: "
- LEV_DBG = "DBG: "
- LEV_INF = "INF: "
- LEV_WRN = "WRN: "
- LEV_ERR = "ERR: "
- LEV_CRITICAL = "CRITICAL: "
-
- # Default format
- LOG_FORMAT = "[%(asctime)s] %(message)s"
+ class Level(EnumValue):
+ TRACE = "TRACE: "
+ DBG = "DBG: "
+ INF = "INF: "
+ WRN = "WRN: "
+ ERR = "ERR: "
+ CRITICAL = "CRITICAL: "
+ class Constant(EnumValue):
+ # Default format
+ LOG_FORMAT = "[%(asctime)s] %(message)s"
+ ARGUMENT_UNSPECIFIED = "_ARGUMENT_UNSPECIFIED_"
+ NOT_IN_LIST = 2147483646
+
# Externally modifiable variables
- log_to_err_set = LOG_TO_FILE + LOG_TO_STDERR # This can be changed by the calling source in order to customize what targets get error messages
- log_to_norm = LOG_TO_FILE + LOG_TO_CONSOLE # Can be change so-as to set target output for normal logging
+ log_to_err_set = LogTo.FILE + LogTo.STDERR # This can be changed by the calling source in order to customize what targets get error messages
+ log_to_norm = LogTo.FILE + LogTo.CONSOLE # Can be change so-as to set target output for normal logging
# Warn message goes to both plugin log file and stash when sent to Stash log file.
- log_to_wrn_set = LOG_TO_STASH # This can be changed by the calling source in order to customize what targets get warning messages
+ log_to_wrn_set = LogTo.STASH # This can be changed by the calling source in order to customize what targets get warning messages
def __init__(self,
- debugTracing = None, # Set debugTracing to True so as to output debug and trace logging
- logFormat = LOG_FORMAT, # Plugin log line format
- dateFmt = "%y%m%d %H:%M:%S", # Date format when logging to plugin log file
- maxbytes = 8*1024*1024, # Max size of plugin log file
- backupcount = 2, # Backup counts when log file size reaches max size
- logToWrnSet = 0, # Customize the target output set which will get warning logging
- logToErrSet = 0, # Customize the target output set which will get error logging
- logToNormSet = 0, # Customize the target output set which will get normal logging
- logFilePath = "", # Plugin log file. If empty, the log file name will be set based on current python file name and path
- mainScriptName = "", # The main plugin script file name (full path)
- pluginID = "",
- settings = None, # Default settings for UI fields
- config = None, # From pluginName_config.py or pluginName_setting.py
- fragmentServer = None,
- stash_url = None, # Stash URL (endpoint URL) Example: http://localhost:9999
- apiKey = None, # API Key only needed when username and password set while running script via command line
+ debugTracing = None, # Set debugTracing to True so as to output debug and trace logging
+ logFormat = Constant.LOG_FORMAT.value, # Plugin log line format
+ dateFmt = "%y%m%d %H:%M:%S", # Date format when logging to plugin log file
+ maxbytes = 8*1024*1024, # Max size of plugin log file
+ backupcount = 2, # Backup counts when log file size reaches max size
+ logToWrnSet = 0, # Customize the target output set which will get warning logging
+ logToErrSet = 0, # Customize the target output set which will get error logging
+ logToNormSet = 0, # Customize the target output set which will get normal logging
+ logFilePath = "", # Plugin log file. If empty, the log file name will be set based on current python file name and path
+ mainScriptName = "", # The main plugin script file name (full path)
+ pluginID = "",
+ settings = None, # Default settings for UI fields
+ config = None, # From pluginName_config.py or pluginName_setting.py
+ fragmentServer = None,
+ stash_url = None, # Stash URL (endpoint URL) Example: http://localhost:9999
+ apiKey = None, # API Key only needed when username and password set while running script via command line
DebugTraceFieldName = "zzdebugTracing",
+ DebugFieldName = "zzDebug",
DryRunFieldName = "zzdryRun",
- setStashLoggerAsPluginLogger = False):
+ setStashLoggerAsPluginLogger = False,
+ DBG_LEVEL = DbgLevel.INF):
+ if DBG_LEVEL in list(self.DbgLevel):
+ self.DBG_LEVEL = DBG_LEVEL
+ if debugTracing:
+ self.DEBUG_TRACING = debugTracing
+ if self.DBG_LEVEL > self.DbgLevel.DBG:
+ self.DBG_LEVEL = self.DbgLevel.TRACE
+ elif self.DBG_LEVEL < self.DbgLevel.INF:
+ self.DEBUG_TRACING = True
self.thredPool = concurrent.futures.ThreadPoolExecutor(max_workers=2)
+ if self.isWindows():
+ self.IS_WINDOWS = True
+ self.OS_TYPE = self.OS_Type.WINDOWS
+ elif self.isLinux():
+ self.IS_LINUX = True
+ self.OS_TYPE = self.OS_Type.LINUX
+ if self.isDocker():
+ self.IS_DOCKER = True
+ elif self.isFreeBSD():
+ self.IS_FREEBSD = True
+ self.OS_TYPE = self.OS_Type.FREEBSD
+ if self.isDocker():
+ self.IS_DOCKER = True
+ elif self.isMacOS():
+ self.IS_MAC_OS = True
+ self.OS_TYPE = self.OS_Type.MAC_OS
if logToWrnSet: self.log_to_wrn_set = logToWrnSet
if logToErrSet: self.log_to_err_set = logToErrSet
if logToNormSet: self.log_to_norm = logToNormSet
@@ -129,7 +207,6 @@ def __init__(self,
else:
self.FRAGMENT_SERVER = {'Scheme': 'http', 'Host': '0.0.0.0', 'Port': '9999', 'SessionCookie': {'Name': 'session', 'Value': '', 'Path': '', 'Domain': '', 'Expires': '0001-01-01T00:00:00Z', 'RawExpires': '', 'MaxAge': 0, 'Secure': False, 'HttpOnly': False, 'SameSite': 0, 'Raw': '', 'Unparsed': None}, 'Dir': os.path.dirname(pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent), 'PluginDir': pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent}
- if debugTracing: self.DEBUG_TRACING = debugTracing
if config:
self.pluginConfig = config
if self.Setting('apiKey', "") != "":
@@ -191,8 +268,14 @@ def __init__(self,
self.API_KEY = self.STASH_CONFIGURATION['apiKey']
self.DRY_RUN = self.Setting(DryRunFieldName, self.DRY_RUN)
- self.DEBUG_TRACING = self.Setting(DebugTraceFieldName, self.DEBUG_TRACING)
- if self.DEBUG_TRACING: self.LOG_LEVEL = logging.DEBUG
+ if self.Setting(DebugTraceFieldName, self.DEBUG_TRACING):
+ self.DEBUG_TRACING = True
+ self.LOG_LEVEL = logging.TRACE
+ self.DBG_LEVEL = self.DbgLevel.TRACE
+ elif self.Setting(DebugFieldName, self.DEBUG_TRACING):
+ self.DEBUG_TRACING = True
+ self.LOG_LEVEL = logging.DEBUG
+ self.DBG_LEVEL = self.DbgLevel.DBG
logging.basicConfig(level=self.LOG_LEVEL, format=logFormat, datefmt=dateFmt, handlers=[RFH])
self.pluginLog = logging.getLogger(pathlib.Path(self.MAIN_SCRIPT_NAME).stem)
@@ -202,74 +285,104 @@ def __init__(self,
def __del__(self):
self.thredPool.shutdown(wait=False)
- def Setting(self, name, default=_ARGUMENT_UNSPECIFIED_, raiseEx=True, notEmpty=False):
+ def Setting(self, name, default=Constant.ARGUMENT_UNSPECIFIED.value, raiseEx=True, notEmpty=False):
if self.pluginSettings != None and name in self.pluginSettings:
if notEmpty == False or self.pluginSettings[name] != "":
return self.pluginSettings[name]
if self.pluginConfig != None and name in self.pluginConfig:
if notEmpty == False or self.pluginConfig[name] != "":
return self.pluginConfig[name]
- if default == _ARGUMENT_UNSPECIFIED_ and raiseEx:
+ if default == self.Constant.ARGUMENT_UNSPECIFIED.value and raiseEx:
raise Exception(f"Missing {name} from both UI settings and config file settings.")
return default
- def Log(self, logMsg, printTo = 0, logLevel = logging.INFO, lineNo = -1, levelStr = "", logAlways = False, toAscii = None):
- if toAscii or (toAscii == None and (self.encodeToUtf8 or self.convertToAscii)):
- logMsg = self.asc2(logMsg)
- else:
- logMsg = logMsg
- if printTo == 0:
- printTo = self.log_to_norm
- elif printTo == self.LOG_TO_ERROR and logLevel == logging.INFO:
- logLevel = logging.ERROR
- printTo = self.log_to_err_set
- elif printTo == self.LOG_TO_CRITICAL and logLevel == logging.INFO:
- logLevel = logging.CRITICAL
- printTo = self.log_to_err_set
- elif printTo == self.LOG_TO_WARN and logLevel == logging.INFO:
- logLevel = logging.WARN
- printTo = self.log_to_wrn_set
+ def Log(self, logMsg, printTo = 0, logLevel = logging.INFO, lineNo = -1, levelStr = "", logAlways = False, toAscii = None, printLogException = False):
+ try:
+ if toAscii or (toAscii == None and (self.encodeToUtf8 or self.convertToAscii)):
+ logMsg = self.asc2(logMsg)
+ else:
+ logMsg = logMsg
+ if printTo == 0:
+ printTo = self.log_to_norm
+ elif printTo == self.LogTo.ERROR and logLevel == logging.INFO:
+ logLevel = logging.ERROR
+ printTo = self.log_to_err_set
+ elif printTo == self.LogTo.CRITICAL and logLevel == logging.INFO:
+ logLevel = logging.CRITICAL
+ printTo = self.log_to_err_set
+ elif printTo == self.LogTo.WARN and logLevel == logging.INFO:
+ logLevel = logging.WARN
+ printTo = self.log_to_wrn_set
+ if lineNo == -1:
+ lineNo = inspect.currentframe().f_back.f_lineno
+ LN_Str = f"[LN:{lineNo}]"
+ # print(f"{LN_Str}, {logAlways}, {self.LOG_LEVEL}, {logging.DEBUG}, {levelStr}, {logMsg}")
+ if logLevel == logging.TRACE and (logAlways == False or self.LOG_LEVEL == logging.TRACE):
+ if levelStr == "": levelStr = self.Level.DBG
+ if printTo & self.LogTo.FILE: self.pluginLog.trace(f"{LN_Str} {levelStr}{logMsg}")
+ if printTo & self.LogTo.STASH: self.log.trace(f"{LN_Str} {levelStr}{logMsg}")
+ elif logLevel == logging.DEBUG and (logAlways == False or self.LOG_LEVEL == logging.DEBUG or self.LOG_LEVEL == logging.TRACE):
+ if levelStr == "": levelStr = self.Level.DBG
+ if printTo & self.LogTo.FILE: self.pluginLog.debug(f"{LN_Str} {levelStr}{logMsg}")
+ if printTo & self.LogTo.STASH: self.log.debug(f"{LN_Str} {levelStr}{logMsg}")
+ elif logLevel == logging.INFO or logLevel == logging.DEBUG:
+ if levelStr == "": levelStr = self.Level.INF if logLevel == logging.INFO else self.Level.DBG
+ if printTo & self.LogTo.FILE: self.pluginLog.info(f"{LN_Str} {levelStr}{logMsg}")
+ if printTo & self.LogTo.STASH: self.log.info(f"{LN_Str} {levelStr}{logMsg}")
+ elif logLevel == logging.WARN:
+ if levelStr == "": levelStr = self.Level.WRN
+ if printTo & self.LogTo.FILE: self.pluginLog.warning(f"{LN_Str} {levelStr}{logMsg}")
+ if printTo & self.LogTo.STASH: self.log.warning(f"{LN_Str} {levelStr}{logMsg}")
+ elif logLevel == logging.ERROR:
+ if levelStr == "": levelStr = self.Level.ERR
+ if printTo & self.LogTo.FILE: self.pluginLog.error(f"{LN_Str} {levelStr}{logMsg}")
+ if printTo & self.LogTo.STASH: self.log.error(f"{LN_Str} {levelStr}{logMsg}")
+ elif logLevel == logging.CRITICAL:
+ if levelStr == "": levelStr = self.Level.CRITICAL
+ if printTo & self.LogTo.FILE: self.pluginLog.critical(f"{LN_Str} {levelStr}{logMsg}")
+ if printTo & self.LogTo.STASH: self.log.error(f"{LN_Str} {levelStr}{logMsg}")
+ if (printTo & self.LogTo.CONSOLE) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways):
+ print(f"{LN_Str} {levelStr}{logMsg}")
+ if (printTo & self.LogTo.STDERR) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways):
+ print(f"StdErr: {LN_Str} {levelStr}{logMsg}", file=sys.stderr)
+ except Exception as e:
+ if printLogException:
+ tb = traceback.format_exc()
+ print(f"Exception calling [Log]; Error: {e}\nTraceBack={tb}")
+ pass
+
+ def Trace(self, logMsg = "", printTo = 0, logAlways = False, lineNo = -1, toAscii = None):
+ if printTo == 0: printTo = self.LogTo.FILE
if lineNo == -1:
lineNo = inspect.currentframe().f_back.f_lineno
- LN_Str = f"[LN:{lineNo}]"
- # print(f"{LN_Str}, {logAlways}, {self.LOG_LEVEL}, {logging.DEBUG}, {levelStr}, {logMsg}")
- if logLevel == logging.DEBUG and (logAlways == False or self.LOG_LEVEL == logging.DEBUG):
- if levelStr == "": levelStr = self.LEV_DBG
- if printTo & self.LOG_TO_FILE: self.pluginLog.debug(f"{LN_Str} {levelStr}{logMsg}")
- if printTo & self.LOG_TO_STASH: self.log.debug(f"{LN_Str} {levelStr}{logMsg}")
- elif logLevel == logging.INFO or logLevel == logging.DEBUG:
- if levelStr == "": levelStr = self.LEV_INF if logLevel == logging.INFO else self.LEV_DBG
- if printTo & self.LOG_TO_FILE: self.pluginLog.info(f"{LN_Str} {levelStr}{logMsg}")
- if printTo & self.LOG_TO_STASH: self.log.info(f"{LN_Str} {levelStr}{logMsg}")
- elif logLevel == logging.WARN:
- if levelStr == "": levelStr = self.LEV_WRN
- if printTo & self.LOG_TO_FILE: self.pluginLog.warning(f"{LN_Str} {levelStr}{logMsg}")
- if printTo & self.LOG_TO_STASH: self.log.warning(f"{LN_Str} {levelStr}{logMsg}")
- elif logLevel == logging.ERROR:
- if levelStr == "": levelStr = self.LEV_ERR
- if printTo & self.LOG_TO_FILE: self.pluginLog.error(f"{LN_Str} {levelStr}{logMsg}")
- if printTo & self.LOG_TO_STASH: self.log.error(f"{LN_Str} {levelStr}{logMsg}")
- elif logLevel == logging.CRITICAL:
- if levelStr == "": levelStr = self.LEV_CRITICAL
- if printTo & self.LOG_TO_FILE: self.pluginLog.critical(f"{LN_Str} {levelStr}{logMsg}")
- if printTo & self.LOG_TO_STASH: self.log.error(f"{LN_Str} {levelStr}{logMsg}")
- if (printTo & self.LOG_TO_CONSOLE) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways):
- print(f"{LN_Str} {levelStr}{logMsg}")
- if (printTo & self.LOG_TO_STDERR) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways):
- print(f"StdErr: {LN_Str} {levelStr}{logMsg}", file=sys.stderr)
+ logLev = logging.INFO if logAlways else logging.TRACE
+ if self.DBG_LEVEL == self.DbgLevel.TRACE or logAlways:
+ if logMsg == "":
+ logMsg = f"Line number {lineNo}..."
+ self.Log(logMsg, printTo, logLev, lineNo, self.Level.TRACE, logAlways, toAscii=toAscii)
- def Trace(self, logMsg = "", printTo = 0, logAlways = False, lineNo = -1, toAscii = None):
- if printTo == 0: printTo = self.LOG_TO_FILE
+ # Log once per session. Only logs the first time called from a particular line number in the code.
+ def TraceOnce(self, logMsg = "", printTo = 0, logAlways = False, toAscii = None):
+ lineNo = inspect.currentframe().f_back.f_lineno
+ if self.DBG_LEVEL == self.DbgLevel.TRACE or logAlways:
+ FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}"
+ if FuncAndLineNo in self.logLinePreviousHits:
+ return
+ self.logLinePreviousHits.append(FuncAndLineNo)
+ self.Trace(logMsg, printTo, logAlways, lineNo, toAscii=toAscii)
+
+ def Debug(self, logMsg = "", printTo = 0, logAlways = False, lineNo = -1, toAscii = None):
+ if printTo == 0: printTo = self.LogTo.FILE
if lineNo == -1:
lineNo = inspect.currentframe().f_back.f_lineno
logLev = logging.INFO if logAlways else logging.DEBUG
if self.DEBUG_TRACING or logAlways:
if logMsg == "":
logMsg = f"Line number {lineNo}..."
- self.Log(logMsg, printTo, logLev, lineNo, self.LEV_TRACE, logAlways, toAscii=toAscii)
+ self.Log(logMsg, printTo, logLev, lineNo, self.Level.DBG, logAlways, toAscii=toAscii)
# Log once per session. Only logs the first time called from a particular line number in the code.
- def TraceOnce(self, logMsg = "", printTo = 0, logAlways = False, toAscii = None):
+ def DebugOnce(self, logMsg = "", printTo = 0, logAlways = False, toAscii = None):
lineNo = inspect.currentframe().f_back.f_lineno
if self.DEBUG_TRACING or logAlways:
FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}"
@@ -279,8 +392,8 @@ def TraceOnce(self, logMsg = "", printTo = 0, logAlways = False, toAscii = None)
self.Trace(logMsg, printTo, logAlways, lineNo, toAscii=toAscii)
# Log INFO on first call, then do Trace on remaining calls.
- def LogOnce(self, logMsg = "", printTo = 0, logAlways = False, traceOnRemainingCalls = True, toAscii = None):
- if printTo == 0: printTo = self.LOG_TO_FILE
+ def LogOnce(self, logMsg = "", printTo = 0, logAlways = False, traceOnRemainingCalls = True, toAscii = None, printLogException = False):
+ if printTo == 0: printTo = self.LogTo.FILE
lineNo = inspect.currentframe().f_back.f_lineno
FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}"
if FuncAndLineNo in self.logLinePreviousHits:
@@ -288,49 +401,97 @@ def LogOnce(self, logMsg = "", printTo = 0, logAlways = False, traceOnRemainingC
self.Trace(logMsg, printTo, logAlways, lineNo, toAscii=toAscii)
else:
self.logLinePreviousHits.append(FuncAndLineNo)
- self.Log(logMsg, printTo, logging.INFO, lineNo, toAscii=toAscii)
+ self.Log(logMsg, printTo, logging.INFO, lineNo, toAscii=toAscii, printLogException=printLogException)
- def Warn(self, logMsg, printTo = 0, toAscii = None):
+ def Warn(self, logMsg, printTo = 0, toAscii = None, printLogException = False):
if printTo == 0: printTo = self.log_to_wrn_set
lineNo = inspect.currentframe().f_back.f_lineno
- self.Log(logMsg, printTo, logging.WARN, lineNo, toAscii=toAscii)
+ self.Log(logMsg, printTo, logging.WARN, lineNo, toAscii=toAscii, printLogException=printLogException)
- def Error(self, logMsg, printTo = 0, toAscii = None):
+ def Error(self, logMsg, printTo = 0, toAscii = None, printLogException = False):
if printTo == 0: printTo = self.log_to_err_set
lineNo = inspect.currentframe().f_back.f_lineno
- self.Log(logMsg, printTo, logging.ERROR, lineNo, toAscii=toAscii)
+ self.Log(logMsg, printTo, logging.ERROR, lineNo, toAscii=toAscii, printLogException=printLogException)
- def Status(self, printTo = 0, logLevel = logging.INFO, lineNo = -1):
+ # Above logging functions all use UpperCamelCase naming convention to avoid conflict with parent class logging function names.
+ # The below non-loggging functions use (lower) camelCase naming convention.
+ def status(self, printTo = 0, logLevel = logging.INFO, lineNo = -1):
if printTo == 0: printTo = self.log_to_norm
if lineNo == -1:
lineNo = inspect.currentframe().f_back.f_lineno
self.Log(f"StashPluginHelper Status: (CALLED_AS_STASH_PLUGIN={self.CALLED_AS_STASH_PLUGIN}), (RUNNING_IN_COMMAND_LINE_MODE={self.RUNNING_IN_COMMAND_LINE_MODE}), (DEBUG_TRACING={self.DEBUG_TRACING}), (DRY_RUN={self.DRY_RUN}), (PLUGIN_ID={self.PLUGIN_ID}), (PLUGIN_TASK_NAME={self.PLUGIN_TASK_NAME}), (STASH_URL={self.STASH_URL}), (MAIN_SCRIPT_NAME={self.MAIN_SCRIPT_NAME})",
printTo, logLevel, lineNo)
- def ExecuteProcess(self, args, ExecDetach=False):
- import platform, subprocess
- is_windows = any(platform.win32_ver())
+ # Replaces obsolete UI settings variable with new name. Only use this with strings and numbers.
+ # Example usage:
+ # obsoleteSettingsToConvert = {"OldVariableName" : "NewVariableName", "AnotherOldVarName" : "NewName2"}
+ # stash.replaceObsoleteSettings(obsoleteSettingsToConvert, "ObsoleteSettingsCheckVer2")
+ def replaceObsoleteSettings(self, settingSet:dict, SettingToCheckFirst="", init_defaults=False):
+ if SettingToCheckFirst == "" or self.Setting(SettingToCheckFirst) == False:
+ for key in settingSet:
+ obsoleteVar = self.Setting(key)
+ if isinstance(obsoleteVar, bool):
+ if obsoleteVar:
+ if self.Setting(settingSet[key]) == False:
+ self.Log(f"Detected obsolete (bool) settings ({key}). Moving obsolete settings to new setting name {settingSet[key]}.")
+ results = self.configure_plugin(self.PLUGIN_ID, {settingSet[key]:self.Setting(key), key : False}, init_defaults)
+ self.Debug(f"configure_plugin = {results}")
+ else:
+ self.Log(f"Detected obsolete (bool) settings ({key}), and deleting it's content because new setting name ({settingSet[key]}) is already populated.")
+ results = self.configure_plugin(self.PLUGIN_ID, {key : False}, init_defaults)
+ self.Debug(f"configure_plugin = {results}")
+ elif isinstance(obsoleteVar, int): # Both int and bool type returns true here
+ if obsoleteVar > 0:
+ if self.Setting(settingSet[key]) > 0:
+ self.Log(f"Detected obsolete (int) settings ({key}), and deleting it's content because new setting name ({settingSet[key]}) is already populated.")
+ results = self.configure_plugin(self.PLUGIN_ID, {key : 0}, init_defaults)
+ self.Debug(f"configure_plugin = {results}")
+ else:
+ self.Log(f"Detected obsolete (int) settings ({key}). Moving obsolete settings to new setting name {settingSet[key]}.")
+ results = self.configure_plugin(self.PLUGIN_ID, {settingSet[key]:self.Setting(key), key : 0}, init_defaults)
+ self.Debug(f"configure_plugin = {results}")
+ elif obsoleteVar != "":
+ if self.Setting(settingSet[key]) == "":
+ self.Log(f"Detected obsolete (str) settings ({key}). Moving obsolete settings to new setting name {settingSet[key]}.")
+ results = self.configure_plugin(self.PLUGIN_ID, {settingSet[key]:self.Setting(key), key : ""}, init_defaults)
+ self.Debug(f"configure_plugin = {results}")
+ else:
+ self.Log(f"Detected obsolete (str) settings ({key}), and deleting it's content because new setting name ({settingSet[key]}) is already populated.")
+ results = self.configure_plugin(self.PLUGIN_ID, {key : ""}, init_defaults)
+ self.Debug(f"configure_plugin = {results}")
+ if SettingToCheckFirst != "":
+ results = self.configure_plugin(self.PLUGIN_ID, {SettingToCheckFirst : True}, init_defaults)
+ self.Debug(f"configure_plugin = {results}")
+
+
+ def executeProcess(self, args, ExecDetach=False):
pid = None
- self.Trace(f"is_windows={is_windows} args={args}")
- if is_windows:
+ self.Trace(f"self.IS_WINDOWS={self.IS_WINDOWS} args={args}")
+ if self.IS_WINDOWS:
if ExecDetach:
- self.Trace("Executing process using Windows DETACHED_PROCESS")
+ self.Trace(f"Executing process using Windows DETACHED_PROCESS; args=({args})")
DETACHED_PROCESS = 0x00000008
pid = subprocess.Popen(args,creationflags=DETACHED_PROCESS, shell=True).pid
else:
pid = subprocess.Popen(args, shell=True).pid
else:
- self.Trace("Executing process using normal Popen")
- pid = subprocess.Popen(args).pid
+ if ExecDetach:
+ # For linux detached, use nohup. I.E. subprocess.Popen(["nohup", "python", "test.py"])
+ if self.IS_LINUX:
+ args = ["nohup"] + args
+ self.Trace(f"Executing detached process using Popen({args})")
+ else:
+ self.Trace(f"Executing process using normal Popen({args})")
+ pid = subprocess.Popen(args).pid # On detach, may need the following for MAC OS subprocess.Popen(args, shell=True, start_new_session=True)
self.Trace(f"pid={pid}")
return pid
- def ExecutePythonScript(self, args, ExecDetach=True):
+ def executePythonScript(self, args, ExecDetach=True):
PythonExe = f"{sys.executable}"
argsWithPython = [f"{PythonExe}"] + args
- return self.ExecuteProcess(argsWithPython,ExecDetach=ExecDetach)
+ return self.executeProcess(argsWithPython,ExecDetach=ExecDetach)
- def Submit(self, *args, **kwargs):
+ def submit(self, *args, **kwargs):
return self.thredPool.submit(*args, **kwargs)
def asc2(self, data, convertToAscii=None):
@@ -340,24 +501,282 @@ def asc2(self, data, convertToAscii=None):
# data = str(data).encode('ascii','ignore') # This works better for logging than ascii function
# return str(data)[2:-1] # strip out b'str'
- def init_mergeMetadata(self, excludeMergeTags=None):
+ def initMergeMetadata(self, excludeMergeTags=None):
self.excludeMergeTags = excludeMergeTags
self._mergeMetadata = mergeMetadata(self, self.excludeMergeTags)
- # Must call init_mergeMetadata, before calling merge_metadata
- def merge_metadata(self, SrcData, DestData): # Input arguments can be scene ID or scene metadata
- if type(SrcData) is int:
- SrcData = self.find_scene(SrcData)
- DestData = self.find_scene(DestData)
- return self._mergeMetadata.merge(SrcData, DestData)
+ def mergeMetadata(self, SrcData, DestData, retryCount = 12, sleepSecondsBetweenRetry = 5, excludeMergeTags=None): # Input arguments can be scene ID or scene metadata
+ import requests
+ if self._mergeMetadata == None:
+ self.initMergeMetadata(excludeMergeTags)
+ errMsg = None
+ for i in range(0, retryCount):
+ try:
+ if errMsg != None:
+ self.Warn(errMsg)
+ if type(SrcData) is int:
+ SrcData = self.find_scene(SrcData)
+ DestData = self.find_scene(DestData)
+ return self._mergeMetadata.merge(SrcData, DestData)
+ except (requests.exceptions.ConnectionError, ConnectionResetError):
+ tb = traceback.format_exc()
+ errMsg = f"Exception calling [mergeMetadata]. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
+ except Exception as e:
+ tb = traceback.format_exc()
+ errMsg = f"Exception calling [mergeMetadata]. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
+ time.sleep(sleepSecondsBetweenRetry)
+
+ def getUpdateProgressBarIter(self, qtyResults):
+ if qtyResults > 40000:
+ return 100
+ if qtyResults > 20000:
+ return 80
+ if qtyResults > 10000:
+ return 40
+ if qtyResults > 5000:
+ return 20
+ if qtyResults > 2000:
+ return 10
+ if qtyResults > 1000:
+ return 5
+ if qtyResults > 500:
+ return 3
+ if qtyResults > 200:
+ return 2
+ return 1
+
+ def enableProgressBar(self, enable=True):
+ self.progressBarIsEnabled = enable
+
+ # Use setProgressBarIter to reduce traffic to the server by only updating the progressBar every X(updateProgressbarOnIter) iteration.
+ def setProgressBarIter(self, qtyResults):
+ if self.progressBarIsEnabled:
+ self.updateProgressbarOnIter = self.getUpdateProgressBarIter(qtyResults)
+ self.currentProgressbarIteration = 0
+
+ def progressBar(self, currentIndex, maxCount):
+ if self.progressBarIsEnabled:
+ if self.updateProgressbarOnIter > 0:
+ self.currentProgressbarIteration+=1
+ if self.currentProgressbarIteration > self.updateProgressbarOnIter:
+ self.currentProgressbarIteration = 0
+ else:
+ return
+ progress = (currentIndex / maxCount) if currentIndex < maxCount else (maxCount / currentIndex)
+ try:
+ self.log.progress(progress)
+ except Exception as e:
+ pass
+
+ def isDocker(self):
+ cgroup = pathlib.Path('/proc/self/cgroup')
+ return pathlib.Path('/.dockerenv').is_file() or cgroup.is_file() and 'docker' in cgroup.read_text()
+
+ def isWindows(self):
+ if any(platform.win32_ver()):
+ return True
+ return False
+
+ def isLinux(self):
+ if platform.system().lower().startswith("linux"):
+ return True
+ return False
+
+ def isFreeBSD(self):
+ if platform.system().lower().startswith("freebsd"):
+ return True
+ return False
+
+ def isMacOS(self):
+ if sys.platform == "darwin":
+ return True
+ return False
- def Progress(self, currentIndex, maxCount):
- progress = (currentIndex / maxCount) if currentIndex < maxCount else (maxCount / currentIndex)
- self.log.progress(progress)
+ def isWindows(self):
+ if any(platform.win32_ver()):
+ return True
+ return False
+
+ def spinProcessBar(self, sleepSeconds = 1, maxPos = 30, trace = False):
+ if trace:
+ self.Trace(f"Starting spinProcessBar loop; sleepSeconds={sleepSeconds}, maxPos={maxPos}")
+ pos = 1
+ while self.stopProcessBarSpin == False:
+ if trace:
+ self.Trace(f"progressBar({pos}, {maxPos})")
+ self.progressBar(pos, maxPos)
+ pos +=1
+ if pos > maxPos:
+ pos = 1
+ time.sleep(sleepSeconds)
+
+ def startSpinningProcessBar(self, sleepSeconds = 1, maxPos = 30, trace = False):
+ self.stopProcessBarSpin = False
+ if trace:
+ self.Trace(f"submitting spinProcessBar; sleepSeconds={sleepSeconds}, maxPos={maxPos}, trace={trace}")
+ self.submit(self.spinProcessBar, sleepSeconds, maxPos, trace)
+
+ def stopSpinningProcessBar(self, sleepSeconds = 1):
+ self.stopProcessBarSpin = True
+ time.sleep(sleepSeconds)
+
+ def startsWithInList(self, listToCk, itemToCk):
+ itemToCk = itemToCk.lower()
+ for listItem in listToCk:
+ if itemToCk.startswith(listItem.lower()):
+ return True
+ return False
+
+ def indexStartsWithInList(self, listToCk, itemToCk):
+ itemToCk = itemToCk.lower()
+ index = -1
+ lenItemMatch = 0
+ returnValue = self.Constant.NOT_IN_LIST.value
+ for listItem in listToCk:
+ index += 1
+ if itemToCk.startswith(listItem.lower()):
+ if len(listItem) > lenItemMatch: # Make sure the best match is selected by getting match with longest string.
+ lenItemMatch = len(listItem)
+ returnValue = index
+ return returnValue
+
+ def checkIfTagInlist(self, somelist, tagName, trace=False):
+ tagId = self.find_tags(q=tagName)
+ if len(tagId) > 0 and 'id' in tagId[0]:
+ tagId = tagId[0]['id']
+ else:
+ self.Warn(f"Could not find tag ID for tag '{tagName}'.")
+ return
+ somelist = somelist.split(",")
+ if trace:
+ self.Trace("#########################################################################")
+ scenes = self.find_scenes(f={"tags": {"value":tagId, "modifier":"INCLUDES"}}, fragment='id tags {id name} files {path width height duration size video_codec bit_rate frame_rate} details')
+ qtyResults = len(scenes)
+ self.Log(f"Found {qtyResults} scenes with tag ({tagName})")
+ Qty = 0
+ for scene in scenes:
+ Qty+=1
+ if self.startsWithInList(somelist, scene['files'][0]['path']):
+ self.Log(f"Found scene part of list; {scene['files'][0]['path']}")
+ elif trace:
+ self.Trace(f"Not part of list; {scene['files'][0]['path']}")
- def run_plugin(self, plugin_id, task_mode=None, args:dict={}, asyn=False):
+ def createTagId(self, tagName, tagName_descp = "", deleteIfExist = False, ignoreAutoTag = False):
+ tagId = self.find_tags(q=tagName)
+ if len(tagId):
+ tagId = tagId[0]
+ if deleteIfExist:
+ self.destroy_tag(int(tagId['id']))
+ else:
+ return tagId['id']
+ tagId = self.create_tag({"name":tagName, "description":tagName_descp, "ignore_auto_tag": ignoreAutoTag})
+ self.Log(f"Dup-tagId={tagId['id']}")
+ return tagId['id']
+
+ def removeTag(self, scene, tagName): # scene can be scene ID or scene metadata
+ scene_details = scene
+ if isinstance(scene, int) or 'id' not in scene:
+ scene_details = self.find_scene(scene)
+ tagIds = []
+ doesHaveTagName = False
+ for tag in scene_details['tags']:
+ if tag['name'] != tagName:
+ tagIds += [tag['id']]
+ else:
+ doesHaveTagName = True
+ if doesHaveTagName:
+ dataDict = {'id' : scene_details['id']}
+ dataDict.update({'tag_ids' : tagIds})
+ self.update_scene(dataDict)
+ return doesHaveTagName
+
+ def addTag(self, scene, tagName, tagName_descp = "", ignoreAutoTag=False, retryCount = 12, sleepSecondsBetweenRetry = 5): # scene can be scene ID or scene metadata
+ errMsg = None
+ for i in range(0, retryCount):
+ try:
+ if errMsg != None:
+ self.Warn(errMsg)
+ scene_details = scene
+ if isinstance(scene, int) or 'id' not in scene:
+ scene_details = self.find_scene(scene)
+ tagIds = [self.createTagId(tagName, tagName_descp=tagName_descp, ignoreAutoTag=ignoreAutoTag)]
+ for tag in scene_details['tags']:
+ if tag['name'] == tagName:
+ return False
+ else:
+ tagIds += [tag['id']]
+ dataDict = {'id' : scene_details['id']}
+ dataDict.update({'tag_ids' : tagIds})
+ self.update_scene(dataDict)
+ return True
+ except (ConnectionResetError):
+ tb = traceback.format_exc()
+ errMsg = f"Exception calling [addTag]. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
+ except Exception as e:
+ tb = traceback.format_exc()
+ errMsg = f"Exception calling [addTag]. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
+ time.sleep(sleepSecondsBetweenRetry)
+
+ def copyFields(self, srcData, fieldsToCpy):
+ destData = {}
+ for key in srcData:
+ if key in fieldsToCpy:
+ destData.update({key : srcData[key]})
+ return destData
+
+ def renameTag(self,oldTagName, newTagName):
+ tagMetadata = self.find_tags(q=oldTagName)
+ if len(tagMetadata) > 0 and 'id' in tagMetadata[0]:
+ if tagMetadata[0]['name'] == newTagName:
+ return False
+ tagMetadata[0]['name'] = newTagName
+ fieldsToCpy = ["id", "name", "description", "aliases", "ignore_auto_tag", "favorite", "image", "parent_ids", "child_ids"]
+ tagUpdateInput = self.copyFields(tagMetadata[0], fieldsToCpy)
+ self.Trace(f"Renaming tag using tagUpdateInput = {tagUpdateInput}")
+ self.update_tag(tagUpdateInput)
+ return True
+ return False
+
+ def updateScene(self, update_input, create=False, retryCount = 24, sleepSecondsBetweenRetry = 5):
+ errMsg = None
+ for i in range(0, retryCount):
+ try:
+ if errMsg != None:
+ self.Warn(errMsg)
+ return self.update_scene(update_input, create)
+ except (ConnectionResetError):
+ tb = traceback.format_exc()
+ errMsg = f"Exception calling [updateScene]. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
+ except Exception as e:
+ tb = traceback.format_exc()
+ errMsg = f"Exception calling [updateScene]. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
+ time.sleep(sleepSecondsBetweenRetry)
+
+ def destroyScene(self, scene_id, delete_file=False, retryCount = 12, sleepSecondsBetweenRetry = 5):
+ errMsg = None
+ for i in range(0, retryCount):
+ try:
+ if errMsg != None:
+ self.Warn(errMsg)
+ if i > 0:
+ # Check if file still exist
+ scene = self.find_scene(scene_id)
+ if scene == None or len(scene) == 0:
+ self.Warn(f"Scene {scene_id} not found in Stash.")
+ return False
+ return self.destroy_scene(scene_id, delete_file)
+ except (ConnectionResetError):
+ tb = traceback.format_exc()
+ errMsg = f"Exception calling [updateScene]. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
+ except Exception as e:
+ tb = traceback.format_exc()
+ errMsg = f"Exception calling [updateScene]. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
+ time.sleep(sleepSecondsBetweenRetry)
+
+ def runPlugin(self, plugin_id, task_mode=None, args:dict={}, asyn=False):
"""Runs a plugin operation.
The operation is run immediately and does not use the job queue.
+ This is a blocking call, and does not return until plugin completes.
Args:
plugin_id (ID): plugin_id
task_name (str, optional): Plugin task to perform
@@ -375,30 +794,73 @@ def run_plugin(self, plugin_id, task_mode=None, args:dict={}, asyn=False):
"args": args,
}
if asyn:
- self.Submit(self.call_GQL, query, variables)
+ self.submit(self.call_GQL, query, variables)
return f"Made asynchronous call for plugin {plugin_id}"
else:
return self.call_GQL(query, variables)
-
- def find_duplicate_scenes_diff(self, distance: PhashDistance=PhashDistance.EXACT, fragment='id', duration_diff: float=10.00 ):
- query = """
- query FindDuplicateScenes($distance: Int, $duration_diff: Float) {
- findDuplicateScenes(distance: $distance, duration_diff: $duration_diff) {
- ...SceneSlim
- }
- }
- """
- if fragment:
- query = re.sub(r'\.\.\.SceneSlim', fragment, query)
- else:
- query += "fragment SceneSlim on Scene { id }"
-
- variables = { "distance": distance, "duration_diff": duration_diff }
- result = self.call_GQL(query, variables)
- return result['findDuplicateScenes']
- # #################################################################################################
- # The below functions extends class StashInterface with functions which are not yet in the class
+ def stopJobs(self, startPos = 0, startsWith = ""):
+ taskQue = self.job_queue()
+ if taskQue != None:
+ count = 0
+ for jobDetails in taskQue:
+ count+=1
+ if count > startPos:
+ if startsWith == "" or jobDetails['description'].startswith(startsWith):
+ self.Log(f"Killing Job ID({jobDetails['id']}); description={jobDetails['description']}")
+ self.stop_job(jobDetails['id'])
+ else:
+ self.Log(f"Excluding Job ID({jobDetails['id']}); description={jobDetails['description']}; {jobDetails})")
+ else:
+ self.Log(f"Skipping Job ID({jobDetails['id']}); description={jobDetails['description']}; {jobDetails})")
+
+ def toJson(self, data, replaceSingleQuote=False):
+ if replaceSingleQuote:
+ data = data.replace("'", '"')
+ data = data.replace("\\", "\\\\")
+ data = data.replace("\\\\\\\\", "\\\\")
+ return json.loads(data)
+
+ def isCorrectDbVersion(self, verNumber = 68):
+ results = self.sql_query("select version from schema_migrations")
+ # self.Log(results)
+ if len(results['rows']) == 0 or len(results['rows'][0]) == 0:
+ return False
+ return int(results['rows'][0][0]) == verNumber
+
+ def renameFileNameInDB(self, fileId, oldName, newName, UpdateUsingIdOnly = False):
+ if self.isCorrectDbVersion():
+ query = f'update files set basename = "{newName}" where basename = "{oldName}" and id = {fileId};'
+ if UpdateUsingIdOnly:
+ query = f'update files set basename = "{newName}" where id = {fileId};'
+ self.Trace(f"Executing query ({query})")
+ results = self.sql_commit(query)
+ if 'rows_affected' in results and results['rows_affected'] == 1:
+ return True
+ return False
+
+ def getFileNameFromDB(self, id):
+ results = self.sql_query(f'select basename from files where id = {id};')
+ self.Trace(f"results = ({results})")
+ if len(results['rows']) == 0 or len(results['rows'][0]) == 0:
+ return None
+ return results['rows'][0][0]
+
+ # ############################################################################################################
+ # Functions which are candidates to be added to parent class use snake_case naming convention.
+ # ############################################################################################################
+ # The below functions extends class StashInterface with functions which are not yet in the class or
+ # fixes for functions which have not yet made it into official class.
+ def metadata_scan(self, paths:list=[], flags={}): # ToDo: Add option to add path to library if path not included when calling metadata_scan
+ query = "mutation MetadataScan($input:ScanMetadataInput!) { metadataScan(input: $input) }"
+ scan_metadata_input = {"paths": paths}
+ if flags:
+ scan_metadata_input.update(flags)
+ elif scan_config := self.get_configuration_defaults("scan { ...ScanMetadataOptions }").get("scan"):
+ scan_metadata_input.update(scan_config)
+ result = self.call_GQL(query, {"input": scan_metadata_input})
+ return result["metadataScan"]
+
def get_all_scenes(self):
query_all_scenes = """
query AllScenes {
@@ -451,6 +913,43 @@ def metadata_clean_generated(self, blobFiles=True, dryRun=False, imageThumbnails
def rename_generated_files(self):
return self.call_GQL("mutation MigrateHashNaming {migrateHashNaming}")
+
+ def find_duplicate_scenes_diff(self, distance: PhashDistance=PhashDistance.EXACT, fragment='id', duration_diff: float=10.00 ):
+ query = """
+ query FindDuplicateScenes($distance: Int, $duration_diff: Float) {
+ findDuplicateScenes(distance: $distance, duration_diff: $duration_diff) {
+ ...SceneSlim
+ }
+ }
+ """
+ if fragment:
+ query = re.sub(r'\.\.\.SceneSlim', fragment, query)
+ else:
+ query += "fragment SceneSlim on Scene { id }"
+
+ variables = { "distance": distance, "duration_diff": duration_diff }
+ result = self.call_GQL(query, variables)
+ return result['findDuplicateScenes']
+
+ # +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+ # Direct SQL associated functions
+ def get_file_metadata(self, data, raw_data = False): # data is either file ID or scene metadata
+ results = None
+ if data == None:
+ return results
+ if 'files' in data and len(data['files']) > 0 and 'id' in data['files'][0]:
+ results = self.sql_query(f"select * from files where id = {data['files'][0]['id']}")
+ else:
+ results = self.sql_query(f"select * from files where id = {data}")
+ if raw_data:
+ return results
+ if 'rows' in results:
+ return results['rows'][0]
+ self.Error(f"Unknown error while SQL query with data='{data}'; Results='{results}'.")
+ return None
+
+ def set_file_basename(self, id, basename):
+ return self.sql_commit(f"update files set basename = '{basename}' where id = {id}")
class mergeMetadata: # A class to merge scene metadata from source scene to destination scene
srcData = None
@@ -471,7 +970,8 @@ def merge(self, SrcData, DestData):
self.mergeItems('tags', 'tag_ids', [], excludeName=self.excludeMergeTags)
self.mergeItems('performers', 'performer_ids', [])
self.mergeItems('galleries', 'gallery_ids', [])
- self.mergeItems('movies', 'movies', [])
+ # Looks like movies has been removed from new Stash version
+ # self.mergeItems('movies', 'movies', [])
self.mergeItems('urls', listToAdd=self.destData['urls'], NotStartWith=self.stash.STASH_URL)
self.mergeItem('studio', 'studio_id', 'id')
self.mergeItem('title')
@@ -524,3 +1024,54 @@ def mergeItems(self, fieldName, updateFieldName=None, listToAdd=[], NotStartWith
listToAdd += [item['id']]
self.dataDict.update({ updateFieldName : listToAdd})
# self.stash.Trace(f"Added {fieldName} ({dataAdded}) to scene ID({self.destData['id']})", toAscii=True)
+
+class taskQueue:
+ taskqueue = None
+ def __init__(self, taskqueue):
+ self.taskqueue = taskqueue
+
+ def tooManyScanOnTaskQueue(self, tooManyQty = 5):
+ count = 0
+ if self.taskqueue == None:
+ return False
+ for jobDetails in self.taskqueue:
+ if jobDetails['description'] == "Scanning...":
+ count += 1
+ if count < tooManyQty:
+ return False
+ return True
+
+ def cleanJobOnTaskQueue(self):
+ for jobDetails in self.taskqueue:
+ if jobDetails['description'] == "Cleaning...":
+ return True
+ return False
+
+ def cleanGeneratedJobOnTaskQueue(self):
+ for jobDetails in self.taskqueue:
+ if jobDetails['description'] == "Cleaning generated files...":
+ return True
+ return False
+
+ def isRunningPluginTaskJobOnTaskQueue(self, taskName):
+ for jobDetails in self.taskqueue:
+ if jobDetails['description'] == "Running plugin task: {taskName}":
+ return True
+ return False
+
+ def tagDuplicatesJobOnTaskQueue(self):
+ return self.isRunningPluginTaskJobOnTaskQueue("Tag Duplicates")
+
+ def clearDupTagsJobOnTaskQueue(self):
+ return self.isRunningPluginTaskJobOnTaskQueue("Clear Tags")
+
+ def generatePhashMatchingJobOnTaskQueue(self):
+ return self.isRunningPluginTaskJobOnTaskQueue("Generate PHASH Matching")
+
+ def deleteDuplicatesJobOnTaskQueue(self):
+ return self.isRunningPluginTaskJobOnTaskQueue("Delete Duplicates")
+
+ def deleteTaggedScenesJobOnTaskQueue(self):
+ return self.isRunningPluginTaskJobOnTaskQueue("Delete Tagged Scenes")
+
+
diff --git a/plugins/DupFileManager/advance_options.html b/plugins/DupFileManager/advance_options.html
new file mode 100644
index 00000000..1f5e5135
--- /dev/null
+++ b/plugins/DupFileManager/advance_options.html
@@ -0,0 +1,2708 @@
+
+
+
+ DupFileManager Advance Menus
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Create report with different
+ [Match Duplicate Distance] options
+
+ Overrides user [Match Duplicate Distance] and
+ [significantTimeDiff] settings
+
+
+ |
+
+
+
+
+
+
+ Create Report with Tagging
+ |
+
+
+
+
+
+
+ |
+
+
+
+
+
+
+ |
+
+
+
+
+
+
+ |
+
+
+
+
+
+
+ |
+
+
+ |
+
+
+
+
+ Create Report without Tagging
+ |
+
+
+
+
+
+
+ |
+
+
+
+
+
+
+ |
+
+
+
+
+
+
+ |
+
+
+
+
+
+
+ |
+
+
+ |
+
+
+
+ Details:
+
+ - Match Duplicate Distance Number Details
+
+ - Exact Match
+
+ - Safest and most reliable option
+ - Uses tag name _DuplicateMarkForDeletion_0
+ -
+ Has the fewest results, and it's very rare to have false
+ matches.
+
+
+ - High Match
+
+ - Recommended Setting
+ - Safe and usually reliable
+ - Uses tag name _DuplicateMarkForDeletion_1
+ -
+ Scenes tagged by Exact Match will have both tags
+ (_DuplicateMarkForDeletion_0 and
+ _DuplicateMarkForDeletion_1)
+
+
+ - Medium Match
+
+ - Not so safe. Some false matches
+ -
+ To reduce false matches use a time difference of .96 or
+ higher.
+
+ - Uses tag name _DuplicateMarkForDeletion_2
+ - Scenes tagged by 0 and 1 will have three tags.
+
+ - Low Match
+
+ - Unsafe, and many false matches
+ -
+ To reduce false matches use a time difference of .98 or
+ higher.
+
+ - Uses tag name _DuplicateMarkForDeletion_3
+ - Scenes tagged by 0, 1, and 2 will have four tags.
+ - Has the most results, but with many false matches.
+
+
+ - Time Difference
+
+ -
+ Significant time difference setting, where 1 equals 100% and
+ (.9) equals 90%.
+
+ -
+ This setting overrides the setting in
+ DupFileManager_config.py.
+
+
+ -
+ See setting significantTimeDiff in
+ DupFileManager_config.py
+
+
+ -
+ This setting is generally not useful for
+ [Exact Match] reports.
+
+ -
+ This is an important setting when creating Low or Medium match
+ reports. It will reduce false matches.
+
+
+ - Report with tagging
+
+ -
+ Reports with tagging will work with above
+ DupFileManager Advance Menu.
+
+ - The report can take serveral minutes to complete.
+ -
+ It takes much more time to produce a report with tagging
+ compare to creating a report without tagging.
+
+
+ - Report WITHOUT tagging
+
+ -
+ Reports with no tagging can NOT be
+ used with above DupFileManager Advance Menu.
+
+ -
+ The report is created much faster. It usually takes a few
+ seconds to complete.
+
+ -
+ This is the recommended report type to create if the
+ DupFileManager Advance Menu is not needed or desired.
+
+
+
+ |
+
+
+
+
+
diff --git a/plugins/DupFileManager/requirements.txt b/plugins/DupFileManager/requirements.txt
index d503550d..19069845 100644
--- a/plugins/DupFileManager/requirements.txt
+++ b/plugins/DupFileManager/requirements.txt
@@ -1,4 +1,3 @@
stashapp-tools >= 0.2.50
-pyYAML
-watchdog
+requests
Send2Trash
\ No newline at end of file
|