Skip to content

Commit

Permalink
misc fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
westsurname authored Dec 22, 2024
1 parent 5513e0c commit 8eb34b2
Show file tree
Hide file tree
Showing 4 changed files with 54 additions and 47 deletions.
43 changes: 32 additions & 11 deletions blackhole.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,12 +48,29 @@ def __init__(self, isTorrentOrMagnet, isDotTorrentFile) -> None:
def __init__(self, filename, isRadarr) -> None:
print('filename:', filename)
baseBath = getPath(isRadarr)
uniqueId = str(uuid.uuid4())[:8] # Generate a unique identifier
uniqueId = str(uuid.uuid4())[:8]
isDotTorrentFile = filename.casefold().endswith('.torrent')
isTorrentOrMagnet = isDotTorrentFile or filename.casefold().endswith('.magnet')
filenameWithoutExt, ext = os.path.splitext(filename)
filePath = os.path.join(baseBath, filename)
filePathProcessing = os.path.join(baseBath, 'processing', f"{filenameWithoutExt}_{uniqueId}{ext}")

# Get the maximum filename length for the target directory
try:
maxNameBytes = os.pathconf(baseBath, 'PC_NAME_MAX')
except (AttributeError, ValueError, OSError):
maxNameBytes = 255

# Calculate space needed for uniqueId, separator, and extension
extraBytes = len(f"_{uniqueId}{ext}".encode())

# Truncate the filename if needed
if len(filenameWithoutExt.encode()) > maxNameBytes - extraBytes:
processingName = truncateBytes(filenameWithoutExt, maxNameBytes - extraBytes)
print(f"Truncated filename from {len(filenameWithoutExt.encode())} to {len(processingName.encode())} bytes")
else:
processingName = filenameWithoutExt

filePathProcessing = os.path.join(baseBath, 'processing', f"{processingName}_{uniqueId}{ext}")
folderPathCompleted = os.path.join(baseBath, 'completed', filenameWithoutExt)

self.fileInfo = self.FileInfo(filename, filenameWithoutExt, filePath, filePathProcessing, folderPathCompleted)
Expand Down Expand Up @@ -85,6 +102,11 @@ def cleanFileName(name):

refreshingTask = None

def truncateBytes(text: str, maxBytes: int) -> str:
"""Truncate a string to a maximum number of bytes in UTF-8 encoding."""
encoded = text.encode()
return encoded[:maxBytes].decode(errors='ignore')

async def refreshArr(arr: Arr, count=60):
# TODO: Change to refresh until found/imported
async def refresh():
Expand Down Expand Up @@ -165,8 +187,7 @@ def print(*values: object):
# Send progress to arr
progress = info['progress']
print(f"Progress: {progress:.2f}%")
if torrent.incompatibleHashSize and torrent.failIfNotCached:
print("Non-cached incompatible hash sized torrent")
if torrent.skipAvailabilityCheck and torrent.failIfNotCached:
torrent.delete()
return False
await asyncio.sleep(1)
Expand Down Expand Up @@ -315,19 +336,13 @@ async def is_accessible(path, timeout=10):

discordError(f"Error processing {file.fileInfo.filenameWithoutExt}", e)

def isSeasonPack(filename):
# Match patterns like 'S01' or 'Season 1' but not 'S01E01'
return bool(re.search(r'(?:S|Season\s*)(\d{1,2})(?!\s*E\d{2})', filename, re.IGNORECASE))

async def fail(torrent: TorrentBase, arr: Arr, isRadarr):
_print = globals()['print']

def print(*values: object):
_print(f"[{torrent.__class__.__name__}] [{torrent.file.fileInfo.filenameWithoutExt}]", *values)

print(f"Failing")

isSeasonPack = isSeasonPack(torrent.file.fileInfo.filename)

torrentHash = torrent.getHash()
history = await asyncio.to_thread(arr.getHistory, blackhole['historyPageSize'])
Expand All @@ -338,11 +353,17 @@ def print(*values: object):
print(message)
discordError(message, torrent.file.fileInfo.filenameWithoutExt)
else:
items = [item[0]] if not isRadarr and isSeasonPack else items
firstItem = items[0]
isSeasonPack = firstItem.releaseType == 'SeasonPack'

# For season packs, we only need to fail one episode and trigger one search
items = [firstItem] if not isRadarr and isSeasonPack else items

# Mark items as failed
failTasks = [asyncio.to_thread(arr.failHistoryItem, item.id) for item in items]
await asyncio.gather(*failTasks)

# For season packs in Sonarr, trigger a new search
if not isRadarr and isSeasonPack:
for item in items:
series = await asyncio.to_thread(arr.get, item.grandparentId)
Expand Down
14 changes: 12 additions & 2 deletions repair.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,8 @@ def parseInterval(intervalStr):
parser.add_argument('--repair-interval', type=str, default=repair['repairInterval'], help='Optional interval in smart format (e.g. 1h2m3s) to wait between repairing each media file.')
parser.add_argument('--run-interval', type=str, default=repair['runInterval'], help='Optional interval in smart format (e.g. 1w2d3h4m5s) to run the repair process.')
parser.add_argument('--mode', type=str, choices=['symlink', 'file'], default='symlink', help='Choose repair mode: `symlink` or `file`. `symlink` to repair broken symlinks and `file` to repair missing files.')
parser.add_argument('--season-packs', action='store_true', help='Upgrade to season-packs when a non-season-pack is found. Only applicable in symlink mode.')
parser.add_argument('--soft-repair', action='store_true', help='Only search for missing files, do not delete or re-grab. This is always enabled in file mode.')
parser.add_argument('--include-unmonitored', action='store_true', help='Include unmonitored media in the repair process')
args = parser.parse_args()

Expand Down Expand Up @@ -102,7 +104,7 @@ def main():
if args.dry_run or args.no_confirm or input("Do you want to delete and re-grab? (y/n): ").lower() == 'y':
if not args.dry_run:
discordUpdate(f"[{args.mode}] Repairing {media.title}: {childId}")
if args.mode == 'symlink':
if args.mode == 'symlink' and not args.soft_repair:
print("Deleting files:")
[print(item.path) for item in childItems]
results = arr.deleteFiles(childItems)
Expand All @@ -127,9 +129,17 @@ def main():
if childId in media.fullyAvailableChildrenIds and len(parentFolders) > 1:
print("Title:", media.title)
print("Movie ID/Season Number:", childId)
print("Inconsistent folders:")
print("Non-season-pack folders:")
[print(parentFolder) for parentFolder in parentFolders]
print()
if args.season_packs:
print("Searching for season-pack")
results = arr.automaticSearch(media, childId)
print(results)

if repairIntervalSeconds > 0:
time.sleep(repairIntervalSeconds)

except Exception:
e = traceback.format_exc()

Expand Down
5 changes: 5 additions & 0 deletions shared/arr.py
Original file line number Diff line number Diff line change
Expand Up @@ -212,6 +212,11 @@ def sourceTitle(self):
def torrentInfoHash(self):
return self.json['data'].get('torrentInfoHash')

@property
def releaseType(self):
"""Get the release type from the history item data."""
return self.json['data'].get('releaseType')

@property
@abstractmethod
def parentId(self):
Expand Down
39 changes: 5 additions & 34 deletions shared/debrid.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ def __init__(self, f, fileData, file, failIfNotCached, onlyLargestFile) -> None:
self.file = file
self.failIfNotCached = failIfNotCached
self.onlyLargestFile = onlyLargestFile
self.incompatibleHashSize = False
self.skipAvailabilityCheck = False
self.id = None
self._info = None
self._hash = None
Expand Down Expand Up @@ -173,31 +173,11 @@ def submitTorrent(self):
return not not self.addTorrent()

def _getInstantAvailability(self, refresh=False):
if refresh or not self._instantAvailability:
torrentHash = self.getHash()
self.print('hash:', torrentHash)

if len(torrentHash) != 40 or True:
self.incompatibleHashSize = True
return True

instantAvailabilityRequest = retryRequest(
lambda: requests.get(urljoin(realdebrid['host'], f"torrents/instantAvailability/{torrentHash}"), headers=self.headers),
print=self.print
)
if instantAvailabilityRequest is None:
return None
torrentHash = self.getHash()
self.print('hash:', torrentHash)
self.skipAvailabilityCheck = True

instantAvailabilities = instantAvailabilityRequest.json()
self.print('instantAvailabilities:', instantAvailabilities)
if not instantAvailabilities: return

instantAvailabilityHosters = next(iter(instantAvailabilities.values()))
if not instantAvailabilityHosters: return

self._instantAvailability = next(iter(instantAvailabilityHosters.values()))

return self._instantAvailability
return True

def _getAvailableHost(self):
availableHostsRequest = retryRequest(
Expand Down Expand Up @@ -248,15 +228,6 @@ async def selectFiles(self):
largestMediaFileId = str(largestMediaFile['id'])
self.print('only largest file:', self.onlyLargestFile)
self.print('largest file:', largestMediaFile)

if self.failIfNotCached and not self.incompatibleHashSize:
targetFileIds = {largestMediaFileId} if self.onlyLargestFile else mediaFileIds
if not any(set(fileGroup.keys()) == targetFileIds for fileGroup in self._instantAvailability):
extraFilesGroup = next((fileGroup for fileGroup in self._instantAvailability if largestMediaFileId in fileGroup.keys()), None)
if self.onlyLargestFile and extraFilesGroup:
self.print('extra files required for cache:', extraFilesGroup)
discordUpdate('Extra files required for cache:', extraFilesGroup)
return False

if self.onlyLargestFile and len(mediaFiles) > 1:
discordUpdate('largest file:', largestMediaFile['path'])
Expand Down

0 comments on commit 8eb34b2

Please sign in to comment.