Skip to content

Commit

Permalink
Merge pull request #45 from westsurname/research-seasons
Browse files Browse the repository at this point in the history
Re-search season packs on failure
  • Loading branch information
westsurname authored Dec 23, 2024
2 parents 7cba287 + a872db0 commit 0f01275
Show file tree
Hide file tree
Showing 5 changed files with 82 additions and 44 deletions.
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -148,6 +148,7 @@ The script accepts the following arguments:
- `--repair-interval`: Optional interval in smart format (e.g., '1h2m3s') to wait between repairing each media file.
- `--run-interval`: Optional interval in smart format (e.g., '1w2d3h4m5s') to run the repair process.
- `--mode`: Choose repair mode: `symlink` or `file`. `symlink` to repair broken symlinks and `file` to repair missing files. (default: 'symlink').
- `--season-packs`: Upgrade to season-packs when a non-season-pack is found. Only applicable in symlink mode.
- `--include-unmonitored`: Include unmonitored media in the repair process.
### Warning
Expand Down
53 changes: 44 additions & 9 deletions blackhole.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,12 +48,29 @@ def __init__(self, isTorrentOrMagnet, isDotTorrentFile) -> None:
def __init__(self, filename, isRadarr) -> None:
print('filename:', filename)
baseBath = getPath(isRadarr)
uniqueId = str(uuid.uuid4())[:8] # Generate a unique identifier
uniqueId = str(uuid.uuid4())[:8]
isDotTorrentFile = filename.casefold().endswith('.torrent')
isTorrentOrMagnet = isDotTorrentFile or filename.casefold().endswith('.magnet')
filenameWithoutExt, ext = os.path.splitext(filename)
filePath = os.path.join(baseBath, filename)
filePathProcessing = os.path.join(baseBath, 'processing', f"{filenameWithoutExt}_{uniqueId}{ext}")

# Get the maximum filename length for the target directory
try:
maxNameBytes = os.pathconf(baseBath, 'PC_NAME_MAX')
except (AttributeError, ValueError, OSError):
maxNameBytes = 255

# Calculate space needed for uniqueId, separator, and extension
extraBytes = len(f"_{uniqueId}{ext}".encode())

# Truncate the filename if needed
if len(filenameWithoutExt.encode()) > maxNameBytes - extraBytes:
processingName = truncateBytes(filenameWithoutExt, maxNameBytes - extraBytes)
print(f"Truncated filename from {len(filenameWithoutExt.encode())} to {len(processingName.encode())} bytes")
else:
processingName = filenameWithoutExt

filePathProcessing = os.path.join(baseBath, 'processing', f"{processingName}_{uniqueId}{ext}")
folderPathCompleted = os.path.join(baseBath, 'completed', filenameWithoutExt)

self.fileInfo = self.FileInfo(filename, filenameWithoutExt, filePath, filePathProcessing, folderPathCompleted)
Expand Down Expand Up @@ -85,6 +102,11 @@ def cleanFileName(name):

refreshingTask = None

def truncateBytes(text: str, maxBytes: int) -> str:
"""Truncate a string to a maximum number of bytes in UTF-8 encoding."""
encoded = text.encode()
return encoded[:maxBytes].decode(errors='ignore')

async def refreshArr(arr: Arr, count=60):
# TODO: Change to refresh until found/imported
async def refresh():
Expand Down Expand Up @@ -165,8 +187,7 @@ def print(*values: object):
# Send progress to arr
progress = info['progress']
print(f"Progress: {progress:.2f}%")
if torrent.incompatibleHashSize and torrent.failIfNotCached:
print("Non-cached incompatible hash sized torrent")
if torrent.skipAvailabilityCheck and torrent.failIfNotCached:
torrent.delete()
return False
await asyncio.sleep(1)
Expand Down Expand Up @@ -295,7 +316,7 @@ async def is_accessible(path, timeout=10):
results = await asyncio.gather(*(processTorrent(torrent, file, arr) for torrent in torrents))

if not any(results):
await asyncio.gather(*(fail(torrent, arr) for torrent in torrents))
await asyncio.gather(*(fail(torrent, arr, isRadarr) for torrent in torrents))
else:
for i, constructor in enumerate(torrentConstructors):
isLast = (i == len(torrentConstructors) - 1)
Expand All @@ -304,7 +325,7 @@ async def is_accessible(path, timeout=10):
if await processTorrent(torrent, file, arr):
break
elif isLast:
await fail(torrent, arr)
await fail(torrent, arr, isRadarr)

os.remove(file.fileInfo.filePathProcessing)
except:
Expand All @@ -315,7 +336,7 @@ async def is_accessible(path, timeout=10):

discordError(f"Error processing {file.fileInfo.filenameWithoutExt}", e)

async def fail(torrent: TorrentBase, arr: Arr):
async def fail(torrent: TorrentBase, arr: Arr, isRadarr):
_print = globals()['print']

def print(*values: object):
Expand All @@ -324,16 +345,30 @@ def print(*values: object):
print(f"Failing")

torrentHash = torrent.getHash()
history = await asyncio.to_thread(arr.getHistory, blackhole['historyPageSize'])
history = await asyncio.to_thread(arr.getHistory, blackhole['historyPageSize'], includeGrandchildDetails=True)
items = [item for item in history if (item.torrentInfoHash and item.torrentInfoHash.casefold() == torrentHash.casefold()) or cleanFileName(item.sourceTitle.casefold()) == torrent.file.fileInfo.filenameWithoutExt.casefold()]

if not items:
message = "No history items found to mark as failed. Arr will not attempt to grab an alternative."
print(message)
discordError(message, torrent.file.fileInfo.filenameWithoutExt)
else:
# TODO: See if we can fail without blacklisting as cached items constantly changes
firstItem = items[0]
isSeasonPack = firstItem.releaseType == 'SeasonPack'

# For season packs, we only need to fail one episode and trigger one search
items = [firstItem] if isSeasonPack else items

# Mark items as failed
failTasks = [asyncio.to_thread(arr.failHistoryItem, item.id) for item in items]
await asyncio.gather(*failTasks)

# For season packs, trigger a new search
if isSeasonPack:
for item in items:
series = await asyncio.to_thread(arr.get, item.grandparentId)
await asyncio.to_thread(arr.automaticSearch, series, item.parentId)

print(f"Failed")

def getFiles(isRadarr):
Expand Down
11 changes: 10 additions & 1 deletion repair.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ def parseInterval(intervalStr):
parser.add_argument('--repair-interval', type=str, default=repair['repairInterval'], help='Optional interval in smart format (e.g. 1h2m3s) to wait between repairing each media file.')
parser.add_argument('--run-interval', type=str, default=repair['runInterval'], help='Optional interval in smart format (e.g. 1w2d3h4m5s) to run the repair process.')
parser.add_argument('--mode', type=str, choices=['symlink', 'file'], default='symlink', help='Choose repair mode: `symlink` or `file`. `symlink` to repair broken symlinks and `file` to repair missing files.')
parser.add_argument('--season-packs', action='store_true', help='Upgrade to season-packs when a non-season-pack is found. Only applicable in symlink mode.')
parser.add_argument('--include-unmonitored', action='store_true', help='Include unmonitored media in the repair process')
args = parser.parse_args()

Expand Down Expand Up @@ -127,9 +128,17 @@ def main():
if childId in media.fullyAvailableChildrenIds and len(parentFolders) > 1:
print("Title:", media.title)
print("Movie ID/Season Number:", childId)
print("Inconsistent folders:")
print("Non-season-pack folders:")
[print(parentFolder) for parentFolder in parentFolders]
print()
if args.season_packs:
print("Searching for season-pack")
results = arr.automaticSearch(media, childId)
print(results)

if repairIntervalSeconds > 0:
time.sleep(repairIntervalSeconds)

except Exception:
e = traceback.format_exc()

Expand Down
22 changes: 22 additions & 0 deletions shared/arr.py
Original file line number Diff line number Diff line change
Expand Up @@ -212,11 +212,22 @@ def sourceTitle(self):
def torrentInfoHash(self):
return self.json['data'].get('torrentInfoHash')

@property
def releaseType(self):
"""Get the release type from the history item data."""
return self.json['data'].get('releaseType')

@property
@abstractmethod
def parentId(self):
pass

@property
@abstractmethod
def grandparentId(self):
"""Get the top-level ID (series ID for episodes, same as parentId for movies)."""
pass

@property
@abstractmethod
def isFileDeletedEvent(self):
Expand All @@ -227,6 +238,11 @@ class MovieHistory(MediaHistory):
def parentId(self):
return self.json['movieId']

@property
def grandparentId(self):
"""For movies, grandparent ID is the same as parent ID."""
return self.parentId

@property
def isFileDeletedEvent(self):
return self.eventType == 'movieFileDeleted'
Expand All @@ -237,6 +253,12 @@ class EpisodeHistory(MediaHistory):
def parentId(self):
return self.json['episode']['seasonNumber']

@property
# Requires includeGrandchildDetails to be true
def grandparentId(self):
"""Get the series ID from the history item."""
return self.json['episode']['seriesId']

@property
def isFileDeletedEvent(self):
return self.eventType == 'episodeFileDeleted'
Expand Down
39 changes: 5 additions & 34 deletions shared/debrid.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ def __init__(self, f, fileData, file, failIfNotCached, onlyLargestFile) -> None:
self.file = file
self.failIfNotCached = failIfNotCached
self.onlyLargestFile = onlyLargestFile
self.incompatibleHashSize = False
self.skipAvailabilityCheck = False
self.id = None
self._info = None
self._hash = None
Expand Down Expand Up @@ -173,31 +173,11 @@ def submitTorrent(self):
return not not self.addTorrent()

def _getInstantAvailability(self, refresh=False):
if refresh or not self._instantAvailability:
torrentHash = self.getHash()
self.print('hash:', torrentHash)

if len(torrentHash) != 40 or True:
self.incompatibleHashSize = True
return True

instantAvailabilityRequest = retryRequest(
lambda: requests.get(urljoin(realdebrid['host'], f"torrents/instantAvailability/{torrentHash}"), headers=self.headers),
print=self.print
)
if instantAvailabilityRequest is None:
return None
torrentHash = self.getHash()
self.print('hash:', torrentHash)
self.skipAvailabilityCheck = True

instantAvailabilities = instantAvailabilityRequest.json()
self.print('instantAvailabilities:', instantAvailabilities)
if not instantAvailabilities: return

instantAvailabilityHosters = next(iter(instantAvailabilities.values()))
if not instantAvailabilityHosters: return

self._instantAvailability = next(iter(instantAvailabilityHosters.values()))

return self._instantAvailability
return True

def _getAvailableHost(self):
availableHostsRequest = retryRequest(
Expand Down Expand Up @@ -248,15 +228,6 @@ async def selectFiles(self):
largestMediaFileId = str(largestMediaFile['id'])
self.print('only largest file:', self.onlyLargestFile)
self.print('largest file:', largestMediaFile)

if self.failIfNotCached and not self.incompatibleHashSize:
targetFileIds = {largestMediaFileId} if self.onlyLargestFile else mediaFileIds
if not any(set(fileGroup.keys()) == targetFileIds for fileGroup in self._instantAvailability):
extraFilesGroup = next((fileGroup for fileGroup in self._instantAvailability if largestMediaFileId in fileGroup.keys()), None)
if self.onlyLargestFile and extraFilesGroup:
self.print('extra files required for cache:', extraFilesGroup)
discordUpdate('Extra files required for cache:', extraFilesGroup)
return False

if self.onlyLargestFile and len(mediaFiles) > 1:
discordUpdate('largest file:', largestMediaFile['path'])
Expand Down

0 comments on commit 0f01275

Please sign in to comment.