Skip to content

Commit

Permalink
Adding --list option
Browse files Browse the repository at this point in the history
  • Loading branch information
John Pfuntner committed Nov 20, 2024
1 parent 529bb72 commit 7f539ce
Showing 1 changed file with 52 additions and 42 deletions.
94 changes: 52 additions & 42 deletions bin/supercd.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,10 @@
CACHE_THRESHOLD = 30

parser = argparse.ArgumentParser(description='`Super cd` Python code')
parser.add_argument('pat', help='Glob pattern to search for')
parser.add_argument('pat', nargs='?', help='Glob pattern to search for')
parser.add_argument('-b', '--bash', action='store_true', help='Generate output to set bash array')
parser.add_argument('-c', '--cache', action='store_true', help='Use cache for results')
parser.add_argument('-l', '--list', action='store_true', help='List cache')
parser.add_argument('-v', '--verbose', action='count', help='Enable debugging')
args = parser.parse_args()

Expand All @@ -27,19 +28,33 @@

signal.signal(signal.SIGPIPE, lambda signum, stack_frame: exit(0))

if args.pat and args.list:
parser.error('pattern and --list are mutually exclusive')

if not (args.pat or args.list):
parser.error('pattern is required')

files = list()
now = datetime.datetime.now()
epoch = datetime.datetime.fromtimestamp(0)
cache_threshold = datetime.timedelta(days=CACHE_THRESHOLD)
cache_file_name = os.path.expanduser('~/.supercd.json')

cache = dict()
cleanup = False

if args.cache:
if os.path.exists(cache_file_name):
with open(cache_file_name) as stream:
cache = json.load(stream)
cache = dict()
if os.path.exists(cache_file_name):
with open(cache_file_name) as stream:
cache = json.load(stream)

if args.list:
table = bruno_tools.Table('Pattern', 'Path', 'Date')
for curr in sorted([{'pattern': pattern, 'path': curr['file'], 'date': curr['timestamp_human']} for (pattern, curr) in cache.items()], key=lambda curr: curr['date'], reverse=True):
table.add(curr['pattern'], curr['path'], curr['date'])
table.close()

else:
if args.cache:
if args.pat in cache:
log.info(f'Found {args.pat!r} in {cache_file_name}')
if datetime.datetime.fromtimestamp(cache[args.pat]['timestamp']) >= now - cache_threshold:
Expand All @@ -49,42 +64,37 @@
log.info(f'Pattern was last used {datetime.datetime.fromtimestamp(cache[args.pat]["timestamp"])!s} and is inelligible for reuse')
del cache[args.pat]
cleanup = True
else:
log.info(f'{cache_file_name} not found')

# do more thorough cleanup of old items
patterns = list(cache.keys())
for pattern in patterns:
if datetime.datetime.fromtimestamp(cache[pattern]['timestamp']) < now - cache_threshold:
log.info(f'Removing {pattern!r} since it was used {datetime.datetime.fromtimestamp(cache[pattern]["timestamp"])!s}')
del cache[pattern]
cleanup = True

if not files:
files = bruno_tools.run([
'find',
os.path.expanduser('~'),
'!', '-path', '*/.*',
'-type', 'd',
'-name', args.pat,
], log=log)[1].splitlines()

if args.cache and (len(files) == 1 or cleanup):
if len(files) == 1:
log.info(f'Equating {args.pat} with {files[0]} in {cache_file_name}')
cache[args.pat] = {
'file': files[0],
'timestamp': (now - epoch).total_seconds(),
'timestamp_human': str(now),
}
with open(cache_file_name, 'w') as stream:
json.dump(cache, stream)
log.info(f'Rewrote {cache_file_name}')

# do more thorough cleanup of old items
patterns = list(cache.keys())
for pattern in patterns:
if datetime.datetime.fromtimestamp(cache[pattern]['timestamp']) < now - cache_threshold:
log.info(f'Removing {pattern!r} since it was used {datetime.datetime.fromtimestamp(cache[pattern]["timestamp"])!s}')
del cache[pattern]
cleanup = True

if not files:
files = bruno_tools.run([
'find',
os.path.expanduser('~'),
'!', '-path', '*/.*',
'-type', 'd',
'-name', args.pat,
], log=log)[1].splitlines()

if args.cache and (len(files) == 1 or cleanup):
if len(files) == 1:
log.info(f'Equating {args.pat} with {files[0]} in {cache_file_name}')
cache[args.pat] = {
'file': files[0],
'timestamp': (now - epoch).total_seconds(),
'timestamp_human': str(now),
}
with open(cache_file_name, 'w') as stream:
json.dump(cache, stream)
log.info(f'Rewrote {cache_file_name}')

if args.bash:
print(f'({" ".join([repr(file) for file in files])})')
else:
print('\n'.join(files))

if args.bash:
print(f'({" ".join([repr(file) for file in files])})')
else:
print('\n'.join(files))

0 comments on commit 7f539ce

Please sign in to comment.