Skip to content

Commit

Permalink
Remove trailing commas to avoid black expanding code
Browse files Browse the repository at this point in the history
From black 20.8b0's changelog:

```
re-implemented support for explicit trailing commas: now it works
consistently within any bracket pair, including nested structures (#1288
and duplicates)
```

We have lots of trailing commas throughout our source code and it causes
changes like this:

```
-    print('Brambox: %r, %r' % (brambox.__version__, brambox.__file__,))
+    print(
+        'Brambox: %r, %r'
+        % (
+            brambox.__version__,
+            brambox.__file__,
+        )
+    )
```

which adds lots of lines for no reason.  There are more extreme cases that make
the code less readable after black reformats the code.  The only way to avoid
this is to remove trailing commas.
  • Loading branch information
karenc committed Sep 22, 2020
1 parent 4315868 commit 1db82a3
Show file tree
Hide file tree
Showing 122 changed files with 687 additions and 656 deletions.
2 changes: 1 addition & 1 deletion _dev/super_setup_old.py
Original file line number Diff line number Diff line change
Expand Up @@ -1036,7 +1036,7 @@ def move_wildme(wbia_rman, fmt):
if origin_user not in remotes:
# first add a remote that is the original origin
origin_url = origin['url']
print(' * Create remote %r: %r' % (origin_user, origin_url,))
print(' * Create remote %r: %r' % (origin_user, origin_url))
gitrepo.create_remote(origin_user, origin_url)
# change origin to use wildme url
gitorigin = gitrepo.remote('origin')
Expand Down
2 changes: 1 addition & 1 deletion super_setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -556,7 +556,7 @@ def ensure(repo, dry=False):
# Print some status
repo.debug(
' * branch = {} -> {}'.format(
repo.pygit.active_branch.name, repo.pygit.active_branch.tracking_branch(),
repo.pygit.active_branch.name, repo.pygit.active_branch.tracking_branch()
)
)

Expand Down
4 changes: 2 additions & 2 deletions wbia/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,11 +51,11 @@ def smoke_test(): # nocover

import brambox

print('Brambox: %r, %r' % (brambox.__version__, brambox.__file__,))
print('Brambox: %r, %r' % (brambox.__version__, brambox.__file__))

import lightnet

print('Lightnet: %r, %r' % (lightnet.__version__, lightnet.__file__,))
print('Lightnet: %r, %r' % (lightnet.__version__, lightnet.__file__))


def run_wbia():
Expand Down
23 changes: 13 additions & 10 deletions wbia/_wbia_object.py
Original file line number Diff line number Diff line change
Expand Up @@ -273,16 +273,19 @@ def expand_closure_source(funcname, func):
source = ' %s = %s' % (alias, attrname)
explicit_lines.append(source)

explicit_source = '\n'.join(
[
'from wbia import _wbia_object',
'',
'',
'class _%s_base_class(_wbia_object.ObjectList1D):',
' __needs_inject__ = False',
'',
]
) % (objname,)
explicit_source = (
'\n'.join(
[
'from wbia import _wbia_object',
'',
'',
'class _%s_base_class(_wbia_object.ObjectList1D):',
' __needs_inject__ = False',
'',
]
)
% (objname,)
)
explicit_source += '\n'.join(explicit_lines)
explicit_fname = '_autogen_%s_base.py' % (objname,)
from os.path import dirname, join
Expand Down
2 changes: 1 addition & 1 deletion wbia/algo/detect/azure.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ def _detect(gpath, prediction_project, prediction_iteration, prediction_model):
with open(gpath, 'rb') as image_file:
data = image_file.read()

prediction_url = DETECTION_URL % (prediction_project, prediction_iteration,)
prediction_url = DETECTION_URL % (prediction_project, prediction_iteration)
prediction_header = DETECTION_HEADER.copy()
prediction_header['Prediction-Key'] = prediction_model
response = requests.post(url=prediction_url, data=data, headers=prediction_header)
Expand Down
10 changes: 5 additions & 5 deletions wbia/algo/detect/canonical.py
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,7 @@ def __getitem__(self, index):
if self.target_transform is not None:
target = self.target_transform(target)

result = (sample, target,) if self.targets else (sample,)
result = (sample, target) if self.targets else (sample,)
return result

def __len__(self):
Expand All @@ -182,7 +182,7 @@ def __repr__(self):


def finetune(
model, dataloaders, optimizer, scheduler, device, num_epochs=128, under=1.0, over=1.0,
model, dataloaders, optimizer, scheduler, device, num_epochs=128, under=1.0, over=1.0
):
phases = ['train', 'val']

Expand Down Expand Up @@ -367,7 +367,7 @@ def visualize_augmentations(dataset, augmentation, tag, num=20):
import matplotlib.pyplot as plt

samples = dataset.samples
logger.info('Dataset %r has %d samples' % (tag, len(samples),))
logger.info('Dataset %r has %d samples' % (tag, len(samples)))

index_list = list(range(len(samples)))
random.shuffle(index_list)
Expand Down Expand Up @@ -452,7 +452,7 @@ def train(data_path, output_path, batch_size=32):
# Initialize the model for this run
model = torchvision.models.densenet201(pretrained=True)
num_ftrs = model.classifier.in_features
model.classifier = nn.Sequential(nn.Dropout(0.5), nn.Linear(num_ftrs, 4),)
model.classifier = nn.Sequential(nn.Dropout(0.5), nn.Linear(num_ftrs, 4))

# Send the model to GPU
model = model.to(device)
Expand Down Expand Up @@ -521,7 +521,7 @@ def test_single(filepath_list, weights_path, batch_size=512):
# Initialize the model for this run
model = torchvision.models.densenet201()
num_ftrs = model.classifier.in_features
model.classifier = nn.Sequential(nn.Dropout(0.5), nn.Linear(num_ftrs, 4),)
model.classifier = nn.Sequential(nn.Dropout(0.5), nn.Linear(num_ftrs, 4))

model.load_state_dict(state)

Expand Down
10 changes: 5 additions & 5 deletions wbia/algo/detect/densenet.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ def __init__(self, blur=True, flip=False, rotate=10, shear=10, **kwargs):
sequence += [
iaa.PiecewiseAffine(scale=(0.0005, 0.005)),
iaa.Affine(
rotate=(-rotate, rotate), shear=(-shear, shear), mode='symmetric',
rotate=(-rotate, rotate), shear=(-shear, shear), mode='symmetric'
),
iaa.Grayscale(alpha=(0.0, 0.5)),
]
Expand Down Expand Up @@ -168,7 +168,7 @@ def __init__(self, filepaths, targets=None, transform=None, target_transform=Non

self.targets = targets is not None

args = (filepaths, targets,) if self.targets else (filepaths,)
args = (filepaths, targets) if self.targets else (filepaths,)
self.samples = list(zip(*args))

if self.targets:
Expand Down Expand Up @@ -204,7 +204,7 @@ def __getitem__(self, index):
if self.target_transform is not None:
target = self.target_transform(target)

result = (sample, target,) if self.targets else (sample,)
result = (sample, target) if self.targets else (sample,)

return result

Expand Down Expand Up @@ -391,7 +391,7 @@ def visualize_augmentations(dataset, augmentation, tag, num_per_class=10, **kwar

samples = dataset.samples
flags = np.array(ut.take_column(samples, 1))
logger.info('Dataset %r has %d samples' % (tag, len(flags),))
logger.info('Dataset %r has %d samples' % (tag, len(flags)))

indices = []
for flag in set(flags):
Expand Down Expand Up @@ -771,7 +771,7 @@ def test(

logger.info(
'Using weights in the ensemble, index %r: %s '
% (ensemble_index, ut.repr3(weights_path_list),)
% (ensemble_index, ut.repr3(weights_path_list))
)
result_list = test_ensemble(
gpath_list,
Expand Down
10 changes: 5 additions & 5 deletions wbia/algo/detect/orientation.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ def __init__(self, blur=True, flip=False, rotate=10, shear=10, **kwargs):
sequence += [
iaa.PiecewiseAffine(scale=(0.0005, 0.005)),
iaa.Affine(
rotate=(-rotate, rotate), shear=(-shear, shear), mode='symmetric',
rotate=(-rotate, rotate), shear=(-shear, shear), mode='symmetric'
),
iaa.Grayscale(alpha=(0.0, 0.5)),
]
Expand Down Expand Up @@ -121,7 +121,7 @@ def __init__(self, filepaths, targets=None, transform=None, target_transform=Non

self.targets = targets is not None

args = (filepaths, targets,) if self.targets else (filepaths,)
args = (filepaths, targets) if self.targets else (filepaths,)
self.samples = list(zip(*args))

if self.targets:
Expand Down Expand Up @@ -157,7 +157,7 @@ def __getitem__(self, index):
if self.target_transform is not None:
target = self.target_transform(target)

result = (sample, target,) if self.targets else (sample,)
result = (sample, target) if self.targets else (sample,)

return result

Expand Down Expand Up @@ -344,7 +344,7 @@ def visualize_augmentations(dataset, augmentation, tag, num_per_class=10, **kwar

samples = dataset.samples
flags = np.array(ut.take_column(samples, 1))
logger.info('Dataset %r has %d samples' % (tag, len(flags),))
logger.info('Dataset %r has %d samples' % (tag, len(flags)))

indices = []
for flag in set(flags):
Expand Down Expand Up @@ -722,7 +722,7 @@ def test(

logger.info(
'Using weights in the ensemble, index %r: %s '
% (ensemble_index, ut.repr3(weights_path_list),)
% (ensemble_index, ut.repr3(weights_path_list))
)
result_list = test_ensemble(
gpath_list,
Expand Down
4 changes: 2 additions & 2 deletions wbia/algo/detect/randomforest.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def train_gid_list(
"""
logger.info(
'[randomforest.train()] training with %d gids and species=%r'
% (len(gid_list), species,)
% (len(gid_list), species)
)
if trees_path is None and species is not None:
trees_path = join(ibs.get_cachedir(), 'trees', species)
Expand Down Expand Up @@ -298,7 +298,7 @@ def detect(ibs, gpath_list, tree_path_list, **kwargs):
if verbose:
logger.info(
'[randomforest.detect()] Detecting with %d trees with scale_list=%r'
% (len(tree_path_list), kwargs['scale_list'],)
% (len(tree_path_list), kwargs['scale_list'])
)

# Run detection
Expand Down
2 changes: 1 addition & 1 deletion wbia/algo/graph/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -686,7 +686,7 @@ def connected_component_status(infr):
num_inconsistent = len(infr.recovery_ccs)
num_names_max = infr.pos_graph.number_of_components()

status = dict(num_names_max=num_names_max, num_inconsistent=num_inconsistent,)
status = dict(num_names_max=num_names_max, num_inconsistent=num_inconsistent)
infr.print('done checking status', 3)
return status

Expand Down
4 changes: 2 additions & 2 deletions wbia/algo/graph/mixin_dynamic.py
Original file line number Diff line number Diff line change
Expand Up @@ -1146,7 +1146,7 @@ def _set_pos_redun_flag(infr, nid, flag):
infr.remove_internal_priority(cc)
if infr.params['inference.update_attrs']:
infr.set_edge_attrs(
'inferred_state', ut.dzip(nxu.edges_inside(infr.graph, cc), ['same']),
'inferred_state', ut.dzip(nxu.edges_inside(infr.graph, cc), ['same'])
)
else:
if was_pos_redun:
Expand Down Expand Up @@ -1491,7 +1491,7 @@ def categorize_edges(infr, graph=None, ne_to_edges=None):
# Find edges internal to inconsistent PCCs
incon_internal = {
nid: union(
ne_to_edges[key][(nid, nid)] for key in (POSTV, NEGTV,) + UNINFERABLE
ne_to_edges[key][(nid, nid)] for key in (POSTV, NEGTV) + UNINFERABLE
)
for nid in incon_internal_nids
}
Expand Down
2 changes: 1 addition & 1 deletion wbia/algo/graph/mixin_viz.py
Original file line number Diff line number Diff line change
Expand Up @@ -448,7 +448,7 @@ def get_any(dict_, keys, default=None):
reviewed_width = 2.0
if highlight_reviews:
nx.set_edge_attributes(
graph, name='linewidth', values=ut.dzip(reviewed_edges, [reviewed_width]),
graph, name='linewidth', values=ut.dzip(reviewed_edges, [reviewed_width])
)
nx.set_edge_attributes(
graph,
Expand Down
3 changes: 2 additions & 1 deletion wbia/algo/graph/mixin_wbia.py
Original file line number Diff line number Diff line change
Expand Up @@ -1194,7 +1194,8 @@ def fix_annotmatch_to_undirected_upper(ibs):

df4 = df3[~pd.isnull(df3[ed_key])]
ibs.set_annotmatch_evidence_decision(
df4.annotmatch_rowid, [None if pd.isnull(x) else int(x) for x in df4[ed_key]],
df4.annotmatch_rowid,
[None if pd.isnull(x) else int(x) for x in df4[ed_key]],
)
ibs.set_annotmatch_tag_text(
df4.annotmatch_rowid, df4.annotmatch_tag_text.tolist()
Expand Down
4 changes: 2 additions & 2 deletions wbia/algo/graph/nx_dynamic_graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
class GraphHelperMixin(ut.NiceRepr):
def __nice__(self):
return 'nNodes={}, nEdges={}'.format(
self.number_of_nodes(), self.number_of_edges(),
self.number_of_nodes(), self.number_of_edges()
)

def has_nodes(self, nodes):
Expand Down Expand Up @@ -181,7 +181,7 @@ def clear(self):

def __nice__(self):
return 'nNodes={}, nEdges={}, nCCs={}'.format(
self.number_of_nodes(), self.number_of_edges(), self.number_of_components(),
self.number_of_nodes(), self.number_of_edges(), self.number_of_components()
)

def number_of_components(self):
Expand Down
2 changes: 1 addition & 1 deletion wbia/algo/hots/_pipeline_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -218,7 +218,7 @@ def testdata_pre_sver(defaultdb='PZ_MTEST', qaid_list=None, daid_list=None):


def testdata_post_sver(
defaultdb='PZ_MTEST', qaid_list=None, daid_list=None, codename='vsmany', cfgdict=None,
defaultdb='PZ_MTEST', qaid_list=None, daid_list=None, codename='vsmany', cfgdict=None
):
"""
>>> from wbia.algo.hots._pipeline_helpers import * # NOQA
Expand Down
17 changes: 9 additions & 8 deletions wbia/algo/hots/chip_match.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,9 +138,12 @@ def safe_check_nested_lens_eq(arr1, arr2):
else:
safe_check_lens_eq(arr1, arr2, 'outer lengths do not correspond')
for count, (x, y) in enumerate(zip(arr1, arr2)):
assert len(x) == len(y), (
'inner lengths at position=%r do not correspond (%r != %r)'
% (count, len(x), len(y),)
assert len(x) == len(
y
), 'inner lengths at position=%r do not correspond (%r != %r)' % (
count,
len(x),
len(y),
)


Expand Down Expand Up @@ -522,7 +525,7 @@ def ishow_single_annotmatch(cm, qreq_, aid2=None, **kwargs):
}
if aid2 is None:
aid2 = cm.get_top_aids(ntop=1)[0]
logger.info('[cm] ishow_single_annotmatch aids(%s, %s)' % (cm.qaid, aid2,))
logger.info('[cm] ishow_single_annotmatch aids(%s, %s)' % (cm.qaid, aid2))
kwshow.update(**kwargs)
try:
inter = interact_matches.MatchInteraction(
Expand Down Expand Up @@ -1187,9 +1190,7 @@ def summarize(cm, qreq_):
cminfo_dict[key] = annot_df[prop].iloc[idx]

cminfo_dict.update(
dict(
gt_aid=cminfo_dict['gt_annot_daid'], gf_aid=cminfo_dict['gf_annot_daid'],
)
dict(gt_aid=cminfo_dict['gt_annot_daid'], gf_aid=cminfo_dict['gf_annot_daid'])
)
del cminfo_dict['gt_annot_daid']
del cminfo_dict['gf_annot_daid']
Expand Down Expand Up @@ -1851,7 +1852,7 @@ def varinfo(varname, onlyrepr=False, canshowrepr=True, cm=cm, varcolor='yellow')
varinfo_list += [
# ' %s varinfo(%s):' % (symbol, varname,),
' %s %s = <not shown!>'
% (symbol, varname,),
% (symbol, varname)
]
varinfo_list += [' len = %r' % (len(varval),)]
if depth != len(varval):
Expand Down
2 changes: 1 addition & 1 deletion wbia/algo/hots/neighbor_index_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -311,7 +311,7 @@ def request_wbia_nnindexer(qreq_, verbose=True, **kwargs):


def request_augmented_wbia_nnindexer(
qreq_, daid_list, verbose=True, use_memcache=True, force_rebuild=False, memtrack=None,
qreq_, daid_list, verbose=True, use_memcache=True, force_rebuild=False, memtrack=None
):
r"""
DO NOT USE. THIS FUNCTION CAN CURRENTLY CAUSE A SEGFAULT
Expand Down
2 changes: 1 addition & 1 deletion wbia/algo/hots/scoring.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def score_chipmatch_list(qreq_, cm_list, score_method, progkw=None):
progkw = dict(freq=1, time_thresh=30.0, adjust=True)
lbl = 'scoring %s' % (score_method)
# Choose the appropriate scoring mechanism
logger.info('[scoring] score %d chipmatches with %s' % (len(cm_list), score_method,))
logger.info('[scoring] score %d chipmatches with %s' % (len(cm_list), score_method))
if score_method == 'sumamech':
for cm in ut.ProgressIter(cm_list, lbl=lbl, **progkw):
cm.score_name_sumamech(qreq_)
Expand Down
6 changes: 3 additions & 3 deletions wbia/algo/preproc/preproc_image.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ def islocal(gpath):
_, ext = splitext(filename)
# base = filename
base = ut.random_nonce(16)
suffix = '.%s%s' % (base, ext,)
suffix = '.%s%s' % (base, ext)
temp_file, temp_filepath = tempfile.mkstemp(suffix=suffix)
args = (
gpath,
Expand Down Expand Up @@ -120,7 +120,7 @@ def islocal(gpath):
scheme = urlsplit(uri_, allow_fragments=False).scheme
uri_ = uri_.strip('%s://' % (scheme,))
uri_path = urlquote(uri_.encode('utf8'))
uri_ = '%s://%s' % (scheme, uri_path,)
uri_ = '%s://%s' % (scheme, uri_path)
# six.moves.urllib.request.urlretrieve(uri_, filename=temp_filepath)
response = requests.get(uri_, stream=True, allow_redirects=True)
assert (
Expand Down Expand Up @@ -158,7 +158,7 @@ def islocal(gpath):
# warn.line)
# warnstr = warnings.formatwarning
# logger.info(warnstr)
logger.info('%d warnings issued by %r' % (len(w), gpath,))
logger.info('%d warnings issued by %r' % (len(w), gpath))
# Parse out the data
width, height = pil_img.size # Read width, height
time, lat, lon, orient = parse_exif(pil_img) # Read exif tags
Expand Down
2 changes: 1 addition & 1 deletion wbia/algo/smk/inverted_index.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,7 +200,7 @@ def render_inverted_vocab_word(inva, wx, ibs, fnum=None):

# Stack them together
solidbar = np.zeros(
(patch_img.shape[0], int(patch_img.shape[1] * 0.1), 3), dtype=patch_img.dtype,
(patch_img.shape[0], int(patch_img.shape[1] * 0.1), 3), dtype=patch_img.dtype
)
border_color = (100, 10, 10) # bgr, darkblue
if ut.is_float(solidbar):
Expand Down
Loading

0 comments on commit 1db82a3

Please sign in to comment.