From 6064f005d720456f7e5246a9dd56325ac032dcd4 Mon Sep 17 00:00:00 2001 From: Orion Reblitz-Richardson Date: Tue, 13 Mar 2018 12:22:55 -0700 Subject: [PATCH 1/7] Support export of CollectAndDistributeFpnRpnProposalsOp --- tools/convert_pkl_to_pb.py | 109 ++++++++++++++++++++++++++++--------- 1 file changed, 83 insertions(+), 26 deletions(-) diff --git a/tools/convert_pkl_to_pb.py b/tools/convert_pkl_to_pb.py index 27d4378ac..ac19686f9 100644 --- a/tools/convert_pkl_to_pb.py +++ b/tools/convert_pkl_to_pb.py @@ -124,10 +124,41 @@ def unscope_name(name): def reset_names(names): - for i in range(0, len(names)): + for i in range(len(names)): names[i] = unscope_name(names[i]) +def convert_collect_and_distribute( + op, blobs, + roi_canonical_scale, + roi_canonical_level, + roi_max_level, + roi_min_level, + rpn_max_level, + rpn_min_level, + rpn_post_nms_topN, +): + print('Converting CollectAndDistributeFpnRpnProposals' + ' Python -> C++:\n{}'.format(op)) + assert op.name.startswith('CollectAndDistributeFpnRpnProposalsOp'), \ + 'Not valid CollectAndDistributeFpnRpnProposalsOp' + + inputs = [x for x in op.input] + ret = core.CreateOperator( + 'CollectAndDistributeFpnRpnProposals', + inputs, + list(op.output), + roi_canonical_scale=roi_canonical_scale, + roi_canonical_level=roi_canonical_level, + roi_max_level=roi_max_level, + roi_min_level=roi_min_level, + rpn_max_level=rpn_max_level, + rpn_min_level=rpn_min_level, + rpn_post_nms_topN=rpn_post_nms_topN, + ) + return ret + + def convert_gen_proposals( op, blobs, rpn_pre_nms_topN, @@ -136,19 +167,19 @@ def convert_gen_proposals( rpn_min_size, ): print('Converting GenerateProposals Python -> C++:\n{}'.format(op)) - assert op.name.startswith("GenerateProposalsOp"), "Not valid GenerateProposalsOp" + assert op.name.startswith('GenerateProposalsOp'), 'Not valid GenerateProposalsOp' - spatial_scale = mutils.get_op_arg_valf(op, "spatial_scale", None) + spatial_scale = mutils.get_op_arg_valf(op, 'spatial_scale', None) assert spatial_scale is not None inputs = [x for x in op.input] - anchor_name = "anchor" + anchor_name = 'anchor' inputs.append(anchor_name) blobs[anchor_name] = get_anchors(spatial_scale) print('anchors {}'.format(blobs[anchor_name])) ret = core.CreateOperator( - "GenerateProposals", + 'GenerateProposals', inputs, list(op.output), spatial_scale=spatial_scale, @@ -158,7 +189,6 @@ def convert_gen_proposals( min_size=rpn_min_size, correct_transform_coords=True, ) - return ret, anchor_name @@ -188,25 +218,48 @@ def convert_op_name(op): reset_names(op.output) return [op] - @op_filter(type="Python", inputs=['rpn_cls_probs', 'rpn_bbox_pred', 'im_info']) - def convert_gen_proposal(op_in): - gen_proposals_op, ext_input = convert_gen_proposals( - op_in, blobs, - rpn_min_size=float(cfg.TEST.RPN_MIN_SIZE), - rpn_post_nms_topN=cfg.TEST.RPN_POST_NMS_TOP_N, - rpn_pre_nms_topN=cfg.TEST.RPN_PRE_NMS_TOP_N, - rpn_nms_thresh=cfg.TEST.RPN_NMS_THRESH, - ) - net.external_input.extend([ext_input]) - return [gen_proposals_op] + @op_filter() + def convert_python(op): + if op.type == 'Python': + if op.name.startswith('GenerateProposalsOp'): + gen_proposals_op, ext_input = convert_gen_proposals( + op, blobs, + rpn_min_size=float(cfg.TEST.RPN_MIN_SIZE), + rpn_post_nms_topN=cfg.TEST.RPN_POST_NMS_TOP_N, + rpn_pre_nms_topN=cfg.TEST.RPN_PRE_NMS_TOP_N, + rpn_nms_thresh=cfg.TEST.RPN_NMS_THRESH, + ) + net.external_input.extend([ext_input]) + return [gen_proposals_op] + elif op.name.startswith('CollectAndDistributeFpnRpnProposalsOp'): + collect_dist_op = convert_collect_and_distribute( + op, blobs, + roi_canonical_scale=cfg.FPN.ROI_CANONICAL_SCALE, + roi_canonical_level=cfg.FPN.ROI_CANONICAL_LEVEL, + roi_max_level=cfg.FPN.ROI_MAX_LEVEL, + roi_min_level=cfg.FPN.ROI_MIN_LEVEL, + rpn_max_level=cfg.FPN.RPN_MAX_LEVEL, + rpn_min_level=cfg.FPN.RPN_MIN_LEVEL, + rpn_post_nms_topN=cfg.TEST.RPN_POST_NMS_TOP_N, + ) + return [collect_dist_op] + else: + raise ValueError('Failed to convert Python op {}'.format( + op.name)) + return [op] - @op_filter(input_has='rois') + @op_filter() def convert_rpn_rois(op): - for j in range(0, len(op.input)): + for j in range(len(op.input)): if op.input[j] == 'rois': print('Converting op {} input name: rois -> rpn_rois:\n{}'.format( op.type, op)) op.input[j] = 'rpn_rois' + for j in range(len(op.output)): + if op.output[j] == 'rois': + print('Converting op {} output name: rois -> rpn_rois:\n{}'.format( + op.type, op)) + op.output[j] = 'rpn_rois' return [op] @op_filter(type_in=['StopGradient', 'Alias']) @@ -216,7 +269,7 @@ def convert_remove_op(op): convert_op_in_proto(net, convert_op_name) convert_op_in_proto(net, [ - convert_gen_proposal, convert_rpn_rois, convert_remove_op + convert_python, convert_rpn_rois, convert_remove_op ]) reset_names(net.external_input) @@ -272,6 +325,7 @@ def convert_model_gpu(args, net, init_net): cdo_cpu = mutils.get_device_option_cpu() CPU_OPS = [ + ["CollectAndDistributeFpnRpnProposals", None], ["GenerateProposals", None], ["BBoxTransform", None], ["BoxWithNMSLimit", None], @@ -462,7 +516,7 @@ def run_model_pb(args, net, init_net, im, check_blobs): ) try: - workspace.RunNet(net.Proto().name) + workspace.RunNet(net) scores = workspace.FetchBlob('score_nms') classids = workspace.FetchBlob('class_nms') boxes = workspace.FetchBlob('bbox_nms') @@ -520,13 +574,16 @@ def main(): merge_cfg_from_list(args.opts) cfg.NUM_GPUS = 1 assert_and_infer_cfg() - logger.info('Conerting model with config:') + logger.info('Converting model with config:') logger.info(pprint.pformat(cfg)) - assert not cfg.MODEL.KEYPOINTS_ON, "Keypoint model not supported." - assert not cfg.MODEL.MASK_ON, "Mask model not supported." - assert not cfg.FPN.FPN_ON, "FPN not supported." - assert not cfg.RETINANET.RETINANET_ON, "RetinaNet model not supported." + # script will stop when it can't find an operator rather + # than stopping based on these flags + # + # assert not cfg.MODEL.KEYPOINTS_ON, "Keypoint model not supported." + # assert not cfg.MODEL.MASK_ON, "Mask model not supported." + # assert not cfg.FPN.FPN_ON, "FPN not supported." + # assert not cfg.RETINANET.RETINANET_ON, "RetinaNet model not supported." # load model from cfg model, blobs = load_model(args) From 94c725173b3a05125f5ab12474110ef47f45e319 Mon Sep 17 00:00:00 2001 From: Orion Reblitz-Richardson Date: Wed, 4 Apr 2018 14:58:15 -0700 Subject: [PATCH 2/7] Use type=Python in op_filter --- tools/convert_pkl_to_pb.py | 54 ++++++++++++++++++-------------------- 1 file changed, 26 insertions(+), 28 deletions(-) diff --git a/tools/convert_pkl_to_pb.py b/tools/convert_pkl_to_pb.py index ac19686f9..c1978b921 100644 --- a/tools/convert_pkl_to_pb.py +++ b/tools/convert_pkl_to_pb.py @@ -218,35 +218,33 @@ def convert_op_name(op): reset_names(op.output) return [op] - @op_filter() + @op_filter(type='Python') def convert_python(op): - if op.type == 'Python': - if op.name.startswith('GenerateProposalsOp'): - gen_proposals_op, ext_input = convert_gen_proposals( - op, blobs, - rpn_min_size=float(cfg.TEST.RPN_MIN_SIZE), - rpn_post_nms_topN=cfg.TEST.RPN_POST_NMS_TOP_N, - rpn_pre_nms_topN=cfg.TEST.RPN_PRE_NMS_TOP_N, - rpn_nms_thresh=cfg.TEST.RPN_NMS_THRESH, - ) - net.external_input.extend([ext_input]) - return [gen_proposals_op] - elif op.name.startswith('CollectAndDistributeFpnRpnProposalsOp'): - collect_dist_op = convert_collect_and_distribute( - op, blobs, - roi_canonical_scale=cfg.FPN.ROI_CANONICAL_SCALE, - roi_canonical_level=cfg.FPN.ROI_CANONICAL_LEVEL, - roi_max_level=cfg.FPN.ROI_MAX_LEVEL, - roi_min_level=cfg.FPN.ROI_MIN_LEVEL, - rpn_max_level=cfg.FPN.RPN_MAX_LEVEL, - rpn_min_level=cfg.FPN.RPN_MIN_LEVEL, - rpn_post_nms_topN=cfg.TEST.RPN_POST_NMS_TOP_N, - ) - return [collect_dist_op] - else: - raise ValueError('Failed to convert Python op {}'.format( - op.name)) - return [op] + if op.name.startswith('GenerateProposalsOp'): + gen_proposals_op, ext_input = convert_gen_proposals( + op, blobs, + rpn_min_size=float(cfg.TEST.RPN_MIN_SIZE), + rpn_post_nms_topN=cfg.TEST.RPN_POST_NMS_TOP_N, + rpn_pre_nms_topN=cfg.TEST.RPN_PRE_NMS_TOP_N, + rpn_nms_thresh=cfg.TEST.RPN_NMS_THRESH, + ) + net.external_input.extend([ext_input]) + return [gen_proposals_op] + elif op.name.startswith('CollectAndDistributeFpnRpnProposalsOp'): + collect_dist_op = convert_collect_and_distribute( + op, blobs, + roi_canonical_scale=cfg.FPN.ROI_CANONICAL_SCALE, + roi_canonical_level=cfg.FPN.ROI_CANONICAL_LEVEL, + roi_max_level=cfg.FPN.ROI_MAX_LEVEL, + roi_min_level=cfg.FPN.ROI_MIN_LEVEL, + rpn_max_level=cfg.FPN.RPN_MAX_LEVEL, + rpn_min_level=cfg.FPN.RPN_MIN_LEVEL, + rpn_post_nms_topN=cfg.TEST.RPN_POST_NMS_TOP_N, + ) + return [collect_dist_op] + else: + raise ValueError('Failed to convert Python op {}'.format( + op.name)) @op_filter() def convert_rpn_rois(op): From 7d01723404be8f1a96b968d8b277bd97e6ec399f Mon Sep 17 00:00:00 2001 From: Orion Reblitz-Richardson Date: Wed, 4 Apr 2018 15:20:34 -0700 Subject: [PATCH 3/7] Apply conversions separately --- tools/convert_pkl_to_pb.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/tools/convert_pkl_to_pb.py b/tools/convert_pkl_to_pb.py index c1978b921..9c556b778 100644 --- a/tools/convert_pkl_to_pb.py +++ b/tools/convert_pkl_to_pb.py @@ -265,10 +265,12 @@ def convert_remove_op(op): print('Removing op {}:\n{}'.format(op.type, op)) return [] + # We want to apply to all operators, including converted + # so run separately + convert_op_in_proto(net, convert_remove_op) + convert_op_in_proto(net, convert_python) convert_op_in_proto(net, convert_op_name) - convert_op_in_proto(net, [ - convert_python, convert_rpn_rois, convert_remove_op - ]) + convert_op_in_proto(net, convert_rpn_rois) reset_names(net.external_input) reset_names(net.external_output) From 6b3de8b8318edac353116bb179bc4648d4a2dd58 Mon Sep 17 00:00:00 2001 From: daquexian Date: Tue, 17 Apr 2018 04:13:39 +0000 Subject: [PATCH 4/7] Support fpn --- detectron/modeling/FPN.py | 2 +- tools/convert_pkl_to_pb.py | 15 +++++++++++---- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/detectron/modeling/FPN.py b/detectron/modeling/FPN.py index 46881e497..21f68133f 100644 --- a/detectron/modeling/FPN.py +++ b/detectron/modeling/FPN.py @@ -293,7 +293,7 @@ def add_topdown_lateral_module( bias_init=const_fill(0.0) ) # Top-down 2x upsampling - td = model.net.UpsampleNearest(fpn_top, fpn_bottom + '_topdown', scale=2) + td = model.net.ResizeNearest(fpn_top, fpn_bottom + '_topdown', width_scale=2., height_scale=2.) # Sum lateral and top-down model.net.Sum([lat, td], fpn_bottom) diff --git a/tools/convert_pkl_to_pb.py b/tools/convert_pkl_to_pb.py index 9c556b778..0d7b90993 100644 --- a/tools/convert_pkl_to_pb.py +++ b/tools/convert_pkl_to_pb.py @@ -172,10 +172,17 @@ def convert_gen_proposals( spatial_scale = mutils.get_op_arg_valf(op, 'spatial_scale', None) assert spatial_scale is not None + lvl = int(op.input[0][-1]) if op.input[0][-1].isdigit() else None + inputs = [x for x in op.input] - anchor_name = 'anchor' + anchor_name = 'anchor{}'.format(lvl) if lvl else 'anchor' inputs.append(anchor_name) - blobs[anchor_name] = get_anchors(spatial_scale) + blobs[anchor_name] = \ + get_anchors( + spatial_scale, + (cfg.FPN.RPN_ANCHOR_START_SIZE * 2.**(lvl - cfg.FPN.RPN_MIN_LEVEL),) + ) \ + if lvl else get_anchors(spatial_scale, cfg.RPN.SIZES) print('anchors {}'.format(blobs[anchor_name])) ret = core.CreateOperator( @@ -192,10 +199,10 @@ def convert_gen_proposals( return ret, anchor_name -def get_anchors(spatial_scale): +def get_anchors(spatial_scale, anchor_sizes): anchors = generate_anchors.generate_anchors( stride=1. / spatial_scale, - sizes=cfg.RPN.SIZES, + sizes=anchor_sizes, aspect_ratios=cfg.RPN.ASPECT_RATIOS).astype(np.float32) return anchors From 23234e4da00d01a45d3b126b6aaffa358c8a41cf Mon Sep 17 00:00:00 2001 From: daquexian Date: Mon, 30 Apr 2018 14:17:44 +0000 Subject: [PATCH 5/7] Reuse code in blob.py to fix inconsistent sizes in FPN's sum --- tools/convert_pkl_to_pb.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/tools/convert_pkl_to_pb.py b/tools/convert_pkl_to_pb.py index 0d7b90993..7256883a3 100644 --- a/tools/convert_pkl_to_pb.py +++ b/tools/convert_pkl_to_pb.py @@ -50,6 +50,7 @@ from detectron.utils.logging import setup_logging from detectron.utils.model_convert_utils import convert_op_in_proto from detectron.utils.model_convert_utils import op_filter +import detectron.utils.blob as blob_utils import detectron.core.test_engine as test_engine import detectron.utils.c2 as c2_utils import detectron.utils.model_convert_utils as mutils @@ -485,10 +486,8 @@ def _prepare_blobs( im = cv2.resize(im, None, None, fx=im_scale, fy=im_scale, interpolation=cv2.INTER_LINEAR) - blob = np.zeros([1, im.shape[0], im.shape[1], 3], dtype=np.float32) - blob[0, :, :, :] = im - channel_swap = (0, 3, 1, 2) # swap channel to (k, c, h, w) - blob = blob.transpose(channel_swap) + # Reuse code in blob_utils and fit FPN + blob = blob_utils.im_list_to_blob([im]) blobs = {} blobs['data'] = blob From abaa4b9474ac6626f318e7ac4d29ec293969e9c1 Mon Sep 17 00:00:00 2001 From: daquexian Date: Mon, 23 Jul 2018 03:57:08 +0000 Subject: [PATCH 6/7] Extract anchor_sizes to imporve readability --- tools/convert_pkl_to_pb.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/tools/convert_pkl_to_pb.py b/tools/convert_pkl_to_pb.py index 7256883a3..c84885041 100644 --- a/tools/convert_pkl_to_pb.py +++ b/tools/convert_pkl_to_pb.py @@ -178,12 +178,8 @@ def convert_gen_proposals( inputs = [x for x in op.input] anchor_name = 'anchor{}'.format(lvl) if lvl else 'anchor' inputs.append(anchor_name) - blobs[anchor_name] = \ - get_anchors( - spatial_scale, - (cfg.FPN.RPN_ANCHOR_START_SIZE * 2.**(lvl - cfg.FPN.RPN_MIN_LEVEL),) - ) \ - if lvl else get_anchors(spatial_scale, cfg.RPN.SIZES) + anchor_sizes = (cfg.FPN.RPN_ANCHOR_START_SIZE * 2.**(lvl - cfg.FPN.RPN_MIN_LEVEL),) if lvl else cfg.RPN.SIZES + blobs[anchor_name] = get_anchors(spatial_scale, anchor_sizes) print('anchors {}'.format(blobs[anchor_name])) ret = core.CreateOperator( From 3558c35b6986387ccdabb2aafc15945067019400 Mon Sep 17 00:00:00 2001 From: daquexian Date: Wed, 8 Aug 2018 02:11:30 +0000 Subject: [PATCH 7/7] Restore UpsampleNearest in training code and convert it to ResizeNearest only when converting pkl to pb --- detectron/modeling/FPN.py | 2 +- tools/convert_pkl_to_pb.py | 19 +++++++++++++++++++ 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/detectron/modeling/FPN.py b/detectron/modeling/FPN.py index 21f68133f..46881e497 100644 --- a/detectron/modeling/FPN.py +++ b/detectron/modeling/FPN.py @@ -293,7 +293,7 @@ def add_topdown_lateral_module( bias_init=const_fill(0.0) ) # Top-down 2x upsampling - td = model.net.ResizeNearest(fpn_top, fpn_bottom + '_topdown', width_scale=2., height_scale=2.) + td = model.net.UpsampleNearest(fpn_top, fpn_bottom + '_topdown', scale=2) # Sum lateral and top-down model.net.Sum([lat, td], fpn_bottom) diff --git a/tools/convert_pkl_to_pb.py b/tools/convert_pkl_to_pb.py index c84885041..a553444bc 100644 --- a/tools/convert_pkl_to_pb.py +++ b/tools/convert_pkl_to_pb.py @@ -250,6 +250,24 @@ def convert_python(op): raise ValueError('Failed to convert Python op {}'.format( op.name)) + # Only convert UpsampleNearest to ResizeNearest when converting to pb so that the existing models is unchanged + # https://github.com/facebookresearch/Detectron/pull/372#issuecomment-410248561 + @op_filter(type='UpsampleNearest') + def convert_upsample_nearest(op): + for arg in op.arg: + if arg.name == 'scale': + scale = arg.i + break + else: + raise KeyError('No attribute "scale" in UpsampleNearest op') + resize_nearest_op = core.CreateOperator('ResizeNearest', + list(op.input), + list(op.output), + name=op.name, + width_scale=float(scale), + height_scale=float(scale)) + return resize_nearest_op + @op_filter() def convert_rpn_rois(op): for j in range(len(op.input)): @@ -272,6 +290,7 @@ def convert_remove_op(op): # We want to apply to all operators, including converted # so run separately convert_op_in_proto(net, convert_remove_op) + convert_op_in_proto(net, convert_upsample_nearest) convert_op_in_proto(net, convert_python) convert_op_in_proto(net, convert_op_name) convert_op_in_proto(net, convert_rpn_rois)