xvjiarui / GCNet

BOX AP AP50 AP75 APS APM APL
SPEED
MODEL CODE PAPER
ε-REPR
CODE PAPER
ε-REPR
CODE PAPER
ε-REPR
CODE PAPER
ε-REPR
CODE PAPER
ε-REPR
CODE PAPER
ε-REPR
PAPER
GLOBAL RANK
GCNet
(ResNeXt-101 + DCN + cascade + GC r16)
0.479 0.479 0.668 0.669 0.522 0.522 0.292 -- 0.522 -- 0.631 -- 5.5 #3
GCNet
(ResNeXt-101 + DCN + cascade + GC r4)
0.478 0.479 0.668 0.669 0.518 0.522 0.281 -- 0.523 -- 0.634 -- 5.4 #8
Mask R-CNN
(ResNet-101-FPN, fixBN, 1x LR)
0.397 -- 0.612 -- 0.428 -- 0.228 -- 0.443 -- 0.510 -- #60
Mask R-CNN
(ResNet-101-FPN, syncBN, 1x LR)
0.397 0.398 0.612 0.613 0.428 0.429 0.228 -- 0.443 -- 0.510 -- #53
Mask R-CNN
(ResNet-101-FPN, syncBN, GC r16, 1x LR)
0.414 0.411 0.636 0.636 0.450 0.450 0.250 -- 0.459 -- 0.540 -- 8.2 #46
Mask R-CNN
(ResNet-101-FPN, syncBN, GC r4, 1x LR)
0.417 0.417 0.637 0.637 0.455 0.455 0.247 -- 0.462 -- 0.539 -- 8.1 #42
Mask R-CNN
(ResNet-50-FPN, 1x LR)
0.373 0.372 0.590 0.590 0.402 0.401 0.219 -- 0.409 -- 0.481 -- #85
Mask R-CNN
(ResNet-50-FPN, 2x LR)
0.382 -- 0.595 -- 0.413 -- 0.220 -- 0.418 -- 0.503 -- #80
Mask R-CNN
(ResNet-50-FPN, GC r16, 2x LR)
0.397 0.394 0.617 0.616 0.431 0.424
0.238 -- 0.436 -- 0.512 -- #65
Mask R-CNN
(ResNet-50-FPN, GC r4, 2x LR)
0.400 0.399 0.621 0.622 0.431 0.429 0.244 -- 0.440 -- 0.517 -- #64
Mask R-CNN
(ResNeXt-101 + DCN + cascade)
0.470 0.471 0.660 0.661 0.513 0.513 0.277 -- 0.514 -- 0.625 -- 6.0 #7
Mask R-CNN
(ResNeXt-101-FPN, syncBN, 1x LR)
0.415 0.412 0.632 0.630 0.452 0.451 0.243 -- 0.461 -- 0.539 -- 7.9 #45
Mask R-CNN
(ResNeXt-101-FPN, syncBN, cascade, 1x LR)
0.446 0.447 0.629 0.630 0.485 0.485 0.264 -- 0.489 -- 0.585 -- 6.7 #15
Mask R-CNN
(ResNeXt-101-FPN, syncBN, cascade, GC r16, 1x LR)
0.458 0.459 0.647 0.648 0.500 0.500 0.270 -- 0.500 -- 0.601 -- 6.3 #13
Mask R-CNN
(ResNeXt-101-FPN, syncBN, cascade, GC r4, 1x LR)
0.464 0.465 0.653 0.654 0.506 0.507 0.281 -- 0.507 -- 0.605 -- 6.3 #9
Mask R-CNN
(ResNeXt-101-FPN, syncBN, GC r16, 1x LR)
0.424 0.424 0.645 0.646 0.464 0.465 0.264 -- 0.468 -- 0.549 -- 7.4 #15
Mask R-CNN
(ResNeXt-101-FPN, syncBN, GC r4, 1x LR)
0.429 0.429 0.651 0.652 0.469 0.470 0.265 -- 0.472 -- 0.554 -- 7.3 #29
See Full Build Details +get badge code
[![SotaBench](https://img.shields.io/endpoint.svg?url=https://sotabench.com/api/v0/badge/gh/deepparrot/GCNet)](https://sotabench.com/user/PartyParrot/repos/deepparrot/GCNet)

How the Repository is Evaluated

The full sotabench.py file - source
import argparse
import json
import os.path as osp
import shutil
import tempfile
import urllib.request

from sotabencheval.object_detection import COCOEvaluator

import copy
import mmcv
import torch
import torch.distributed as dist
from mmcv.runner import load_checkpoint, get_dist_info
from mmcv.parallel import MMDataParallel, MMDistributedDataParallel

from mmdet.apis import init_dist
from mmdet.core import coco_eval, wrap_fp16_model
from mmdet.datasets import build_dataloader, get_dataset
from mmdet.models import build_detector

# Extract val2017 zip
from torchbench.utils import extract_archive
image_dir_zip = osp.join('./.data/vision/coco', 'val2017.zip')
extract_archive(from_path=image_dir_zip, to_path='./.data/vision/coco')


def xyxy2xywh(bbox):
    _bbox = bbox.tolist()
    return [
        _bbox[0],
        _bbox[1],
        _bbox[2] - _bbox[0] + 1,
        _bbox[3] - _bbox[1] + 1,
    ]


def proposal2json(dataset, results):
    json_results = []
    for idx in range(len(dataset)):
        img_id = dataset.img_ids[idx]
        bboxes = results[idx]
        for i in range(bboxes.shape[0]):
            data = dict()
            data['image_id'] = img_id
            data['bbox'] = xyxy2xywh(bboxes[i])
            data['score'] = float(bboxes[i][4])
            data['category_id'] = 1
            json_results.append(data)
    return json_results


def det2json(dataset, results):
    json_results = []
    for idx in range(len(dataset)):
        img_id = dataset.img_ids[idx]
        try:
            result = results[idx]
        except IndexError:
            break
        for label in range(len(result)):
            bboxes = result[label]
            for i in range(bboxes.shape[0]):
                data = dict()
                data['image_id'] = img_id
                data['bbox'] = xyxy2xywh(bboxes[i])
                data['score'] = float(bboxes[i][4])
                data['category_id'] = dataset.cat_ids[label]
                json_results.append(data)
    return json_results


def segm2json(dataset, results):
    bbox_json_results = []
    segm_json_results = []
    for idx in range(len(dataset)):
        img_id = dataset.img_ids[idx]
        try:
            det, seg = results[idx]
        except IndexError:
            break
        for label in range(len(det)):
            # bbox results
            bboxes = det[label]
            for i in range(bboxes.shape[0]):
                data = dict()
                data['image_id'] = img_id
                data['bbox'] = xyxy2xywh(bboxes[i])
                data['score'] = float(bboxes[i][4])
                data['category_id'] = dataset.cat_ids[label]
                bbox_json_results.append(data)

            # segm results
            # some detectors use different score for det and segm
            if len(seg) == 2:
                segms = seg[0][label]
                mask_score = seg[1][label]
            else:
                segms = seg[label]
                mask_score = [bbox[4] for bbox in bboxes]
            for i in range(bboxes.shape[0]):
                data = dict()
                data['image_id'] = img_id
                data['score'] = float(mask_score[i])
                data['category_id'] = dataset.cat_ids[label]
                segms[i]['counts'] = segms[i]['counts'].decode()
                data['segmentation'] = segms[i]
                segm_json_results.append(data)
    return bbox_json_results, segm_json_results


def cached_results2json(dataset, results, out_file):
    result_files = dict()
    if isinstance(results[0], list):
        json_results = det2json(dataset, results)
        result_files['bbox'] = '{}.{}.json'.format(out_file, 'bbox')
        result_files['proposal'] = '{}.{}.json'.format(out_file, 'bbox')
        mmcv.dump(json_results, result_files['bbox'])
    elif isinstance(results[0], tuple):
        json_results = segm2json(dataset, results)
        result_files['bbox'] = '{}.{}.json'.format(out_file, 'bbox')
        result_files['proposal'] = '{}.{}.json'.format(out_file, 'bbox')
        result_files['segm'] = '{}.{}.json'.format(out_file, 'segm')
        mmcv.dump(json_results[0], result_files['bbox'])
        mmcv.dump(json_results[1], result_files['segm'])
    elif isinstance(results[0], np.ndarray):
        json_results = proposal2json(dataset, results)
        result_files['proposal'] = '{}.{}.json'.format(out_file, 'proposal')
        mmcv.dump(json_results, result_files['proposal'])
    else:
        raise TypeError('invalid type of results')
    return result_files

def single_gpu_test(model, data_loader, show=False, evaluator=None):
    model.eval()
    results = []
    dataset = data_loader.dataset
    prog_bar = mmcv.ProgressBar(len(dataset))                    
        
    for i, data in enumerate(data_loader):
        with torch.no_grad():
            result = model(return_loss=False, rescale=not show, **data)
        results.append(result)

        if i == 0:
            temp_result_files = cached_results2json(copy.deepcopy(dataset), copy.deepcopy(results), 'temp_results.pkl')
            anns = json.load(open(temp_result_files['bbox']))
            evaluator.add(anns)
            from sotabencheval.object_detection.utils import get_coco_metrics
            print(evaluator.batch_hash)
            print(evaluator.cache_exists)
            if evaluator.cache_exists:
                return results, True
        
        if show:
            model.module.show_result(data, result, dataset.img_norm_cfg)

        batch_size = data['img'][0].size(0)
        for _ in range(batch_size):
            prog_bar.update()
            
    return results, False


def multi_gpu_test(model, data_loader, tmpdir=None):
    model.eval()
    results = []
    dataset = data_loader.dataset
    rank, world_size = get_dist_info()
    if rank == 0:
        prog_bar = mmcv.ProgressBar(len(dataset))
    for i, data in enumerate(data_loader):
        with torch.no_grad():
            result = model(return_loss=False, rescale=True, **data)
        results.append(result)

        if rank == 0:
            batch_size = data['img'][0].size(0)
            for _ in range(batch_size * world_size):
                prog_bar.update()

    # collect results from all ranks
    results = collect_results(results, len(dataset), tmpdir)

    return results


def collect_results(result_part, size, tmpdir=None):
    rank, world_size = get_dist_info()
    # create a tmp dir if it is not specified
    if tmpdir is None:
        MAX_LEN = 512
        # 32 is whitespace
        dir_tensor = torch.full((MAX_LEN, ),
                                32,
                                dtype=torch.uint8,
                                device='cuda')
        if rank == 0:
            tmpdir = tempfile.mkdtemp()
            tmpdir = torch.tensor(
                bytearray(tmpdir.encode()), dtype=torch.uint8, device='cuda')
            dir_tensor[:len(tmpdir)] = tmpdir
        dist.broadcast(dir_tensor, 0)
        tmpdir = dir_tensor.cpu().numpy().tobytes().decode().rstrip()
    else:
        mmcv.mkdir_or_exist(tmpdir)
    # dump the part result to the dir
    mmcv.dump(result_part, osp.join(tmpdir, 'part_{}.pkl'.format(rank)))
    dist.barrier()
    # collect all parts
    if rank != 0:
        return None
    else:
        # load results of all parts from tmp dir
        part_list = []
        for i in range(world_size):
            part_file = osp.join(tmpdir, 'part_{}.pkl'.format(i))
            part_list.append(mmcv.load(part_file))
        # sort the results
        ordered_results = []
        for res in zip(*part_list):
            ordered_results.extend(list(res))
        # the dataloader may pad some samples
        ordered_results = ordered_results[:size]
        # remove tmp dir
        shutil.rmtree(tmpdir)
        return ordered_results

def evaluate_model(model_name, paper_arxiv_id, weights_url, weights_name, paper_results, config):
    
    evaluator = COCOEvaluator(
    root='./.data/vision/coco',
    model_name=model_name,
    paper_arxiv_id=paper_arxiv_id,
    paper_results=paper_results)

    out = 'results.pkl'
    launcher = 'none'

    if out is not None and not out.endswith(('.pkl', '.pickle')):
        raise ValueError('The output file must be a pkl file.')

    cfg = mmcv.Config.fromfile(config)
    cfg.data.test['ann_file'] = './.data/vision/coco/annotations/instances_val2017.json'
    cfg.data.test['img_prefix'] = './.data/vision/coco/val2017/'

    # set cudnn_benchmark
    if cfg.get('cudnn_benchmark', False):
        torch.backends.cudnn.benchmark = True
    cfg.model.pretrained = None
    cfg.data.test.test_mode = True

    # init distributed env first, since logger depends on the dist info.
    if launcher == 'none':
        distributed = False
    else:
        distributed = True
        init_dist(launcher, **cfg.dist_params)

    # set cudnn_benchmark
    if cfg.get('cudnn_benchmark', False):
        torch.backends.cudnn.benchmark = True
    cfg.model.pretrained = None
    cfg.data.test.test_mode = True

    # init distributed env first, since logger depends on the dist info.
    if launcher == 'none':
        distributed = False
    else:
        distributed = True
        init_dist(launcher, **cfg.dist_params)

    # build the dataloader
    # TODO: support multiple images per gpu (only minor changes are needed)
    dataset = get_dataset(cfg.data.test)
    data_loader = build_dataloader(
        dataset,
        imgs_per_gpu=1,
        workers_per_gpu=cfg.data.workers_per_gpu,
        dist=distributed,
        shuffle=False)

    # build the model and load checkpoint
    model = build_detector(cfg.model, train_cfg=None, test_cfg=cfg.test_cfg)
    fp16_cfg = cfg.get('fp16', None)
    if fp16_cfg is not None:
        wrap_fp16_model(model)

    local_checkpoint, _ = urllib.request.urlretrieve(
        weights_url,
        weights_name)

    # '/home/ubuntu/GCNet/mask_rcnn_r50_fpn_1x_20181010-069fa190.pth'
    checkpoint = load_checkpoint(model, local_checkpoint, map_location='cpu')
    # old versions did not save class info in checkpoints, this walkaround is
    # for backward compatibility
    if 'CLASSES' in checkpoint['meta']:
        model.CLASSES = checkpoint['meta']['CLASSES']
    else:
        model.CLASSES = dataset.CLASSES

    evaluator.reset_time()
        
    if not distributed:
        model = MMDataParallel(model, device_ids=[0])
        outputs, cache_exists = single_gpu_test(model, data_loader, False, evaluator)
    else:
        model = MMDistributedDataParallel(model.cuda())
        outputs = multi_gpu_test(model, data_loader, '')

    if cache_exists:
        print('Cache exists: %s' % (evaluator.batch_hash))
        evaluator.save()
    
    else:
        from mmdet.core import results2json

        rank, _ = get_dist_info()
        if out and rank == 0:
            print('\nwriting results to {}'.format(out))
            mmcv.dump(outputs, out)
            eval_types = ['bbox']
            if eval_types:
                print('Starting evaluate {}'.format(' and '.join(eval_types)))
                if eval_types == ['proposal_fast']:
                    result_file = out
                else:
                    if not isinstance(outputs[0], dict):
                        result_files = results2json(dataset, outputs, out)
                    else:
                        for name in outputs[0]:
                            print('\nEvaluating {}'.format(name))
                            outputs_ = [out[name] for out in outputs]
                            result_file = out + '.{}'.format(name)
                            result_files = results2json(dataset, outputs_,
                                                        result_file)


        anns = json.load(open(result_files['bbox']))
        evaluator.detections = []
        evaluator.add(anns)
        evaluator.save()

model_configs = []

# Results on R50-FPN with backbone (fixBN)
model_configs.append(
    {'model_name': 'Mask R-CNN (ResNet-50-FPN, 1x LR)', 
     'paper_arxiv_id': '1904.11492',
     'weights_url': 'https://github.com/deepparrot/GCNet/releases/download/0.1/mask_rcnn_r50_fpn_1x_20181010-069fa190.pth',
     'weights_name': 'mask_rcnn_r50_fpn_1x_20181010-069fa190.pth',
     'config': './configs/gcnet/r50/mask_rcnn_r50_fpn_1x.py',
     'paper_results': {'box AP': 0.372, 'AP50': 0.590, 'AP75': 0.401}}
)
model_configs.append(
    {'model_name': 'Mask R-CNN (ResNet-50-FPN, 2x LR)', 
     'paper_arxiv_id': '1904.11492',
     'weights_url': 'https://github.com/deepparrot/GCNet/releases/download/0.2/mask_rcnn_r50_fpn_2x-4615e866.pth',
     'weights_name': 'mask_rcnn_r50_fpn_2x-4615e866.pth',
     'config': './configs/gcnet/r50/mask_rcnn_r50_fpn_2x.py',
     'paper_results': None}
)
model_configs.append(
    {'model_name': 'Mask R-CNN (ResNet-50-FPN, GC r16, 2x LR)', 
     'paper_arxiv_id': '1904.11492',
     'weights_url': 'https://github.com/deepparrot/GCNet/releases/download/0.3/mask_rcnn_r16_gcb_c3-c5_r50_fpn_2x-bf3a5059.pth',
     'weights_name': 'mask_rcnn_r16_gcb_c3-c5_r50_fpn_2x-bf3a5059.pth',
     'config': './configs/gcnet/r50/mask_rcnn_r16_gcb_c3-c5_r50_fpn_2x.py',
     'paper_results': {'box AP': 0.394, 'AP50': 0.616, 'AP75': 0.424}}
)
model_configs.append(
    {'model_name': 'Mask R-CNN (ResNet-50-FPN, GC r4, 2x LR)', 
     'paper_arxiv_id': '1904.11492',
     'weights_url': 'https://github.com/deepparrot/GCNet/releases/download/0.3/mask_rcnn_r4_gcb_c3-c5_r50_fpn_2x-360c29f3.pth',
     'weights_name': 'mask_rcnn_r4_gcb_c3-c5_r50_fpn_2x-360c29f3.pth',
     'config': './configs/gcnet/r50/mask_rcnn_r4_gcb_c3-c5_r50_fpn_2x.py',
     'paper_results': {'box AP': 0.399, 'AP50': 0.622, 'AP75': 0.429}}
)

# Results on R101-FPN with backbone (fixBN, syncBN)

model_configs.append(
    {'model_name': 'Mask R-CNN (ResNet-101-FPN, fixBN, 1x LR)', 
     'paper_arxiv_id': '1904.11492',
     'weights_url': 'https://github.com/deepparrot/GCNet/releases/download/0.2/mask_rcnn_r101_fpn_1x.pth',
     'weights_name': 'mask_rcnn_r101_fpn_1x.pth',
     'config': './configs/gcnet/r101/mask_rcnn_r101_fpn_1x.py',
     'paper_results': None}
)

model_configs.append(
    {'model_name': 'Mask R-CNN (ResNet-101-FPN, syncBN, 1x LR)', 
     'paper_arxiv_id': '1904.11492',
     'weights_url': 'https://github.com/deepparrot/GCNet/releases/download/0.4/mask_rcnn_r101_fpn_syncbn_1x_20190602-b2a0e2b7.pth',
     'weights_name': 'mask_rcnn_r101_fpn_syncbn_1x_20190602-b2a0e2b7.pth',
     'config': './configs/gcnet/r101/backbone_syncbn/mask_rcnn_r101_fpn_syncbn_1x.py',
     'paper_results': {'box AP': 0.398, 'AP50': 0.613, 'AP75': 0.429}}
)

model_configs.append(
    {'model_name': 'Mask R-CNN (ResNet-101-FPN, syncBN, GC r16, 1x LR)', 
     'paper_arxiv_id': '1904.11492',
     'weights_url': 'https://github.com/deepparrot/GCNet/releases/download/0.4/mask_rcnn_r16_gcb_c3-c5_r101_fpn_syncbn_1x_20190602-717e6dbd.pth',
     'weights_name': 'mask_rcnn_r16_gcb_c3-c5_r101_fpn_syncbn_1x_20190602-717e6dbd.pth',
     'config': './configs/gcnet/r101/backbone_syncbn/mask_rcnn_r16_gcb_c3-c5_r101_fpn_syncbn_1x.py',
     'paper_results': {'box AP': 0.411, 'AP50': 0.636, 'AP75': 0.45}}
)

model_configs.append(
    {'model_name': 'Mask R-CNN (ResNet-101-FPN, syncBN, GC r4, 1x LR)', 
     'paper_arxiv_id': '1904.11492',
     'weights_url': 'https://github.com/deepparrot/GCNet/releases/download/0.4/mask_rcnn_r4_gcb_c3-c5_r101_fpn_syncbn_1x_20190602-a893c718.pth',
     'weights_name': 'mask_rcnn_r4_gcb_c3-c5_r101_fpn_syncbn_1x_20190602-a893c718.pth',
     'config': './configs/gcnet/r101/backbone_syncbn/mask_rcnn_r4_gcb_c3-c5_r101_fpn_syncbn_1x.py',
     'paper_results': {'box AP': 0.417, 'AP50': 0.637, 'AP75': 0.455}}
)	

# Results on X101-FPN with backbone (syncBN) - START HERE

model_configs.append(
    {'model_name': 'Mask R-CNN (ResNeXt-101-FPN, syncBN, 1x LR)', 
     'paper_arxiv_id': '1904.11492',
     'weights_url': 'https://github.com/deepparrot/GCNet/releases/download/0.5/mask_rcnn_x101_32x4d_fpn_syncbn_1x_20190602-bb8ae7e5.pth',
     'weights_name': 'mask_rcnn_x101_32x4d_fpn_syncbn_1x_20190602-bb8ae7e5.pth',
     'config': './configs/gcnet/x101/mask_rcnn_x101_32x4d_fpn_syncbn_1x.py',
     'paper_results': {'box AP': 0.412, 'AP50': 0.63, 'AP75': 0.451}}
)

model_configs.append(
    {'model_name': 'Mask R-CNN (ResNeXt-101-FPN, syncBN, GC r16, 1x LR)', 
     'paper_arxiv_id': '1904.11492',
     'weights_url': 'https://github.com/deepparrot/GCNet/releases/download/0.5/mask_rcnn_r16_gcb_c3-c5_x101_32x4d_fpn_syncbn_1x_20190602-c28edb53.pth',
     'weights_name': 'mask_rcnn_r16_gcb_c3-c5_x101_32x4d_fpn_syncbn_1x_20190602-c28edb53.pth',
     'config': './configs/gcnet/x101/mask_rcnn_r16_gcb_c3-c5_x101_32x4d_fpn_syncbn_1x.py',
     'paper_results': {'box AP': 0.424, 'AP50': 0.646, 'AP75': 0.465}}
)

model_configs.append(
    {'model_name': 'Mask R-CNN (ResNeXt-101-FPN, syncBN, GC r4, 1x LR)', 
     'paper_arxiv_id': '1904.11492',
     'weights_url': 'https://github.com/deepparrot/GCNet/releases/download/0.5/mask_rcnn_r4_gcb_c3-c5_x101_32x4d_fpn_syncbn_1x_20190602-930b3d51.pth',
     'weights_name': 'mask_rcnn_r4_gcb_c3-c5_x101_32x4d_fpn_syncbn_1x_20190602-930b3d51.pth',
     'config': './configs/gcnet/x101/mask_rcnn_r4_gcb_c3-c5_x101_32x4d_fpn_syncbn_1x.py',
     'paper_results': {'box AP': 0.429, 'AP50': 0.652, 'AP75': 0.47}}
) 

# Results on X101-FPN with backbone + cascade (syncBN)

model_configs.append(
    {'model_name': 'Mask R-CNN (ResNeXt-101-FPN, syncBN, cascade, 1x LR)', 
     'paper_arxiv_id': '1904.11492',
     'weights_url': 'https://github.com/deepparrot/GCNet/releases/download/0.5/cascade_mask_rcnn_x101_32x4d_fpn_syncbn_1x_20190602-63a800fb.pth',
     'weights_name': 'cascade_mask_rcnn_x101_32x4d_fpn_syncbn_1x_20190602-63a800fb.pth',
     'config': './configs/gcnet/x101/cascade/cascade_mask_rcnn_x101_32x4d_fpn_syncbn_1x.py',
     'paper_results': {'box AP': 0.447, 'AP50': 0.63, 'AP75': 0.485}}
)

model_configs.append(
    {'model_name': 'Mask R-CNN (ResNeXt-101-FPN, syncBN, cascade, GC r16, 1x LR)', 
     'paper_arxiv_id': '1904.11492',
     'weights_url': 'https://github.com/deepparrot/GCNet/releases/download/0.5/cascade_mask_rcnn_r16_gcb_c3-c5_x101_32x4d_fpn_syncbn_1x_20190602-3e168d88.pth',
     'weights_name': 'cascade_mask_rcnn_r16_gcb_c3-c5_x101_32x4d_fpn_syncbn_1x_20190602-3e168d88.pth',
     'config': './configs/gcnet/x101/cascade/cascade_mask_rcnn_r16_gcb_c3-c5_x101_32x4d_fpn_syncbn_1x.py',
     'paper_results': {'box AP': 0.459, 'AP50': 0.648, 'AP75': 0.50}}
)

model_configs.append(
    {'model_name': 'Mask R-CNN (ResNeXt-101-FPN, syncBN, cascade, GC r4, 1x LR)', 
     'paper_arxiv_id': '1904.11492',
     'weights_url': 'https://github.com/deepparrot/GCNet/releases/download/0.5/cascade_mask_rcnn_r4_gcb_c3-c5_x101_32x4d_fpn_syncbn_1x_20190602-b579157f.pth',
     'weights_name': 'cascade_mask_rcnn_r4_gcb_c3-c5_x101_32x4d_fpn_syncbn_1x_20190602-b579157f.pth',
     'config': './configs/gcnet/x101/cascade/cascade_mask_rcnn_r4_gcb_c3-c5_x101_32x4d_fpn_syncbn_1x.py',
     'paper_results': {'box AP': 0.465, 'AP50': 0.654, 'AP75': 0.507}}
)

# Cascade + DCN

model_configs.append(
    {'model_name': 'Mask R-CNN (ResNeXt-101 + DCN + cascade)', 
     'paper_arxiv_id': '1904.11492',
     'weights_url': 'https://github.com/deepparrot/GCNet/releases/download/0.6/cascade_mask_rcnn_dconv_c3-c5_x101_32x4d_fpn_syncbn_1x_20190602-9aa8c394.pth',
     'weights_name': 'cascade_mask_rcnn_dconv_c3-c5_x101_32x4d_fpn_syncbn_1x_20190602-9aa8c394.pth',
     'config': './configs/gcnet/x101/cascade/dcn/cascade_mask_rcnn_dconv_c3-c5_x101_32x4d_fpn_syncbn_1x.py',
     'paper_results': {'box AP': 0.471, 'AP50': 0.661, 'AP75': 0.513}}
)

model_configs.append(
    {'model_name': 'GCNet (ResNeXt-101 + DCN + cascade + GC r4)', 
     'paper_arxiv_id': '1904.11492',
     'weights_url': 'https://github.com/deepparrot/GCNet/releases/download/0.6/cascade_mask_rcnn_r4_gcb_dconv_c3-c5_x101_32x4d_fpn_syncbn_1x_20190602-b4164f6b.1.pth',
     'weights_name': 'cascade_mask_rcnn_r4_gcb_dconv_c3-c5_x101_32x4d_fpn_syncbn_1x_20190602-b4164f6b.1.pth',
     'config': './configs/gcnet/x101/cascade/dcn/cascade_mask_rcnn_r4_gcb_dconv_c3-c5_x101_32x4d_fpn_syncbn_1x.py',
     'paper_results': {'box AP': 0.479, 'AP50': 0.669, 'AP75': 0.522}}
)

model_configs.append(
    {'model_name': 'GCNet (ResNeXt-101 + DCN + cascade + GC r16)', 
     'paper_arxiv_id': '1904.11492',
     'weights_url': 'https://github.com/deepparrot/GCNet/releases/download/0.7/cascade_mask_rcnn_r16_gcb_dconv_c3-c5_x101_32x4d_fpn_syncbn_1x_20190602-b86027a6.pth',
     'weights_name': 'cascade_mask_rcnn_r16_gcb_dconv_c3-c5_x101_32x4d_fpn_syncbn_1x_20190602-b86027a6.pth',
     'config': './configs/gcnet/x101/cascade/dcn/cascade_mask_rcnn_r16_gcb_dconv_c3-c5_x101_32x4d_fpn_syncbn_1x.py',
     'paper_results': None}
)
            
import torch.distributed as dist
dist.init_process_group('gloo', init_method='file:///tmp/somefile', rank=0, world_size=1)
    
for model_config in model_configs:
    evaluate_model(model_name=model_config['model_name'], 
                   paper_arxiv_id=model_config['paper_arxiv_id'],
                   weights_url=model_config['weights_url'],
                   weights_name=model_config['weights_name'],
                   paper_results=model_config['paper_results'],
                   config=model_config['config'])
STATUS
BUILD
COMMIT MESSAGE
RUN TIME
2h:35m:27s
Update sotabench.py
deepparrot   023e39f  ·  Oct 07 2019
1h:06m:13s
Update sotabench.py
deepparrot   45f2529  ·  Oct 03 2019
0h:50m:55s
Update sotabench.py
deepparrot   485eec3  ·  Oct 02 2019
1h:31m:58s
Update sotabench.py
deepparrot   4b6b13f  ·  Oct 02 2019
0h:09m:14s
0h:15m:28s
0h:52m:35s
Update sotabench.py
deepparrot   e1c57f4  ·  Oct 02 2019
0h:04m:34s
Update sotabench.py
deepparrot   493eff0  ·  Oct 01 2019
0h:12m:50s
Update sotabench.py
deepparrot   f29610b  ·  Sep 30 2019
0h:10m:57s
Update sotabench.py
deepparrot   bdf1ee3  ·  Sep 30 2019
0h:08m:19s
Update sotabench.py
deepparrot   c4dfa06  ·  Sep 30 2019
0h:25m:39s
Update sotabench_setup.sh
deepparrot   8135f97  ·  Sep 30 2019
0h:11m:49s
Update sotabench_setup.sh
deepparrot   8c177bc  ·  Sep 30 2019
0h:04m:31s
Update sotabench.py
deepparrot   83b53f7  ·  Sep 30 2019
0h:04m:47s
Add Apex
deepparrot   9827a75  ·  Sep 30 2019
0h:04m:54s
Update sotabench.py
deepparrot   df5da87  ·  Sep 30 2019
0h:29m:18s
Update sotabench.py
deepparrot   c05a896  ·  Sep 30 2019
unknown
Update sotabench.py
deepparrot   9f520e3  ·  Sep 30 2019
0h:08m:41s
0h:10m:30s
Update sotabench.py
deepparrot   95c721f  ·  Sep 30 2019
0h:17m:52s
test caching
deepparrot   b3e3a6a  ·  Sep 30 2019
0h:17m:33s
Update sotabench.py
deepparrot   734349b  ·  Sep 30 2019
0h:17m:33s
Update sotabench.py
deepparrot   ba40099  ·  Sep 30 2019
0h:19m:42s
Print hash...
deepparrot   bced6ee  ·  Sep 30 2019
0h:19m:41s
Update sotabench.py
deepparrot   7b9c4ef  ·  Sep 30 2019
0h:19m:41s
Update sotabench.py
deepparrot   aed4097  ·  Sep 30 2019
0h:07m:51s
Update sotabench.py
deepparrot   e47d9aa  ·  Sep 30 2019
0h:16m:50s
Update sotabench.py
deepparrot   43c1fcb  ·  Sep 30 2019
0h:16m:32s
Update sotabench.py
deepparrot   11b6b98  ·  Sep 30 2019
0h:08m:44s
Update sotabench.py
deepparrot   2ccf493  ·  Sep 30 2019
0h:08m:25s
0h:17m:40s
Update sotabench.py
deepparrot   8246622  ·  Sep 29 2019
0h:18m:13s
0h:08m:33s
0h:17m:43s
Update sotabench.py
deepparrot   98f8e07  ·  Sep 21 2019
0h:15m:46s
Update sotabench.py
deepparrot   a38617d  ·  Sep 21 2019
0h:07m:59s
Update sotabench.py
deepparrot   7bc913f  ·  Sep 21 2019
0h:18m:17s
Update sotabench.py
deepparrot   580df0d  ·  Sep 21 2019
0h:08m:25s
Update sotabench.py
deepparrot   ad5a13d  ·  Sep 21 2019
0h:07m:30s
0h:07m:48s
0h:07m:35s
0h:08m:26s
Update sotabench_setup.sh
deepparrot   94bb971  ·  Sep 20 2019
0h:04m:46s
0h:03m:37s
0h:03m:19s
0h:03m:13s
Create sotabench_setup.sh
deepparrot   2b80545  ·  Sep 20 2019
0h:03m:10s
Update sotabench.py
deepparrot   d9cccbc  ·  Sep 20 2019
0h:02m:53s
Update compile.sh
deepparrot   577b628  ·  Sep 20 2019
0h:05m:13s
Update __init__.py
deepparrot   6635d6d  ·  Sep 20 2019
0h:02m:59s
Update sotabench.py
deepparrot   50bdfa3  ·  Sep 20 2019
0h:03m:04s
Update requirements.txt
deepparrot   9969465  ·  Sep 20 2019
0h:02m:58s
0h:02m:56s
Update requirements.txt
deepparrot   9132197  ·  Sep 20 2019
0h:02m:49s
Update requirements.txt
deepparrot   73488ed  ·  Sep 20 2019
0h:03m:04s
0h:03m:01s