-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathavg_iou.py
115 lines (93 loc) · 3.29 KB
/
avg_iou.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
import numpy as np
import torch
import torch.nn as nn
from torchvision import transforms
from bounding_box import BoundingBox
import os
import time
import pickle
from pprint import pprint
from utils import check_overlap, get_iou, bounding_box_grad, load_model
from predict import predict
import multiprocessing
start = time.time()
# constants
checkpoint_folder = '/home/user/Models/Experiment-4/All/resnet50'
val_folder_path = '/home/user/Models/Experiment-4/data/val'
result_file = 'iou/avg_iou_loc.pickle'
images_text_file = 'data/images.txt'
bounding_box_file = 'data/bounding_boxes.txt'
height = 224
width = 224
num_channels = 3
num_labels = 200
all_models = []
for file_name in os.listdir(checkpoint_folder):
if file_name.endswith('.pth'):
all_models.append(file_name)
print('Running...')
print(f'Total models: {len(all_models)}')
transform = transforms.Compose([
transforms.Resize((height, width)),
transforms.ToTensor(),
transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
])
subfolders = sorted(os.listdir(val_folder_path))
bbox = BoundingBox(val_folder_path, images_text_file, bounding_box_file, height, width)
def calculate_iou(m_name, gp_id, r_dict):
checkpoint_path = checkpoint_folder + '/' + m_name
criterion = nn.CrossEntropyLoss()
model = load_model(checkpoint_path, num_labels, gp_id)
average_iou = []
correct = 0
num_img = 0
for folder_name in subfolders:
label = int(folder_name.split('.')[0]) - 1
target_tensor = torch.tensor([label])
image_names = os.listdir(val_folder_path + '/' + folder_name)
for img_name in image_names:
img_path = val_folder_path + '/' + folder_name + '/' + img_name
x1_gt, y1_gt, x2_gt, y2_gt = bbox.get_bbox_from_path(img_path)
gt = [x1_gt, y1_gt, x2_gt, y2_gt]
prediction, grad = predict(model, img_path, transform, criterion, target_tensor, height, width,
num_channels, gp_id)
pred = bounding_box_grad(grad)
overlap = check_overlap(pred, gt)
if overlap:
iou = get_iou(pred, gt)
else:
iou = 0.0
average_iou.append(iou)
num_img += 1
if prediction == label and iou >= 0.5:
correct += 1
# print(f'{m_name}: {np.mean(average_iou)}')
loc_acc = float(correct) / float(num_img)
avg_iou = np.mean(average_iou)
r_dict[m_name] = [avg_iou, loc_acc]
def generator_from_list(lst, n):
# generates n sized chunk from the given list
for i in range(0, len(lst), n):
yield lst[i:i + n]
manager = multiprocessing.Manager()
return_dict = manager.dict()
jobs = []
for idx, model_name in enumerate(all_models):
gpu_id = str(idx % 4)
p = multiprocessing.Process(target=calculate_iou, args=(model_name, gpu_id, return_dict))
jobs.append(p)
n = 4
counter = 0
for chunks in generator_from_list(jobs, n):
for proc in chunks:
proc.start()
for p in chunks:
p.join()
counter += n
print(f'{counter} models done.')
with open(result_file, 'wb') as write_file:
pickle.dump(dict(return_dict), write_file)
pprint(dict(return_dict))
end = time.time()
elapsed_minutes = (end - start) / 60
print(f'elapsed-minutes: {round(elapsed_minutes, 2)}')