This commit is contained in:
Glenn Jocher 2018-09-02 11:26:56 +02:00
parent 8ed89d8c88
commit e99bda0c54
3 changed files with 16 additions and 14 deletions

11
test.py
View File

@ -87,11 +87,12 @@ for batch_i, (imgs, targets) in enumerate(dataloader):
correct.extend([0 for _ in range(len(detections))]) correct.extend([0 for _ in range(len(detections))])
else: else:
# Extract target boxes as (x1, y1, x2, y2) # Extract target boxes as (x1, y1, x2, y2)
target_boxes = torch.FloatTensor(annotations[:, 1:].shape) # target_boxes = torch.FloatTensor(annotations[:, 1:].shape)
target_boxes[:, 0] = (annotations[:, 1] - annotations[:, 3] / 2) # target_boxes[:, 0] = (annotations[:, 1] - annotations[:, 3] / 2)
target_boxes[:, 1] = (annotations[:, 2] - annotations[:, 4] / 2) # target_boxes[:, 1] = (annotations[:, 2] - annotations[:, 4] / 2)
target_boxes[:, 2] = (annotations[:, 1] + annotations[:, 3] / 2) # target_boxes[:, 2] = (annotations[:, 1] + annotations[:, 3] / 2)
target_boxes[:, 3] = (annotations[:, 2] + annotations[:, 4] / 2) # target_boxes[:, 3] = (annotations[:, 2] + annotations[:, 4] / 2)
target_boxes = xywh2xyxy(annotations[:,1:5])
target_boxes *= opt.img_size target_boxes *= opt.img_size
detected = [] detected = []

View File

@ -23,6 +23,7 @@ class ImageFolder(): # for eval-only
self.nB = math.ceil(self.nF / batch_size) # number of batches self.nB = math.ceil(self.nF / batch_size) # number of batches
self.batch_size = batch_size self.batch_size = batch_size
self.height = img_size self.height = img_size
assert self.nF > 0, 'No images found in path %s' % path assert self.nF > 0, 'No images found in path %s' % path
# RGB normalization values # RGB normalization values
@ -65,7 +66,7 @@ class ListDataset(): # for training
with open(path, 'r') as file: with open(path, 'r') as file:
self.img_files = file.readlines() self.img_files = file.readlines()
if platform == 'darwin': # macos if platform == 'darwin': # MacOS (local)
self.img_files = [path.replace('\n', '').replace('/images', '/Users/glennjocher/Downloads/DATA/coco/images') self.img_files = [path.replace('\n', '').replace('/images', '/Users/glennjocher/Downloads/DATA/coco/images')
for path in self.img_files] for path in self.img_files]
else: # linux (gcp cloud) else: # linux (gcp cloud)
@ -77,10 +78,10 @@ class ListDataset(): # for training
self.nF = len(self.img_files) # number of image files self.nF = len(self.img_files) # number of image files
self.nB = math.ceil(self.nF / batch_size) # number of batches self.nB = math.ceil(self.nF / batch_size) # number of batches
self.batch_size = batch_size self.batch_size = batch_size
# assert self.nB > 0, 'No images found in path %s' % path
self.height = img_size self.height = img_size
assert self.nB > 0, 'No images found in path %s' % path
# RGB normalization values # RGB normalization values
# self.rgb_mean = np.array([60.134, 49.697, 40.746], dtype=np.float32).reshape((1, 3, 1, 1)) # self.rgb_mean = np.array([60.134, 49.697, 40.746], dtype=np.float32).reshape((1, 3, 1, 1))
# self.rgb_std = np.array([29.99, 24.498, 22.046], dtype=np.float32).reshape((1, 3, 1, 1)) # self.rgb_std = np.array([29.99, 24.498, 22.046], dtype=np.float32).reshape((1, 3, 1, 1))

View File

@ -62,7 +62,7 @@ def weights_init_normal(m):
def xyxy2xywh(x): # Convert bounding box format from [x1, y1, x2, y2] to [x, y, w, h] def xyxy2xywh(x): # Convert bounding box format from [x1, y1, x2, y2] to [x, y, w, h]
y = np.zeros(x.shape) y = torch.zeros(x.shape) if x.dtype is torch.float32 else np.zeros(x.shape)
y[:, 0] = (x[:, 0] + x[:, 2]) / 2 y[:, 0] = (x[:, 0] + x[:, 2]) / 2
y[:, 1] = (x[:, 1] + x[:, 3]) / 2 y[:, 1] = (x[:, 1] + x[:, 3]) / 2
y[:, 2] = x[:, 2] - x[:, 0] y[:, 2] = x[:, 2] - x[:, 0]
@ -71,11 +71,11 @@ def xyxy2xywh(x): # Convert bounding box format from [x1, y1, x2, y2] to [x, y,
def xywh2xyxy(x): # Convert bounding box format from [x, y, w, h] to [x1, y1, x2, y2] def xywh2xyxy(x): # Convert bounding box format from [x, y, w, h] to [x1, y1, x2, y2]
y = np.zeros(x.shape) y = torch.zeros(x.shape) if x.dtype is torch.float32 else np.zeros(x.shape)
y[:, 0] = (x[:, 1] - x[:, 3] / 2) y[:, 0] = (x[:, 0] - x[:, 2] / 2)
y[:, 1] = (x[:, 2] - x[:, 4] / 2) y[:, 1] = (x[:, 1] - x[:, 3] / 2)
y[:, 2] = (x[:, 1] + x[:, 3] / 2) y[:, 2] = (x[:, 0] + x[:, 2] / 2)
y[:, 3] = (x[:, 2] + x[:, 4] / 2) y[:, 3] = (x[:, 1] + x[:, 3] / 2)
return y return y