update
This commit is contained in:
parent
eff752bba2
commit
43a5d4568a
|
@ -633,14 +633,14 @@ activation=leaky
|
||||||
size=1
|
size=1
|
||||||
stride=1
|
stride=1
|
||||||
pad=1
|
pad=1
|
||||||
filters=72
|
filters=78
|
||||||
activation=linear
|
activation=linear
|
||||||
|
|
||||||
|
|
||||||
[yolo]
|
[yolo]
|
||||||
mask = 6,7,8
|
mask = 6,7,8
|
||||||
anchors=26,16, 19,45, 36,27, 54,32, 34,80, 60,49, 74,71, 96,105, 135,145
|
anchors=26,16, 19,45, 36,27, 54,32, 34,80, 60,49, 74,71, 96,105, 135,145
|
||||||
classes=19
|
classes=21
|
||||||
num=9
|
num=9
|
||||||
jitter=.3
|
jitter=.3
|
||||||
ignore_thresh = .7
|
ignore_thresh = .7
|
||||||
|
@ -719,14 +719,14 @@ activation=leaky
|
||||||
size=1
|
size=1
|
||||||
stride=1
|
stride=1
|
||||||
pad=1
|
pad=1
|
||||||
filters=72
|
filters=78
|
||||||
activation=linear
|
activation=linear
|
||||||
|
|
||||||
|
|
||||||
[yolo]
|
[yolo]
|
||||||
mask = 3,4,5
|
mask = 3,4,5
|
||||||
anchors=26,16, 19,45, 36,27, 54,32, 34,80, 60,49, 74,71, 96,105, 135,145
|
anchors=26,16, 19,45, 36,27, 54,32, 34,80, 60,49, 74,71, 96,105, 135,145
|
||||||
classes=19
|
classes=21
|
||||||
num=9
|
num=9
|
||||||
jitter=.3
|
jitter=.3
|
||||||
ignore_thresh = .7
|
ignore_thresh = .7
|
||||||
|
@ -806,14 +806,14 @@ activation=leaky
|
||||||
size=1
|
size=1
|
||||||
stride=1
|
stride=1
|
||||||
pad=1
|
pad=1
|
||||||
filters=72
|
filters=78
|
||||||
activation=linear
|
activation=linear
|
||||||
|
|
||||||
|
|
||||||
[yolo]
|
[yolo]
|
||||||
mask = 0,1,2
|
mask = 0,1,2
|
||||||
anchors=26,16, 19,45, 36,27, 54,32, 34,80, 60,49, 74,71, 96,105, 135,145
|
anchors=26,16, 19,45, 36,27, 54,32, 34,80, 60,49, 74,71, 96,105, 135,145
|
||||||
classes=19
|
classes=21
|
||||||
num=9
|
num=9
|
||||||
jitter=.3
|
jitter=.3
|
||||||
ignore_thresh = .7
|
ignore_thresh = .7
|
||||||
|
|
|
@ -640,7 +640,7 @@ activation=linear
|
||||||
[yolo]
|
[yolo]
|
||||||
mask = 6,7,8
|
mask = 6,7,8
|
||||||
anchors = 10,13, 16,30, 33,23, 30,61, 62,45, 59,119, 116,90, 156,198, 373,326
|
anchors = 10,13, 16,30, 33,23, 30,61, 62,45, 59,119, 116,90, 156,198, 373,326
|
||||||
classes=80
|
classes=19
|
||||||
num=9
|
num=9
|
||||||
jitter=.3
|
jitter=.3
|
||||||
ignore_thresh = .7
|
ignore_thresh = .7
|
||||||
|
@ -726,7 +726,7 @@ activation=linear
|
||||||
[yolo]
|
[yolo]
|
||||||
mask = 3,4,5
|
mask = 3,4,5
|
||||||
anchors = 10,13, 16,30, 33,23, 30,61, 62,45, 59,119, 116,90, 156,198, 373,326
|
anchors = 10,13, 16,30, 33,23, 30,61, 62,45, 59,119, 116,90, 156,198, 373,326
|
||||||
classes=80
|
classes=19
|
||||||
num=9
|
num=9
|
||||||
jitter=.3
|
jitter=.3
|
||||||
ignore_thresh = .7
|
ignore_thresh = .7
|
||||||
|
@ -813,7 +813,7 @@ activation=linear
|
||||||
[yolo]
|
[yolo]
|
||||||
mask = 0,1,2
|
mask = 0,1,2
|
||||||
anchors = 10,13, 16,30, 33,23, 30,61, 62,45, 59,119, 116,90, 156,198, 373,326
|
anchors = 10,13, 16,30, 33,23, 30,61, 62,45, 59,119, 116,90, 156,198, 373,326
|
||||||
classes=80
|
classes=19
|
||||||
num=9
|
num=9
|
||||||
jitter=.3
|
jitter=.3
|
||||||
ignore_thresh = .7
|
ignore_thresh = .7
|
||||||
|
|
|
@ -1,6 +0,0 @@
|
||||||
classes=18
|
|
||||||
train=/home/michall/yolov3/data/widok01-11_train_labels.txt
|
|
||||||
valid=/home/michall/yolov3/data/widok01-11_test_labels.txt
|
|
||||||
names=/home/michall/yolov3/data/widok01-11.names
|
|
||||||
backup=backup/
|
|
||||||
eval=coco
|
|
Binary file not shown.
|
@ -1,6 +0,0 @@
|
||||||
classes=19
|
|
||||||
train=/home/tomekb/yolov3/data/widok_01_19/widok_01_19_train_labels.txt
|
|
||||||
valid=/home/tomekb/yolov3/data/widok_01_19/widok_01_19_test_labels.txt
|
|
||||||
names=/home/tomekb/yolov3/data/widok_01_19/widok_01_19.names
|
|
||||||
backup=backup/
|
|
||||||
eval=coco
|
|
|
@ -0,0 +1,6 @@
|
||||||
|
classes=21
|
||||||
|
train=./data/widok_01_21/widok_01_21_train_labels.txt
|
||||||
|
valid=./data/widok_01_21/widok_01_21_test_labels.txt
|
||||||
|
names=./data/widok_01_21/widok_01_21.names
|
||||||
|
backup=backup/
|
||||||
|
eval=coco
|
|
@ -0,0 +1,21 @@
|
||||||
|
1. rower
|
||||||
|
2. motocykl
|
||||||
|
3. osobowy
|
||||||
|
4. osobowy pickup
|
||||||
|
5. osobowy dostawczy
|
||||||
|
6. osobowy van 7-9
|
||||||
|
7. dostawczy blaszak / BUS sredni dostawczy
|
||||||
|
8. dostawczy zabudowany
|
||||||
|
9. dostawczy pickup (w tym pomoc drog.)
|
||||||
|
10. dostawczy VAN (osobowy) / autobus maly 10-24 / BUS sredni osobowy
|
||||||
|
11. autobus miejski / autobus turystyczny i inny
|
||||||
|
12. ciezarowy pow. 3,5t zabudowany / ciezarowy z widoczna przyczepa
|
||||||
|
13. ciezarowy pow. 3,5t otwarty (w tym duzy holownik)
|
||||||
|
14. ciezarowy pow. 3,5t inny (wanna, gruszka, dzwig itp.)
|
||||||
|
15. ciagnik siodlowy z widoczna naczepa / ciagnik siodlowy bez naczepy
|
||||||
|
16. inne pojazdy silnikowe / camper / woz strazacki / ciagnik roliczy, koparka, spychacz
|
||||||
|
17. przyczepa
|
||||||
|
18. BUS-karetka/policja
|
||||||
|
19. BUS brygadowka
|
||||||
|
20. BUS sredni dostawczy
|
||||||
|
21. BUS sredni osobowy
|
|
@ -0,0 +1,16 @@
|
||||||
|
1. rower
|
||||||
|
2. motocykl
|
||||||
|
3. osobowy
|
||||||
|
4. osobowy pickup
|
||||||
|
5. osobowy dostawczy
|
||||||
|
6. osobowy van 7-9
|
||||||
|
7. dostawczy blaszak / BUS sredni dostawczy
|
||||||
|
8. dostawczy zabudowany
|
||||||
|
9. dostawczy pickup (w tym pomoc drog.)
|
||||||
|
10. dostawczy VAN (osobowy) / autobus maly 10-24 / BUS sredni osobowy
|
||||||
|
11. autobus miejski / autobus turystyczny i inny
|
||||||
|
12. ciezarowy pow. 3,5t zabudowany / ciezarowy z widoczna przyczepa
|
||||||
|
13. ciezarowy pow. 3,5t otwarty / (w tym duzy holownik)
|
||||||
|
14. ciezarowy pow. 3,5t inny (wanna, gruszka, dzwig itp.)
|
||||||
|
15. ciagnik siodlowy z widoczna naczepa /
|
||||||
|
ciagnik siodlowy bez naczepy
|
Binary file not shown.
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -133,7 +133,7 @@ def detect(save_img=False):
|
||||||
plot_one_box(xyxy, im0, label=label, color=colors[int(cls)])
|
plot_one_box(xyxy, im0, label=label, color=colors[int(cls)])
|
||||||
|
|
||||||
# Print time (inference + NMS)
|
# Print time (inference + NMS)
|
||||||
print('%sDone. (%.3fs)' % (s, t2 - t1))
|
#print('%sDone. (%.3fs)' % (s, t2 - t1))
|
||||||
|
|
||||||
# Stream results
|
# Stream results
|
||||||
if view_img:
|
if view_img:
|
||||||
|
@ -162,7 +162,7 @@ def detect(save_img=False):
|
||||||
if platform == 'darwin': # MacOS
|
if platform == 'darwin': # MacOS
|
||||||
os.system('open ' + save_path)
|
os.system('open ' + save_path)
|
||||||
|
|
||||||
print('Done. (%.3fs)' % (time.time() - t0))
|
#print('Done. (%.3fs)' % (time.time() - t0))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
@ -186,7 +186,7 @@ if __name__ == '__main__':
|
||||||
opt = parser.parse_args()
|
opt = parser.parse_args()
|
||||||
opt.cfg = check_file(opt.cfg) # check file
|
opt.cfg = check_file(opt.cfg) # check file
|
||||||
opt.names = check_file(opt.names) # check file
|
opt.names = check_file(opt.names) # check file
|
||||||
print(opt)
|
#print(opt)
|
||||||
|
|
||||||
with torch.no_grad():
|
with torch.no_grad():
|
||||||
detect(True)
|
detect(True)
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
train:
|
train:
|
||||||
epochs: 1200
|
epochs: 200
|
||||||
batch-size: 3
|
batch-size: 14 #
|
||||||
cfg: ./cfg/yolov3-spp-19cls.cfg
|
cfg: ./cfg/yolov3-spp-19cls.cfg
|
||||||
data: ./data/widok_01_19.data
|
data: ./data/widok_01_19.data
|
||||||
multi-scale: false
|
multi-scale: false
|
||||||
img-size: '512 1920'
|
img-size: '768 1280'
|
||||||
rect: true
|
rect: false
|
||||||
resume: false
|
resume: false
|
||||||
nosave: false
|
nosave: false
|
||||||
notest: false
|
notest: false
|
||||||
|
@ -16,7 +16,8 @@ train:
|
||||||
device: 1
|
device: 1
|
||||||
adam: true
|
adam: true
|
||||||
single-cls: false
|
single-cls: false
|
||||||
save-every-nth-epoch: 50
|
snapshot-every: 50
|
||||||
|
freeze-layers: true
|
||||||
|
|
||||||
# inne hiperparametry
|
# inne hiperparametry
|
||||||
other-hyps:
|
other-hyps:
|
||||||
|
@ -36,14 +37,14 @@ train:
|
||||||
hsv_v: 0.36 # image HSV-Value augmentation (fraction)
|
hsv_v: 0.36 # image HSV-Value augmentation (fraction)
|
||||||
degrees: 0 # 1.98 * 0 # image rotation (+/- deg)
|
degrees: 0 # 1.98 * 0 # image rotation (+/- deg)
|
||||||
translate: 0 # 0.05 * 0 # image translation (+/- fraction)
|
translate: 0 # 0.05 * 0 # image translation (+/- fraction)
|
||||||
scale: 0 #0 .05 * 0 # image scale (+/- gain)
|
scale: 0 #0 .05 * 0 # image scale (+/- gain)
|
||||||
shear: 0 # 0.641 * 0 # image shear (+/- deg)
|
shear: 0 # 0.641 * 0 # image shear (+/- deg)
|
||||||
|
|
||||||
experiments:
|
experiments:
|
||||||
dir: ./experiments
|
dir: ./experiments
|
||||||
detect:
|
detect:
|
||||||
source: /home/tomekb/yolov3/data/widok_01_19/widok_01_19_test_labels.txt
|
source: /home/tomekb/yolov3/data/widok_01_19/widok_01_19_test_labels.txt
|
||||||
test-img-size: 1920
|
test-img-size: 1024
|
||||||
conf-thres: 0.3
|
conf-thres: 0.3
|
||||||
iou-thres: 0.6
|
iou-thres: 0.6
|
||||||
classes:
|
classes:
|
||||||
|
|
|
@ -1,29 +1,7 @@
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
|
|
||||||
class Args:
|
|
||||||
|
|
||||||
def get_args_string(self) -> str:
|
|
||||||
string = ''
|
|
||||||
for key, value in self.__dict__.items():
|
|
||||||
if not isinstance(value, Configuration.Train.OtherHyps) and value is not None:
|
|
||||||
if key == 'img-size':
|
|
||||||
string += f' --{key} {value.split(" ")[0]} {value.split(" ")[1]}'
|
|
||||||
elif type(value) == bool:
|
|
||||||
if value:
|
|
||||||
string += f" --{key}"
|
|
||||||
else:
|
|
||||||
continue
|
|
||||||
elif type(value) in [int, str] and value != '':
|
|
||||||
string += f' --{key} {value}'
|
|
||||||
else:
|
|
||||||
raise Exception(f"Cannot parse argument {key} {value}")
|
|
||||||
|
|
||||||
return string
|
|
||||||
|
|
||||||
|
|
||||||
class Configuration:
|
class Configuration:
|
||||||
class Train(Args):
|
class Train:
|
||||||
|
|
||||||
class OtherHyps:
|
class OtherHyps:
|
||||||
def __init__(self, config_file) -> None:
|
def __init__(self, config_file) -> None:
|
||||||
|
@ -36,29 +14,56 @@ class Configuration:
|
||||||
if key != 'other-hyps':
|
if key != 'other-hyps':
|
||||||
self.__dict__[key] = value
|
self.__dict__[key] = value
|
||||||
|
|
||||||
class Experiments(Args):
|
class Experiments:
|
||||||
def __init__(self, config_file) -> None:
|
def __init__(self, config_file) -> None:
|
||||||
for key, value in config_file['experiments'].items():
|
for key, value in config_file['experiments'].items():
|
||||||
self.__dict__[key] = value
|
self.__dict__[key] = value
|
||||||
|
|
||||||
class Detect(Args):
|
class Detect:
|
||||||
def __init__(self, config_file) -> None:
|
def __init__(self, config_file) -> None:
|
||||||
for key, value in config_file['detect'].items():
|
for key, value in config_file['detect'].items():
|
||||||
self.__dict__[key] = value
|
self.__dict__[key] = value
|
||||||
|
|
||||||
class ConfussionMatrix(Args):
|
class ConfussionMatrix:
|
||||||
def __init__(self, config_file) -> None:
|
def __init__(self, config_file) -> None:
|
||||||
for key, value in config_file['confussion-matrix'].items():
|
for key, value in config_file['confussion-matrix'].items():
|
||||||
self.__dict__[key] = value
|
self.__dict__[key] = value
|
||||||
|
|
||||||
def __init__(self, config_path='/home/tomekb/yolov3/our_scripts/config_bayes.yml') -> None:
|
class Bayes:
|
||||||
|
def __init__(self, config_file) -> None:
|
||||||
|
for key, value in config_file['bayes'].items():
|
||||||
|
self.__dict__[key] = value
|
||||||
|
|
||||||
|
def __init__(self, config_path='./config_bayes.yml') -> None:
|
||||||
self.config_path = config_path
|
self.config_path = config_path
|
||||||
file = yaml.load(open(config_path, 'r'), Loader=yaml.Loader)
|
file = yaml.load(open(config_path, 'r'), Loader=yaml.Loader)
|
||||||
self.train = self.Train(file)
|
self.train = self.Train(file)
|
||||||
self.experiments = self.Experiments(file)
|
self.experiments = self.Experiments(file)
|
||||||
self.detect = self.Detect(file)
|
self.detect = self.Detect(file)
|
||||||
self.confussion_matrix = self.ConfussionMatrix(file)
|
self.confussion_matrix = self.ConfussionMatrix(file)
|
||||||
|
self.bayes = self.Bayes(file)
|
||||||
|
|
||||||
|
def get_bayes_bounds(self) -> list:
|
||||||
|
result = []
|
||||||
|
dicts = {**self.train.__dict__, **self.train.other_hyps.__dict__, **self.detect.__dict__}
|
||||||
|
for key, value in dicts.items():
|
||||||
|
if type(value) not in [None, Configuration.Train.OtherHyps] and type(value) == dict:
|
||||||
|
if value['type'] == 'continuous': # continous value
|
||||||
|
val = (value['min'], value['max'])
|
||||||
|
item = {'name': key, 'type': value['type'], 'domain': val}
|
||||||
|
elif value['type'] == 'discrete' and 'step' in value:
|
||||||
|
val = tuple(n for n in range(value['min'], value['max'], value['step']))
|
||||||
|
item = {'name': key, 'type': value['type'], 'domain': val}
|
||||||
|
elif value['type'] == 'discrete': # discrete values without step
|
||||||
|
val = tuple(n for n in value['values'])
|
||||||
|
item = {'name': key, 'type': value['type'], 'domain': val}
|
||||||
|
else: # unknown type
|
||||||
|
raise Exception("Invalid type", value['type'])
|
||||||
|
|
||||||
|
result.append(item)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
config = Configuration()
|
config = Configuration()
|
||||||
print(config)
|
print(config)
|
||||||
|
|
|
@ -1,32 +1,47 @@
|
||||||
|
bayes:
|
||||||
|
iterations: 2
|
||||||
train:
|
train:
|
||||||
epochs: 100
|
epochs:
|
||||||
|
type: discrete
|
||||||
|
values: [10]
|
||||||
batch-size:
|
batch-size:
|
||||||
type: discrete
|
type: discrete
|
||||||
values: 1,2,3,4
|
#values: [128]
|
||||||
cfg: ./cfg/yolov3-spp-19cls.cfg
|
min: 1
|
||||||
data: ./data/widok_01_19.data
|
max: 5
|
||||||
|
step: 1
|
||||||
|
cfg: ./cfg/yolov3-spp-21cls.cfg
|
||||||
|
data: ./data/widok_01_21.data
|
||||||
multi-scale:
|
multi-scale:
|
||||||
type: discrete
|
type: discrete
|
||||||
values: true, false
|
values: [true, false]
|
||||||
img-size:
|
img-size-start:
|
||||||
type: discrete
|
type: discrete
|
||||||
values: (512, 1280),(576, 1280),(640, 1280),(704, 1280),(768, 1280),(832,1280),(960, 1280),(1024, 1280) # trzeba wziąć pod uwagę wszystkie kombinacje i warunek img-size-min < img-size_max których jest cała masa
|
min: 512
|
||||||
rect: false
|
max: 1088
|
||||||
|
step: 64
|
||||||
|
img-size-end:
|
||||||
type: discrete
|
type: discrete
|
||||||
values: true,false
|
min: 512
|
||||||
|
max: 1088
|
||||||
|
step: 64
|
||||||
|
rect:
|
||||||
|
type: discrete
|
||||||
|
values: [false]
|
||||||
resume: false
|
resume: false
|
||||||
nosave: false
|
nosave: false
|
||||||
notest: false
|
notest: false
|
||||||
evolve: false
|
evolve: false
|
||||||
bucket:
|
bucket:
|
||||||
cache-images: false
|
cache-images: false
|
||||||
weights: /home/tomekb/yolov3/weights/yolov3-spp-ultralytics.pt
|
weights: ./weights/yolov3-spp-ultralytics.pt
|
||||||
device: 1
|
device: 1
|
||||||
adam: true
|
adam:
|
||||||
|
type: discrete
|
||||||
|
values: [true]
|
||||||
single-cls: false
|
single-cls: false
|
||||||
snapshot-every: 50
|
snapshot-every:
|
||||||
freeze-layers: true
|
freeze-layers: true
|
||||||
|
|
||||||
other-hyps:
|
other-hyps:
|
||||||
giou:
|
giou:
|
||||||
type: continuous
|
type: continuous
|
||||||
|
@ -52,8 +67,14 @@ train:
|
||||||
type: continuous
|
type: continuous
|
||||||
min: 0.0
|
min: 0.0
|
||||||
max: 1.0
|
max: 1.0
|
||||||
lr0: 0.01 # initial learning rate (SGD=5E-3 Adam=5E-4) # trzeba wziąć pod uwage zależność lr0 < lrf dlatego nie zmieniam
|
lr0:
|
||||||
lrf: 0.0005 # final learning rate (with cos scheduler)
|
type: continuous
|
||||||
|
min: 0.000001
|
||||||
|
max: 0.1
|
||||||
|
lrf:
|
||||||
|
type: continuous
|
||||||
|
min: 0.000001
|
||||||
|
max: 0.1
|
||||||
momentum:
|
momentum:
|
||||||
type: continuous
|
type: continuous
|
||||||
min: 0.0
|
min: 0.0
|
||||||
|
@ -70,7 +91,7 @@ train:
|
||||||
type: continuous
|
type: continuous
|
||||||
min: 0.0
|
min: 0.0
|
||||||
max: 1.0
|
max: 1.0
|
||||||
hsv_s: 0.678
|
hsv_s:
|
||||||
type: continuous
|
type: continuous
|
||||||
min: 0.0
|
min: 0.0
|
||||||
max: 1.0
|
max: 1.0
|
||||||
|
@ -97,10 +118,12 @@ train:
|
||||||
experiments:
|
experiments:
|
||||||
dir: ./experiments
|
dir: ./experiments
|
||||||
detect:
|
detect:
|
||||||
source: /home/tomekb/yolov3/data/widok_01_19/widok_01_19_test_labels.txt
|
source: ./data/widok_01_21/widok_01_21_test_labels.txt
|
||||||
test-img-size:
|
test-img-size:
|
||||||
type: discrete
|
type: discrete
|
||||||
values: 512,576,640,704,768,832,896,960,1024,1088,1152,1216,1280,
|
min: 512
|
||||||
|
max: 1088
|
||||||
|
step: 64
|
||||||
conf-thres:
|
conf-thres:
|
||||||
type: continuous
|
type: continuous
|
||||||
min: 0.0
|
min: 0.0
|
||||||
|
@ -113,4 +136,4 @@ detect:
|
||||||
agnostic-nms:
|
agnostic-nms:
|
||||||
augment:
|
augment:
|
||||||
confussion-matrix:
|
confussion-matrix:
|
||||||
labels-dir: /home/tomekb/yolov3/data/widok_01_19/widok_01_19_labels
|
labels-dir: ./data/widok_01_21/widok_01_21_labels
|
||||||
|
|
|
@ -158,10 +158,10 @@ async function main() {
|
||||||
tsvRows.push([ names[row], ...(summaryRows.map(r => summaryPart[r] && (summaryPart[r] / sum).toFixed(2)))].join('\t'))
|
tsvRows.push([ names[row], ...(summaryRows.map(r => summaryPart[r] && (summaryPart[r] / sum).toFixed(2)))].join('\t'))
|
||||||
}
|
}
|
||||||
|
|
||||||
const allLabeled = rows.slice(0, -1).map(r => summary.sum[r]).reduce((a, b) => a + b, 0)
|
const allDetected = rows.slice(0, -1).map(r => summary.sum[r]).reduce((a, b) => a + b, 0)
|
||||||
const allDetected = rows.slice(0, -1).map(r => summary[r].sum).reduce((a, b) => a + b, 0)
|
const allLabeled = rows.slice(0, -1).map(r => summary[r].sum).reduce((a, b) => a + b, 0)
|
||||||
const falseNegatives = rows.slice(0, -1).map(r => summary.n[r] || 0).reduce((a, b) => a + b, 0)
|
const falsePositives = rows.slice(0, -1).map(r => summary.n[r] || 0).reduce((a, b) => a + b, 0)
|
||||||
const falsePositives = rows.slice(0, -1).map(r => summary[r].n || 0).reduce((a, b) => a + b, 0)
|
const falseNegatives = rows.slice(0, -1).map(r => summary[r].n || 0).reduce((a, b) => a + b, 0)
|
||||||
const right = rows.slice(0, -1).map(r => summary[r][r] || 0).reduce((a, b) => a + b, 0)
|
const right = rows.slice(0, -1).map(r => summary[r][r] || 0).reduce((a, b) => a + b, 0)
|
||||||
const mistakes = rows.slice(0, -1).map(a => rows.slice(0, -1).map(b => (a!=b && summary[a][b]) || 0).reduce((a, b) => a + b, 0)).reduce((a, b) => a + b, 0)
|
const mistakes = rows.slice(0, -1).map(a => rows.slice(0, -1).map(b => (a!=b && summary[a][b]) || 0).reduce((a, b) => a + b, 0)).reduce((a, b) => a + b, 0)
|
||||||
|
|
||||||
|
|
|
@ -46,7 +46,7 @@ def map_class_name_to_id(class_name, xml_document, class_distribution):
|
||||||
elif class_name in ['6. osobowy van 7-9']:
|
elif class_name in ['6. osobowy van 7-9']:
|
||||||
class_distribution[5] += 1
|
class_distribution[5] += 1
|
||||||
return 5
|
return 5
|
||||||
elif class_name in ['7. dostawczy blaszak', '27. BUS sredni dostawczy']:
|
elif class_name in ['7. dostawczy blaszak']:
|
||||||
class_distribution[6] += 1
|
class_distribution[6] += 1
|
||||||
return 6
|
return 6
|
||||||
elif class_name in ['8. dostawczy zabudowany']:
|
elif class_name in ['8. dostawczy zabudowany']:
|
||||||
|
@ -55,7 +55,7 @@ def map_class_name_to_id(class_name, xml_document, class_distribution):
|
||||||
elif class_name in ['9. dostawczy pickup (w tym pomoc drog.)']:
|
elif class_name in ['9. dostawczy pickup (w tym pomoc drog.)']:
|
||||||
class_distribution[8] += 1
|
class_distribution[8] += 1
|
||||||
return 8
|
return 8
|
||||||
elif class_name in ['10. dostawczy VAN (osobowy)', '11. autobus maly 10-24', '28. BUS sredni osobowy']:
|
elif class_name in ['10. dostawczy VAN (osobowy)', '11. autobus maly 10-24']:
|
||||||
class_distribution[9] += 1
|
class_distribution[9] += 1
|
||||||
return 9
|
return 9
|
||||||
elif class_name in ['12. autobus miejski', '13. autobus turystyczny i inny']:
|
elif class_name in ['12. autobus miejski', '13. autobus turystyczny i inny']:
|
||||||
|
@ -85,13 +85,19 @@ def map_class_name_to_id(class_name, xml_document, class_distribution):
|
||||||
elif class_name in ['26. BUS brygadowka']:
|
elif class_name in ['26. BUS brygadowka']:
|
||||||
class_distribution[18] += 1
|
class_distribution[18] += 1
|
||||||
return 18
|
return 18
|
||||||
|
elif class_name in ['27. BUS sredni dostawczy']:
|
||||||
|
class_distribution[19] += 1
|
||||||
|
return 19
|
||||||
|
elif class_name in ['28. BUS sredni osobowy']:
|
||||||
|
class_distribution[20] += 1
|
||||||
|
return 20
|
||||||
else:
|
else:
|
||||||
raise Exception('Unknown Class ', xml_document, class_name)
|
raise Exception('Unknown Class ', xml_document, class_name)
|
||||||
#print(f'{xml_document.split("/")[-1]} {class_name}')
|
#print(f'{xml_document.split("/")[-1]} {class_name}')
|
||||||
|
|
||||||
|
|
||||||
def generate_txt_from_xml():
|
def generate_txt_from_xml():
|
||||||
class_distribution = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
|
class_distribution = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,0,0]
|
||||||
|
|
||||||
filepaths = glob(join(annotations , '*.xml'))
|
filepaths = glob(join(annotations , '*.xml'))
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
import argparse
|
|
||||||
import datetime
|
import datetime
|
||||||
import glob
|
import glob
|
||||||
import io
|
import io
|
||||||
|
@ -12,41 +11,44 @@ from config import Configuration
|
||||||
|
|
||||||
def call_training_script(config):
|
def call_training_script(config):
|
||||||
cmd = '/home/tomekb/miniconda3/envs/conda3.7/bin/python -u /home/tomekb/yolov3/train.py '
|
cmd = '/home/tomekb/miniconda3/envs/conda3.7/bin/python -u /home/tomekb/yolov3/train.py '
|
||||||
cmd += config.train.get_args_string()
|
cmd += f"--experiment-dir {config.experiments.dir}"
|
||||||
|
cmd += config.train.get_args_string() # getting rest of train arguments
|
||||||
|
|
||||||
print("_______ CALLING TRAINING SCRIPT _______")
|
print("_______ CALLING TRAINING SCRIPT _______")
|
||||||
print(cmd)
|
print(cmd)
|
||||||
|
|
||||||
os.chdir('..')
|
os.chdir('..') # change to project root directory
|
||||||
process = subprocess.Popen(cmd, stdout=subprocess.PIPE,shell=True)
|
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True)
|
||||||
for line in io.TextIOWrapper(process.stdout, encoding="utf-8"): # print output of training process to console
|
for line in io.TextIOWrapper(process.stdout, encoding="utf-8"): # print output of training process to console
|
||||||
print(line)
|
print(line)
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
|
||||||
def move_training_results_to_experiments_dir(config):
|
def move_training_results_to_experiments_dir(config):
|
||||||
training_results_dir_path = os.path.join(config.experiments.dir, datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S'))
|
training_results_dir_path = os.path.join(config.experiments.dir, datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')) #creating directory accordint to pattern eg: 2020-06-30_17-52-19
|
||||||
|
|
||||||
print("_______ CALLING MOVING RESULTS _______")
|
print("_______ CALLING MOVING RESULTS _______")
|
||||||
print(f"MOVING RESUTLS TO {training_results_dir_path}")
|
print(f"MOVING RESUTLS TO {training_results_dir_path}")
|
||||||
|
|
||||||
|
|
||||||
os.mkdir(training_results_dir_path)
|
os.mkdir(training_results_dir_path)
|
||||||
|
|
||||||
weights_path = os.path.join(training_results_dir_path, 'best.pt')
|
weights_path = os.path.join(training_results_dir_path, 'best.pt')
|
||||||
shutil.move('/home/tomekb/yolov3/weights/best.pt', weights_path) #move best weights
|
shutil.move('/home/tomekb/yolov3/weights/best.pt', weights_path) # move best weights
|
||||||
|
|
||||||
names_path = open(config.train.data).readlines()[3].split('=')[-1].rstrip() # read names path from file
|
names_path = open(config.train.data).readlines()[3].split('=')[-1].rstrip() # read names path from file
|
||||||
names_file_name = ntpath.basename(names_path)
|
names_file_name = ntpath.basename(names_path)
|
||||||
experiment_names_path = os.path.join(training_results_dir_path, names_file_name)
|
experiment_names_path = os.path.join(training_results_dir_path, names_file_name)
|
||||||
shutil.copy(names_path, experiment_names_path) # copy names to created experiment dir with training results
|
shutil.copy(names_path, experiment_names_path) # copy names file from *.data file to created experiment dir with training results
|
||||||
|
|
||||||
tensorboard_dir = './runs'
|
tensorboard_dir = './runs'
|
||||||
last_modified_tensorboard_dir = max(glob.glob(os.path.join(tensorboard_dir, '*/')), key=os.path.getmtime)
|
last_modified_tensorboard_dir = max(glob.glob(os.path.join(tensorboard_dir, '*/')), key=os.path.getmtime)
|
||||||
shutil.move(last_modified_tensorboard_dir, os.path.join(training_results_dir_path)) #saving related tensorboard dir
|
shutil.move(last_modified_tensorboard_dir, os.path.join(training_results_dir_path)) # saving related tensorboard dir
|
||||||
|
|
||||||
shutil.copy2(config.config_path, training_results_dir_path) #copying configuration yaml
|
|
||||||
|
|
||||||
|
|
||||||
#for test purposes only
|
shutil.copy2(config.config_path, training_results_dir_path) # copying configuration yaml
|
||||||
#shutil.copy2('/home/tomekb/yolov3/experiments/1/best.pt', training_results_dir_path)
|
|
||||||
|
# for test purposes only
|
||||||
|
# shutil.copy2('/home/tomekb/yolov3/experiments/1/best.pt', training_results_dir_path)
|
||||||
|
|
||||||
return weights_path, experiment_names_path, training_results_dir_path
|
return weights_path, experiment_names_path, training_results_dir_path
|
||||||
|
|
||||||
|
@ -69,11 +71,10 @@ def call_detection_script(config, weights_path, names_path, dir):
|
||||||
|
|
||||||
cmd = " ".join(cmd.split())
|
cmd = " ".join(cmd.split())
|
||||||
|
|
||||||
|
|
||||||
print("_______ CALLING DETECTION SCRIPT _______")
|
print("_______ CALLING DETECTION SCRIPT _______")
|
||||||
print(cmd)
|
print(cmd)
|
||||||
|
|
||||||
process = subprocess.Popen(cmd, stdout=subprocess.PIPE,shell=True)
|
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True)
|
||||||
for line in io.TextIOWrapper(process.stdout, encoding="utf-8"): # print output of process to console
|
for line in io.TextIOWrapper(process.stdout, encoding="utf-8"): # print output of process to console
|
||||||
print(line)
|
print(line)
|
||||||
|
|
||||||
|
@ -84,7 +85,7 @@ def call_generate_confussion_matrix(detect_output_dir, config, names_path, train
|
||||||
labels_dir = getattr(config.confussion_matrix, 'labels-dir')
|
labels_dir = getattr(config.confussion_matrix, 'labels-dir')
|
||||||
|
|
||||||
cmd = f"node ./our_scripts/generate-confusion-matrix.js {detect_output_dir} {labels_dir} {names_path} > {train_results_dir}/confussion-matrix.tsv"
|
cmd = f"node ./our_scripts/generate-confusion-matrix.js {detect_output_dir} {labels_dir} {names_path} > {train_results_dir}/confussion-matrix.tsv"
|
||||||
process = subprocess.Popen(cmd, stdout=subprocess.PIPE,shell=True)
|
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True)
|
||||||
print("_______ CALLING CONFUSSION MATRIX SCRIPT _______")
|
print("_______ CALLING CONFUSSION MATRIX SCRIPT _______")
|
||||||
print(cmd)
|
print(cmd)
|
||||||
for line in io.TextIOWrapper(process.stdout, encoding="utf-8"): # print output of process to console
|
for line in io.TextIOWrapper(process.stdout, encoding="utf-8"): # print output of process to console
|
||||||
|
@ -95,6 +96,6 @@ if __name__ == '__main__':
|
||||||
config = Configuration()
|
config = Configuration()
|
||||||
|
|
||||||
train_cmd = call_training_script(config)
|
train_cmd = call_training_script(config)
|
||||||
weights_path, names_path,train_results_dir = move_training_results_to_experiments_dir(config)
|
weights_path, names_path, train_results_dir = move_training_results_to_experiments_dir(config)
|
||||||
detect_output_dir = call_detection_script(config, weights_path, names_path,train_results_dir)
|
detect_output_dir = call_detection_script(config, weights_path, names_path, train_results_dir)
|
||||||
call_generate_confussion_matrix(detect_output_dir, config, names_path,train_results_dir)
|
call_generate_confussion_matrix(detect_output_dir, config, names_path, train_results_dir)
|
||||||
|
|
|
@ -1,31 +1,56 @@
|
||||||
import datetime
|
import datetime
|
||||||
import glob
|
import glob
|
||||||
import io
|
|
||||||
import ntpath
|
import ntpath
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import subprocess
|
import traceback
|
||||||
|
|
||||||
from .config import Configuration
|
import GPyOpt
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
from config_bayes import Configuration
|
||||||
|
from utils import call_subprocess, get_values_from_conff_matrix
|
||||||
|
|
||||||
|
config = Configuration()
|
||||||
|
|
||||||
|
date_string = datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
|
||||||
|
bayes_params_file = open(os.path.join(config.experiments.dir, f"{date_string}_bayes_params.txt"), 'a+')
|
||||||
|
|
||||||
|
|
||||||
def call_training_script(config):
|
def call_training_script(gaussian_hyps):
|
||||||
cmd = '/home/tomekb/miniconda3/envs/conda3.7/bin/python -u /home/tomekb/yolov3/train.py '
|
cmd = 'python-u /home/tomekb/yolov3/train.py'
|
||||||
cmd += f"--experiment-dir {config.experiments.dir}"
|
cmd += ' --epochs ' + gaussian_hyps['epochs'].__str__()
|
||||||
cmd += config.train.get_args_string() # getting rest of train arguments
|
cmd += ' --batch-size ' + gaussian_hyps['batch-size'].__str__()
|
||||||
|
cmd += ' --cfg ' + config.train.cfg.__str__()
|
||||||
|
cmd += ' --data ' + config.train.data.__str__()
|
||||||
|
cmd += ' --multi-scale ' if gaussian_hyps['multi-scale'] else ""
|
||||||
|
cmd += ' --img-size ' + gaussian_hyps['img-size']
|
||||||
|
cmd += ' --rect ' if gaussian_hyps['rect'] else ""
|
||||||
|
cmd += ' --weights ' + config.train.weights.__str__()
|
||||||
|
cmd += ' --device ' + config.train.device.__str__()
|
||||||
|
cmd += ' --adam ' if gaussian_hyps['adam'] else ""
|
||||||
|
cmd += ' --freeze-layers ' if getattr(config.train, "freeze-layers") else ""
|
||||||
|
# cmd += ' --snapshot-every ' if getattr(config.train, "snapshot-every") else ""
|
||||||
|
cmd += ' --experiment-dir ' + config.experiments.dir.__str__()
|
||||||
|
|
||||||
|
train_hyps = dict(
|
||||||
|
(key, gaussian_hyps[key]) for idx, (key, _) in enumerate(gaussian_hyps.items()) if idx in range(6, 24))
|
||||||
|
cmd += f' --hyp \"{train_hyps}\"'
|
||||||
|
|
||||||
print("_______ CALLING TRAINING SCRIPT _______")
|
print("_______ CALLING TRAINING SCRIPT _______")
|
||||||
print(cmd)
|
print(cmd)
|
||||||
|
|
||||||
os.chdir('..') # change to project root directory
|
dir_path = os.path.dirname(os.path.realpath(__file__))
|
||||||
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True)
|
os.chdir(os.path.join(dir_path, '..')) # change to project root directory
|
||||||
for line in io.TextIOWrapper(process.stdout, encoding="utf-8"): # print output of training process to console
|
|
||||||
print(line)
|
call_subprocess(cmd)
|
||||||
|
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
|
||||||
def move_training_results_to_experiments_dir(config):
|
def move_training_results_to_experiments_dir():
|
||||||
training_results_dir_path = os.path.join(config.experiments.dir, datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')) #creating directory accordint to pattern eg: 2020-06-30_17-52-19
|
training_results_dir_path = os.path.join(config.experiments.dir, datetime.datetime.now().strftime(
|
||||||
|
'%Y-%m-%d_%H-%M-%S')) # creating directory accordint to pattern eg: 2020-06-30_17-52-19
|
||||||
|
|
||||||
print("_______ CALLING MOVING RESULTS _______")
|
print("_______ CALLING MOVING RESULTS _______")
|
||||||
print(f"MOVING RESUTLS TO {training_results_dir_path}")
|
print(f"MOVING RESUTLS TO {training_results_dir_path}")
|
||||||
|
@ -38,22 +63,26 @@ def move_training_results_to_experiments_dir(config):
|
||||||
names_path = open(config.train.data).readlines()[3].split('=')[-1].rstrip() # read names path from file
|
names_path = open(config.train.data).readlines()[3].split('=')[-1].rstrip() # read names path from file
|
||||||
names_file_name = ntpath.basename(names_path)
|
names_file_name = ntpath.basename(names_path)
|
||||||
experiment_names_path = os.path.join(training_results_dir_path, names_file_name)
|
experiment_names_path = os.path.join(training_results_dir_path, names_file_name)
|
||||||
shutil.copy(names_path, experiment_names_path) # copy names file from *.data file to created experiment dir with training results
|
shutil.copy(names_path,
|
||||||
|
experiment_names_path) # copy names file from *.data file to created experiment dir with training results
|
||||||
tensorboard_dir = './runs'
|
|
||||||
last_modified_tensorboard_dir = max(glob.glob(os.path.join(tensorboard_dir, '*/')), key=os.path.getmtime)
|
|
||||||
shutil.move(last_modified_tensorboard_dir, os.path.join(training_results_dir_path)) # saving related tensorboard dir
|
|
||||||
|
|
||||||
|
tensorboard_dir = '/home/tomekb/yolov3/runs'
|
||||||
|
tensorboard_events_files = glob.glob(os.path.join(tensorboard_dir, '*'))
|
||||||
|
last_modified_events_file = max(tensorboard_events_files, key=os.path.getmtime)
|
||||||
|
shutil.move(last_modified_events_file,
|
||||||
|
os.path.join(training_results_dir_path)) # saving related tensorboard dir
|
||||||
|
|
||||||
shutil.copy2(config.config_path, training_results_dir_path) # copying configuration yaml
|
shutil.copy2(config.config_path, training_results_dir_path) # copying configuration yaml
|
||||||
|
|
||||||
# for test purposes only
|
# for test purposes only
|
||||||
# shutil.copy2('/home/tomekb/yolov3/experiments/1/best.pt', training_results_dir_path)
|
# TODO CHANGE ME AFTER TESTS
|
||||||
|
shutil.copy2('/home/tomekb/yolov3/experiments/yolov3-spp-100-epochs-freeze-layers/best.pt',
|
||||||
|
training_results_dir_path)
|
||||||
|
|
||||||
return weights_path, experiment_names_path, training_results_dir_path
|
return weights_path, experiment_names_path, training_results_dir_path
|
||||||
|
|
||||||
|
|
||||||
def call_detection_script(config, weights_path, names_path, dir):
|
def call_detection_script(gaussian_hyps, weights_path, names_path, dir):
|
||||||
detect_output_dir = os.path.join(dir, 'output')
|
detect_output_dir = os.path.join(dir, 'output')
|
||||||
cmd = f"""/home/tomekb/miniconda3/envs/conda3.7/bin/python -u /home/tomekb/yolov3/detect.py
|
cmd = f"""/home/tomekb/miniconda3/envs/conda3.7/bin/python -u /home/tomekb/yolov3/detect.py
|
||||||
--cfg {config.train.cfg}
|
--cfg {config.train.cfg}
|
||||||
|
@ -61,41 +90,101 @@ def call_detection_script(config, weights_path, names_path, dir):
|
||||||
--output {detect_output_dir}
|
--output {detect_output_dir}
|
||||||
--names {names_path}
|
--names {names_path}
|
||||||
--weights {weights_path}
|
--weights {weights_path}
|
||||||
--test-img-size {getattr(config.detect, 'test-img-size')}
|
--test-img-size {gaussian_hyps['test-img-size']}
|
||||||
--conf-thres {getattr(config.detect, 'conf-thres')}
|
--conf-thres {gaussian_hyps['conf-thres']}
|
||||||
--iou-thres {getattr(config.detect, 'iou-thres')}
|
--iou-thres {gaussian_hyps['iou-thres']}
|
||||||
--save-txt"""
|
--save-txt"""
|
||||||
cmd += " --agnostic-nms" if getattr(config.detect, 'agnostic-nms') else ""
|
|
||||||
cmd += " --agument" if getattr(config.detect, 'augment') else ""
|
|
||||||
cmd += f" --device {config.train.device}" if config.train.device else ""
|
cmd += f" --device {config.train.device}" if config.train.device else ""
|
||||||
|
|
||||||
cmd = " ".join(cmd.split())
|
cmd = " ".join(cmd.split())
|
||||||
|
|
||||||
print("_______ CALLING DETECTION SCRIPT _______")
|
print("_______ CALLING DETECTION SCRIPT _______")
|
||||||
print(cmd)
|
print(cmd)
|
||||||
|
|
||||||
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True)
|
call_subprocess(cmd)
|
||||||
for line in io.TextIOWrapper(process.stdout, encoding="utf-8"): # print output of process to console
|
|
||||||
print(line)
|
|
||||||
|
|
||||||
return detect_output_dir
|
return detect_output_dir
|
||||||
|
|
||||||
|
|
||||||
def call_generate_confussion_matrix(detect_output_dir, config, names_path, train_results_dir):
|
def call_generate_confussion_matrix(detect_output_dir, names_path, train_results_dir):
|
||||||
labels_dir = getattr(config.confussion_matrix, 'labels-dir')
|
labels_dir = getattr(config.confussion_matrix, 'labels-dir')
|
||||||
|
conff_matrix_path = os.path.join(train_results_dir, 'confussion-matrix.tsv')
|
||||||
cmd = f"node ./our_scripts/generate-confusion-matrix.js {detect_output_dir} {labels_dir} {names_path} > {train_results_dir}/confussion-matrix.tsv"
|
cmd = f"node /home/tomekb/yolov3/our_scripts/generate-confusion-matrix.js {detect_output_dir} {labels_dir} {names_path} > {conff_matrix_path}"
|
||||||
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True)
|
|
||||||
print("_______ CALLING CONFUSSION MATRIX SCRIPT _______")
|
print("_______ CALLING CONFUSSION MATRIX SCRIPT _______")
|
||||||
print(cmd)
|
print(cmd)
|
||||||
for line in io.TextIOWrapper(process.stdout, encoding="utf-8"): # print output of process to console
|
call_subprocess(cmd)
|
||||||
print(line)
|
return conff_matrix_path
|
||||||
|
|
||||||
|
|
||||||
|
def yolov3(x):
|
||||||
|
bayes_hyps = {
|
||||||
|
'epochs': int(x[:, 0]),
|
||||||
|
'batch-size': int(x[:, 1]),
|
||||||
|
'multi-scale': bool(x[:, 2]),
|
||||||
|
'img-size': f"{int(x[:, 3])} {int(x[:, 4])}",
|
||||||
|
'rect': bool(x[:, 5]),
|
||||||
|
'adam': bool(x[:, 6]),
|
||||||
|
'giou': float(x[:, 7]), # train hyps start index
|
||||||
|
'cls': float(x[:, 8]),
|
||||||
|
'cls_pw': float(x[:, 9]),
|
||||||
|
'obj': float(x[:, 10]),
|
||||||
|
'obj_pw': float(x[:, 11]),
|
||||||
|
'iou_t': float(x[:, 12]),
|
||||||
|
'lr0': float(x[:, 13]),
|
||||||
|
'lrf': float(x[:, 14]),
|
||||||
|
'momentum': float(x[:, 15]),
|
||||||
|
'weight_decay': float(x[:, 16]),
|
||||||
|
'fl_gamma': float(x[:, 17]),
|
||||||
|
'hsv_h': float(x[:, 18]),
|
||||||
|
'hsv_s': float(x[:, 19]),
|
||||||
|
'hsv_v': float(x[:, 20]),
|
||||||
|
'degrees': float(x[:, 21]),
|
||||||
|
'translate': float(x[:, 22]),
|
||||||
|
'scale': float(x[:, 23]),
|
||||||
|
'shear': float(x[:, 24]), # train hyps end index
|
||||||
|
'test-img-size': int(x[:, 25]),
|
||||||
|
'conf-thres': float(x[:, 26]),
|
||||||
|
'iou-thres': float(x[:, 27])
|
||||||
|
}
|
||||||
|
|
||||||
|
line = ""
|
||||||
|
try:
|
||||||
|
call_training_script(bayes_hyps)
|
||||||
|
weights_path, names_path, train_results_dir = move_training_results_to_experiments_dir()
|
||||||
|
detect_output_dir = call_detection_script(bayes_hyps, weights_path, names_path, train_results_dir)
|
||||||
|
conf_matrix_path = call_generate_confussion_matrix(detect_output_dir, names_path, train_results_dir)
|
||||||
|
|
||||||
|
y_dict = get_values_from_conff_matrix(conf_matrix_path)
|
||||||
|
|
||||||
|
# tutaj wzór na wyliczanie funkcji
|
||||||
|
y_val = 1 - ((y_dict['match'] * 10 - y_dict['false positives'] * 3) / y_dict['mistakes'])
|
||||||
|
|
||||||
|
# zapisywanie do pliku zadeklarowanego globalnie
|
||||||
|
line = "\t".join([bayes_hyps.__str__(), str(y_val)])
|
||||||
|
bayes_params_file.writelines([line, '\n'])
|
||||||
|
return y_val
|
||||||
|
except:
|
||||||
|
tb = traceback.format_exc()
|
||||||
|
print("An error occured during running training-detect-confussion process \n", tb)
|
||||||
|
print("Returning 1 from current bayessian iteration")
|
||||||
|
line = "\t".join([bayes_hyps.__str__(), str(1)])
|
||||||
|
return 1
|
||||||
|
finally:
|
||||||
|
bayes_params_file.writelines([line, '\n'])
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# uruchamiać z
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
config = Configuration()
|
|
||||||
|
|
||||||
train_cmd = call_training_script(config)
|
bounds = config.get_bayes_bounds()
|
||||||
weights_path, names_path, train_results_dir = move_training_results_to_experiments_dir(config)
|
|
||||||
detect_output_dir = call_detection_script(config, weights_path, names_path, train_results_dir)
|
# for b in bounds:
|
||||||
call_generate_confussion_matrix(detect_output_dir, config, names_path, train_results_dir)
|
# print(b)
|
||||||
|
|
||||||
|
# tutaj będzie wczytywanie z poprzednich eksperymentów plik bayes_params
|
||||||
|
X = None
|
||||||
|
Y = None
|
||||||
|
|
||||||
|
bayes_optimizer = GPyOpt.methods.BayesianOptimization(f=yolov3, domain=bounds, X=X, Y=Y, verbosity=True,
|
||||||
|
initial_design_numdata=2)
|
||||||
|
bayes_optimizer.run_optimization(config.bayes.iterations, verbosity=True)
|
||||||
|
bayes_params_file.close()
|
||||||
|
|
|
@ -0,0 +1,55 @@
|
||||||
|
import io
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
|
||||||
|
def call_subprocess(cmd):
|
||||||
|
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
|
||||||
|
error = False
|
||||||
|
for process_line_output in io.TextIOWrapper(process.stdout,
|
||||||
|
encoding="utf-8"): # print output of training process to console
|
||||||
|
if 'Traceback' in process_line_output:
|
||||||
|
error = True
|
||||||
|
print(process_line_output)
|
||||||
|
if error:
|
||||||
|
raise RuntimeError("An error occured during calling subprocess")
|
||||||
|
|
||||||
|
|
||||||
|
def get_values_from_conff_matrix(path):
|
||||||
|
lines = open(path, 'r').readlines()[:7]
|
||||||
|
d = {}
|
||||||
|
for l in lines:
|
||||||
|
key, value, *_ = l.split("\t")
|
||||||
|
d.update({key.replace(":", ""): int(value)})
|
||||||
|
return d
|
||||||
|
|
||||||
|
|
||||||
|
def get_bayes_params_as_dict(x):
|
||||||
|
return {
|
||||||
|
'epochs': int(x[:, 0]),
|
||||||
|
'batch-size': int(x[:, 1]),
|
||||||
|
'multi-scale': bool(x[:, 2]),
|
||||||
|
'img-size': f"{int(x[:, 3])} {int(x[:, 4])}",
|
||||||
|
'rect': bool(x[:, 5]),
|
||||||
|
'adam': bool(x[:, 6]),
|
||||||
|
'giou': float(x[:, 7]), # train hyps start index
|
||||||
|
'cls': float(x[:, 8]),
|
||||||
|
'cls_pw': float(x[:, 9]),
|
||||||
|
'obj': float(x[:, 10]),
|
||||||
|
'obj_pw': float(x[:, 11]),
|
||||||
|
'iou_t': float(x[:, 12]),
|
||||||
|
'lr0': float(x[:, 13]),
|
||||||
|
'lrf': float(x[:, 14]),
|
||||||
|
'momentum': float(x[:, 15]),
|
||||||
|
'weight_decay': float(x[:, 16]),
|
||||||
|
'fl_gamma': float(x[:, 17]),
|
||||||
|
'hsv_h': float(x[:, 18]),
|
||||||
|
'hsv_s': float(x[:, 19]),
|
||||||
|
'hsv_v': float(x[:, 20]),
|
||||||
|
'degrees': float(x[:, 21]),
|
||||||
|
'translate': float(x[:, 22]),
|
||||||
|
'scale': float(x[:, 23]),
|
||||||
|
'shear': float(x[:, 24]), # train hyps end index
|
||||||
|
'test-img-size': int(x[:, 25]),
|
||||||
|
'conf-thres': float(x[:, 26]),
|
||||||
|
'iou-thres': float(x[:, 27])
|
||||||
|
}
|
22
train.py
22
train.py
|
@ -10,6 +10,7 @@ from models import *
|
||||||
from utils.datasets import *
|
from utils.datasets import *
|
||||||
from utils.utils import *
|
from utils.utils import *
|
||||||
from our_scripts.config import Configuration
|
from our_scripts.config import Configuration
|
||||||
|
import ast
|
||||||
|
|
||||||
|
|
||||||
mixed_precision = True
|
mixed_precision = True
|
||||||
|
@ -117,7 +118,6 @@ def train(hyp):
|
||||||
best_fitness = 0.0
|
best_fitness = 0.0
|
||||||
attempt_download(weights)
|
attempt_download(weights)
|
||||||
if weights.endswith('.pt'): # pytorch format
|
if weights.endswith('.pt'): # pytorch format
|
||||||
print("LOADIN MODEL")
|
|
||||||
# possible weights are '*.pt', 'yolov3-spp.pt', 'yolov3-tiny.pt' etc.
|
# possible weights are '*.pt', 'yolov3-spp.pt', 'yolov3-tiny.pt' etc.
|
||||||
ckpt = torch.load(weights, map_location=device)
|
ckpt = torch.load(weights, map_location=device)
|
||||||
|
|
||||||
|
@ -364,8 +364,9 @@ def train(hyp):
|
||||||
'model': ema.ema.module.state_dict() if hasattr(model, 'module') else ema.ema.state_dict(),
|
'model': ema.ema.module.state_dict() if hasattr(model, 'module') else ema.ema.state_dict(),
|
||||||
'optimizer': None if final_epoch else optimizer.state_dict()}
|
'optimizer': None if final_epoch else optimizer.state_dict()}
|
||||||
|
|
||||||
if epoch % opt.save_every_nth_epoch == 0:
|
if opt.snapshot_every and epoch % opt.snapshot_every == 0 :
|
||||||
torch.save(chkpt, f'yolo_{epoch}.pt')
|
saving_path = os.path.join(opt.experiment_dir, f'weights_{epoch}.pt')
|
||||||
|
torch.save(ckpt, saving_path)
|
||||||
# Save last, best and delete
|
# Save last, best and delete
|
||||||
torch.save(ckpt, last)
|
torch.save(ckpt, last)
|
||||||
if (best_fitness == fi) and not final_epoch:
|
if (best_fitness == fi) and not final_epoch:
|
||||||
|
@ -416,8 +417,14 @@ if __name__ == '__main__':
|
||||||
parser.add_argument('--adam', action='store_true', help='use adam optimizer')
|
parser.add_argument('--adam', action='store_true', help='use adam optimizer')
|
||||||
parser.add_argument('--single-cls', action='store_true', help='train as single-class dataset')
|
parser.add_argument('--single-cls', action='store_true', help='train as single-class dataset')
|
||||||
parser.add_argument('--freeze-layers', action='store_true', help='Freeze non-output layers')
|
parser.add_argument('--freeze-layers', action='store_true', help='Freeze non-output layers')
|
||||||
parser.add_argument('--save-every-nth-epoch', type=int, help='Saving every n-th epoth')
|
|
||||||
|
# parametry dodane na cele projektu wykrywania aut
|
||||||
|
parser.add_argument('--snapshot-every', type=int, help='Saving every n-th state of model weights')
|
||||||
|
parser.add_argument('--experiment-dir', type=str, help='Directory for experiments')
|
||||||
|
parser.add_argument('--hyp', type=str, help='String that represents dictionary with hyperparameters ')
|
||||||
|
|
||||||
opt = parser.parse_args()
|
opt = parser.parse_args()
|
||||||
|
#print(opt)
|
||||||
#opt.weights = last if opt.resume and not opt.weights else opt.weights
|
#opt.weights = last if opt.resume and not opt.weights else opt.weights
|
||||||
#check_git_status()
|
#check_git_status()
|
||||||
opt.cfg = check_file(opt.cfg) # check file
|
opt.cfg = check_file(opt.cfg) # check file
|
||||||
|
@ -431,9 +438,10 @@ if __name__ == '__main__':
|
||||||
# scale hyp['obj'] by img_size (evolved at 320)
|
# scale hyp['obj'] by img_size (evolved at 320)
|
||||||
# hyp['obj'] *= opt.img_size[0] / 320.
|
# hyp['obj'] *= opt.img_size[0] / 320.
|
||||||
|
|
||||||
|
#overriding global hyp variable with our bayessian hyps
|
||||||
hyp = Configuration().train.other_hyps.__dict__
|
hyp = ast.literal_eval(opt.hyp)
|
||||||
|
#print('### TRAIN HYPERPARAMETERS ###')
|
||||||
|
#print(hyp)
|
||||||
|
|
||||||
tb_writer = None
|
tb_writer = None
|
||||||
if not opt.evolve: # Train normally
|
if not opt.evolve: # Train normally
|
||||||
|
|
|
@ -552,6 +552,7 @@ def augment_hsv(img, hgain=0.5, sgain=0.5, vgain=0.5):
|
||||||
|
|
||||||
img_hsv = cv2.merge((cv2.LUT(hue, lut_hue), cv2.LUT(sat, lut_sat), cv2.LUT(val, lut_val))).astype(dtype)
|
img_hsv = cv2.merge((cv2.LUT(hue, lut_hue), cv2.LUT(sat, lut_sat), cv2.LUT(val, lut_val))).astype(dtype)
|
||||||
cv2.cvtColor(img_hsv, cv2.COLOR_HSV2BGR, dst=img) # no return needed
|
cv2.cvtColor(img_hsv, cv2.COLOR_HSV2BGR, dst=img) # no return needed
|
||||||
|
|
||||||
|
|
||||||
# Histogram equalization
|
# Histogram equalization
|
||||||
# if random.random() < 0.2:
|
# if random.random() < 0.2:
|
||||||
|
|
Loading…
Reference in New Issue