-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathtry.py
More file actions
executable file
·99 lines (83 loc) · 2.96 KB
/
try.py
File metadata and controls
executable file
·99 lines (83 loc) · 2.96 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
#!/usr/bin/env python
import argparse as ap
#import subprocess
import shlex, subprocess
import json
import xml.etree.ElementTree as ET
parser = ap.ArgumentParser(description='Ocr trainner')
parser.add_argument(
'-dataset',
#dest='dataset',
type=str,
help='Dataset to use'
)
parser.add_argument(
'-jsons',
dest='jsons',
type=str,
nargs='+',
help='List of json to use'
)
parser.add_argument(
'-outfile',
dest='outfile',
type=str,
help='logfile path (!) Overide existing file'
)
args = parser.parse_args()
with open(args.outfile, 'wa+') as logfile:
def get_layers(training_path):
tree = ET.parse(training_path)
root = tree.getroot()
ocr = root.find('SimpleOcr')
layers = ocr.find('layer_sizes')
data = layers.find("data")
return ET.tostring(data, method='text')
def check_value(workflow, dataset_path, training, datas):
with open(dataset_path, 'r') as r:
dataset = sorted(json.load(r).items())
total = float(0)
errors = float(0)
layers = get_layers(training).split()
# fichier -> tant de layers, tel taux de success
for key, values in dataset:
for value in values:
print value
cmd = "./ocr {:s} {:s} {:s} {:s} | grep \'value:\' | sed \'s/value: //g\'"\
.format(value, workflow, dataset_path, training)
#cmd = shlex.split(cmd)
result = subprocess.check_output(['sh', '-c', cmd],
stderr=subprocess.PIPE)[:-1]
print key + ", " + result
total += 1
if key != result:
errors += 1
success = (total - errors) / (total) * 100
logfile.write('Workflow: {:s} dataset: {:s} training: {:s} training_iteration: {:s} success {:f} layers: {:s}\n'.\
format(workflow, dataset_path, training, datas['iterations'], success, str(layers)))
print success
def parse_output(output):
l = output.split('\n')
value = {}
for i in l:
if i.startswith('iterations'):
value['iterations'] = i.split()[1]
elif i.startswith('training_file'):
value['training_path'] = i.split()[1]
#elif i.startswith('value'):
#value['value'] = i.split()[1]
return value
for i in args.jsons:
try:
cmd = './ocr {:s} {:s} {:s}'.format('./data/step1/base/a.bmp', i, args.dataset)
print cmd
cmd = shlex.split(cmd)
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
print out, err
output = parse_output(out)
check_value(i, args.dataset, output['training_path'], output)
except Exception as e:
print e
#def main():
#tests = get_test_datas()