forked from atlarge-research/continuum
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathmain.py
More file actions
244 lines (195 loc) · 7.61 KB
/
main.py
File metadata and controls
244 lines (195 loc) · 7.61 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
"""\
Entry file for the benchmark.
Parse the config file, and continue from there on.
Check the documentation and help for more information.
"""
import argparse
import os
import os.path
import sys
import logging
import time
import socket
import getpass
import configuration_parser.start as configuration_parser
import infrastructure.start as infrastructure
import resource_manager.start as resource_manager
import benchmark.start as benchmark
import execution_model.start as execution_model
# pylint: disable=unused-import
from infrastructure.qemu import start as qemu_vm
from infrastructure.terraform import start as terraform_vm
# pylint: enable=unused-import
def ansible_check_output(out):
"""Check if an Ansible Playbook succeeded or failed
Shared by all files launching Ansible playbooks
Args:
output (list(str), list(str)): List of process stdout and stderr
"""
output, error = out
# Print summary of executioo times
summary = False
lines = [""]
for line in output:
if summary:
lines.append(line.rstrip())
if "==========" in line:
summary = True
if lines != [""]:
logging.debug("\n".join(lines))
# Check if executino was succesful
if error != []:
logging.error("".join(error))
sys.exit()
elif any("FAILED!" in out for out in output):
logging.error("".join(output))
sys.exit()
def make_wide(formatter, w=120, h=36):
"""Return a wider HelpFormatter
Args:
formatter (HelpFormatter): Format class for Python Argparse
w (int, optional): Width of Argparse output. Defaults to 120.
h (int, optional): Max help positions for Argparse output. Defaults to 36.
Returns:
formatter: Format class for Python Argparse, possibly with updated output sizes
"""
try:
kwargs = {"width": w, "max_help_position": h}
formatter(None, **kwargs)
return lambda prog: formatter(prog, **kwargs)
except TypeError:
print("Argparse help formatter failed, falling back.")
return formatter
def add_constants(config):
"""Add some constants to the config dict
Args:
config (dict): Parsed configuration
"""
config["home"] = str(os.getenv("HOME"))
config["base"] = str(os.path.dirname(os.path.realpath(__file__)))
config["username"] = getpass.getuser()
config["ssh_key"] = os.path.join(config["home"], ".ssh/id_rsa_benchmark")
if not config["infrastructure"]["infra_only"]:
if config["benchmark"]["application"] == "image_classification":
config["images"] = {
"worker": "redplanet00/kubeedge-applications:image_classification_subscriber",
"endpoint": "redplanet00/kubeedge-applications:image_classification_publisher",
"combined": "redplanet00/kubeedge-applications:image_classification_combined",
}
elif config["benchmark"]["application"] == "empty":
config["images"] = {"worker": "redplanet00/kubeedge-applications:empty"}
elif config["benchmark"]["application"] == "opencraft":
config["images"] = {
"worker": "lwagner1/opencraft_benchmark:opencraft_server",
"endpoint": "lwagner1/opencraft_benchmark:opencraft_bot",
# this is a hack to avoid infrastructure/start.py/docker_pull to throw a KeyError
# for opencraft, no combined image exists
"combined": "lwagner1/opencraft_benchmark:opencraft_bot",
}
# 100.100.100.100
# Prefix .Mid.Post
config["postfixIP_lower"] = 2
config["postfixIP_upper"] = 252
# Get Docker registry IP
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 80))
host_ip = s.getsockname()[0]
except socket.gaierror as e:
logging.error("Could not get host ip with error: %s", e)
sys.exit()
config["registry"] = host_ip + ":5000"
def set_logging(args):
"""Enable logging to both stdout and file (BENCHMARK_FOLDER/logs)
If -v/--verbose is used, stdout will report logging.DEBUG, otherwise only logging.INFO
The file will always use logging.DEBUG (which is the bigger scope)
Args:
args (Namespace): Argparse object
"""
# Log to file parameters
log_dir = "logs"
if not os.path.exists(log_dir):
os.makedirs(log_dir)
t = time.strftime("%Y-%m-%d_%H:%M:%S", time.gmtime())
if args.config["infrastructure"]["infra_only"]:
log_name = "%s_infra_only.log" % (t)
else:
log_name = "%s_%s_%s.log" % (
t,
args.config["mode"],
args.config["benchmark"]["application"],
)
file_handler = logging.FileHandler(log_dir + "/" + log_name)
file_handler.setLevel(logging.DEBUG)
# Log to stdout parameters
stdout_handler = logging.StreamHandler(sys.stdout)
if args.verbose:
stdout_handler.setLevel(logging.DEBUG)
else:
stdout_handler.setLevel(logging.INFO)
# Set parameters
logging.basicConfig(
format="[%(asctime)s %(pathname)s:%(lineno)s - %(funcName)s() ] %(message)s",
level=logging.DEBUG,
datefmt="%Y-%m-%d %H:%M:%S",
handlers=[file_handler, stdout_handler],
)
logging.info("Logging has been enabled. Writing to stdout and file %s/%s", log_dir, log_name)
s = []
header = True
for key, value in args.config.items():
if isinstance(value, dict):
s.append("[" + key + "]")
category = dict(args.config[key])
for k, v in category.items():
s.append("%-30s = %s" % (k, v))
s.append("")
else:
if header:
s.append("[constants]")
header = False
if isinstance(value, list):
s.append("%-30s = %s" % (key, value[0]))
if len(value) > 1:
for v in value[1:]:
s.append("%-30s %s" % ("", v))
else:
s.append("%-30s = %s" % (key, value))
logging.debug("\n%s", "\n".join(s))
def main(args):
"""Main control function of the framework
Args:
args (Namespace): Argparse object
"""
machines = infrastructure.start(args.config)
if not args.config["infrastructure"]["infra_only"]:
resource_manager.start(args.config, machines)
if "execution_model" in args.config:
execution_model.start(args.config, machines)
if not args.config["benchmark"]["resource_manager_only"]:
benchmark.start(args.config, machines)
if args.config["infrastructure"]["delete"]:
vm = globals()["%s_vm" % (args.config["infrastructure"]["provider"])]
vm.delete_vms(args.config, machines)
logging.info("Finished\n")
else:
s = []
for ssh in args.config["cloud_ssh"] + args.config["edge_ssh"] + args.config["endpoint_ssh"]:
s.append("ssh %s -i %s" % (ssh, args.config["ssh_key"]))
logging.info("To access the VMs:\n\t%s\n", "\n\t".join(s))
if __name__ == "__main__":
# Get input arguments, and validate those arguments
parser_obj = argparse.ArgumentParser(
formatter_class=make_wide(argparse.HelpFormatter, w=120, h=500)
)
parser_obj.add_argument(
"config",
type=lambda x: configuration_parser.start(parser_obj, x),
help="benchmark config file",
)
parser_obj.add_argument("-v", "--verbose", action="store_true", help="increase verbosity level")
arguments = parser_obj.parse_args()
# Set loggers
add_constants(arguments.config)
set_logging(arguments)
main(arguments)