Skip to content

Commit 4cd1a77

Browse files
committed
release 3.0
1 parent d867125 commit 4cd1a77

103 files changed

Lines changed: 9832 additions & 481 deletions

File tree

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

.idea/SPRINT.iml

Lines changed: 8 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

.idea/encodings.xml

Lines changed: 4 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

.idea/misc.xml

Lines changed: 7 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

.idea/modules.xml

Lines changed: 8 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

.idea/workspace.xml

Lines changed: 33 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

README.md

Lines changed: 16 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ Incorporating the **C**onstellation **I**nvestigation **R**epository with **C**o
1010

1111
# General Setup
1212

13-
1. Clone the repo: `git clone git@github.com:MIT-STARLab/SPRINT.git`
13+
1. Clone the repo: `git clone git@github.mit.edu:star-lab/SPRINT.git`
1414
1. Init the appropriate submodules:
1515
1. `cd SPRINT/source`
1616
1. `git submodule init circinus_global_planner circinus_orbit_link_public circinus_orbit_propagation circinus_orbit_viz circinus_sim circinus_tools`
@@ -40,7 +40,20 @@ Incorporating the **C**onstellation **I**nvestigation **R**epository with **C**o
4040
1. Navigate to `SPRINT/scripts`
4141
1. Run simulation: <br>
4242
a. `./run_const_sim.sh --use orig_circinus_zhou` to specify a case corresponding to `inputs/cases/orig_circinus_zhou`.<br>
43-
b. `./run_const_sim.sh --help` for a description of the other options.<br>
43+
b. `./run_const_sim.sh --help` for a description of the other options.<br>
44+
45+
## Separated Simulation Demo
46+
This simulation can be run such that each satellite runs on separate hardware, say a Raspberry Pi. The following demo is for a 2-satellite constellation.
47+
1. For the ground station network: <br>
48+
a. Navigate to `SRPINT/scripts`
49+
b. `./run_const_sim.sh --use circinus_zhou_2_sats --ground` to specify a two-satellite case and to run the ground station network part
50+
2. For each satellite: <br>
51+
a. Navigate to `SPRINT/scripts`
52+
b. `./run_const_sim.sh --use circinus_zhou_2_sats --satellite` to specify a two-satellite case and to run the satellite part
53+
54+
The satellites can be initialized before the ground station network; however, satellites are given 100 tries to connect to the ground station network, once every second. If the ground station network isn't initialized in time, the satellite program exits.
55+
56+
4457

4558
# Submodule dependencies
4659
* [circinus_global_planner](https://github.com/MIT-STARLab/circinus_global_planner)
@@ -53,4 +66,4 @@ Incorporating the **C**onstellation **I**nvestigation **R**epository with **C**o
5366
These should be managed as if they are independent and up to date with their own master, before committing the folder from the the SPRINT main repository (which then tracks the commit of the subrepo).
5467

5568
# History
56-
SPRINT was initiated as CIRCINUS, by [apollokit](https://github.com/apollokit)
69+
SPRINT was initiated by CIRCINUS, by [apollokit](https://github.com/apollokit).
Lines changed: 177 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,177 @@
1+
# this file is for generating plots / outputs from
2+
# the json files in this folder
3+
import json
4+
import matplotlib.pyplot as plt
5+
import numpy as np
6+
7+
SRP_settings = [True, False]
8+
GS_disruptions = ['None','G0','G1','G2']
9+
10+
# grab all data
11+
all_data = {}
12+
for SRP_setting in SRP_settings:
13+
for GS_disruption in GS_disruptions:
14+
cur_str = 'SRP_Test_SRP_%s_GS_%s' % (SRP_setting, GS_disruption)
15+
16+
with open('.\\multirun_tests\\' + cur_str + '.json', "r") as jsonFile:
17+
all_data[cur_str] = json.load(jsonFile)
18+
19+
print('All Data Loaded')
20+
21+
print('test time')
22+
# initialize all data structs
23+
total_failures = []
24+
25+
median_data_margin_prcnt = []
26+
prcntl25_ave_d_margin_prcnt = []
27+
prcntl75_ave_d_margin_prcnt = []
28+
29+
median_energy_margin_prcnt = []
30+
prcntl25_ave_e_margin_prcnt = []
31+
prcntl75_ave_e_margin_prcnt = []
32+
33+
exec_over_poss = []
34+
total_exec_dv = []
35+
total_poss_dv = []
36+
37+
median_obs_initial_lat_exec = [] # initial means the first part of the data container downlinked
38+
prcntl25_obs_initial_lat_exec = []
39+
prcntl75_obs_initial_lat_exec = []
40+
41+
median_av_aoi_exec = []
42+
prcntl25_av_aoi_exec = []
43+
prcntl75_av_aoi_exec = []
44+
45+
# MAKE DATA STRUCTS FOR BAR CHARTS
46+
for ind,SRP_setting in enumerate(SRP_settings):
47+
total_failures.append([])
48+
median_data_margin_prcnt.append([])
49+
prcntl25_ave_d_margin_prcnt.append([])
50+
prcntl75_ave_d_margin_prcnt.append([])
51+
median_energy_margin_prcnt.append([])
52+
prcntl25_ave_e_margin_prcnt.append([])
53+
prcntl75_ave_e_margin_prcnt.append([])
54+
exec_over_poss.append([])
55+
median_obs_initial_lat_exec.append([])
56+
prcntl25_obs_initial_lat_exec.append([])
57+
prcntl75_obs_initial_lat_exec.append([])
58+
median_av_aoi_exec.append([])
59+
prcntl25_av_aoi_exec.append([])
60+
prcntl75_av_aoi_exec.append([])
61+
for GS_disruption in GS_disruptions:
62+
cur_str = 'SRP_Test_BDT_False_SRP_%s_GS_%s' % (SRP_setting, GS_disruption)
63+
cur_data = all_data[cur_str]
64+
# Activity Failures
65+
total_failures[ind].append(sum(cur_data['Num Failures by Type'].values()))
66+
67+
# Data Margin levels
68+
median_data_margin_prcnt[ind].append(cur_data['d_rsrc_stats']['median_ave_d_margin_prcnt'])
69+
prcntl25_ave_d_margin_prcnt[ind].append(cur_data['d_rsrc_stats']['median_ave_d_margin_prcnt'] - cur_data['d_rsrc_stats']['prcntl25_ave_d_margin_prcnt'])
70+
prcntl75_ave_d_margin_prcnt[ind].append(cur_data['d_rsrc_stats']['prcntl75_ave_d_margin_prcnt'] - cur_data['d_rsrc_stats']['median_ave_d_margin_prcnt'])
71+
72+
# Energy Margin levels
73+
median_energy_margin_prcnt[ind].append(cur_data['e_rsrc_stats']['median_ave_e_margin_prcnt'])
74+
prcntl25_ave_e_margin_prcnt[ind].append(cur_data['e_rsrc_stats']['median_ave_e_margin_prcnt']-cur_data['e_rsrc_stats']['prcntl25_ave_e_margin_prcnt'])
75+
prcntl75_ave_e_margin_prcnt [ind].append(cur_data['e_rsrc_stats']['prcntl75_ave_e_margin_prcnt']-cur_data['e_rsrc_stats']['median_ave_e_margin_prcnt'])
76+
77+
# METRICS
78+
# DV % throughput
79+
exec_over_poss[ind].append(cur_data['dv_stats']['exec_over_poss']*100)
80+
81+
# Obs Latency
82+
median_obs_initial_lat_exec[ind].append(cur_data['lat_stats']['median_obs_initial_lat_exec'])
83+
prcntl25_obs_initial_lat_exec[ind].append(cur_data['lat_stats']['median_obs_initial_lat_exec'] - cur_data['lat_stats']['prcntl25_obs_initial_lat_exec'])
84+
prcntl75_obs_initial_lat_exec[ind].append(cur_data['lat_stats']['prcntl75_obs_initial_lat_exec'] - cur_data['lat_stats']['median_obs_initial_lat_exec'])
85+
86+
# AoI
87+
median_av_aoi_exec[ind].append(cur_data['obs_aoi_stats_w_routing']['median_av_aoi_exec'])
88+
prcntl25_av_aoi_exec[ind].append(cur_data['obs_aoi_stats_w_routing']['median_av_aoi_exec'] - cur_data['obs_aoi_stats_w_routing']['prcntl25_av_aoi_exec'])
89+
prcntl75_av_aoi_exec[ind].append(cur_data['obs_aoi_stats_w_routing']['prcntl75_av_aoi_exec'] - cur_data['obs_aoi_stats_w_routing']['median_av_aoi_exec'])
90+
91+
def autolabel(rects,axis):
92+
"""
93+
Attach a text label above each bar displaying its height
94+
from: https://matplotlib.org/examples/api/barchart_demo.html
95+
"""
96+
for rect in rects:
97+
height = rect.get_height()
98+
axis.text(rect.get_x() + rect.get_width()/4., height,
99+
'%d' % int(height),
100+
ha='center', va='bottom')
101+
102+
def double_bar_graph(ax,N,data,yLabelStr,titleStr,xLabelStr,xTickLabels,legendStrs,yerr = [None, None], legendFlag = True, colorStrs = ['b','gray'],width=0.35,):
103+
104+
if len(data) != 2:
105+
raise Exception('Need exactly 2 data sets')
106+
107+
if N != len(data[0]) or N != len(data[1]) or N != len(xTickLabels):
108+
raise Exception('number of bar graphs does not match data and/or tick labels supplied')
109+
110+
ind = np.arange(N) # the x locations for the groups
111+
112+
rects1 = ax.bar(ind, data[0], width, color=colorStrs[0], yerr= yerr[0])
113+
rects2 = ax.bar(ind + width, data[1], width, color=colorStrs[1], yerr= yerr[1])
114+
ax.set_ylabel(yLabelStr)
115+
ax.set_title(titleStr)
116+
ax.set_xticks(ind + width / 2)
117+
ax.set_xlabel(xLabelStr)
118+
ax.set_xticklabels(tuple(xTickLabels))
119+
if legendFlag:
120+
ax.legend((rects1[0], rects2[0]), tuple(legendStrs))
121+
autolabel(rects1,ax)
122+
autolabel(rects2,ax)
123+
124+
return ax
125+
126+
# MAKE PLOTS
127+
N = 4 # maybe change to 4 if we add nominal case
128+
width = 0.35 # the width of the bars
129+
xLabelStr = 'Ground Station Failures'
130+
xTickLabels = ('None','G0 - 24 hrs', 'G1 - 12 hrs', 'G2 -24 hrs')
131+
legendStrs = ('SRP On', 'SRP Off')
132+
133+
############# one plot for total failures ####################
134+
fig, ax = plt.subplots()
135+
yLabelStr = 'Total Activity Failures (#)'
136+
titleStr = 'Activity Failures with SRP on/off'
137+
double_bar_graph(ax,N,total_failures,yLabelStr,titleStr,xLabelStr,xTickLabels,legendStrs)
138+
139+
###### one plot with two subplots (one for each state margin level) ######
140+
fig, ax1 = plt.subplots(nrows=1, ncols=1)
141+
yLabelStr = 'Data Margin (%)'
142+
titleStr = 'Data Margin Levels with SRP on/off'
143+
d_yerr = (np.asarray([prcntl25_ave_d_margin_prcnt[0],prcntl75_ave_d_margin_prcnt[0]]),np.asarray([prcntl25_ave_d_margin_prcnt[1],prcntl75_ave_d_margin_prcnt[1]]))
144+
double_bar_graph(ax1,N,median_data_margin_prcnt,yLabelStr,titleStr,xLabelStr,xTickLabels,legendStrs,yerr=d_yerr)
145+
146+
""" yLabelStr = 'Energy Margin (%)'
147+
titleStr = 'Energy Margin Levels with SRP on/off'
148+
e_yerr = (np.asarray([prcntl25_ave_e_margin_prcnt[0],prcntl75_ave_e_margin_prcnt[0]]),np.asarray([prcntl25_ave_e_margin_prcnt[1],prcntl75_ave_e_margin_prcnt[1]]))
149+
double_bar_graph(ax2,N,median_energy_margin_prcnt,yLabelStr,titleStr,xLabelStr,xTickLabels,legendStrs,yerr=e_yerr) """
150+
151+
152+
###### one plot with a three subplots (one for each metric) ###
153+
# Data Throughput Percentage
154+
fig, (ax1, ax2) = plt.subplots(nrows=2, ncols=1)
155+
#titleStr = 'Metrics with SRP on/off'
156+
yLabelStr = 'Data Throughput - Exec / Poss (%)'
157+
titleStr = 'DV Throughput with SRP on/off'
158+
xLabelStr = ''
159+
double_bar_graph(ax1,N,exec_over_poss,yLabelStr,titleStr,xLabelStr,xTickLabels,legendStrs,legendFlag = False)
160+
161+
162+
xLabelStr = 'Ground Station Failures'
163+
# Median Latency
164+
yLabelStr = 'Observation Latency (min)'
165+
titleStr = 'Observation Initial Data Packet Latency with SRP on/off'
166+
lat_yerr = (np.asarray([prcntl25_obs_initial_lat_exec[0],prcntl75_obs_initial_lat_exec[0]]),np.asarray([prcntl25_obs_initial_lat_exec[1],prcntl75_obs_initial_lat_exec[1]]))
167+
double_bar_graph(ax2,N,median_obs_initial_lat_exec,yLabelStr,titleStr,xLabelStr,xTickLabels,legendStrs,yerr=lat_yerr,legendFlag = False)
168+
169+
""" # Median AoI
170+
yLabelStr = 'Age of Information (hours)'
171+
#titleStr = 'Observation Initial Data Packet Latency with SRP on/off'
172+
aoi_yerr = (np.asarray([prcntl25_av_aoi_exec[0],prcntl75_av_aoi_exec[0]]),np.asarray([prcntl25_av_aoi_exec[1],prcntl75_av_aoi_exec[1]]))
173+
double_bar_graph(ax3,N,median_av_aoi_exec,yLabelStr,titleStr,xLabelStr,xTickLabels,legendStrs,yerr=aoi_yerr) """
174+
### SHOW PLOTS ###
175+
plt.show()
176+
177+
Lines changed: 49 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,49 @@
1+
# this file is intended for setting up and running multiple SPRINT runs, where the config is changed
2+
# in between runs
3+
import json
4+
from subprocess import Popen
5+
# things to modify
6+
# r'..\inputs\reference_model_definitions\sat_refs\zhou_original_sat.json': NVM - only doing Xlnk-always
7+
8+
# setup things to step through
9+
# r'..\inputs\cases\orig_circinus\zhou\sim_case_config.json': ['scenario_params']['sim_run_perturbations']['schedule_disruptions']
10+
schedule_disruptions_list = [
11+
{"G0": [["2016-02-14T04:00:00.000000Z","2016-02-15T04:00:00.000000Z"]]},
12+
{"G1": [["2016-02-14T04:00:00.000000Z","2016-02-14T16:00:00.000000Z"]]},
13+
{"G2": [["2016-02-14T04:00:00.000000Z","2016-02-15T04:00:00.000000Z"]]}
14+
]
15+
16+
# r'..\inputs\general_config\lp_general_params_inputs.json': ['lp_general_params']['use_self_replanner']
17+
SRP_settings_list = [True, False]
18+
19+
SD_file = r'C:\Users\User\circinusGit\SPRINT\inputs\cases\orig_circinus_zhou\sim_case_config.json'
20+
SRP_file = r'C:\Users\User\circinusGit\SPRINT\inputs\\general_config\lp_general_params_inputs.json'
21+
scripts_folder = r"C:\Users\User\circinusGit\SPRINT\scripts"
22+
# NOTE: NEED TO BE IN SCRIPTS DIRECTORY TO FIND windows_env_var_setup.bat
23+
for SD_setting in schedule_disruptions_list:
24+
25+
with open(SD_file, "r") as jsonFile:
26+
data = json.load(jsonFile)
27+
28+
data['scenario_params']['sim_run_perturbations']['schedule_disruptions'] = SD_setting
29+
30+
print('Setting schedule disruptions to: %s' % SD_setting)
31+
with open(SD_file, "w") as jsonFile:
32+
json.dump(data, jsonFile, indent=4, separators=(',', ': '))
33+
34+
for SRP_setting in SRP_settings_list:
35+
with open(SRP_file, "r") as jsonFile:
36+
data = json.load(jsonFile)
37+
38+
data['lp_general_params']['use_self_replanner'] = SRP_setting
39+
40+
print('Setting SRP to: %s' % SRP_setting)
41+
with open(SRP_file, "w") as jsonFile:
42+
json.dump(data, jsonFile, indent=4, separators=(',', ': '))
43+
44+
print('New Settings Set - run batch file')
45+
46+
# python runner_const_sim.py --inputs_location /c/Users/wcgru/Documents/GitHubClones/SPRINT/scripts/../inputs --case_name orig_circinus_zhou --restore_pickle "" --remote_debug false
47+
48+
p = Popen(r"C:\Users\User\circinusGit\SPRINT\scripts\windows_launcher.bat")
49+
stdout, stderr = p.communicate()

0 commit comments

Comments
 (0)