-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathalns_solver.py
More file actions
353 lines (280 loc) · 12.3 KB
/
alns_solver.py
File metadata and controls
353 lines (280 loc) · 12.3 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
import argparse
import json
import sys
import time
import networkx as nx
import numpy as np
from alns import ALNS, State
from alns.accept import SimulatedAnnealing
from alns.stop import MaxRuntime
from alns.select import RouletteWheel
# ==============================================================================
# 1. HELPERS & CONFIG
# ==============================================================================
# Global cache for current query distances
dist_matrix = {}
def logical_to_physical(l_id, orders_list):
"""
Converts internal Logical ID (0..2N-1) to Physical Node ID.
0..N-1 are Pickups, N..2N-1 are Dropoffs.
"""
n = len(orders_list)
if l_id < n:
return orders_list[l_id]['p']
else:
return orders_list[l_id - n]['d']
def get_order_id_from_logical(l_id, orders_list):
"""Returns the original Order ID for a logical node."""
n = len(orders_list)
idx = l_id if l_id < n else l_id - n
return orders_list[idx]['orig_id']
def is_logical_drop(l_id, n_orders):
return l_id >= n_orders
def is_logical_pickup(l_id, n_orders):
return l_id < n_orders
# ==============================================================================
# 2. ALNS STATE & OPERATORS
# ==============================================================================
class VrpState(State):
def __init__(self, routes, unassigned, orders_list, depot, fleet_size):
self.routes = routes # List of lists of LOGICAL IDs
self.unassigned = unassigned # List of LOGICAL IDs
self.orders_list = orders_list
self.depot = depot
self.fleet_size = fleet_size
def copy(self):
return VrpState([r[:] for r in self.routes], self.unassigned[:], self.orders_list, self.depot, self.fleet_size)
def objective(self):
"""
Metric: Cumulative Arrival Time at Drop-off locations.
"""
score = 0
n_orders = len(self.orders_list)
for route in self.routes:
current_time = 0
curr_phys = self.depot
for l_id in route:
next_phys = logical_to_physical(l_id, self.orders_list)
# Safe lookup (pre-filtering ensures keys exist, but safety first)
if curr_phys not in dist_matrix or next_phys not in dist_matrix[curr_phys]:
return 1e12 # Penalty for disconnected path
current_time += dist_matrix[curr_phys][next_phys]
# Add to score only if it is a Drop-off
if is_logical_drop(l_id, n_orders):
score += current_time
curr_phys = next_phys
# Penalty for unassigned (serviceable) orders
score += len(self.unassigned) * 1e9
return score
def random_removal(state, rnd_state):
s = state.copy()
n_orders = len(s.orders_list)
# 1. Identify orders currently assigned
assigned_indices = []
route_map = {}
for r_idx, route in enumerate(s.routes):
for l_id in route:
if is_logical_pickup(l_id, n_orders):
assigned_indices.append(l_id)
route_map[l_id] = r_idx
if not assigned_indices:
return s
# 2. Determine how many to remove (Random 10-40%)
count = max(1, int(len(assigned_indices) * 0.3))
to_remove_indices = rnd_state.choice(assigned_indices, count, replace=False)
# 3. Remove them
for oid in to_remove_indices:
r_idx = route_map[oid]
if oid in s.routes[r_idx]: s.routes[r_idx].remove(oid)
did = oid + n_orders
if did in s.routes[r_idx]: s.routes[r_idx].remove(did)
s.unassigned.append(oid)
return s
def greedy_repair(state, rnd_state):
s = state.copy()
n_orders = len(s.orders_list)
rnd_state.shuffle(s.unassigned)
queue = s.unassigned[:]
s.unassigned = []
while queue:
oid = queue.pop() # Logical ID
p_lid = oid
d_lid = oid + n_orders
best_cost = float('inf')
best_move = None # (r_idx, insert_i, insert_j)
for r_idx in range(s.fleet_size):
route = s.routes[r_idx]
# Cost calculator for temp route
def calc_route_score(r_logical):
t = 0; sc = 0; curr_phys = s.depot
for l_node in r_logical:
nxt_phys = logical_to_physical(l_node, s.orders_list)
# Check connectivity
if curr_phys not in dist_matrix or nxt_phys not in dist_matrix[curr_phys]:
return float('inf')
t += dist_matrix[curr_phys][nxt_phys]
if is_logical_drop(l_node, n_orders): sc += t
curr_phys = nxt_phys
return sc
current_route_score = calc_route_score(route)
if current_route_score == float('inf'): continue
# Try insertions
for i in range(len(route) + 1):
for j in range(i + 1, len(route) + 2):
new_route = route[:i] + [p_lid] + route[i:j-1] + [d_lid] + route[j-1:]
new_score = calc_route_score(new_route)
if new_score != float('inf'):
incr = new_score - current_route_score
if incr < best_cost:
best_cost = incr
best_move = (r_idx, i, j)
if best_move:
r, i, j = best_move
base = s.routes[r]
s.routes[r] = base[:i] + [p_lid] + base[i:j-1] + [d_lid] + base[j-1:]
else:
# If we cannot insert it into any route (disconnected or costly), leave unassigned
s.unassigned.append(oid)
return s
# ==============================================================================
# 3. MAIN EXECUTION LOGIC
# ==============================================================================
def load_graph(path):
with open(path) as f: data = json.load(f)
G = nx.DiGraph()
for n in data['nodes']:
G.add_node(n['id'])
for e in data['edges']:
w = e.get('average_time', 1.0)
G.add_edge(e['u'], e['v'], weight=w)
if not e.get('oneway', True):
G.add_edge(e['v'], e['u'], weight=w)
return G
def solve_query(G, query_event, time_limit=2):
start_time = time.time()
fleet = query_event['fleet']
orders_data = query_event['orders']
depot = fleet['depot_node']
fleet_size = fleet['num_delivery_guys']
# 1. Compute Reachability & Filter Orders
# We perform Dijkstra from the Depot to find all reachable nodes.
# In a strongly connected graph, this is everyone. In disconnected, it's a subset.
try:
# distances from depot to all reachable nodes
paths_from_depot = nx.single_source_dijkstra_path_length(G, depot, weight='weight')
except:
paths_from_depot = {depot: 0}
valid_orders_list = []
valid_unassigned_indices = []
# We map logical IDs in the optimization problem to indices in this valid list
# Note: We preserve original Order ID for output
relevant_nodes = {depot}
for i, o in enumerate(orders_data):
p = o['pickup']
d = o['dropoff']
# Criteria: Depot -> Pickup AND Pickup -> Dropoff must be reachable.
# Using NetworkX has_path is slow, so we approximate using the single source cache.
# Ideally, we need full connectivity check.
# FAST CHECK: Is Pickup in paths_from_depot? Is Dropoff in paths_from_depot?
# (This assumes graph is symmetric/undirected or strongly connected component.
# If directed graph is fragmented, we need stronger checks, but this covers most 'island' cases)
is_reachable = (p in paths_from_depot) and (d in paths_from_depot)
# If directed graph, we strictly need path Depot->P and P->D.
# To correspond with C++, let's pre-calculate a sub-matrix for candidates.
if is_reachable:
# Check P->D specific connectivity
try:
nx.shortest_path_length(G, p, d, weight='weight')
valid_orders_list.append({
'p': p, 'd': d, 'orig_id': o['order_id']
})
valid_unassigned_indices.append(len(valid_orders_list) - 1)
relevant_nodes.add(p)
relevant_nodes.add(d)
except nx.NetworkXNoPath:
pass # Ignore unserviceable
else:
pass # Ignore unserviceable
# 2. Precompute Distance Matrix for Relevant Subgraph
global dist_matrix
dist_matrix = {}
# Compute APSP for the relevant subset
for n in relevant_nodes:
try:
dist_matrix[n] = nx.single_source_dijkstra_path_length(G, n, weight='weight')
except:
dist_matrix[n] = {}
# 3. Setup ALNS
if not valid_orders_list:
return None, (time.time() - start_time)
rng = np.random.default_rng(42)
init_state = VrpState([[] for _ in range(fleet_size)], valid_unassigned_indices, valid_orders_list, depot, fleet_size)
init_state = greedy_repair(init_state, rng)
alns = ALNS(rng)
alns.add_destroy_operator(random_removal)
alns.add_repair_operator(greedy_repair)
result = alns.iterate(
init_state,
RouletteWheel([5, 1, 1, 1], 0.8, 1, 1),
SimulatedAnnealing(1000, 0.01, 0.95),
MaxRuntime(time_limit)
)
return result.best_state, (time.time() - start_time)
def main():
parser = argparse.ArgumentParser(description="Python ALNS Solver for Phase 3 VRP")
parser.add_argument("graph", help="Path to graph.json")
parser.add_argument("queries", help="Path to queries.json")
parser.add_argument("output", help="Path to output.json")
parser.add_argument("--time", type=float, default=3.0, help="Max runtime per query in seconds")
args = parser.parse_args()
try:
G = load_graph(args.graph)
except Exception as e:
print(f"Error loading graph: {e}")
sys.exit(1)
with open(args.queries) as f:
queries_data = json.load(f)
output_data = {
"meta": queries_data.get("meta", {}),
"results": []
}
print(f"Solving {len(queries_data['events'])} queries...", file=sys.stderr)
for i, event in enumerate(queries_data['events']):
print(f" > Query {i}...", file=sys.stderr)
best_state, duration = solve_query(G, event, args.time)
query_result = {
"assignments": [],
"metrics": {
"total_delivery_time_s": 0.0
},
"processing_time": round(duration, 3)
}
if best_state:
query_result["metrics"]["total_delivery_time_s"] = best_state.objective()
for d_id, route in enumerate(best_state.routes):
# 1. Generate Raw Path
raw_path = [best_state.depot]
for lid in route:
raw_path.append(logical_to_physical(lid, best_state.orders_list))
# 2. Compress Duplicates (e.g. [A, B, B, C] -> [A, B, C])
phys_path = [raw_path[0]]
for node in raw_path[1:]:
if node != phys_path[-1]:
phys_path.append(node)
# 3. Extract Orders
assigned_order_ids = [
get_order_id_from_logical(lid, best_state.orders_list)
for lid in route if is_logical_pickup(lid, len(best_state.orders_list))
]
if assigned_order_ids: # Only output drivers who actually work
query_result["assignments"].append({
"driver_id": d_id,
"route": phys_path,
"order_ids": assigned_order_ids
})
output_data["results"].append(query_result)
with open(args.output, "w") as f:
json.dump(output_data, f, indent=2)
print(f"Done. Results written to {args.output}", file=sys.stderr)
if __name__ == "__main__":
main()