77import asyncio
88import json
99import os
10- import time
1110from dataclasses import dataclass
12- from multiprocessing import Pool
13- from typing import List , Dict
11+ from typing import Dict
1412
15- from asyncio_pool import AioPool
1613import pandas as pd
1714from zepben .evolve import NetworkConsumerClient , connect_with_token , Tracing , EnergyConsumer , PowerTransformer , \
1815 TransformerFunctionKind , Breaker , Fuse , IdentifiedObject , EquipmentTreeBuilder , downstream , TreeNode , Feeder
19- from zepben .protobuf .nc .nc_requests_pb2 import IncludedEnergizingContainers , IncludedEnergizedContainers
16+ from zepben .protobuf .nc .nc_requests_pb2 import IncludedEnergizedContainers
2017
2118
2219@dataclass
@@ -41,18 +38,17 @@ def _get_client():
4138 channel = connect_with_token (
4239 host = config ["host" ],
4340 access_token = config ["access_token" ],
44- rpc_port = config ['port ' ],
45- ca_filename = config ['ca_filename ' ]
41+ rpc_port = config ['rpc_port ' ],
42+ ca_filename = config ['ca_path ' ]
4643 )
4744 return NetworkConsumerClient (channel )
4845
4946
5047async def get_feeders () -> Dict [str , Feeder ]:
51- client = _get_client ()
52-
53- _feeders = (await client .get_network_hierarchy ()).result .feeders
48+ _feeders = (await _get_client ().get_network_hierarchy ()).result .feeders
5449 return _feeders
5550
51+
5652def process_leaf (up_data : dict , leaf : TreeNode ):
5753 to_equip : IdentifiedObject = leaf .identified_object
5854
@@ -69,22 +65,22 @@ def process_leaf(up_data: dict, leaf: TreeNode):
6965 up_data ['regulator' ] = to_equip
7066
7167
72- async def trace_from_feeder (feeder : str ):
68+ async def trace_from_feeder (feeder_mrid : str ):
7369 """
7470 Fetch the equipment container from the given feeder and build an equipment tree of everything downstream of the feeder.
7571 Use the Equipment tree to traverse upstream of all EC's and get the equipment we are interested in.
7672 Finally, create a CSV with the relevant information.
7773 """
7874 client = _get_client ()
79- print (f'processing feeder { feeder } ' )
75+ print (f'processing feeder { feeder_mrid } ' )
8076
8177 # Get all objects under the feeder, including Substations and LV Feeders
8278 await client .get_equipment_container (
83- feeder ,
79+ feeder_mrid ,
8480 include_energized_containers = IncludedEnergizedContainers .INCLUDE_ENERGIZED_LV_FEEDERS
8581 )
8682
87- feeder = client .service .get (feeder , Feeder )
83+ feeder = client .service .get (feeder_mrid , Feeder )
8884
8985 builder = EquipmentTreeBuilder ()
9086
@@ -117,7 +113,6 @@ def _process(leaf):
117113 network_objects .to_csv (f"csvs/{ feeder .mrid } _{ csv_sfx } " , index = False )
118114
119115
120-
121116class NullEquipment :
122117 """empty class to simplify code below in the case of an equipment not existing in that position of the network"""
123118 mrid = None
@@ -138,34 +133,10 @@ def _build_row(up_data: dict[str, IdentifiedObject | str]) -> EnergyConsumerDevi
138133 )
139134
140135
141- def process_target (feeder ):
142- asyncio .run (trace_from_feeder (feeder ))
143-
144- async def async_process_targets ():
145- feeders = await get_feeders ()
146- pool = AioPool (2 )
147-
148- print ('processing feeders' )
149- await pool .map (trace_from_feeder , feeders )
150-
151-
152136if __name__ == "__main__" :
153- start = time .time ()
154137 # Get a list of feeders before entering main compute section of script.
155- asyncio .run (async_process_targets ())
156- """
157138 feeders = asyncio .run (get_feeders ())
158139
159- # Spin up a multiprocess pool of $CPU_COUNT processes to handle the workload, otherwise we saturate a single cpu core and it's slow.
160- cpus = os.cpu_count()
161- print(f'Spawning {cpus} processes')
162- pool = Pool(cpus)
163-
164- print(f'mapping to process pool')
165- pool.map(process_target, feeders)
166-
167- print('finishing remaining processes')
168- pool.close()
169- pool.join()
170- """
171- print (f'Done in { time .time () - start } seconds' )
140+ print ('processing feeders' )
141+ for _feeder in feeders :
142+ asyncio .run (trace_from_feeder (_feeder ))
0 commit comments