Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,10 @@
"zepben.evolve==0.48.0",
"numba==0.60.0",
"geojson==2.5.0",
"gql[requests]==3.4.1"
"gql[requests]==3.4.1",
"geopandas",
"pandas",
"shapely"
],
extras_require={
"test": test_deps,
Expand Down
2 changes: 1 addition & 1 deletion src/zepben/examples/connecting_to_grpc_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ async def connect_using_token():
from zepben.evolve import connect_with_token, NetworkConsumerClient

with open("config.json") as f:
c = json.loads(f.read())
c = json.load(f)

print("Connecting to EWB..")
channel = connect_with_token(
Expand Down
13 changes: 7 additions & 6 deletions src/zepben/examples/current_state_manipulations.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,13 @@
# file, You can obtain one at https://mozilla.org/MPL/2.0/.

import asyncio
import json
import sys
from typing import List, Set

from zepben.evolve import (
Feeder, PowerTransformer, Switch, Tracing, NetworkConsumerClient, connect_with_password, Terminal,
BusbarSection, ConductingEquipment, Breaker, EquipmentContainer, StepContext, NetworkTraceStep
BusbarSection, ConductingEquipment, Breaker, EquipmentContainer, StepContext, NetworkTraceStep, connect_with_token
)

from zepben.protobuf.nc.nc_requests_pb2 import INCLUDE_ENERGIZED_FEEDERS, INCLUDE_ENERGIZING_FEEDERS
Expand Down Expand Up @@ -48,8 +49,8 @@ async def fetch_zone_feeders(client: NetworkConsumerClient):
await client.get_equipment_container(
feeder.mrid,
Feeder,
include_energizing_containers=INCLUDE_ENERGIZED_FEEDERS,
include_energized_containers=INCLUDE_ENERGIZING_FEEDERS
include_energizing_containers=INCLUDE_ENERGIZING_FEEDERS,
include_energized_containers=INCLUDE_ENERGIZED_FEEDERS
)
print("CPM feeders fetched.")

Expand Down Expand Up @@ -244,11 +245,11 @@ def log_txs(desc: str, feeders: Set[Feeder]):


async def main():
if len(sys.argv) != 6:
raise TypeError("you must provided the CLIENT_ID, username, password, host and port to connect")

# noinspection PyTypeChecker
async with connect_with_password(*sys.argv[1:]) as secure_channel:
with open('config.json') as f:
config = json.load(f)
async with connect_with_token(**config) as secure_channel:
await run_simple(NetworkConsumerClient(secure_channel))
await run_swap_feeder(NetworkConsumerClient(secure_channel))

Expand Down
2 changes: 1 addition & 1 deletion src/zepben/examples/examining_connectivity.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def build_network() -> NetworkService:

# We create the objects, and their Terminals
_es = EnergySource(mrid="es", terminals=[
Terminal(mrid="es-t")
Terminal(mrid="es_t")
])

_hv_line = AcLineSegment(mrid="hv_line", terminals=[
Expand Down
2 changes: 1 addition & 1 deletion src/zepben/examples/export_open_dss_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from datetime import datetime

from zepben.eas.client.opendss import OpenDssConfig
from zepben.eas.client.work_package import GeneratorConfig, ModelConfig, LoadPlacement, FeederScenarioAllocationStrategy, SolveConfig, RawResultsConfig, \
from zepben.eas.client.work_package import GeneratorConfig, ModelConfig, FeederScenarioAllocationStrategy, SolveConfig, RawResultsConfig, \
MeterPlacementConfig, SwitchMeterPlacementConfig, SwitchClass
from zepben.eas import EasClient, TimePeriod
from time import sleep
Expand Down
8 changes: 4 additions & 4 deletions src/zepben/examples/fetching_network_hierarchy.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,13 +23,13 @@ async def main():

print("Network hierarchy:")
for gr in network_hierarchy.result.geographical_regions.values():
print(f"- {gr.name}")
print(f"- GeographicalRegion mRID: {gr.mrid} name: {gr.name}")
for sgr in gr.sub_geographical_regions:
print(f" - {sgr.name}")
print(f" - SubgeographicalRegion mRID: {sgr.mrid} name: {sgr.name}")
for sub in sgr.substations:
print(f" - {sub.name}")
print(f" - Substation mRID: {sub.mrid} name: {sub.name}")
for fdr in sub.feeders:
print(f" - {fdr.name}")
print(f" - Feeder mRID: {fdr.mrid} name: {fdr.name}")


if __name__ == "__main__":
Expand Down
58 changes: 24 additions & 34 deletions src/zepben/examples/tracing_conductor_type_by_lv_circuit.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,13 @@
import csv
import json
import os
from typing import Any, List, Union
from typing import List, Union, Tuple, Optional, Dict

from zepben.evolve import NetworkConsumerClient, PhaseStep, PhaseCode, AcLineSegment, \
Switch, normal_downstream_trace, FeederDirection, connect_with_token
from zepben.evolve.services.network.tracing.phases.phase_step import start_at
from zepben.protobuf.nc.nc_requests_pb2 import IncludedEnergizedContainers
from zepben.evolve import NetworkConsumerClient, PhaseCode, AcLineSegment, \
FeederDirection, connect_with_token, Tracing, downstream, NetworkTraceStep, ConductingEquipment, PowerTransformer
from zepben.protobuf.nc.nc_requests_pb2 import INCLUDE_ENERGIZED_LV_FEEDERS

LineInfo = Tuple[str, str, Optional[Union[int, float]]]


async def main():
Expand All @@ -27,45 +28,38 @@ async def main():
result = (await client.get_network_hierarchy()).throw_on_error().result
print("Connection Established")

switch_to_line_type: dict[str, tuple[list[Any], bool]] = {}
tx_to_line_type: Dict[str, Tuple[List[LineInfo], bool]] = {}

os.makedirs("csvs", exist_ok=True)
for feeder in result.feeders.values():
print(f"Fetching {feeder.mrid}")
if not (network := await get_feeder_network(channel, feeder.mrid)): # Skip feeders that fail to pull down
print(f"Failed to retrieve feeder {feeder.mrid}")
continue
for io in network.objects(Switch):
for io in network.objects(PowerTransformer):
print(io)
_loop = False

for t in io.terminals:
t_dir = t.normal_feeder_direction
if t_dir == FeederDirection.BOTH:
_loop = True

sw_name = io.name
sw_id = io.mrid

# Currently using switch with the following name as a marker for LV circuit heads
if "Circuit Head Switch" in sw_name:
switch_to_line_type[sw_id] = (
await get_downstream_trace(start_at(io, PhaseCode.ABCN)),
loop
)
await save_to_csv(switch_to_line_type, feeder.mrid)
tx_to_line_type[io.mrid] = (await get_downstream_trace(io, PhaseCode.ABCN), _loop)
await save_to_csv(tx_to_line_type, feeder.mrid)


async def save_to_csv(data: dict[str, tuple[list[Any], bool]], feeder_mrid):
async def save_to_csv(data: Dict[str, Tuple[List[LineInfo], bool]], feeder_mrid):
filename = f"csvs/conductor_types_{feeder_mrid}.csv"
with open(filename, mode='w', newline='') as file:
writer = csv.writer(file)
writer.writerow(["Feeder", "Switch", "Line", "Line Type", "Length", "Loop"])
writer.writerow(["Feeder", "Transformer", "Line", "Line Type", "Length", "Loop"])

for switch, (values, loop) in data.items():
for i in range(0, len(values), 3):
line_type = values[i + 1] if i + 1 < len(values) else ""
length = values[i + 2] if i + 2 < len(values) else ""
switch_data = [feeder_mrid, switch, values[i], line_type, length, loop]
for transformer, (values, loop) in data.items():
for value in values:
line, line_type, length = value
switch_data = [feeder_mrid, transformer, line, line_type, length, loop]
writer.writerow(switch_data)

print(f"Data saved to {filename}")
Expand All @@ -85,22 +79,18 @@ async def get_feeder_network(channel, feeder_mrid):
return client.service


async def get_downstream_trace(ce: ConductingEquipment, phase_code: PhaseCode) -> list[Union[str, float]]:
l_type: List[Union[str, float]] = []

def collect_eq_in():
async def add_eq(ps: NetworkTraceStep, _):
equip = ps.path.to_equipment
if isinstance(equip, AcLineSegment):
nonlocal l_type
l_type.extend((equip.mrid, equip.asset_info.name, equip.length or 0))
async def get_downstream_trace(ce: ConductingEquipment, phase_code: PhaseCode) -> List[LineInfo]:
l_type: List[LineInfo] = []

return add_eq
def collect_eq_in(step: NetworkTraceStep, _):
if isinstance(equip := step.path.to_equipment, AcLineSegment):
nonlocal l_type
l_type.append((equip.mrid, equip.asset_info.name, equip.length or 0))

await (
Tracing.network_trace()
.add_condition(downstream())
.add_step_action(collect_eq_in())
.add_step_action(collect_eq_in)
).run(start=ce, phases=phase_code)

return l_type
Expand Down
80 changes: 80 additions & 0 deletions src/zepben/examples/tx_id_to_name.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
# Copyright 2025 Zeppelin Bend Pty Ltd
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
import asyncio
import json
import os.path
from dataclasses import dataclass
import pandas as pd

from zepben.evolve import NetworkConsumerClient, connect_with_token, PowerTransformer

OUTPUT_FILE = "transformer_id_mapping.csv"
HEADER = True

with open("./config.json") as f:
c = json.loads(f.read())


async def connect():
channel = connect_with_token(host=c["host"], rpc_port=c["rpc_port"], access_token=c["access_token"], ca_filename=c["ca_path"])
network_client = NetworkConsumerClient(channel=channel)

if os.path.exists(OUTPUT_FILE):
print(f"Output file {OUTPUT_FILE} already exists, please delete it if you would like to regenerate.")
return

network_hierarchy = (await network_client.get_network_hierarchy()).throw_on_error().value

print("Network hierarchy:")
for gr in network_hierarchy.geographical_regions.values():
print(f"- Geographical region: {gr.name}")
for sgr in gr.sub_geographical_regions:
print(f" - Subgeographical region: {sgr.name}")
for sub in sgr.substations:
print(f" - Zone Substation: {sub.name}")
await process_nodes(sub.mrid, channel)
for fdr in sub.feeders:
print(f" - Processing Feeder: {fdr.name}")
await process_nodes(fdr.mrid, channel)
return # Only process the first zone...


@dataclass
class NetworkObject(object):
dist_tx_id: str
dist_tx_name: str
container: str
container_mrid: str


async def process_nodes(container_mrid: str, channel):
global HEADER
print("Fetching from server ...")
network_client = NetworkConsumerClient(channel=channel)
network_service = network_client.service
(await network_client.get_equipment_container(container_mrid)).throw_on_error()
container = network_service.get(container_mrid)
container_name = container.name

print("Processing equipment ...")
network_objects = []
for equip in network_service.objects(PowerTransformer):
no = NetworkObject(equip.mrid, equip.name, container_name, container_mrid)
network_objects.append(no)

network_objects = pd.DataFrame(network_objects)
network_objects.to_csv(OUTPUT_FILE, index=False, mode='a', header=HEADER)
print(f"Finished processing {container_mrid}")
if HEADER:
HEADER = False


if __name__ == "__main__":
asyncio.run(connect())
Loading