From 85bd9b4a700fad91c4bebd47aba3e1fa44654eb7 Mon Sep 17 00:00:00 2001 From: adamwhite629 <102849144+adamwhite629@users.noreply.github.com> Date: Wed, 4 Feb 2026 10:45:19 +1100 Subject: [PATCH 1/4] Add new examples and fix original examples of how to use study framework - tested on EE network --- .gitignore | 3 + .../customer_distance_to_transformer.py | 284 +++++++++++++ .../studies/data_quality_studies/README.md | 40 ++ .../studies/data_quality_studies/__init__.py | 1 + .../asset_attribute_inconsistencies.py | 389 ++++++++++++++++++ .../data_quality_studies/connectivity_gaps.py | 248 +++++++++++ .../consumer_mapping_issues.py | 312 ++++++++++++++ .../data_quality_summary.py | 388 +++++++++++++++++ .../studies/data_quality_studies/dq_utils.py | 118 ++++++ .../phase_conductor_issues.py | 256 ++++++++++++ .../protection_directionality_anomalies.py | 286 +++++++++++++ .../spatial_location_anomalies.py | 281 +++++++++++++ .../style_asset_attribute.json | 63 +++ .../style_connectivity_gaps.json | 30 ++ .../style_consumer_mapping.json | 45 ++ .../style_phase_conductor.json | 32 ++ .../style_protection_directionality.json | 32 ++ .../style_spatial_location.json | 38 ++ .../loop_impedance_by_energy_consumer.py | 321 +++++++++++++++ .../pec_capacity_vs_transformer_rating.py | 337 +++++++++++++++ .../studies/pv_percent_by_transformer.py | 322 +++++++++++++++ .../studies/style_customer_distance.json | 44 ++ src/zepben/examples/studies/style_eol.json | 14 +- .../studies/style_loop_impedance.json | 65 +++ .../studies/style_pec_capacity_percent.json | 41 ++ .../examples/studies/style_pv_percent.json | 41 ++ .../examples/studies/style_tap_changer.json | 48 +++ .../studies/style_transformer_density.json | 128 ++++++ .../examples/studies/suspect_end_of_line.py | 291 ++++++++----- .../suspect_end_of_line_all_feeders.py | 121 ++++++ .../tap_changer_info_by_transformer.py | 325 +++++++++++++++ .../studies/transformer_downstream_density.py | 356 ++++++++++++++++ 32 files changed, 5199 insertions(+), 101 deletions(-) create mode 100644 src/zepben/examples/studies/customer_distance_to_transformer.py create mode 100644 src/zepben/examples/studies/data_quality_studies/README.md create mode 100644 src/zepben/examples/studies/data_quality_studies/__init__.py create mode 100644 src/zepben/examples/studies/data_quality_studies/asset_attribute_inconsistencies.py create mode 100644 src/zepben/examples/studies/data_quality_studies/connectivity_gaps.py create mode 100644 src/zepben/examples/studies/data_quality_studies/consumer_mapping_issues.py create mode 100644 src/zepben/examples/studies/data_quality_studies/data_quality_summary.py create mode 100644 src/zepben/examples/studies/data_quality_studies/dq_utils.py create mode 100644 src/zepben/examples/studies/data_quality_studies/phase_conductor_issues.py create mode 100644 src/zepben/examples/studies/data_quality_studies/protection_directionality_anomalies.py create mode 100644 src/zepben/examples/studies/data_quality_studies/spatial_location_anomalies.py create mode 100644 src/zepben/examples/studies/data_quality_studies/style_asset_attribute.json create mode 100644 src/zepben/examples/studies/data_quality_studies/style_connectivity_gaps.json create mode 100644 src/zepben/examples/studies/data_quality_studies/style_consumer_mapping.json create mode 100644 src/zepben/examples/studies/data_quality_studies/style_phase_conductor.json create mode 100644 src/zepben/examples/studies/data_quality_studies/style_protection_directionality.json create mode 100644 src/zepben/examples/studies/data_quality_studies/style_spatial_location.json create mode 100644 src/zepben/examples/studies/loop_impedance_by_energy_consumer.py create mode 100644 src/zepben/examples/studies/pec_capacity_vs_transformer_rating.py create mode 100644 src/zepben/examples/studies/pv_percent_by_transformer.py create mode 100644 src/zepben/examples/studies/style_customer_distance.json create mode 100644 src/zepben/examples/studies/style_loop_impedance.json create mode 100644 src/zepben/examples/studies/style_pec_capacity_percent.json create mode 100644 src/zepben/examples/studies/style_pv_percent.json create mode 100644 src/zepben/examples/studies/style_tap_changer.json create mode 100644 src/zepben/examples/studies/style_transformer_density.json create mode 100644 src/zepben/examples/studies/suspect_end_of_line_all_feeders.py create mode 100644 src/zepben/examples/studies/tap_changer_info_by_transformer.py create mode 100644 src/zepben/examples/studies/transformer_downstream_density.py diff --git a/.gitignore b/.gitignore index 4ef22d8..9daa029 100644 --- a/.gitignore +++ b/.gitignore @@ -135,3 +135,6 @@ src/zepben/examples/config*.json *.crt src/zepben/examples/csvs/ + +config.json +src/zepben/examples/config.json \ No newline at end of file diff --git a/src/zepben/examples/studies/customer_distance_to_transformer.py b/src/zepben/examples/studies/customer_distance_to_transformer.py new file mode 100644 index 0000000..1fb16bd --- /dev/null +++ b/src/zepben/examples/studies/customer_distance_to_transformer.py @@ -0,0 +1,284 @@ +# Copyright 2025 Zeppelin Bend Pty Ltd +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at https://mozilla.org/MPL/2.0/. + +import asyncio +import json +from datetime import datetime +from itertools import islice +from typing import List, Dict, Tuple, Callable, Any, Union, Type, Set + +from geojson import FeatureCollection, Feature +from geojson.geometry import Geometry, LineString, Point +from zepben.eas.client.eas_client import EasClient +from zepben.eas.client.study import Study, Result, GeoJsonOverlay +from zepben.ewb import ( + AcLineSegment, + EnergyConsumer, + PowerTransformer, + NetworkConsumerClient, + PhaseCode, + Feeder, + PowerSystemResource, + Location, + connect_with_token, + NetworkTraceStep, + Tracing, + upstream, + stop_at_open, + IncludedEnergizedContainers, +) +from zepben.ewb.services.network.tracing.networktrace.operators.network_state_operators import NetworkStateOperators + + +with open("../config.json") as f: + c = json.loads(f.read()) + + +def chunk(it, size): + it = iter(it) + return iter(lambda: tuple(islice(it, size)), ()) + + +async def main(): + # Only process feeders in the following zones + zone_mrids = ["CPM"] + print(f"Start time: {datetime.now()}") + + rpc_channel = connect_with_token( + host=c["host"], + access_token=c["access_token"], + rpc_port=c["rpc_port"], + ca_filename=c.get("ca_filename"), + timeout_seconds=c.get("timeout_seconds", 5), + debug=bool(c.get("debug", False)), + skip_connection_test=bool(c.get("skip_connection_test", False)), + ) + client = NetworkConsumerClient(rpc_channel) + hierarchy = (await client.get_network_hierarchy()).throw_on_error() + substations = hierarchy.value.substations + + print(f"Collecting feeders from zones {', '.join(zone_mrids)}.") + feeder_mrids = [] + for zone_mrid in zone_mrids: + if zone_mrid in substations: + for feeder in substations[zone_mrid].feeders: + feeder_mrids.append(feeder.mrid) + + print(f"Feeders to be processed: {', '.join(feeder_mrids)}") + + all_ecs: List[EnergyConsumer] = [] + ec_to_distance_m: Dict[str, float] = {} + + # Process feeders in batches of 3, using asyncio, for performance + batches = chunk(feeder_mrids, 3) + for feeders in batches: + futures = [] + rpc_channel = connect_with_token( + host=c["host"], + access_token=c["access_token"], + rpc_port=c["rpc_port"], + ca_filename=c.get("ca_filename"), + timeout_seconds=c.get("timeout_seconds", 5), + debug=bool(c.get("debug", False)), + skip_connection_test=bool(c.get("skip_connection_test", False)), + ) + print(f"Processing feeders {', '.join(feeders)}") + for feeder_mrid in feeders: + futures.append(asyncio.ensure_future(fetch_feeder_customer_distance(feeder_mrid, rpc_channel))) + + for future in futures: + result = await future + if result is None: + continue + ecs, distances = result + all_ecs.extend(ecs) + ec_to_distance_m.update(distances) + + print(f"Creating study for {len(all_ecs)} energy consumers") + + eas_client = EasClient(host=c["host"], port=c["rpc_port"], protocol="https", access_token=c["access_token"]) + print(f"Uploading Study for zones {', '.join(zone_mrids)} ...") + await upload_distance_study( + eas_client, + all_ecs, + ec_to_distance_m, + name=f"Customer distance to transformer ({', '.join(zone_mrids)})", + description="Distance along the normal network path from each EnergyConsumer to its upstream transformer.", + tags=["customer_distance", "-".join(zone_mrids)], + styles=json.load(open("style_customer_distance.json", "r")), + ) + await eas_client.aclose() + print("Uploaded Study") + + print(f"Finish time: {datetime.now()}") + + +async def fetch_feeder_customer_distance( + feeder_mrid: str, + rpc_channel, +) -> Union[Tuple[List[EnergyConsumer], Dict[str, float]], None]: + print(f"Fetching Feeder {feeder_mrid}") + client = NetworkConsumerClient(rpc_channel) + + result = ( + await client.get_equipment_container( + mrid=feeder_mrid, + expected_class=Feeder, + include_energized_containers=IncludedEnergizedContainers.LV_FEEDERS, + ) + ) + if result.was_failure: + print(f"Failed: {result.thrown}") + return None + + network = client.service + print(f"Finished fetching Feeder {feeder_mrid}") + + # Required for directed traces (upstream/downstream) + await Tracing.set_direction().run(network, network_state_operators=NetworkStateOperators.NORMAL) + + ecs = list(network.objects(EnergyConsumer)) + ec_to_distance_m = {} + for ec in ecs: + path_lines = await get_upstream_lines_to_transformer(ec) + distance = sum(_line_length_m(line) for line in path_lines) + ec_to_distance_m[ec.mrid] = distance + + return ecs, ec_to_distance_m + + +def _line_length_m(line: AcLineSegment) -> float: + return float(line.length or 0.0) + + +def collect_upstream_lines_provider(lines: Set[AcLineSegment]): + + async def collect_lines(ps: NetworkTraceStep, _): + line = ps.path.traversed_ac_line_segment + if line is not None: + lines.add(line) + if isinstance(ps.path.to_equipment, AcLineSegment): + lines.add(ps.path.to_equipment) + if isinstance(ps.path.from_equipment, AcLineSegment): + lines.add(ps.path.from_equipment) + + return collect_lines + + +async def get_upstream_lines_to_transformer(ec: EnergyConsumer) -> Set[AcLineSegment]: + lines = set() + + await ( + Tracing.network_trace() + .add_condition(upstream()) + .add_condition(stop_at_open()) + .add_step_action(collect_upstream_lines_provider(lines)) + .add_stop_condition(_is_transformer) + ).run(start=ec, phases=PhaseCode.ABCN, can_stop_on_start_item=False) + + return lines + + +def _is_transformer(ps: NetworkTraceStep, _context=None) -> bool: + return isinstance(ps.path.to_equipment, PowerTransformer) + + +async def upload_distance_study( + eas_client: EasClient, + ecs: List[EnergyConsumer], + ec_to_distance_m: Dict[str, float], + name: str, + description: str, + tags: List[str], + styles: List, +) -> None: + + class_to_properties = { + EnergyConsumer: { + "name": lambda ec: ec.name, + "distance_m": _distance_from(ec_to_distance_m), + "distance_label": _distance_label_from(ec_to_distance_m), + "type": lambda x: "ec", + }, + } + feature_collection = to_geojson_feature_collection(ecs, class_to_properties) + response = await eas_client.async_upload_study( + Study( + name=name, + description=description, + tags=tags, + results=[ + Result( + name=name, + geo_json_overlay=GeoJsonOverlay( + data=feature_collection, + styles=[s['id'] for s in styles] + ) + ) + ], + styles=styles + ) + ) + print(f"Study response: {response}") + + +def _distance_from(ec_to_distance_m: Dict[str, float]): + def fun(ec: EnergyConsumer): + return round(ec_to_distance_m.get(ec.mrid, 0.0)) + + return fun + + +def _distance_label_from(ec_to_distance_m: Dict[str, float]): + def fun(ec: EnergyConsumer): + value = ec_to_distance_m.get(ec.mrid, 0.0) + return f"{round(value)}m" + + return fun + + +def to_geojson_feature_collection( + psrs: List[PowerSystemResource], + class_to_properties: Dict[Type, Dict[str, Callable[[Any], Any]]] +) -> FeatureCollection: + + features = [] + for psr in psrs: + properties_map = class_to_properties.get(type(psr)) + + if properties_map is not None: + feature = to_geojson_feature(psr, properties_map) + if feature is not None: + features.append(feature) + + return FeatureCollection(features) + + +def to_geojson_feature( + psr: PowerSystemResource, + property_map: Dict[str, Callable[[PowerSystemResource], Any]] +) -> Union[Feature, None]: + + geometry = to_geojson_geometry(psr.location) + if geometry is None: + return None + + properties = {k: f(psr) for (k, f) in property_map.items()} + return Feature(psr.mrid, geometry, properties) + + +def to_geojson_geometry(location: Location) -> Union[Geometry, None]: + points = list(location.points) if location is not None else [] + if len(points) > 1: + return LineString([(point.x_position, point.y_position) for point in points]) + elif len(points) == 1: + return Point((points[0].x_position, points[0].y_position)) + else: + return None + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/src/zepben/examples/studies/data_quality_studies/README.md b/src/zepben/examples/studies/data_quality_studies/README.md new file mode 100644 index 0000000..0d02032 --- /dev/null +++ b/src/zepben/examples/studies/data_quality_studies/README.md @@ -0,0 +1,40 @@ +# Data Quality Studies + +These scripts generate data-quality analysis layers focused on connectivity and power-flow modeling issues. +They upload EAS studies using the credentials in `src/zepben/examples/config.json`. + +## Usage + +From the repository root: + +``` +python src/zepben/examples/studies/data_quality_studies/connectivity_gaps.py CPM +python src/zepben/examples/studies/data_quality_studies/consumer_mapping_issues.py CPM +python src/zepben/examples/studies/data_quality_studies/phase_conductor_issues.py CPM +python src/zepben/examples/studies/data_quality_studies/asset_attribute_inconsistencies.py CPM +python src/zepben/examples/studies/data_quality_studies/protection_directionality_anomalies.py CPM +python src/zepben/examples/studies/data_quality_studies/spatial_location_anomalies.py CPM +``` + +Use a comma-separated list for multiple zones: + +``` +python src/zepben/examples/studies/data_quality_studies/connectivity_gaps.py CPM,NSK +``` + +## Summary Study + +`data_quality_summary.py` runs all checks for a zone and uploads a single study +containing only layers where anomalies are detected. The study description and +tags list only the tests that reported anomalies. + +``` +python src/zepben/examples/studies/data_quality_studies/data_quality_summary.py NSK +``` + +## Notes + +- Each script accepts a zone code argument. If omitted, it defaults to `CPM`. +- Individual scripts always upload all layers; if no anomalies are found, the + layer name is prefixed with "No anomalies detected: ...". +- The summary script skips upload if no anomalies are detected at all. diff --git a/src/zepben/examples/studies/data_quality_studies/__init__.py b/src/zepben/examples/studies/data_quality_studies/__init__.py new file mode 100644 index 0000000..e976163 --- /dev/null +++ b/src/zepben/examples/studies/data_quality_studies/__init__.py @@ -0,0 +1 @@ +# Data quality study examples. diff --git a/src/zepben/examples/studies/data_quality_studies/asset_attribute_inconsistencies.py b/src/zepben/examples/studies/data_quality_studies/asset_attribute_inconsistencies.py new file mode 100644 index 0000000..4562e99 --- /dev/null +++ b/src/zepben/examples/studies/data_quality_studies/asset_attribute_inconsistencies.py @@ -0,0 +1,389 @@ +# Copyright 2025 Zeppelin Bend Pty Ltd +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at https://mozilla.org/MPL/2.0/. + +import asyncio +import json +from pathlib import Path +from datetime import datetime +from typing import Dict, List, Set, Tuple +import sys + +from geojson import FeatureCollection +from zepben.eas.client.eas_client import EasClient +from zepben.eas.client.study import GeoJsonOverlay, Result, Study +from zepben.ewb import ( + AcLineSegment, + Feeder, + IncludedEnergizedContainers, + NetworkConsumerClient, + PowerTransformer, + Switch, + connect_with_token, +) + +from dq_utils import ( + chunk, + get_zone_mrids, + line_length_m, + load_config, + no_anomaly_feature_collection, + to_geojson_feature_collection, +) + + +async def main(): + zone_mrids = get_zone_mrids(sys.argv, default=["CPM"]) + print(f"Start time: {datetime.now()}") + + config = load_config() + rpc_channel = _connect_rpc(config) + client = NetworkConsumerClient(rpc_channel) + hierarchy = (await client.get_network_hierarchy()).throw_on_error() + feeder_mrids = _collect_feeder_mrids(hierarchy.value.substations, zone_mrids) + + print(f"Collecting feeders from zones {', '.join(zone_mrids)}.") + print(f"Feeders to be processed: {', '.join(feeder_mrids)}") + + zero_length_lines: Set[AcLineSegment] = set() + missing_impedance_lines: Set[AcLineSegment] = set() + missing_rating_transformers: Set[PowerTransformer] = set() + missing_impedance_transformers: Set[PowerTransformer] = set() + missing_normal_state_switches: Set[Switch] = set() + + for feeders in chunk(feeder_mrids, 3): + rpc_channel = _connect_rpc(config) + for feeder_mrid in feeders: + result = await _fetch_asset_inconsistencies(feeder_mrid, rpc_channel) + if result is None: + continue + z_lines, i_lines, t_missing, t_imp_missing, s_missing = result + zero_length_lines.update(z_lines) + missing_impedance_lines.update(i_lines) + missing_rating_transformers.update(t_missing) + missing_impedance_transformers.update(t_imp_missing) + missing_normal_state_switches.update(s_missing) + + style_path = Path(__file__).resolve().parent / "style_asset_attribute.json" + styles = json.load(open(style_path, "r")) + result_specs = [ + ( + "Zero-length line segments", + _build_line_result( + "Zero-length line segments", + list(zero_length_lines), + style_ids=["dq-zero-length-lines"], + issue="zero_length", + ), + ), + ( + "Line segments missing impedance info", + _build_line_result( + "Line segments missing impedance info", + list(missing_impedance_lines), + style_ids=["dq-missing-impedance-lines"], + issue="missing_impedance", + ), + ), + ( + "Transformers missing rating", + _build_transformer_result( + "Transformers missing rating", + list(missing_rating_transformers), + style_ids=["dq-missing-rating-transformer"], + issue="missing_rating", + ), + ), + ( + "Transformers missing impedance", + _build_transformer_result( + "Transformers missing impedance", + list(missing_impedance_transformers), + style_ids=["dq-missing-impedance-transformer"], + issue="missing_transformer_impedance", + ), + ), + ( + "Switches missing normal state", + _build_switch_result( + "Switches missing normal state", + list(missing_normal_state_switches), + style_ids=["dq-missing-normal-state-switch"], + issue="missing_normal_state", + ), + ), + ] + results = [] + for name, result in result_specs: + if result is not None: + results.append(result) + else: + results.append( + Result( + name=f"No anomalies detected: {name}", + geo_json_overlay=GeoJsonOverlay( + data=no_anomaly_feature_collection(), + styles=["dq-no-anomalies"], + ), + ) + ) + + eas_client = EasClient(host=config["host"], port=config["rpc_port"], protocol="https", access_token=config["access_token"]) + print(f"Uploading Study for zones {', '.join(zone_mrids)} ...") + await eas_client.async_upload_study( + Study( + name=f"Asset attribute inconsistencies ({', '.join(zone_mrids)})", + description="Lines with missing length/impedance, transformers missing ratings/impedance, and switches missing normal state.", + tags=["dq_asset_attributes", "-".join(zone_mrids)], + results=results, + styles=styles, + ) + ) + await eas_client.aclose() + print("Uploaded Study") + print(f"Finish time: {datetime.now()}") + + +def _connect_rpc(config): + return connect_with_token( + host=config["host"], + access_token=config["access_token"], + rpc_port=config["rpc_port"], + ca_filename=config.get("ca_filename"), + timeout_seconds=config.get("timeout_seconds", 5), + debug=bool(config.get("debug", False)), + skip_connection_test=bool(config.get("skip_connection_test", False)), + ) + + +def _collect_feeder_mrids(substations: Dict, zone_mrids: List[str]) -> List[str]: + feeder_mrids: List[str] = [] + for zone_mrid in zone_mrids: + if zone_mrid in substations: + for feeder in substations[zone_mrid].feeders: + feeder_mrids.append(feeder.mrid) + return feeder_mrids + + +async def _fetch_asset_inconsistencies( + feeder_mrid: str, + rpc_channel, +) -> Tuple[Set[AcLineSegment], Set[AcLineSegment], Set[PowerTransformer], Set[PowerTransformer], Set[Switch]] | None: + print(f"Fetching Feeder {feeder_mrid}") + client = NetworkConsumerClient(rpc_channel) + result = await client.get_equipment_container( + mrid=feeder_mrid, + expected_class=Feeder, + include_energized_containers=IncludedEnergizedContainers.LV_FEEDERS, + ) + if result.was_failure: + print(f"Failed: {result.thrown}") + return None + + network = client.service + print(f"Finished fetching Feeder {feeder_mrid}") + + return await analyze_network(network) + + +async def analyze_network( + network, +) -> Tuple[Set[AcLineSegment], Set[AcLineSegment], Set[PowerTransformer], Set[PowerTransformer], Set[Switch]]: + zero_length_lines = set() + missing_impedance_lines = set() + for line in network.objects(AcLineSegment): + if line_length_m(line) <= 0.0: + zero_length_lines.add(line) + if not _has_impedance(line): + missing_impedance_lines.add(line) + + missing_rating_transformers = set() + missing_impedance_transformers = set() + for pt in network.objects(PowerTransformer): + if not _has_transformer_rating(pt): + missing_rating_transformers.add(pt) + if not _has_transformer_impedance(pt): + missing_impedance_transformers.add(pt) + + missing_normal_state_switches = set() + for sw in network.objects(Switch): + if _normal_open_value(sw) is None: + missing_normal_state_switches.add(sw) + + return zero_length_lines, missing_impedance_lines, missing_rating_transformers, missing_impedance_transformers, missing_normal_state_switches + + +def _has_impedance(line: AcLineSegment) -> bool: + impedance_attrs = [ + "per_length_sequence_impedance", + "per_length_phase_impedance", + "per_length_impedance", + "wire_info", + "ac_line_segment_wire_info", + ] + for attr in impedance_attrs: + value = getattr(line, attr, None) + if value is not None: + return True + for attr in ["r", "x", "r0", "x0"]: + value = getattr(line, attr, None) + if value not in (None, 0): + return True + return False + + +def _has_transformer_rating(pt: PowerTransformer) -> bool: + ends = list(pt.ends) + for end in ends: + rated_s = getattr(end, "rated_s", None) + if rated_s and rated_s > 0: + return True + for rating in getattr(end, "s_ratings", []): + if rating and getattr(rating, "rated_s", None): + return True + return False + + +def _has_transformer_impedance(pt: PowerTransformer) -> bool: + primary_end = _primary_transformer_end(pt) + if primary_end is None: + return False + + for attr in ("r", "x", "r0", "x0"): + value = getattr(primary_end, attr, None) + if value not in (None, 0): + return True + + star_impedance = getattr(primary_end, "star_impedance", None) + if star_impedance is not None: + rr = star_impedance.resistance_reactance() + if rr is not None and not rr.is_empty(): + return True + + power_transformer_info = getattr(pt, "power_transformer_info", None) + if power_transformer_info is not None and getattr(primary_end, "end_number", None) is not None: + rr = power_transformer_info.resistance_reactance(primary_end.end_number) + if rr is not None and not rr.is_empty(): + return True + + return False + + +def _primary_transformer_end(pt: PowerTransformer): + ends = list(pt.ends) + if not ends: + return None + for end in ends: + if getattr(end, "end_number", None) == 1: + return end + return max(ends, key=_end_voltage) + + +def _end_voltage(end) -> float: + try: + nominal = end.nominal_voltage + except Exception: + nominal = getattr(end, "rated_u", None) + if nominal is None: + base = getattr(end, "base_voltage", None) + nominal = getattr(base, "nominal_voltage", None) if base is not None else None + return float(nominal or 0.0) + + +def _normal_open_value(sw: Switch): + if hasattr(sw, "is_normally_open"): + try: + return sw.is_normally_open() + except Exception: + return None + for attr in ["normally_open", "normal_open"]: + if hasattr(sw, attr): + return getattr(sw, attr) + return None + + +def _build_line_result( + name: str, + lines: List[AcLineSegment], + style_ids: List[str], + issue: str, +) -> Result | None: + if not lines: + return None + class_to_properties = { + AcLineSegment: { + "issue": lambda _: issue, + "name": lambda line: line.name or line.mrid, + "type": lambda _: "line", + } + } + feature_collection: FeatureCollection = to_geojson_feature_collection(lines, class_to_properties) + if not feature_collection.features: + return None + return Result( + name=name, + geo_json_overlay=GeoJsonOverlay( + data=feature_collection, + styles=style_ids, + ), + ) + + +def _build_transformer_result( + name: str, + transformers: List[PowerTransformer], + style_ids: List[str], + issue: str, +) -> Result | None: + if not transformers: + return None + class_to_properties = { + PowerTransformer: { + "issue": lambda _: issue, + "name": lambda pt: pt.name or pt.mrid, + "type": lambda _: "pt", + } + } + feature_collection: FeatureCollection = to_geojson_feature_collection(transformers, class_to_properties) + if not feature_collection.features: + return None + return Result( + name=name, + geo_json_overlay=GeoJsonOverlay( + data=feature_collection, + styles=style_ids, + ), + ) + + +def _build_switch_result( + name: str, + switches: List[Switch], + style_ids: List[str], + issue: str, +) -> Result | None: + if not switches: + return None + class_to_properties = { + Switch: { + "issue": lambda _: issue, + "name": lambda sw: sw.name or sw.mrid, + "type": lambda _: "switch", + } + } + feature_collection: FeatureCollection = to_geojson_feature_collection(switches, class_to_properties) + if not feature_collection.features: + return None + return Result( + name=name, + geo_json_overlay=GeoJsonOverlay( + data=feature_collection, + styles=style_ids, + ), + ) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/src/zepben/examples/studies/data_quality_studies/connectivity_gaps.py b/src/zepben/examples/studies/data_quality_studies/connectivity_gaps.py new file mode 100644 index 0000000..e913c5d --- /dev/null +++ b/src/zepben/examples/studies/data_quality_studies/connectivity_gaps.py @@ -0,0 +1,248 @@ +# Copyright 2025 Zeppelin Bend Pty Ltd +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at https://mozilla.org/MPL/2.0/. + +import asyncio +import json +from pathlib import Path +from datetime import datetime +from typing import Dict, List, Set, Tuple +import sys + +from geojson import FeatureCollection +from zepben.eas.client.eas_client import EasClient +from zepben.eas.client.study import GeoJsonOverlay, Result, Study +from zepben.ewb import ( + AcLineSegment, + ConductingEquipment, + Feeder, + IncludedEnergizedContainers, + NetworkConsumerClient, + NetworkTraceStep, + PhaseCode, + Tracing, + connect_with_token, + stop_at_open, +) + +from dq_utils import ( + chunk, + get_zone_mrids, + load_config, + no_anomaly_feature_collection, + to_geojson_feature_collection, +) + + +async def main(): + zone_mrids = get_zone_mrids(sys.argv, default=["CPM"]) + print(f"Start time: {datetime.now()}") + + config = load_config() + rpc_channel = _connect_rpc(config) + client = NetworkConsumerClient(rpc_channel) + hierarchy = (await client.get_network_hierarchy()).throw_on_error() + + feeder_mrids = _collect_feeder_mrids(hierarchy.value.substations, zone_mrids) + print(f"Collecting feeders from zones {', '.join(zone_mrids)}.") + print(f"Feeders to be processed: {', '.join(feeder_mrids)}") + + open_ended_lines: Set[AcLineSegment] = set() + disconnected_lines: Set[AcLineSegment] = set() + + for feeders in chunk(feeder_mrids, 3): + rpc_channel = _connect_rpc(config) + for feeder_mrid in feeders: + result = await _fetch_connectivity_gaps(feeder_mrid, rpc_channel) + if result is None: + continue + feeder_open_ends, feeder_disconnected = result + open_ended_lines.update(feeder_open_ends) + disconnected_lines.update(feeder_disconnected) + + style_path = Path(__file__).resolve().parent / "style_connectivity_gaps.json" + styles = json.load(open(style_path, "r")) + result_specs = [ + ( + "Open-ended line segments", + _build_line_result( + "Open-ended line segments", + list(open_ended_lines), + style_ids=["dq-open-ended-lines"], + issue="open_end", + ), + ), + ( + "Disconnected island lines", + _build_line_result( + "Disconnected island lines", + list(disconnected_lines), + style_ids=["dq-disconnected-lines"], + issue="disconnected_island", + ), + ), + ] + results = [] + for name, result in result_specs: + if result is not None: + results.append(result) + else: + results.append( + Result( + name=f"No anomalies detected: {name}", + geo_json_overlay=GeoJsonOverlay( + data=no_anomaly_feature_collection(), + styles=["dq-no-anomalies"], + ), + ) + ) + + eas_client = EasClient(host=config["host"], port=config["rpc_port"], protocol="https", access_token=config["access_token"]) + print(f"Uploading Study for zones {', '.join(zone_mrids)} ...") + await eas_client.async_upload_study( + Study( + name=f"Connectivity gaps ({', '.join(zone_mrids)})", + description="Highlights open-ended lines and disconnected line islands.", + tags=["dq_connectivity_gaps", "-".join(zone_mrids)], + results=results, + styles=styles, + ) + ) + await eas_client.aclose() + print("Uploaded Study") + print(f"Finish time: {datetime.now()}") + + +def _connect_rpc(config): + return connect_with_token( + host=config["host"], + access_token=config["access_token"], + rpc_port=config["rpc_port"], + ca_filename=config.get("ca_filename"), + timeout_seconds=config.get("timeout_seconds", 5), + debug=bool(config.get("debug", False)), + skip_connection_test=bool(config.get("skip_connection_test", False)), + ) + + +def _collect_feeder_mrids(substations: Dict, zone_mrids: List[str]) -> List[str]: + feeder_mrids: List[str] = [] + for zone_mrid in zone_mrids: + if zone_mrid in substations: + for feeder in substations[zone_mrid].feeders: + feeder_mrids.append(feeder.mrid) + return feeder_mrids + + +async def _fetch_connectivity_gaps( + feeder_mrid: str, + rpc_channel, +) -> Tuple[Set[AcLineSegment], Set[AcLineSegment]] | None: + print(f"Fetching Feeder {feeder_mrid}") + client = NetworkConsumerClient(rpc_channel) + result = await client.get_equipment_container( + mrid=feeder_mrid, + expected_class=Feeder, + include_energized_containers=IncludedEnergizedContainers.LV_FEEDERS, + ) + if result.was_failure: + print(f"Failed: {result.thrown}") + return None + + network = client.service + print(f"Finished fetching Feeder {feeder_mrid}") + + return await analyze_network(network, feeder_mrid) + + +async def analyze_network( + network, + feeder_mrid: str, +) -> Tuple[Set[AcLineSegment], Set[AcLineSegment]]: + node_to_equipment: Dict[object, Set[ConductingEquipment]] = {} + for ce in network.objects(ConductingEquipment): + for terminal in ce.terminals: + node = terminal.connectivity_node + if node is None: + continue + node_to_equipment.setdefault(node, set()).add(ce) + + open_ended_lines = set() + for line in network.objects(AcLineSegment): + for terminal in line.terminals: + node = terminal.connectivity_node + if node and len(node_to_equipment.get(node, set())) == 1: + open_ended_lines.add(line) + break + + disconnected_lines = await _disconnected_island_lines(network, feeder_mrid) + return open_ended_lines, disconnected_lines + + +async def _disconnected_island_lines(network, feeder_mrid: str) -> Set[AcLineSegment]: + feeder_head = None + for feeder in network.objects(Feeder): + if feeder.mrid == feeder_mrid and feeder.normal_head_terminal: + feeder_head = feeder.normal_head_terminal.conducting_equipment + break + if feeder_head is None: + print(f"Feeder {feeder_mrid} has no normal head terminal; skipping disconnected island check.") + return set() + + reachable: Set[ConductingEquipment] = set() + + async def collect(ps: NetworkTraceStep, _): + if ps.path.from_equipment is not None: + reachable.add(ps.path.from_equipment) + if ps.path.to_equipment is not None: + reachable.add(ps.path.to_equipment) + + await ( + Tracing.network_trace() + .add_condition(stop_at_open()) + .add_step_action(collect) + ).run(start=feeder_head, phases=PhaseCode.ABCN, can_stop_on_start_item=False) + + feeder_lines = [ + line for line in network.objects(AcLineSegment) if _belongs_to_feeder(line, feeder_mrid) + ] + reachable_lines = {eq for eq in reachable if isinstance(eq, AcLineSegment)} + return set(feeder_lines) - reachable_lines + + +def _belongs_to_feeder(psr, feeder_mrid: str) -> bool: + feeders = getattr(psr, "normal_feeders", []) or [] + return any(feeder.mrid == feeder_mrid for feeder in feeders) + + +def _build_line_result( + name: str, + lines: List[AcLineSegment], + style_ids: List[str], + issue: str, +) -> Result | None: + if not lines: + return None + class_to_properties = { + AcLineSegment: { + "issue": lambda _: issue, + "name": lambda line: line.name or line.mrid, + "type": lambda _: "line", + } + } + feature_collection: FeatureCollection = to_geojson_feature_collection(lines, class_to_properties) + if not feature_collection.features: + return None + return Result( + name=name, + geo_json_overlay=GeoJsonOverlay( + data=feature_collection, + styles=style_ids, + ), + ) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/src/zepben/examples/studies/data_quality_studies/consumer_mapping_issues.py b/src/zepben/examples/studies/data_quality_studies/consumer_mapping_issues.py new file mode 100644 index 0000000..485b366 --- /dev/null +++ b/src/zepben/examples/studies/data_quality_studies/consumer_mapping_issues.py @@ -0,0 +1,312 @@ +# Copyright 2025 Zeppelin Bend Pty Ltd +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at https://mozilla.org/MPL/2.0/. + +import asyncio +import json +from pathlib import Path +from datetime import datetime +from typing import Dict, List, Set, Tuple +import sys + +from geojson import FeatureCollection +from zepben.eas.client.eas_client import EasClient +from zepben.eas.client.study import GeoJsonOverlay, Result, Study +from zepben.ewb import ( + EnergyConsumer, + PowerTransformer, + Feeder, + IncludedEnergizedContainers, + NetworkConsumerClient, + NetworkTraceStep, + PhaseCode, + Tracing, + connect_with_token, + downstream, + stop_at_open, + upstream, +) +from zepben.ewb.services.network.tracing.networktrace.operators.network_state_operators import ( + NetworkStateOperators, +) + +from dq_utils import ( + chunk, + get_zone_mrids, + load_config, + no_anomaly_feature_collection, + to_geojson_feature_collection, +) + + +async def main(): + zone_mrids = get_zone_mrids(sys.argv, default=["CPM"]) + print(f"Start time: {datetime.now()}") + + config = load_config() + rpc_channel = _connect_rpc(config) + client = NetworkConsumerClient(rpc_channel) + hierarchy = (await client.get_network_hierarchy()).throw_on_error() + feeder_mrids = _collect_feeder_mrids(hierarchy.value.substations, zone_mrids) + + print(f"Collecting feeders from zones {', '.join(zone_mrids)}.") + print(f"Feeders to be processed: {', '.join(feeder_mrids)}") + + unserved_ecs: Set[EnergyConsumer] = set() + missing_lv_feeder_ecs: Set[EnergyConsumer] = set() + no_load_transformers: Set[PowerTransformer] = set() + + for feeders in chunk(feeder_mrids, 3): + rpc_channel = _connect_rpc(config) + for feeder_mrid in feeders: + result = await _fetch_consumer_mapping_issues(feeder_mrid, rpc_channel) + if result is None: + continue + feeder_unserved, feeder_missing_lv, feeder_no_load = result + unserved_ecs.update(feeder_unserved) + missing_lv_feeder_ecs.update(feeder_missing_lv) + no_load_transformers.update(feeder_no_load) + + style_path = Path(__file__).resolve().parent / "style_consumer_mapping.json" + styles = json.load(open(style_path, "r")) + result_specs = [ + ( + "Unserved EnergyConsumers", + _build_point_result( + "Unserved EnergyConsumers", + list(unserved_ecs), + style_ids=["dq-unserved-ec"], + issue="unserved", + ), + ), + ( + "EnergyConsumers missing LV feeder container", + _build_point_result( + "EnergyConsumers missing LV feeder container", + list(missing_lv_feeder_ecs), + style_ids=["dq-missing-lv-feeder-ec"], + issue="missing_lv_feeder", + ), + ), + ( + "Transformers with no downstream consumers", + _build_transformer_result( + "Transformers with no downstream consumers", + list(no_load_transformers), + style_ids=["dq-no-load-transformer"], + issue="no_downstream_consumers", + ), + ), + ] + results = [] + for name, result in result_specs: + if result is not None: + results.append(result) + else: + results.append( + Result( + name=f"No anomalies detected: {name}", + geo_json_overlay=GeoJsonOverlay( + data=no_anomaly_feature_collection(), + styles=["dq-no-anomalies"], + ), + ) + ) + + eas_client = EasClient(host=config["host"], port=config["rpc_port"], protocol="https", access_token=config["access_token"]) + print(f"Uploading Study for zones {', '.join(zone_mrids)} ...") + await eas_client.async_upload_study( + Study( + name=f"Consumer mapping issues ({', '.join(zone_mrids)})", + description="Unserved consumers, consumers missing LV feeder containers, and transformers with no downstream consumers.", + tags=["dq_consumer_mapping", "-".join(zone_mrids)], + results=results, + styles=styles, + ) + ) + await eas_client.aclose() + print("Uploaded Study") + print(f"Finish time: {datetime.now()}") + + +def _connect_rpc(config): + return connect_with_token( + host=config["host"], + access_token=config["access_token"], + rpc_port=config["rpc_port"], + ca_filename=config.get("ca_filename"), + timeout_seconds=config.get("timeout_seconds", 5), + debug=bool(config.get("debug", False)), + skip_connection_test=bool(config.get("skip_connection_test", False)), + ) + + +def _collect_feeder_mrids(substations: Dict, zone_mrids: List[str]) -> List[str]: + feeder_mrids: List[str] = [] + for zone_mrid in zone_mrids: + if zone_mrid in substations: + for feeder in substations[zone_mrid].feeders: + feeder_mrids.append(feeder.mrid) + return feeder_mrids + + +async def _fetch_consumer_mapping_issues( + feeder_mrid: str, + rpc_channel, +) -> Tuple[Set[EnergyConsumer], Set[EnergyConsumer], Set[PowerTransformer]] | None: + print(f"Fetching Feeder {feeder_mrid}") + client = NetworkConsumerClient(rpc_channel) + result = await client.get_equipment_container( + mrid=feeder_mrid, + expected_class=Feeder, + include_energized_containers=IncludedEnergizedContainers.LV_FEEDERS, + ) + if result.was_failure: + print(f"Failed: {result.thrown}") + return None + + network = client.service + print(f"Finished fetching Feeder {feeder_mrid}") + + return await analyze_network(network, feeder_mrid) + + +async def analyze_network( + network, + feeder_mrid: str, +) -> Tuple[Set[EnergyConsumer], Set[EnergyConsumer], Set[PowerTransformer]]: + await Tracing.set_direction().run(network, network_state_operators=NetworkStateOperators.NORMAL) + + unserved_ecs: Set[EnergyConsumer] = set() + missing_lv_feeder_ecs: Set[EnergyConsumer] = set() + + for ec in network.objects(EnergyConsumer): + if not _has_lv_feeder_container(ec): + missing_lv_feeder_ecs.add(ec) + if not await _has_upstream_transformer(ec): + unserved_ecs.add(ec) + + no_load_transformers: Set[PowerTransformer] = set() + for pt in network.objects(PowerTransformer): + if not _belongs_to_feeder(pt, feeder_mrid): + continue + downstream_eq = await _get_downstream_eq(pt) + if not any(isinstance(eq, EnergyConsumer) for eq in downstream_eq): + no_load_transformers.add(pt) + + return unserved_ecs, missing_lv_feeder_ecs, no_load_transformers + + +async def _has_upstream_transformer(ec: EnergyConsumer) -> bool: + found = False + + async def mark_transformer(ps: NetworkTraceStep, _): + nonlocal found + if isinstance(ps.path.to_equipment, PowerTransformer): + found = True + + await ( + Tracing.network_trace() + .add_condition(upstream()) + .add_condition(stop_at_open()) + .add_step_action(mark_transformer) + .add_stop_condition(_is_transformer) + ).run(start=ec, phases=PhaseCode.ABCN, can_stop_on_start_item=False) + + return found + + +def _is_transformer(ps: NetworkTraceStep, _context=None) -> bool: + return isinstance(ps.path.to_equipment, PowerTransformer) + + +async def _get_downstream_eq(pt: PowerTransformer) -> Set[object]: + nodes: Set[object] = {pt} + adjacency: Dict[object, Set[object]] = {} + + async def collect_edges(ps: NetworkTraceStep, _): + nodes.add(ps.path.from_equipment) + nodes.add(ps.path.to_equipment) + if ps.path.traced_externally: + adjacency.setdefault(ps.path.from_equipment, set()).add(ps.path.to_equipment) + + await ( + Tracing.network_trace() + .add_condition(downstream()) + .add_step_action(collect_edges) + ).run(start=pt, phases=PhaseCode.ABCN, can_stop_on_start_item=False) + + return nodes + + +def _has_lv_feeder_container(ec: EnergyConsumer) -> bool: + return any(True for _ in ec.normal_lv_feeders) + + +def _belongs_to_feeder(psr, feeder_mrid: str) -> bool: + if any(feeder.mrid == feeder_mrid for feeder in psr.normal_feeders): + return True + for lv_feeder in psr.normal_lv_feeders: + if any(feeder.mrid == feeder_mrid for feeder in lv_feeder.normal_energizing_feeders): + return True + return False + + +def _build_point_result( + name: str, + ecs: List[EnergyConsumer], + style_ids: List[str], + issue: str, +) -> Result | None: + if not ecs: + return None + class_to_properties = { + EnergyConsumer: { + "issue": lambda _: issue, + "name": lambda ec: ec.name or ec.mrid, + "type": lambda _: "ec", + } + } + feature_collection: FeatureCollection = to_geojson_feature_collection(ecs, class_to_properties) + if not feature_collection.features: + return None + return Result( + name=name, + geo_json_overlay=GeoJsonOverlay( + data=feature_collection, + styles=style_ids, + ), + ) + + +def _build_transformer_result( + name: str, + transformers: List[PowerTransformer], + style_ids: List[str], + issue: str, +) -> Result | None: + if not transformers: + return None + class_to_properties = { + PowerTransformer: { + "issue": lambda _: issue, + "name": lambda pt: pt.name or pt.mrid, + "type": lambda _: "pt", + } + } + feature_collection: FeatureCollection = to_geojson_feature_collection(transformers, class_to_properties) + if not feature_collection.features: + return None + return Result( + name=name, + geo_json_overlay=GeoJsonOverlay( + data=feature_collection, + styles=style_ids, + ), + ) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/src/zepben/examples/studies/data_quality_studies/data_quality_summary.py b/src/zepben/examples/studies/data_quality_studies/data_quality_summary.py new file mode 100644 index 0000000..bd7bc29 --- /dev/null +++ b/src/zepben/examples/studies/data_quality_studies/data_quality_summary.py @@ -0,0 +1,388 @@ +# Copyright 2025 Zeppelin Bend Pty Ltd +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at https://mozilla.org/MPL/2.0/. + +import asyncio +import json +import sys +from datetime import datetime +from pathlib import Path +from typing import Dict, List, Set, Tuple + +from zepben.eas.client.eas_client import EasClient +from zepben.eas.client.study import Study +from zepben.ewb import ( + Feeder, + IncludedEnergizedContainers, + NetworkConsumerClient, + connect_with_token, +) + +import asset_attribute_inconsistencies as aa +import connectivity_gaps as cg +import consumer_mapping_issues as cm +import phase_conductor_issues as pc +import protection_directionality_anomalies as pd +import spatial_location_anomalies as sl +from dq_utils import chunk, get_zone_mrids, load_config + + +async def main(): + zone_mrids = get_zone_mrids(sys.argv, default=["CPM"]) + print(f"Start time: {datetime.now()}") + + config = load_config() + rpc_channel = _connect_rpc(config) + client = NetworkConsumerClient(rpc_channel) + hierarchy = (await client.get_network_hierarchy()).throw_on_error() + + feeder_mrids = _collect_feeder_mrids(hierarchy.value.substations, zone_mrids) + print(f"Collecting feeders from zones {', '.join(zone_mrids)}.") + print(f"Feeders to be processed: {', '.join(feeder_mrids)}") + + open_ended_lines: Set = set() + disconnected_lines: Set = set() + unserved_ecs: Set = set() + missing_lv_feeder_ecs: Set = set() + no_load_transformers: Set = set() + phase_mismatch_features: List = [] + missing_phase_lines: Set = set() + zero_length_lines: Set = set() + missing_impedance_lines: Set = set() + missing_rating_transformers: Set = set() + missing_impedance_transformers: Set = set() + missing_normal_state_switches: Set = set() + loop_lines: Set = set() + switch_terminal_issues: Set = set() + long_service_ecs: Set = set() + ec_to_distance: Dict[str, float] = {} + very_long_lines: Set = set() + + for feeders in chunk(feeder_mrids, 3): + rpc_channel = _connect_rpc(config) + for feeder_mrid in feeders: + network = await _fetch_feeder_network(feeder_mrid, rpc_channel) + if network is None: + continue + + feeder_open, feeder_disconnected = await cg.analyze_network(network, feeder_mrid) + open_ended_lines.update(feeder_open) + disconnected_lines.update(feeder_disconnected) + + feeder_unserved, feeder_missing_lv, feeder_no_load = await cm.analyze_network(network, feeder_mrid) + unserved_ecs.update(feeder_unserved) + missing_lv_feeder_ecs.update(feeder_missing_lv) + no_load_transformers.update(feeder_no_load) + + mismatch_features, missing_lines = await pc.analyze_network(network) + phase_mismatch_features.extend(mismatch_features) + missing_phase_lines.update(missing_lines) + + z_lines, i_lines, t_missing, t_imp_missing, s_missing = await aa.analyze_network(network) + zero_length_lines.update(z_lines) + missing_impedance_lines.update(i_lines) + missing_rating_transformers.update(t_missing) + missing_impedance_transformers.update(t_imp_missing) + missing_normal_state_switches.update(s_missing) + + loop_segments, switch_issues = await pd.analyze_network(network) + loop_lines.update(loop_segments) + switch_terminal_issues.update(switch_issues) + + feeder_ecs, feeder_distances, feeder_lines = await sl.analyze_network(network) + long_service_ecs.update(feeder_ecs) + ec_to_distance.update(feeder_distances) + very_long_lines.update(feeder_lines) + + results, detected_tests, used_style_ids = _build_results( + open_ended_lines, + disconnected_lines, + unserved_ecs, + missing_lv_feeder_ecs, + no_load_transformers, + phase_mismatch_features, + missing_phase_lines, + zero_length_lines, + missing_impedance_lines, + missing_rating_transformers, + missing_impedance_transformers, + missing_normal_state_switches, + loop_lines, + switch_terminal_issues, + long_service_ecs, + ec_to_distance, + very_long_lines, + ) + + if not results: + print("No anomalies detected across all tests. Study upload skipped.") + return + + styles = _load_styles(used_style_ids) + description = _description_from_tests(detected_tests) + tags = [_slugify(test) for test in detected_tests] + + eas_client = EasClient(host=config["host"], port=config["rpc_port"], protocol="https", access_token=config["access_token"]) + print(f"Uploading Study for zones {', '.join(zone_mrids)} ...") + await eas_client.async_upload_study( + Study( + name=f"Data quality summary ({', '.join(zone_mrids)})", + description=description, + tags=tags, + results=results, + styles=styles, + ) + ) + await eas_client.aclose() + print("Uploaded Study") + print(f"Finish time: {datetime.now()}") + + +def _connect_rpc(config): + return connect_with_token( + host=config["host"], + access_token=config["access_token"], + rpc_port=config["rpc_port"], + ca_filename=config.get("ca_filename"), + timeout_seconds=config.get("timeout_seconds", 5), + debug=bool(config.get("debug", False)), + skip_connection_test=bool(config.get("skip_connection_test", False)), + ) + + +def _collect_feeder_mrids(substations: Dict, zone_mrids: List[str]) -> List[str]: + feeder_mrids: List[str] = [] + for zone_mrid in zone_mrids: + if zone_mrid in substations: + for feeder in substations[zone_mrid].feeders: + feeder_mrids.append(feeder.mrid) + return feeder_mrids + + +async def _fetch_feeder_network(feeder_mrid: str, rpc_channel): + print(f"Fetching Feeder {feeder_mrid}") + client = NetworkConsumerClient(rpc_channel) + result = await client.get_equipment_container( + mrid=feeder_mrid, + expected_class=Feeder, + include_energized_containers=IncludedEnergizedContainers.LV_FEEDERS, + ) + if result.was_failure: + print(f"Failed: {result.thrown}") + return None + print(f"Finished fetching Feeder {feeder_mrid}") + return client.service + + +def _build_results( + open_ended_lines: Set, + disconnected_lines: Set, + unserved_ecs: Set, + missing_lv_feeder_ecs: Set, + no_load_transformers: Set, + phase_mismatch_features: List, + missing_phase_lines: Set, + zero_length_lines: Set, + missing_impedance_lines: Set, + missing_rating_transformers: Set, + missing_impedance_transformers: Set, + missing_normal_state_switches: Set, + loop_lines: Set, + switch_terminal_issues: Set, + long_service_ecs: Set, + ec_to_distance: Dict[str, float], + very_long_lines: Set, +) -> Tuple[List, List[str], Set[str]]: + results = [] + detected_tests: List[str] = [] + used_style_ids: Set[str] = set() + + def add_result(name: str, result): + if result is None: + return + results.append(result) + detected_tests.append(name) + used_style_ids.update(result.geo_json_overlay.styles) + + add_result( + "Open-ended line segments", + cg._build_line_result( + "Open-ended line segments", + list(open_ended_lines), + style_ids=["dq-open-ended-lines"], + issue="open_end", + ), + ) + add_result( + "Disconnected island lines", + cg._build_line_result( + "Disconnected island lines", + list(disconnected_lines), + style_ids=["dq-disconnected-lines"], + issue="disconnected_island", + ), + ) + add_result( + "Unserved EnergyConsumers", + cm._build_point_result( + "Unserved EnergyConsumers", + list(unserved_ecs), + style_ids=["dq-unserved-ec"], + issue="unserved", + ), + ) + add_result( + "EnergyConsumers missing LV feeder container", + cm._build_point_result( + "EnergyConsumers missing LV feeder container", + list(missing_lv_feeder_ecs), + style_ids=["dq-missing-lv-feeder-ec"], + issue="missing_lv_feeder", + ), + ) + add_result( + "Transformers with no downstream consumers", + cm._build_transformer_result( + "Transformers with no downstream consumers", + list(no_load_transformers), + style_ids=["dq-no-load-transformer"], + issue="no_downstream_consumers", + ), + ) + add_result( + "Phase mismatch at nodes", + pc._build_feature_result( + "Phase mismatch at nodes", + phase_mismatch_features, + style_ids=["dq-phase-mismatch-node"], + ), + ) + add_result( + "Lines missing phase information", + pc._build_line_result( + "Lines missing phase information", + list(missing_phase_lines), + style_ids=["dq-missing-phase-lines"], + ), + ) + add_result( + "Zero-length line segments", + aa._build_line_result( + "Zero-length line segments", + list(zero_length_lines), + style_ids=["dq-zero-length-lines"], + issue="zero_length", + ), + ) + add_result( + "Line segments missing impedance info", + aa._build_line_result( + "Line segments missing impedance info", + list(missing_impedance_lines), + style_ids=["dq-missing-impedance-lines"], + issue="missing_impedance", + ), + ) + add_result( + "Transformers missing rating", + aa._build_transformer_result( + "Transformers missing rating", + list(missing_rating_transformers), + style_ids=["dq-missing-rating-transformer"], + issue="missing_rating", + ), + ) + add_result( + "Transformers missing impedance", + aa._build_transformer_result( + "Transformers missing impedance", + list(missing_impedance_transformers), + style_ids=["dq-missing-impedance-transformer"], + issue="missing_transformer_impedance", + ), + ) + add_result( + "Switches missing normal state", + aa._build_switch_result( + "Switches missing normal state", + list(missing_normal_state_switches), + style_ids=["dq-missing-normal-state-switch"], + issue="missing_normal_state", + ), + ) + add_result( + "Loops without switches", + pd._build_line_result( + "Loops without switches", + list(loop_lines), + style_ids=["dq-loop-no-switch"], + issue="loop_without_switch", + ), + ) + add_result( + "Switch terminal anomalies", + pd._build_switch_result( + "Switch terminal anomalies", + list(switch_terminal_issues), + style_ids=["dq-switch-terminal-issue"], + issue="switch_terminal_issue", + ), + ) + add_result( + "Long service drops", + sl._build_ec_distance_result( + "Long service drops", + list(long_service_ecs), + ec_to_distance, + style_ids=["dq-long-service-drop"], + ), + ) + add_result( + "Very long line segments", + sl._build_line_result( + "Very long line segments", + list(very_long_lines), + style_ids=["dq-very-long-lines"], + issue="very_long_line", + ), + ) + + return results, detected_tests, used_style_ids + + +def _load_styles(used_style_ids: Set[str]) -> List[Dict]: + style_dir = Path(__file__).resolve().parent + style_files = [ + style_dir / "style_connectivity_gaps.json", + style_dir / "style_consumer_mapping.json", + style_dir / "style_phase_conductor.json", + style_dir / "style_asset_attribute.json", + style_dir / "style_protection_directionality.json", + style_dir / "style_spatial_location.json", + ] + style_map: Dict[str, Dict] = {} + for style_file in style_files: + styles = json.load(open(style_file, "r")) + for style in styles: + if style.get("id") in used_style_ids: + style_map[style["id"]] = style + return list(style_map.values()) + + +def _description_from_tests(detected_tests: List[str]) -> str: + if not detected_tests: + return "No anomalies detected." + return "Detected anomalies: " + ", ".join(detected_tests) + + +def _slugify(text: str) -> str: + cleaned = "".join(ch.lower() if ch.isalnum() else "_" for ch in text).strip("_") + while "__" in cleaned: + cleaned = cleaned.replace("__", "_") + return cleaned + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/src/zepben/examples/studies/data_quality_studies/dq_utils.py b/src/zepben/examples/studies/data_quality_studies/dq_utils.py new file mode 100644 index 0000000..fd92ca3 --- /dev/null +++ b/src/zepben/examples/studies/data_quality_studies/dq_utils.py @@ -0,0 +1,118 @@ +# Copyright 2025 Zeppelin Bend Pty Ltd +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at https://mozilla.org/MPL/2.0/. + +from pathlib import Path +from typing import Any, Callable, Dict, Iterable, List, Optional, Type, Union +import json + +from geojson import Feature, FeatureCollection +from geojson.geometry import Geometry, LineString, Point +from zepben.ewb import Location, PowerSystemResource + + +CONFIG_PATH = Path(__file__).resolve().parents[2] / "config.json" + + +def load_config() -> Dict[str, Any]: + with open(CONFIG_PATH, "r") as f: + return json.loads(f.read()) + + +def get_zone_mrids(argv: List[str], default: Optional[List[str]] = None) -> List[str]: + if len(argv) > 1: + zones = [z.strip() for z in argv[1].split(",") if z.strip()] + if zones: + return zones + return default or [] + + +def chunk(it: Iterable[Any], size: int): + it = iter(it) + return iter(lambda: tuple(_take(it, size)), ()) + + +def _take(it: Iterable[Any], size: int): + items = [] + for _ in range(size): + try: + items.append(next(it)) + except StopIteration: + break + return items + + +def line_length_m(line) -> float: + return float(getattr(line, "length", 0.0) or 0.0) + + +def terminal_phase_code(terminal) -> Optional[Any]: + phases = getattr(terminal, "normal_phases", None) or getattr(terminal, "phases", None) + if phases is None: + return None + if hasattr(phases, "as_phase_code"): + return phases.as_phase_code() + return phases + + +def to_geojson_feature_collection( + psrs: List[PowerSystemResource], + class_to_properties: Dict[Type, Dict[str, Callable[[Any], Any]]], +) -> FeatureCollection: + features = [] + for psr in psrs: + properties_map = class_to_properties.get(type(psr)) + if properties_map is None: + continue + feature = to_geojson_feature(psr, properties_map) + if feature is not None: + features.append(feature) + return FeatureCollection(features) + + +def to_geojson_feature( + psr: PowerSystemResource, + property_map: Dict[str, Callable[[PowerSystemResource], Any]], +) -> Union[Feature, None]: + geometry = to_geojson_geometry(getattr(psr, "location", None)) + if geometry is None: + return None + properties = {k: f(psr) for (k, f) in property_map.items()} + return Feature(psr.mrid, geometry, properties) + + +def to_geojson_geometry(location: Location) -> Union[Geometry, None]: + points = list(location.points) if location is not None else [] + if len(points) > 1: + return LineString([(point.x_position, point.y_position) for point in points]) + if len(points) == 1: + return Point((points[0].x_position, points[0].y_position)) + return None + + +def feature_from_location( + feature_id: str, + location: Location, + properties: Dict[str, Any], +) -> Optional[Feature]: + geometry = to_geojson_geometry(location) + if geometry is None: + return None + return Feature(feature_id, geometry, properties) + + +def point_feature( + feature_id: str, + x: float, + y: float, + properties: Dict[str, Any], +) -> Feature: + return Feature(feature_id, Point((x, y)), properties) + + +def no_anomaly_feature_collection(message: str = "No anomalies detected") -> FeatureCollection: + return FeatureCollection( + [Feature("no-anomalies", Point((0.0, 0.0)), {"message": message, "type": "no_anomalies"})] + ) diff --git a/src/zepben/examples/studies/data_quality_studies/phase_conductor_issues.py b/src/zepben/examples/studies/data_quality_studies/phase_conductor_issues.py new file mode 100644 index 0000000..a4d3af5 --- /dev/null +++ b/src/zepben/examples/studies/data_quality_studies/phase_conductor_issues.py @@ -0,0 +1,256 @@ +# Copyright 2025 Zeppelin Bend Pty Ltd +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at https://mozilla.org/MPL/2.0/. + +import asyncio +import json +from pathlib import Path +from datetime import datetime +from typing import Dict, List, Set, Tuple +import sys + +from geojson import FeatureCollection +from zepben.eas.client.eas_client import EasClient +from zepben.eas.client.study import GeoJsonOverlay, Result, Study +from zepben.ewb import ( + AcLineSegment, + ConductingEquipment, + Feeder, + IncludedEnergizedContainers, + NetworkConsumerClient, + connect_with_token, +) + +from dq_utils import ( + chunk, + get_zone_mrids, + load_config, + no_anomaly_feature_collection, + point_feature, + terminal_phase_code, + to_geojson_feature_collection, +) + + +async def main(): + zone_mrids = get_zone_mrids(sys.argv, default=["CPM"]) + print(f"Start time: {datetime.now()}") + + config = load_config() + rpc_channel = _connect_rpc(config) + client = NetworkConsumerClient(rpc_channel) + hierarchy = (await client.get_network_hierarchy()).throw_on_error() + feeder_mrids = _collect_feeder_mrids(hierarchy.value.substations, zone_mrids) + + print(f"Collecting feeders from zones {', '.join(zone_mrids)}.") + print(f"Feeders to be processed: {', '.join(feeder_mrids)}") + + phase_mismatch_features = [] + missing_phase_lines: Set[AcLineSegment] = set() + + for feeders in chunk(feeder_mrids, 3): + rpc_channel = _connect_rpc(config) + for feeder_mrid in feeders: + result = await _fetch_phase_issues(feeder_mrid, rpc_channel) + if result is None: + continue + mismatch_features, missing_lines = result + phase_mismatch_features.extend(mismatch_features) + missing_phase_lines.update(missing_lines) + + style_path = Path(__file__).resolve().parent / "style_phase_conductor.json" + styles = json.load(open(style_path, "r")) + result_specs = [ + ( + "Phase mismatch at nodes", + _build_feature_result( + "Phase mismatch at nodes", + phase_mismatch_features, + style_ids=["dq-phase-mismatch-node"], + ), + ), + ( + "Lines missing phase information", + _build_line_result( + "Lines missing phase information", + list(missing_phase_lines), + style_ids=["dq-missing-phase-lines"], + ), + ), + ] + results = [] + for name, result in result_specs: + if result is not None: + results.append(result) + else: + results.append( + Result( + name=f"No anomalies detected: {name}", + geo_json_overlay=GeoJsonOverlay( + data=no_anomaly_feature_collection(), + styles=["dq-no-anomalies"], + ), + ) + ) + + eas_client = EasClient(host=config["host"], port=config["rpc_port"], protocol="https", access_token=config["access_token"]) + print(f"Uploading Study for zones {', '.join(zone_mrids)} ...") + await eas_client.async_upload_study( + Study( + name=f"Phase and conductor issues ({', '.join(zone_mrids)})", + description="Connectivity nodes with mixed phases and lines missing phase info.", + tags=["dq_phase_conductor", "-".join(zone_mrids)], + results=results, + styles=styles, + ) + ) + await eas_client.aclose() + print("Uploaded Study") + print(f"Finish time: {datetime.now()}") + + +def _connect_rpc(config): + return connect_with_token( + host=config["host"], + access_token=config["access_token"], + rpc_port=config["rpc_port"], + ca_filename=config.get("ca_filename"), + timeout_seconds=config.get("timeout_seconds", 5), + debug=bool(config.get("debug", False)), + skip_connection_test=bool(config.get("skip_connection_test", False)), + ) + + +def _collect_feeder_mrids(substations: Dict, zone_mrids: List[str]) -> List[str]: + feeder_mrids: List[str] = [] + for zone_mrid in zone_mrids: + if zone_mrid in substations: + for feeder in substations[zone_mrid].feeders: + feeder_mrids.append(feeder.mrid) + return feeder_mrids + + +async def _fetch_phase_issues( + feeder_mrid: str, + rpc_channel, +) -> Tuple[List, Set[AcLineSegment]] | None: + print(f"Fetching Feeder {feeder_mrid}") + client = NetworkConsumerClient(rpc_channel) + result = await client.get_equipment_container( + mrid=feeder_mrid, + expected_class=Feeder, + include_energized_containers=IncludedEnergizedContainers.LV_FEEDERS, + ) + if result.was_failure: + print(f"Failed: {result.thrown}") + return None + + network = client.service + print(f"Finished fetching Feeder {feeder_mrid}") + + return await analyze_network(network) + + +async def analyze_network( + network, +) -> Tuple[List, Set[AcLineSegment]]: + node_to_terminals: Dict[object, List[object]] = {} + node_to_equipment: Dict[object, ConductingEquipment] = {} + for ce in network.objects(ConductingEquipment): + for terminal in ce.terminals: + node = terminal.connectivity_node + if node is None: + continue + node_to_terminals.setdefault(node, []).append(terminal) + node_to_equipment.setdefault(node, ce) + + phase_mismatch_features = [] + for node, terminals in node_to_terminals.items(): + phase_codes = [] + for terminal in terminals: + phase_code = terminal_phase_code(terminal) + if phase_code is not None: + phase_codes.append(str(phase_code)) + unique_phases = sorted(set(phase_codes)) + if len(unique_phases) > 1: + equipment = node_to_equipment.get(node) + if equipment is None or equipment.location is None: + continue + points = list(equipment.location.points) + if not points: + continue + pt = points[0] + feature = point_feature( + f"phase-mismatch-{getattr(node, 'mrid', id(node))}", + pt.x_position, + pt.y_position, + { + "issue": "phase_mismatch", + "phases": ", ".join(unique_phases), + "type": "node", + }, + ) + phase_mismatch_features.append(feature) + + missing_phase_lines: Set[AcLineSegment] = set() + for line in network.objects(AcLineSegment): + terminal_phases = [] + for terminal in line.terminals: + phase_code = terminal_phase_code(terminal) + if phase_code is not None: + terminal_phases.append(phase_code) + if not terminal_phases: + missing_phase_lines.add(line) + + return phase_mismatch_features, missing_phase_lines + + +def _build_feature_result( + name: str, + features: List, + style_ids: List[str], +) -> Result | None: + if not features: + return None + feature_collection = FeatureCollection(features) + if not feature_collection.features: + return None + return Result( + name=name, + geo_json_overlay=GeoJsonOverlay( + data=feature_collection, + styles=style_ids, + ), + ) + + +def _build_line_result( + name: str, + lines: List[AcLineSegment], + style_ids: List[str], +) -> Result | None: + if not lines: + return None + class_to_properties = { + AcLineSegment: { + "issue": lambda _: "missing_phase", + "name": lambda line: line.name or line.mrid, + "type": lambda _: "line", + } + } + feature_collection: FeatureCollection = to_geojson_feature_collection(lines, class_to_properties) + if not feature_collection.features: + return None + return Result( + name=name, + geo_json_overlay=GeoJsonOverlay( + data=feature_collection, + styles=style_ids, + ), + ) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/src/zepben/examples/studies/data_quality_studies/protection_directionality_anomalies.py b/src/zepben/examples/studies/data_quality_studies/protection_directionality_anomalies.py new file mode 100644 index 0000000..c5a2746 --- /dev/null +++ b/src/zepben/examples/studies/data_quality_studies/protection_directionality_anomalies.py @@ -0,0 +1,286 @@ +# Copyright 2025 Zeppelin Bend Pty Ltd +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at https://mozilla.org/MPL/2.0/. + +import asyncio +import json +from pathlib import Path +from datetime import datetime +from typing import Dict, List, Set, Tuple +import sys + +from geojson import FeatureCollection +from zepben.eas.client.eas_client import EasClient +from zepben.eas.client.study import GeoJsonOverlay, Result, Study +from zepben.ewb import ( + AcLineSegment, + Feeder, + IncludedEnergizedContainers, + NetworkConsumerClient, + Switch, + connect_with_token, +) + +from dq_utils import ( + chunk, + get_zone_mrids, + load_config, + no_anomaly_feature_collection, + to_geojson_feature_collection, +) + + +async def main(): + zone_mrids = get_zone_mrids(sys.argv, default=["CPM"]) + print(f"Start time: {datetime.now()}") + + config = load_config() + rpc_channel = _connect_rpc(config) + client = NetworkConsumerClient(rpc_channel) + hierarchy = (await client.get_network_hierarchy()).throw_on_error() + feeder_mrids = _collect_feeder_mrids(hierarchy.value.substations, zone_mrids) + + print(f"Collecting feeders from zones {', '.join(zone_mrids)}.") + print(f"Feeders to be processed: {', '.join(feeder_mrids)}") + + loop_lines: Set[AcLineSegment] = set() + switch_terminal_issues: Set[Switch] = set() + + for feeders in chunk(feeder_mrids, 3): + rpc_channel = _connect_rpc(config) + for feeder_mrid in feeders: + result = await _fetch_protection_directionality_issues(feeder_mrid, rpc_channel) + if result is None: + continue + loop_segments, switch_issues = result + loop_lines.update(loop_segments) + switch_terminal_issues.update(switch_issues) + + style_path = Path(__file__).resolve().parent / "style_protection_directionality.json" + styles = json.load(open(style_path, "r")) + result_specs = [ + ( + "Loops without switches", + _build_line_result( + "Loops without switches", + list(loop_lines), + style_ids=["dq-loop-no-switch"], + issue="loop_without_switch", + ), + ), + ( + "Switch terminal anomalies", + _build_switch_result( + "Switch terminal anomalies", + list(switch_terminal_issues), + style_ids=["dq-switch-terminal-issue"], + issue="switch_terminal_issue", + ), + ), + ] + results = [] + for name, result in result_specs: + if result is not None: + results.append(result) + else: + results.append( + Result( + name=f"No anomalies detected: {name}", + geo_json_overlay=GeoJsonOverlay( + data=no_anomaly_feature_collection(), + styles=["dq-no-anomalies"], + ), + ) + ) + + eas_client = EasClient(host=config["host"], port=config["rpc_port"], protocol="https", access_token=config["access_token"]) + print(f"Uploading Study for zones {', '.join(zone_mrids)} ...") + await eas_client.async_upload_study( + Study( + name=f"Protection and directionality anomalies ({', '.join(zone_mrids)})", + description="Loops without switches and switch terminal count anomalies.", + tags=["dq_protection_directionality", "-".join(zone_mrids)], + results=results, + styles=styles, + ) + ) + await eas_client.aclose() + print("Uploaded Study") + print(f"Finish time: {datetime.now()}") + + +def _connect_rpc(config): + return connect_with_token( + host=config["host"], + access_token=config["access_token"], + rpc_port=config["rpc_port"], + ca_filename=config.get("ca_filename"), + timeout_seconds=config.get("timeout_seconds", 5), + debug=bool(config.get("debug", False)), + skip_connection_test=bool(config.get("skip_connection_test", False)), + ) + + +def _collect_feeder_mrids(substations: Dict, zone_mrids: List[str]) -> List[str]: + feeder_mrids: List[str] = [] + for zone_mrid in zone_mrids: + if zone_mrid in substations: + for feeder in substations[zone_mrid].feeders: + feeder_mrids.append(feeder.mrid) + return feeder_mrids + + +async def _fetch_protection_directionality_issues( + feeder_mrid: str, + rpc_channel, +) -> Tuple[Set[AcLineSegment], Set[Switch]] | None: + print(f"Fetching Feeder {feeder_mrid}") + client = NetworkConsumerClient(rpc_channel) + result = await client.get_equipment_container( + mrid=feeder_mrid, + expected_class=Feeder, + include_energized_containers=IncludedEnergizedContainers.LV_FEEDERS, + ) + if result.was_failure: + print(f"Failed: {result.thrown}") + return None + + network = client.service + print(f"Finished fetching Feeder {feeder_mrid}") + + return await analyze_network(network) + + +async def analyze_network( + network, +) -> Tuple[Set[AcLineSegment], Set[Switch]]: + loop_lines = _find_loops_without_switch(network) + + switch_terminal_issues = set() + for sw in network.objects(Switch): + terminals = [t for t in sw.terminals if t.connectivity_node is not None] + if len(terminals) != 2: + switch_terminal_issues.add(sw) + + return loop_lines, switch_terminal_issues + + +def _find_loops_without_switch(network) -> Set[AcLineSegment]: + lines = list(network.objects(AcLineSegment)) + if not lines: + return set() + + parent = {} + + def find(node): + if node not in parent: + parent[node] = node + while parent[node] != node: + parent[node] = parent[parent[node]] + node = parent[node] + return node + + def union(a, b): + ra = find(a) + rb = find(b) + if ra != rb: + parent[rb] = ra + + line_nodes: Dict[AcLineSegment, Tuple[object, object]] = {} + for line in lines: + nodes = [t.connectivity_node for t in line.terminals if t.connectivity_node is not None] + if len(nodes) < 2: + continue + n1, n2 = nodes[0], nodes[1] + line_nodes[line] = (n1, n2) + union(n1, n2) + + component_edges: Dict[object, int] = {} + component_nodes: Dict[object, Set[object]] = {} + component_lines: Dict[object, Set[AcLineSegment]] = {} + for line, (n1, n2) in line_nodes.items(): + root = find(n1) + component_edges[root] = component_edges.get(root, 0) + 1 + component_nodes.setdefault(root, set()).update([n1, n2]) + component_lines.setdefault(root, set()).add(line) + + component_has_switch: Dict[object, bool] = {root: False for root in component_nodes} + for sw in network.objects(Switch): + for terminal in sw.terminals: + node = terminal.connectivity_node + if node is None: + continue + root = find(node) + if root in component_has_switch: + component_has_switch[root] = True + + loop_lines: Set[AcLineSegment] = set() + for root, edges in component_edges.items(): + nodes = component_nodes.get(root, set()) + if not nodes: + continue + if edges >= len(nodes) and not component_has_switch.get(root, False): + loop_lines.update(component_lines.get(root, set())) + + return loop_lines + + +def _build_line_result( + name: str, + lines: List[AcLineSegment], + style_ids: List[str], + issue: str, +) -> Result | None: + if not lines: + return None + class_to_properties = { + AcLineSegment: { + "issue": lambda _: issue, + "name": lambda line: line.name or line.mrid, + "type": lambda _: "line", + } + } + feature_collection: FeatureCollection = to_geojson_feature_collection(lines, class_to_properties) + if not feature_collection.features: + return None + return Result( + name=name, + geo_json_overlay=GeoJsonOverlay( + data=feature_collection, + styles=style_ids, + ), + ) + + +def _build_switch_result( + name: str, + switches: List[Switch], + style_ids: List[str], + issue: str, +) -> Result | None: + if not switches: + return None + class_to_properties = { + Switch: { + "issue": lambda _: issue, + "name": lambda sw: sw.name or sw.mrid, + "type": lambda _: "switch", + "terminal_count": lambda sw: len([t for t in sw.terminals if t.connectivity_node is not None]), + } + } + feature_collection: FeatureCollection = to_geojson_feature_collection(switches, class_to_properties) + if not feature_collection.features: + return None + return Result( + name=name, + geo_json_overlay=GeoJsonOverlay( + data=feature_collection, + styles=style_ids, + ), + ) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/src/zepben/examples/studies/data_quality_studies/spatial_location_anomalies.py b/src/zepben/examples/studies/data_quality_studies/spatial_location_anomalies.py new file mode 100644 index 0000000..697e8bc --- /dev/null +++ b/src/zepben/examples/studies/data_quality_studies/spatial_location_anomalies.py @@ -0,0 +1,281 @@ +# Copyright 2025 Zeppelin Bend Pty Ltd +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at https://mozilla.org/MPL/2.0/. + +import asyncio +import json +from pathlib import Path +from datetime import datetime +from typing import Dict, List, Set, Tuple +import sys + +from geojson import FeatureCollection +from zepben.eas.client.eas_client import EasClient +from zepben.eas.client.study import GeoJsonOverlay, Result, Study +from zepben.ewb import ( + AcLineSegment, + EnergyConsumer, + Feeder, + IncludedEnergizedContainers, + NetworkConsumerClient, + NetworkTraceStep, + PhaseCode, + PowerTransformer, + Tracing, + connect_with_token, + stop_at_open, + upstream, +) +from zepben.ewb.services.network.tracing.networktrace.operators.network_state_operators import ( + NetworkStateOperators, +) + +from dq_utils import ( + chunk, + get_zone_mrids, + line_length_m, + load_config, + no_anomaly_feature_collection, + to_geojson_feature_collection, +) + + +LONG_SERVICE_DROP_M = 500.0 +VERY_LONG_LINE_M = 2000.0 + + +async def main(): + zone_mrids = get_zone_mrids(sys.argv, default=["CPM"]) + print(f"Start time: {datetime.now()}") + + config = load_config() + rpc_channel = _connect_rpc(config) + client = NetworkConsumerClient(rpc_channel) + hierarchy = (await client.get_network_hierarchy()).throw_on_error() + feeder_mrids = _collect_feeder_mrids(hierarchy.value.substations, zone_mrids) + + print(f"Collecting feeders from zones {', '.join(zone_mrids)}.") + print(f"Feeders to be processed: {', '.join(feeder_mrids)}") + + long_service_ecs: Set[EnergyConsumer] = set() + ec_to_distance: Dict[str, float] = {} + very_long_lines: Set[AcLineSegment] = set() + + for feeders in chunk(feeder_mrids, 3): + rpc_channel = _connect_rpc(config) + for feeder_mrid in feeders: + result = await _fetch_spatial_anomalies(feeder_mrid, rpc_channel) + if result is None: + continue + feeder_ecs, feeder_distances, feeder_lines = result + long_service_ecs.update(feeder_ecs) + ec_to_distance.update(feeder_distances) + very_long_lines.update(feeder_lines) + + style_path = Path(__file__).resolve().parent / "style_spatial_location.json" + styles = json.load(open(style_path, "r")) + result_specs = [ + ( + "Long service drops", + _build_ec_distance_result( + "Long service drops", + list(long_service_ecs), + ec_to_distance, + style_ids=["dq-long-service-drop"], + ), + ), + ( + "Very long line segments", + _build_line_result( + "Very long line segments", + list(very_long_lines), + style_ids=["dq-very-long-lines"], + issue="very_long_line", + ), + ), + ] + results = [] + for name, result in result_specs: + if result is not None: + results.append(result) + else: + results.append( + Result( + name=f"No anomalies detected: {name}", + geo_json_overlay=GeoJsonOverlay( + data=no_anomaly_feature_collection(), + styles=["dq-no-anomalies"], + ), + ) + ) + + eas_client = EasClient(host=config["host"], port=config["rpc_port"], protocol="https", access_token=config["access_token"]) + print(f"Uploading Study for zones {', '.join(zone_mrids)} ...") + await eas_client.async_upload_study( + Study( + name=f"Spatial/location anomalies ({', '.join(zone_mrids)})", + description="Long service drops and unusually long line segments.", + tags=["dq_spatial_location", "-".join(zone_mrids)], + results=results, + styles=styles, + ) + ) + await eas_client.aclose() + print("Uploaded Study") + print(f"Finish time: {datetime.now()}") + + +def _connect_rpc(config): + return connect_with_token( + host=config["host"], + access_token=config["access_token"], + rpc_port=config["rpc_port"], + ca_filename=config.get("ca_filename"), + timeout_seconds=config.get("timeout_seconds", 5), + debug=bool(config.get("debug", False)), + skip_connection_test=bool(config.get("skip_connection_test", False)), + ) + + +def _collect_feeder_mrids(substations: Dict, zone_mrids: List[str]) -> List[str]: + feeder_mrids: List[str] = [] + for zone_mrid in zone_mrids: + if zone_mrid in substations: + for feeder in substations[zone_mrid].feeders: + feeder_mrids.append(feeder.mrid) + return feeder_mrids + + +async def _fetch_spatial_anomalies( + feeder_mrid: str, + rpc_channel, +) -> Tuple[Set[EnergyConsumer], Dict[str, float], Set[AcLineSegment]] | None: + print(f"Fetching Feeder {feeder_mrid}") + client = NetworkConsumerClient(rpc_channel) + result = await client.get_equipment_container( + mrid=feeder_mrid, + expected_class=Feeder, + include_energized_containers=IncludedEnergizedContainers.LV_FEEDERS, + ) + if result.was_failure: + print(f"Failed: {result.thrown}") + return None + + network = client.service + print(f"Finished fetching Feeder {feeder_mrid}") + + return await analyze_network(network) + + +async def analyze_network( + network, +) -> Tuple[Set[EnergyConsumer], Dict[str, float], Set[AcLineSegment]]: + await Tracing.set_direction().run(network, network_state_operators=NetworkStateOperators.NORMAL) + + long_service_ecs: Set[EnergyConsumer] = set() + ec_to_distance: Dict[str, float] = {} + for ec in network.objects(EnergyConsumer): + path_lines = await _get_upstream_lines_to_transformer(ec) + distance = sum(line_length_m(line) for line in path_lines) + ec_to_distance[ec.mrid] = distance + if distance >= LONG_SERVICE_DROP_M: + long_service_ecs.add(ec) + + very_long_lines = {line for line in network.objects(AcLineSegment) if line_length_m(line) >= VERY_LONG_LINE_M} + + return long_service_ecs, ec_to_distance, very_long_lines + + +async def _get_upstream_lines_to_transformer(ec: EnergyConsumer) -> Set[AcLineSegment]: + lines = set() + + async def collect_lines(ps: NetworkTraceStep, _): + line = ps.path.traversed_ac_line_segment + if line is not None: + lines.add(line) + if isinstance(ps.path.to_equipment, AcLineSegment): + lines.add(ps.path.to_equipment) + if isinstance(ps.path.from_equipment, AcLineSegment): + lines.add(ps.path.from_equipment) + + await ( + Tracing.network_trace() + .add_condition(upstream()) + .add_condition(stop_at_open()) + .add_step_action(collect_lines) + .add_stop_condition(_is_transformer) + ).run(start=ec, phases=PhaseCode.ABCN, can_stop_on_start_item=False) + + return lines + + +def _is_transformer(ps: NetworkTraceStep, _context=None) -> bool: + return isinstance(ps.path.to_equipment, PowerTransformer) + + +def _build_ec_distance_result( + name: str, + ecs: List[EnergyConsumer], + ec_to_distance: Dict[str, float], + style_ids: List[str], +) -> Result | None: + if not ecs: + return None + + def distance(ec: EnergyConsumer) -> float: + return float(ec_to_distance.get(ec.mrid, 0.0)) + + def label(ec: EnergyConsumer) -> str: + value = distance(ec) + return f"{value:.0f} m" if value > 0 else "n/a" + + class_to_properties = { + EnergyConsumer: { + "distance_m": distance, + "distance_label": label, + "type": lambda _: "ec", + } + } + feature_collection: FeatureCollection = to_geojson_feature_collection(ecs, class_to_properties) + if not feature_collection.features: + return None + return Result( + name=name, + geo_json_overlay=GeoJsonOverlay( + data=feature_collection, + styles=style_ids, + ), + ) + + +def _build_line_result( + name: str, + lines: List[AcLineSegment], + style_ids: List[str], + issue: str, +) -> Result | None: + if not lines: + return None + class_to_properties = { + AcLineSegment: { + "issue": lambda _: issue, + "name": lambda line: line.name or line.mrid, + "type": lambda _: "line", + } + } + feature_collection: FeatureCollection = to_geojson_feature_collection(lines, class_to_properties) + if not feature_collection.features: + return None + return Result( + name=name, + geo_json_overlay=GeoJsonOverlay( + data=feature_collection, + styles=style_ids, + ), + ) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/src/zepben/examples/studies/data_quality_studies/style_asset_attribute.json b/src/zepben/examples/studies/data_quality_studies/style_asset_attribute.json new file mode 100644 index 0000000..e9091b7 --- /dev/null +++ b/src/zepben/examples/studies/data_quality_studies/style_asset_attribute.json @@ -0,0 +1,63 @@ +[ + { + "id": "dq-zero-length-lines", + "name": "Zero-length line segments", + "type": "line", + "paint": { + "line-color": "rgb(210,2,29)", + "line-width": 2 + } + }, + { + "id": "dq-missing-impedance-lines", + "name": "Lines missing impedance", + "type": "line", + "paint": { + "line-color": "rgb(244,109,67)", + "line-width": 2 + } + }, + { + "id": "dq-missing-rating-transformer", + "name": "Transformers missing rating", + "type": "circle", + "paint": { + "circle-radius": 8, + "circle-color": "rgb(94,79,162)", + "circle-stroke-color": "white", + "circle-stroke-width": 1 + } + }, + { + "id": "dq-missing-impedance-transformer", + "name": "Transformers missing impedance", + "type": "circle", + "paint": { + "circle-radius": 8, + "circle-color": "rgb(253,174,97)", + "circle-stroke-color": "white", + "circle-stroke-width": 1 + } + }, + { + "id": "dq-missing-normal-state-switch", + "name": "Switches missing normal state", + "type": "circle", + "paint": { + "circle-radius": 6, + "circle-color": "rgb(44,123,182)", + "circle-stroke-color": "white", + "circle-stroke-width": 1 + } + }, + { + "id": "dq-no-anomalies", + "name": "No anomalies detected", + "type": "circle", + "paint": { + "circle-radius": 1, + "circle-opacity": 0, + "circle-stroke-opacity": 0 + } + } +] diff --git a/src/zepben/examples/studies/data_quality_studies/style_connectivity_gaps.json b/src/zepben/examples/studies/data_quality_studies/style_connectivity_gaps.json new file mode 100644 index 0000000..6ba6104 --- /dev/null +++ b/src/zepben/examples/studies/data_quality_studies/style_connectivity_gaps.json @@ -0,0 +1,30 @@ +[ + { + "id": "dq-open-ended-lines", + "name": "Open-ended lines", + "type": "line", + "paint": { + "line-color": "rgb(210,2,29)", + "line-width": 2 + } + }, + { + "id": "dq-disconnected-lines", + "name": "Disconnected island lines", + "type": "line", + "paint": { + "line-color": "rgb(244,109,67)", + "line-width": 2 + } + }, + { + "id": "dq-no-anomalies", + "name": "No anomalies detected", + "type": "circle", + "paint": { + "circle-radius": 1, + "circle-opacity": 0, + "circle-stroke-opacity": 0 + } + } +] diff --git a/src/zepben/examples/studies/data_quality_studies/style_consumer_mapping.json b/src/zepben/examples/studies/data_quality_studies/style_consumer_mapping.json new file mode 100644 index 0000000..73dd6f1 --- /dev/null +++ b/src/zepben/examples/studies/data_quality_studies/style_consumer_mapping.json @@ -0,0 +1,45 @@ +[ + { + "id": "dq-unserved-ec", + "name": "Unserved EnergyConsumers", + "type": "circle", + "paint": { + "circle-radius": 6, + "circle-color": "rgb(210,2,29)", + "circle-stroke-color": "white", + "circle-stroke-width": 1 + } + }, + { + "id": "dq-missing-lv-feeder-ec", + "name": "EnergyConsumers missing LV feeder", + "type": "circle", + "paint": { + "circle-radius": 6, + "circle-color": "rgb(244,109,67)", + "circle-stroke-color": "white", + "circle-stroke-width": 1 + } + }, + { + "id": "dq-no-load-transformer", + "name": "Transformers with no downstream consumers", + "type": "circle", + "paint": { + "circle-radius": 9, + "circle-color": "rgb(44,123,182)", + "circle-stroke-color": "white", + "circle-stroke-width": 1 + } + }, + { + "id": "dq-no-anomalies", + "name": "No anomalies detected", + "type": "circle", + "paint": { + "circle-radius": 1, + "circle-opacity": 0, + "circle-stroke-opacity": 0 + } + } +] diff --git a/src/zepben/examples/studies/data_quality_studies/style_phase_conductor.json b/src/zepben/examples/studies/data_quality_studies/style_phase_conductor.json new file mode 100644 index 0000000..062bd33 --- /dev/null +++ b/src/zepben/examples/studies/data_quality_studies/style_phase_conductor.json @@ -0,0 +1,32 @@ +[ + { + "id": "dq-phase-mismatch-node", + "name": "Phase mismatch nodes", + "type": "circle", + "paint": { + "circle-radius": 7, + "circle-color": "rgb(158,1,66)", + "circle-stroke-color": "white", + "circle-stroke-width": 1 + } + }, + { + "id": "dq-missing-phase-lines", + "name": "Lines missing phase info", + "type": "line", + "paint": { + "line-color": "rgb(210,2,29)", + "line-width": 2 + } + }, + { + "id": "dq-no-anomalies", + "name": "No anomalies detected", + "type": "circle", + "paint": { + "circle-radius": 1, + "circle-opacity": 0, + "circle-stroke-opacity": 0 + } + } +] diff --git a/src/zepben/examples/studies/data_quality_studies/style_protection_directionality.json b/src/zepben/examples/studies/data_quality_studies/style_protection_directionality.json new file mode 100644 index 0000000..4c76e44 --- /dev/null +++ b/src/zepben/examples/studies/data_quality_studies/style_protection_directionality.json @@ -0,0 +1,32 @@ +[ + { + "id": "dq-loop-no-switch", + "name": "Loops without switches", + "type": "line", + "paint": { + "line-color": "rgb(210,2,29)", + "line-width": 2 + } + }, + { + "id": "dq-switch-terminal-issue", + "name": "Switch terminal anomalies", + "type": "circle", + "paint": { + "circle-radius": 7, + "circle-color": "rgb(244,109,67)", + "circle-stroke-color": "white", + "circle-stroke-width": 1 + } + }, + { + "id": "dq-no-anomalies", + "name": "No anomalies detected", + "type": "circle", + "paint": { + "circle-radius": 1, + "circle-opacity": 0, + "circle-stroke-opacity": 0 + } + } +] diff --git a/src/zepben/examples/studies/data_quality_studies/style_spatial_location.json b/src/zepben/examples/studies/data_quality_studies/style_spatial_location.json new file mode 100644 index 0000000..f78faef --- /dev/null +++ b/src/zepben/examples/studies/data_quality_studies/style_spatial_location.json @@ -0,0 +1,38 @@ +[ + { + "id": "dq-long-service-drop", + "name": "Long service drops", + "type": "circle", + "paint": { + "circle-radius": 7, + "circle-color": [ + "step", + ["get", "distance_m"], + "rgb(244,109,67)", 500, + "rgb(210,2,29)", 1000, + "rgb(165,0,38)" + ], + "circle-stroke-color": "white", + "circle-stroke-width": 1 + } + }, + { + "id": "dq-very-long-lines", + "name": "Very long lines", + "type": "line", + "paint": { + "line-color": "rgb(94,79,162)", + "line-width": 2 + } + }, + { + "id": "dq-no-anomalies", + "name": "No anomalies detected", + "type": "circle", + "paint": { + "circle-radius": 1, + "circle-opacity": 0, + "circle-stroke-opacity": 0 + } + } +] diff --git a/src/zepben/examples/studies/loop_impedance_by_energy_consumer.py b/src/zepben/examples/studies/loop_impedance_by_energy_consumer.py new file mode 100644 index 0000000..737f559 --- /dev/null +++ b/src/zepben/examples/studies/loop_impedance_by_energy_consumer.py @@ -0,0 +1,321 @@ +# Copyright 2025 Zeppelin Bend Pty Ltd +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at https://mozilla.org/MPL/2.0/. + +import asyncio +import json +import math +from datetime import datetime +from itertools import islice +from typing import List, Dict, Tuple, Callable, Any, Union, Type, Set + +from geojson import FeatureCollection, Feature +from geojson.geometry import Geometry, LineString, Point +from zepben.eas.client.eas_client import EasClient +from zepben.eas.client.study import Study, Result, GeoJsonOverlay +from zepben.ewb import ( + AcLineSegment, + EnergyConsumer, + PowerTransformer, + NetworkConsumerClient, + PhaseCode, + Feeder, + PowerSystemResource, + Location, + connect_with_token, + NetworkTraceStep, + Tracing, + upstream, + stop_at_open, + IncludedEnergizedContainers, +) +from zepben.ewb.services.network.tracing.networktrace.operators.network_state_operators import NetworkStateOperators + + +with open("../config.json") as f: + c = json.loads(f.read()) + + +def chunk(it, size): + it = iter(it) + return iter(lambda: tuple(islice(it, size)), ()) + + +async def main(): + # Only process feeders in the following zones + zone_mrids = ["CPM"] + print(f"Start time: {datetime.now()}") + + rpc_channel = connect_with_token( + host=c["host"], + access_token=c["access_token"], + rpc_port=c["rpc_port"], + ca_filename=c.get("ca_filename"), + timeout_seconds=c.get("timeout_seconds", 5), + debug=bool(c.get("debug", False)), + skip_connection_test=bool(c.get("skip_connection_test", False)), + ) + client = NetworkConsumerClient(rpc_channel) + hierarchy = (await client.get_network_hierarchy()).throw_on_error() + substations = hierarchy.value.substations + + print(f"Collecting feeders from zones {', '.join(zone_mrids)}.") + feeder_mrids = [] + for zone_mrid in zone_mrids: + if zone_mrid in substations: + for feeder in substations[zone_mrid].feeders: + feeder_mrids.append(feeder.mrid) + + print(f"Feeders to be processed: {', '.join(feeder_mrids)}") + + all_ecs: List[EnergyConsumer] = [] + all_lines: List[AcLineSegment] = [] + ec_to_loop_z: Dict[str, float] = {} + line_to_z_per_km: Dict[str, float] = {} + + # Process feeders in batches of 3, using asyncio, for performance + batches = chunk(feeder_mrids, 3) + for feeders in batches: + futures = [] + rpc_channel = connect_with_token( + host=c["host"], + access_token=c["access_token"], + rpc_port=c["rpc_port"], + ca_filename=c.get("ca_filename"), + timeout_seconds=c.get("timeout_seconds", 5), + debug=bool(c.get("debug", False)), + skip_connection_test=bool(c.get("skip_connection_test", False)), + ) + print(f"Processing feeders {', '.join(feeders)}") + for feeder_mrid in feeders: + futures.append(asyncio.ensure_future(fetch_feeder_loop_impedance(feeder_mrid, rpc_channel))) + + for future in futures: + result = await future + if result is None: + continue + ecs, lines, ec_impedance, line_impedance = result + all_ecs.extend(ecs) + all_lines.extend(lines) + ec_to_loop_z.update(ec_impedance) + line_to_z_per_km.update(line_impedance) + + print(f"Creating study for {len(all_ecs)} energy consumers") + + eas_client = EasClient(host=c["host"], port=c["rpc_port"], protocol="https", access_token=c["access_token"]) + print(f"Uploading Study for zones {', '.join(zone_mrids)} ...") + await upload_loop_impedance_study( + eas_client, + all_ecs, + all_lines, + ec_to_loop_z, + line_to_z_per_km, + name=f"Loop impedance (normal) ({', '.join(zone_mrids)})", + description="Loop impedance at EnergyConsumers on normal network path; AC line segments colored by impedance.", + tags=["loop_impedance", "-".join(zone_mrids)], + styles=json.load(open("style_loop_impedance.json", "r")), + ) + await eas_client.aclose() + print("Uploaded Study") + + print(f"Finish time: {datetime.now()}") + + +async def fetch_feeder_loop_impedance( + feeder_mrid: str, + rpc_channel, +) -> Union[Tuple[List[EnergyConsumer], List[AcLineSegment], Dict[str, float], Dict[str, float]], None]: + print(f"Fetching Feeder {feeder_mrid}") + client = NetworkConsumerClient(rpc_channel) + + result = ( + await client.get_equipment_container( + mrid=feeder_mrid, + expected_class=Feeder, + include_energized_containers=IncludedEnergizedContainers.LV_FEEDERS, + ) + ) + if result.was_failure: + print(f"Failed: {result.thrown}") + return None + + network = client.service + print(f"Finished fetching Feeder {feeder_mrid}") + + # Required for directed traces (upstream/downstream) + await Tracing.set_direction().run(network, network_state_operators=NetworkStateOperators.NORMAL) + + ecs = list(network.objects(EnergyConsumer)) + lines = list(network.objects(AcLineSegment)) + + line_to_z_per_km = { + line.mrid: _line_impedance_per_km(line) + for line in lines + } + + ec_to_loop_z = {} + for ec in ecs: + path_lines = await get_upstream_lines_to_transformer(ec) + loop_z = 2.0 * sum(_line_impedance_per_m(line) * (line.length or 0.0) for line in path_lines) + ec_to_loop_z[ec.mrid] = loop_z + + return ecs, lines, ec_to_loop_z, line_to_z_per_km + + +def _line_impedance_per_m(line: AcLineSegment) -> float: + plsi = line.per_length_sequence_impedance + if plsi is None: + return 0.0 + r = plsi.r or 0.0 + x = plsi.x or 0.0 + return math.hypot(r, x) + + +def _line_impedance_per_km(line: AcLineSegment) -> float: + return _line_impedance_per_m(line) * 1000.0 + + +def collect_upstream_lines_provider(lines: Set[AcLineSegment]): + + async def collect_lines(ps: NetworkTraceStep, _): + line = ps.path.traversed_ac_line_segment + if line is not None: + lines.add(line) + if isinstance(ps.path.to_equipment, AcLineSegment): + lines.add(ps.path.to_equipment) + if isinstance(ps.path.from_equipment, AcLineSegment): + lines.add(ps.path.from_equipment) + + return collect_lines + + +async def get_upstream_lines_to_transformer(ec: EnergyConsumer) -> Set[AcLineSegment]: + lines = set() + + await ( + Tracing.network_trace() + .add_condition(upstream()) + .add_condition(stop_at_open()) + .add_step_action(collect_upstream_lines_provider(lines)) + .add_stop_condition(_is_transformer) + ).run(start=ec, phases=PhaseCode.ABCN, can_stop_on_start_item=False) + + return lines + + +def _is_transformer(ps: NetworkTraceStep, _context=None) -> bool: + return isinstance(ps.path.to_equipment, PowerTransformer) + + +async def upload_loop_impedance_study( + eas_client: EasClient, + ecs: List[EnergyConsumer], + lines: List[AcLineSegment], + ec_to_loop_z: Dict[str, float], + line_to_z_per_km: Dict[str, float], + name: str, + description: str, + tags: List[str], + styles: List, +) -> None: + + class_to_properties = { + EnergyConsumer: { + "name": lambda ec: ec.name, + "loop_z_ohm": _loop_z_from(ec_to_loop_z), + "loop_z_label": _loop_z_label_from(ec_to_loop_z), + "type": lambda x: "ec", + }, + AcLineSegment: { + "name": lambda line: line.name, + "z_ohm_per_km": _line_z_per_km_from(line_to_z_per_km), + "type": lambda x: "line", + }, + } + feature_collection = to_geojson_feature_collection(ecs + lines, class_to_properties) + response = await eas_client.async_upload_study( + Study( + name=name, + description=description, + tags=tags, + results=[ + Result( + name=name, + geo_json_overlay=GeoJsonOverlay( + data=feature_collection, + styles=[s['id'] for s in styles] + ) + ) + ], + styles=styles + ) + ) + print(f"Study response: {response}") + + +def _loop_z_from(ec_to_loop_z: Dict[str, float]): + def fun(ec: EnergyConsumer): + return round(ec_to_loop_z.get(ec.mrid, 0.0), 4) + + return fun + + +def _loop_z_label_from(ec_to_loop_z: Dict[str, float]): + def fun(ec: EnergyConsumer): + value = ec_to_loop_z.get(ec.mrid, 0.0) + return f"{value:.2f}Ω" + + return fun + + +def _line_z_per_km_from(line_to_z_per_km: Dict[str, float]): + def fun(line: AcLineSegment): + return round(line_to_z_per_km.get(line.mrid, 0.0), 4) + + return fun + + +def to_geojson_feature_collection( + psrs: List[PowerSystemResource], + class_to_properties: Dict[Type, Dict[str, Callable[[Any], Any]]] +) -> FeatureCollection: + + features = [] + for psr in psrs: + properties_map = class_to_properties.get(type(psr)) + + if properties_map is not None: + feature = to_geojson_feature(psr, properties_map) + if feature is not None: + features.append(feature) + + return FeatureCollection(features) + + +def to_geojson_feature( + psr: PowerSystemResource, + property_map: Dict[str, Callable[[PowerSystemResource], Any]] +) -> Union[Feature, None]: + + geometry = to_geojson_geometry(psr.location) + if geometry is None: + return None + + properties = {k: f(psr) for (k, f) in property_map.items()} + return Feature(psr.mrid, geometry, properties) + + +def to_geojson_geometry(location: Location) -> Union[Geometry, None]: + points = list(location.points) if location is not None else [] + if len(points) > 1: + return LineString([(point.x_position, point.y_position) for point in points]) + elif len(points) == 1: + return Point((points[0].x_position, points[0].y_position)) + else: + return None + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/src/zepben/examples/studies/pec_capacity_vs_transformer_rating.py b/src/zepben/examples/studies/pec_capacity_vs_transformer_rating.py new file mode 100644 index 0000000..831b0e0 --- /dev/null +++ b/src/zepben/examples/studies/pec_capacity_vs_transformer_rating.py @@ -0,0 +1,337 @@ +# Copyright 2025 Zeppelin Bend Pty Ltd +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at https://mozilla.org/MPL/2.0/. + +import asyncio +import json +from datetime import datetime +from itertools import islice +from typing import List, Dict, Tuple, Callable, Any, Union, Type, Set + +from geojson import FeatureCollection, Feature +from geojson.geometry import Geometry, LineString, Point +from zepben.eas.client.eas_client import EasClient +from zepben.eas.client.study import Study, Result, GeoJsonOverlay +from zepben.ewb import ( + PowerTransformer, + ConductingEquipment, + PhotoVoltaicUnit, + PowerElectronicsConnection, + NetworkConsumerClient, + PhaseCode, + Feeder, + PowerSystemResource, + Location, + connect_with_token, + NetworkTraceStep, + Tracing, + downstream, + IncludedEnergizedContainers, +) + + +with open("../config.json") as f: + c = json.loads(f.read()) + + +def chunk(it, size): + it = iter(it) + return iter(lambda: tuple(islice(it, size)), ()) + + +async def main(): + # Only process feeders in the following zones + zone_mrids = ["CPM"] + pv_only = True # Set False to include all PowerElectronicsConnections, not just those with PV units. + print(f"Start time: {datetime.now()}") + + rpc_channel = connect_with_token( + host=c["host"], + access_token=c["access_token"], + rpc_port=c["rpc_port"], + ca_filename=c.get("ca_filename"), + timeout_seconds=c.get("timeout_seconds", 5), + debug=bool(c.get("debug", False)), + skip_connection_test=bool(c.get("skip_connection_test", False)), + ) + client = NetworkConsumerClient(rpc_channel) + hierarchy = (await client.get_network_hierarchy()).throw_on_error() + substations = hierarchy.value.substations + + print(f"Collecting feeders from zones {', '.join(zone_mrids)}.") + feeder_mrids = [] + for zone_mrid in zone_mrids: + if zone_mrid in substations: + for feeder in substations[zone_mrid].feeders: + feeder_mrids.append(feeder.mrid) + + print(f"Feeders to be processed: {', '.join(feeder_mrids)}") + + all_transformers: List[PowerTransformer] = [] + transformer_to_stats: Dict[str, Tuple[int, int, int, int]] = {} + + # Process the feeders in batches of 3, using asyncio, for performance + batches = chunk(feeder_mrids, 3) + for feeders in batches: + futures = [] + rpc_channel = connect_with_token( + host=c["host"], + access_token=c["access_token"], + rpc_port=c["rpc_port"], + ca_filename=c.get("ca_filename"), + timeout_seconds=c.get("timeout_seconds", 5), + debug=bool(c.get("debug", False)), + skip_connection_test=bool(c.get("skip_connection_test", False)), + ) + print(f"Processing feeders {', '.join(feeders)}") + for feeder_mrid in feeders: + futures.append( + asyncio.ensure_future( + fetch_feeder_and_capacity_stats( + feeder_mrid, + rpc_channel, + pv_only=pv_only, + ) + ) + ) + + for future in futures: + transformers, stats = await future + if transformers: # Empty if the feeder failed + all_transformers.extend(transformers) + transformer_to_stats.update(stats) + + print(f"Created Study for {len(all_transformers)} transformers") + + eas_client = EasClient(host=c["host"], port=c["rpc_port"], protocol="https", access_token=c["access_token"]) + print(f"Uploading Study for zones {', '.join(zone_mrids)} ...") + await upload_capacity_percent_study( + eas_client, + all_transformers, + transformer_to_stats, + name=f"PEC Capacity % vs Transformer Rating ({', '.join(zone_mrids)})", + description="Compares sum of PowerElectronicsConnection capacity (PV only by default) to transformer rating.", + tags=["pec_capacity_percent", "-".join(zone_mrids)], + styles=json.load(open("style_pec_capacity_percent.json", "r")), + ) + await eas_client.aclose() + print("Uploaded Study") + + print(f"Finish time: {datetime.now()}") + + +def collect_eq_provider(collection: Set[ConductingEquipment]): + + async def collect_equipment(ps: NetworkTraceStep, _): + collection.add(ps.path.to_equipment) + + return collect_equipment + + +async def get_downstream_eq(ce: ConductingEquipment) -> Set[ConductingEquipment]: + equipment_set = set() + + await ( + Tracing.network_trace() + .add_condition(downstream()) + .add_step_action(collect_eq_provider(equipment_set)) + ).run(start=ce, phases=PhaseCode.ABCN, can_stop_on_start_item=False) + + return equipment_set + + +async def fetch_feeder_and_capacity_stats( + feeder_mrid: str, + rpc_channel, + pv_only: bool, +) -> Tuple[List[PowerTransformer], Dict[str, Tuple[int, int, int, int]]]: + print(f"Fetching Feeder {feeder_mrid}") + client = NetworkConsumerClient(rpc_channel) + + result = ( + await client.get_equipment_container( + mrid=feeder_mrid, + expected_class=Feeder, + include_energized_containers=IncludedEnergizedContainers.LV_FEEDERS, + ) + ) + if result.was_failure: + print(f"Failed: {result.thrown}") + return [], {} + + network = client.service + print(f"Finished fetching Feeder {feeder_mrid}") + + transformers: List[PowerTransformer] = [] + transformer_to_stats: Dict[str, Tuple[int, int, int, int]] = {} + + print(f"Tracing downstream transformers for feeder {feeder_mrid}") + for io in (pt for pt in network.objects(PowerTransformer)): + pt: PowerTransformer = io + transformers.append(pt) + downstream_equipment = await get_downstream_eq(pt) + + pecs = [ + eq for eq in downstream_equipment + if isinstance(eq, PowerElectronicsConnection) + ] + if pv_only: + pecs = [pec for pec in pecs if _pec_has_pv_unit(pec)] + + capacity_va = sum(_pec_capacity_va(pec) for pec in pecs) + rating_va = _transformer_rating_va(pt) + percent = round((capacity_va / rating_va) * 100) if rating_va else 0 + + transformer_to_stats[pt.mrid] = (capacity_va, rating_va, percent, len(pecs)) + + return transformers, transformer_to_stats + + +def _pec_has_pv_unit(pec: PowerElectronicsConnection) -> bool: + return any(isinstance(unit, PhotoVoltaicUnit) for unit in pec.units) + + +def _pec_capacity_va(pec: PowerElectronicsConnection) -> int: + if pec.rated_s is not None: + return int(pec.rated_s) + unit_capacity = sum(unit.max_p for unit in pec.units if unit.max_p is not None) + return int(unit_capacity) if unit_capacity else 0 + + +def _transformer_rating_va(pt: PowerTransformer) -> int: + ratings: List[int] = [] + for end in pt.ends: + if end.rated_s is not None: + ratings.append(end.rated_s) + else: + for rating in end.s_ratings: + if rating and rating.rated_s is not None: + ratings.append(rating.rated_s) + return max(ratings) if ratings else 0 + + +async def upload_capacity_percent_study( + eas_client: EasClient, + transformers: List[PowerTransformer], + transformer_to_stats: Dict[str, Tuple[int, int, int, int]], + name: str, + description: str, + tags: List[str], + styles: List, +) -> None: + + class_to_properties = { + PowerTransformer: { + "capacity_percent": _capacity_percent_from(transformer_to_stats), + "capacity_percent_label": _capacity_percent_label_from(transformer_to_stats), + "pec_capacity_va": _pec_capacity_from(transformer_to_stats), + "transformer_rating_va": _transformer_rating_from(transformer_to_stats), + "pec_count": _pec_count_from(transformer_to_stats), + "type": lambda x: "pt", + } + } + feature_collection = to_geojson_feature_collection(transformers, class_to_properties) + response = await eas_client.async_upload_study( + Study( + name=name, + description=description, + tags=tags, + results=[ + Result( + name=name, + geo_json_overlay=GeoJsonOverlay( + data=feature_collection, + styles=[s["id"] for s in styles], + ), + ) + ], + styles=styles, + ) + ) + print(f"Study response: {response}") + + +def _capacity_percent_from(pt_to_stats: Dict[str, Tuple[int, int, int, int]]): + def fun(pt: PowerTransformer): + capacity, rating, percent, pec_count = pt_to_stats.get(pt.mrid, (0, 0, 0, 0)) + return percent + + return fun + + +def _capacity_percent_label_from(pt_to_stats: Dict[str, Tuple[int, int, int, int]]): + def fun(pt: PowerTransformer): + capacity, rating, percent, pec_count = pt_to_stats.get(pt.mrid, (0, 0, 0, 0)) + return "n/a" if rating == 0 else f"{percent}%" + + return fun + + +def _pec_capacity_from(pt_to_stats: Dict[str, Tuple[int, int, int, int]]): + def fun(pt: PowerTransformer): + capacity, rating, percent, pec_count = pt_to_stats.get(pt.mrid, (0, 0, 0, 0)) + return capacity + + return fun + + +def _transformer_rating_from(pt_to_stats: Dict[str, Tuple[int, int, int, int]]): + def fun(pt: PowerTransformer): + capacity, rating, percent, pec_count = pt_to_stats.get(pt.mrid, (0, 0, 0, 0)) + return rating + + return fun + + +def _pec_count_from(pt_to_stats: Dict[str, Tuple[int, int, int, int]]): + def fun(pt: PowerTransformer): + capacity, rating, percent, pec_count = pt_to_stats.get(pt.mrid, (0, 0, 0, 0)) + return pec_count + + return fun + + +def to_geojson_feature_collection( + psrs: List[PowerSystemResource], + class_to_properties: Dict[Type, Dict[str, Callable[[Any], Any]]], +) -> FeatureCollection: + + features = [] + for psr in psrs: + properties_map = class_to_properties.get(type(psr)) + if properties_map is None: + continue + feature = to_geojson_feature(psr, properties_map) + if feature is not None: + features.append(feature) + + return FeatureCollection(features) + + +def to_geojson_feature( + psr: PowerSystemResource, + property_map: Dict[str, Callable[[PowerSystemResource], Any]], +) -> Union[Feature, None]: + + geometry = to_geojson_geometry(psr.location) + if geometry is None: + return None + + properties = {k: f(psr) for (k, f) in property_map.items()} + return Feature(psr.mrid, geometry, properties) + + +def to_geojson_geometry(location: Location) -> Union[Geometry, None]: + points = list(location.points) if location is not None else [] + if len(points) > 1: + return LineString([(point.x_position, point.y_position) for point in points]) + elif len(points) == 1: + return Point((points[0].x_position, points[0].y_position)) + else: + return None + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/src/zepben/examples/studies/pv_percent_by_transformer.py b/src/zepben/examples/studies/pv_percent_by_transformer.py new file mode 100644 index 0000000..66e91ef --- /dev/null +++ b/src/zepben/examples/studies/pv_percent_by_transformer.py @@ -0,0 +1,322 @@ +# Copyright 2025 Zeppelin Bend Pty Ltd +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at https://mozilla.org/MPL/2.0/. + +import asyncio +import json +from datetime import datetime +from itertools import islice +from typing import List, Dict, Tuple, Callable, Any, Union, Type, Set + +from geojson import FeatureCollection, Feature +from geojson.geometry import Geometry, LineString, Point +from zepben.eas.client.eas_client import EasClient +from zepben.eas.client.study import Study, Result, GeoJsonOverlay +from zepben.ewb import ( + PowerTransformer, + ConductingEquipment, + EnergyConsumer, + PhotoVoltaicUnit, + PowerElectronicsConnection, + NetworkConsumerClient, + PhaseCode, + Feeder, + PowerSystemResource, + Location, + connect_with_token, + NetworkTraceStep, + Tracing, + downstream, + IncludedEnergizedContainers, +) + + +with open("../config.json") as f: + c = json.loads(f.read()) + + +def chunk(it, size): + it = iter(it) + return iter(lambda: tuple(islice(it, size)), ()) + + +async def main(): + # Only process feeders in the following zones + zone_mrids = ["CPM"] + print(f"Start time: {datetime.now()}") + + rpc_channel = connect_with_token( + host=c["host"], + access_token=c["access_token"], + rpc_port=c["rpc_port"], + ca_filename=c.get("ca_filename"), + timeout_seconds=c.get("timeout_seconds", 5), + debug=bool(c.get("debug", False)), + skip_connection_test=bool(c.get("skip_connection_test", False)), + ) + client = NetworkConsumerClient(rpc_channel) + hierarchy = (await client.get_network_hierarchy()).throw_on_error() + substations = hierarchy.value.substations + + print(f"Collecting feeders from zones {', '.join(zone_mrids)}.") + feeder_mrids = [] + for zone_mrid in zone_mrids: + if zone_mrid in substations: + for feeder in substations[zone_mrid].feeders: + feeder_mrids.append(feeder.mrid) + + print(f"Feeders to be processed: {', '.join(feeder_mrids)}") + + all_transformers: List[PowerTransformer] = [] + transformer_to_stats: Dict[str, Tuple[int, int, int]] = {} + + # Process the feeders in batches of 3, using asyncio, for performance + batches = chunk(feeder_mrids, 3) + for feeders in batches: + futures = [] + rpc_channel = connect_with_token( + host=c["host"], + access_token=c["access_token"], + rpc_port=c["rpc_port"], + ca_filename=c.get("ca_filename"), + timeout_seconds=c.get("timeout_seconds", 5), + debug=bool(c.get("debug", False)), + skip_connection_test=bool(c.get("skip_connection_test", False)), + ) + print(f"Processing feeders {', '.join(feeders)}") + for feeder_mrid in feeders: + futures.append(asyncio.ensure_future(fetch_feeder_and_pv_stats(feeder_mrid, rpc_channel))) + + for future in futures: + transformers, stats = await future + if transformers: # Empty if the feeder failed + all_transformers.extend(transformers) + transformer_to_stats.update(stats) + + print(f"Created Study for {len(all_transformers)} transformers") + + eas_client = EasClient(host=c["host"], port=c["rpc_port"], protocol="https", access_token=c["access_token"]) + print(f"Uploading Study for zones {', '.join(zone_mrids)} ...") + await upload_pv_percent_study( + eas_client, + all_transformers, + transformer_to_stats, + name=f"PV % by Transformer ({', '.join(zone_mrids)})", + description="Percentage of EnergyConsumers with PV (PhotoVoltaicUnit) downstream of each transformer.", + tags=["pv_percent_by_transformer", "-".join(zone_mrids)], + styles=json.load(open("style_pv_percent.json", "r")), + ) + await eas_client.aclose() + print("Uploaded Study") + + print(f"Finish time: {datetime.now()}") + + +def collect_eq_provider(collection: Set[ConductingEquipment]): + + async def collect_equipment(ps: NetworkTraceStep, _): + collection.add(ps.path.to_equipment) + + return collect_equipment + + +async def get_downstream_eq(ce: ConductingEquipment) -> Set[ConductingEquipment]: + equipment_set = set() + + await ( + Tracing.network_trace() + .add_condition(downstream()) + .add_step_action(collect_eq_provider(equipment_set)) + ).run(start=ce, phases=PhaseCode.ABCN, can_stop_on_start_item=False) + + return equipment_set + + +async def fetch_feeder_and_pv_stats( + feeder_mrid: str, + rpc_channel, +) -> Tuple[List[PowerTransformer], Dict[str, Tuple[int, int, int]]]: + print(f"Fetching Feeder {feeder_mrid}") + client = NetworkConsumerClient(rpc_channel) + + result = ( + await client.get_equipment_container( + mrid=feeder_mrid, + expected_class=Feeder, + include_energized_containers=IncludedEnergizedContainers.LV_FEEDERS, + ) + ) + if result.was_failure: + print(f"Failed: {result.thrown}") + return [], {} + + network = client.service + print(f"Finished fetching Feeder {feeder_mrid}") + + pv_consumers = _find_pv_energy_consumers(network) + + transformers: List[PowerTransformer] = [] + transformer_to_stats: Dict[str, Tuple[int, int, int]] = {} + + print(f"Tracing downstream transformers for feeder {feeder_mrid}") + for io in (pt for pt in network.objects(PowerTransformer)): + pt: PowerTransformer = io + transformers.append(pt) + downstream_equipment = await get_downstream_eq(pt) + downstream_ecs = [eq for eq in downstream_equipment if isinstance(eq, EnergyConsumer)] + total_ec = len(downstream_ecs) + pv_ec = sum(1 for ec in downstream_ecs if ec.mrid in pv_consumers) + pv_percent = round((pv_ec / total_ec) * 100) if total_ec else 0 + transformer_to_stats[pt.mrid] = (pv_ec, total_ec, pv_percent) + + return transformers, transformer_to_stats + + +def _find_pv_energy_consumers(network) -> Set[str]: + pv_usage_points: Set[str] = set() + pv_nodes: Set[str] = set() + + # PV units can be directly linked to usage points or via their power electronics connection + for pv in network.objects(PhotoVoltaicUnit): + for up in pv.usage_points: + pv_usage_points.add(up.mrid) + if pv.power_electronics_connection: + for up in pv.power_electronics_connection.usage_points: + pv_usage_points.add(up.mrid) + + # PV units also appear on power electronics connections + for pec in network.objects(PowerElectronicsConnection): + if any(isinstance(unit, PhotoVoltaicUnit) for unit in pec.units): + for up in pec.usage_points: + pv_usage_points.add(up.mrid) + for terminal in pec.terminals: + if terminal.connectivity_node: + pv_nodes.add(terminal.connectivity_node.mrid) + + pv_consumers: Set[str] = set() + for ec in network.objects(EnergyConsumer): + if any(up.mrid in pv_usage_points for up in ec.usage_points): + pv_consumers.add(ec.mrid) + continue + if pv_nodes and any(t.connectivity_node and t.connectivity_node.mrid in pv_nodes for t in ec.terminals): + pv_consumers.add(ec.mrid) + + return pv_consumers + + +async def upload_pv_percent_study( + eas_client: EasClient, + transformers: List[PowerTransformer], + transformer_to_stats: Dict[str, Tuple[int, int, int]], + name: str, + description: str, + tags: List[str], + styles: List, +) -> None: + + class_to_properties = { + PowerTransformer: { + "pv_percent": _pv_percent_from(transformer_to_stats), + "pv_percent_label": _pv_percent_label_from(transformer_to_stats), + "pv_ec_count": _pv_count_from(transformer_to_stats), + "ec_count": _ec_count_from(transformer_to_stats), + "type": lambda x: "pt", + } + } + feature_collection = to_geojson_feature_collection(transformers, class_to_properties) + response = await eas_client.async_upload_study( + Study( + name=name, + description=description, + tags=tags, + results=[ + Result( + name=name, + geo_json_overlay=GeoJsonOverlay( + data=feature_collection, + styles=[s["id"] for s in styles], + ), + ) + ], + styles=styles, + ) + ) + print(f"Study response: {response}") + + +def _pv_percent_from(pt_to_stats: Dict[str, Tuple[int, int, int]]): + def fun(pt: PowerTransformer): + pv_count, total, percent = pt_to_stats.get(pt.mrid, (0, 0, 0)) + return percent + + return fun + + +def _pv_percent_label_from(pt_to_stats: Dict[str, Tuple[int, int, int]]): + def fun(pt: PowerTransformer): + pv_count, total, percent = pt_to_stats.get(pt.mrid, (0, 0, 0)) + return f"{percent}%" + + return fun + + +def _pv_count_from(pt_to_stats: Dict[str, Tuple[int, int, int]]): + def fun(pt: PowerTransformer): + pv_count, total, percent = pt_to_stats.get(pt.mrid, (0, 0, 0)) + return pv_count + + return fun + + +def _ec_count_from(pt_to_stats: Dict[str, Tuple[int, int, int]]): + def fun(pt: PowerTransformer): + pv_count, total, percent = pt_to_stats.get(pt.mrid, (0, 0, 0)) + return total + + return fun + + +def to_geojson_feature_collection( + psrs: List[PowerSystemResource], + class_to_properties: Dict[Type, Dict[str, Callable[[Any], Any]]], +) -> FeatureCollection: + + features = [] + for psr in psrs: + properties_map = class_to_properties.get(type(psr)) + if properties_map is None: + continue + feature = to_geojson_feature(psr, properties_map) + if feature is not None: + features.append(feature) + + return FeatureCollection(features) + + +def to_geojson_feature( + psr: PowerSystemResource, + property_map: Dict[str, Callable[[PowerSystemResource], Any]], +) -> Union[Feature, None]: + + geometry = to_geojson_geometry(psr.location) + if geometry is None: + return None + + properties = {k: f(psr) for (k, f) in property_map.items()} + return Feature(psr.mrid, geometry, properties) + + +def to_geojson_geometry(location: Location) -> Union[Geometry, None]: + points = list(location.points) if location is not None else [] + if len(points) > 1: + return LineString([(point.x_position, point.y_position) for point in points]) + elif len(points) == 1: + return Point((points[0].x_position, points[0].y_position)) + else: + return None + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/src/zepben/examples/studies/style_customer_distance.json b/src/zepben/examples/studies/style_customer_distance.json new file mode 100644 index 0000000..7b87cbb --- /dev/null +++ b/src/zepben/examples/studies/style_customer_distance.json @@ -0,0 +1,44 @@ +[ + { + "id": "ec-distance", + "name": "Customer Distance to Transformer (m)", + "type": "circle", + "minzoom": 12, + "paint": { + "circle-radius": 12, + "circle-color": [ + "case", + ["==", ["get", "distance_m"], 0], + "rgb(160,160,160)", + ["step", ["get", "distance_m"], + "rgb(0,180,40)", 200, + "rgb(171,221,164)", 300, + "rgb(253,174,97)", 400, + "rgb(244,109,67)", 500, + "rgb(210,2,29)" + ] + ], + "circle-stroke-color": "white", + "circle-stroke-width": 1 + }, + "filter" : ["==", ["get", "type"], "ec"] + }, + { + "id": "ec-distance-label", + "name": "Distance Label", + "type": "symbol", + "minzoom": 13, + "layout": { + "text-field": ["get", "distance_label"], + "text-font": ["DIN Offc Pro Medium", "Arial Unicode MS Bold"], + "text-size": 11 + }, + "paint": { + "text-color": "black", + "text-halo-blur": 6, + "text-halo-width": 6, + "text-halo-color": "white" + }, + "filter" : ["==", ["get", "type"], "ec"] + } +] diff --git a/src/zepben/examples/studies/style_eol.json b/src/zepben/examples/studies/style_eol.json index 546c44c..6001d8b 100644 --- a/src/zepben/examples/studies/style_eol.json +++ b/src/zepben/examples/studies/style_eol.json @@ -50,19 +50,25 @@ "minzoom": 12, "paint": { "circle-radius": 15, - "circle-color": "rgb(0,200,2)", + "circle-color": [ + "step", + ["get", "suspect_length_m"], + "rgb(0,200,2)", 100, + "rgb(244,109,67)", 200, + "rgb(210,2,29)" + ], "circle-stroke-color": "white", "circle-stroke-width": 1 }, "filter" : ["==", ["get", "type"], "pt"] }, { - "id": "consumer-count-label", - "name": "Consumer Count", + "id": "suspect-length-label", + "name": "Suspect Length", "type": "symbol", "minzoom": 12, "layout": { - "text-field": ["get", "consumer_count"], + "text-field": ["get", "suspect_length_label"], "text-font": ["DIN Offc Pro Medium", "Arial Unicode MS Bold"], "text-size": 12 }, diff --git a/src/zepben/examples/studies/style_loop_impedance.json b/src/zepben/examples/studies/style_loop_impedance.json new file mode 100644 index 0000000..d94e383 --- /dev/null +++ b/src/zepben/examples/studies/style_loop_impedance.json @@ -0,0 +1,65 @@ +[ + { + "id": "line-impedance", + "name": "Line Impedance (ohm/km)", + "type": "line", + "paint": { + "line-color": [ + "case", + ["==", ["get", "z_ohm_per_km"], 0], + "rgb(160,160,160)", + ["step", ["get", "z_ohm_per_km"], + "rgb(0,180,40)", 0.2, + "rgb(171,221,164)", 0.5, + "rgb(253,174,97)", 1.5, + "rgb(244,109,67)", 6.0, + "rgb(210,2,29)" + ] + ], + "line-width": 2 + }, + "filter" : ["==", ["get", "type"], "line"] + }, + { + "id": "ec-loop-impedance", + "name": "Loop Impedance (ohm)", + "type": "circle", + "minzoom": 12, + "paint": { + "circle-radius": 12, + "circle-color": [ + "case", + ["==", ["get", "loop_z_ohm"], 0], + "rgb(160,160,160)", + ["step", ["get", "loop_z_ohm"], + "rgb(0,180,40)", 0.1, + "rgb(171,221,164)", 0.2, + "rgb(253,174,97)", 0.5, + "rgb(244,109,67)", 1.0, + "rgb(210,2,29)" + ] + ], + "circle-stroke-color": "white", + "circle-stroke-width": 1 + }, + "filter" : ["==", ["get", "type"], "ec"] + }, + { + "id": "ec-loop-impedance-label", + "name": "Loop Impedance Label", + "type": "symbol", + "minzoom": 13, + "layout": { + "text-field": ["get", "loop_z_label"], + "text-font": ["DIN Offc Pro Medium", "Arial Unicode MS Bold"], + "text-size": 11 + }, + "paint": { + "text-color": "black", + "text-halo-blur": 6, + "text-halo-width": 6, + "text-halo-color": "white" + }, + "filter" : ["==", ["get", "type"], "ec"] + } +] diff --git a/src/zepben/examples/studies/style_pec_capacity_percent.json b/src/zepben/examples/studies/style_pec_capacity_percent.json new file mode 100644 index 0000000..70f8669 --- /dev/null +++ b/src/zepben/examples/studies/style_pec_capacity_percent.json @@ -0,0 +1,41 @@ +[ + { + "id": "pec-capacity-transformers", + "name": "PEC Capacity % by Transformer", + "type": "circle", + "minzoom": 12, + "paint": { + "circle-radius": 15, + "circle-color": [ + "case", + ["==", ["get", "transformer_rating_va"], 0], + "rgb(160,160,160)", + ["step", ["get", "capacity_percent"], + "rgb(0,180,40)", 30, + "rgb(171,221,164)", 60, + "rgb(253,174,97)", 80, + "rgb(244,109,67)", 90, + "rgb(210,2,29)" + ] + ], + "circle-stroke-color": "white", + "circle-stroke-width": 1 + }, + "filter" : ["==", ["get", "type"], "pt"] + }, + { + "id": "pec-capacity-label", + "name": "PEC Capacity %", + "type": "symbol", + "minzoom": 12, + "layout": { + "text-field": ["get", "capacity_percent_label"], + "text-font": ["DIN Offc Pro Medium", "Arial Unicode MS Bold"], + "text-size": 12 + }, + "paint": { + "text-color": "white" + }, + "filter" : ["==", ["get", "type"], "pt"] + } +] diff --git a/src/zepben/examples/studies/style_pv_percent.json b/src/zepben/examples/studies/style_pv_percent.json new file mode 100644 index 0000000..ed0d72a --- /dev/null +++ b/src/zepben/examples/studies/style_pv_percent.json @@ -0,0 +1,41 @@ +[ + { + "id": "pv-transformers", + "name": "PV % by Transformer", + "type": "circle", + "minzoom": 12, + "paint": { + "circle-radius": 15, + "circle-color": [ + "case", + ["<", ["get", "ec_count"], 3], + "rgb(160,160,160)", + ["step", ["get", "pv_percent"], + "rgb(0,180,40)", 10, + "rgb(171,221,164)", 25, + "rgb(253,174,97)", 40, + "rgb(244,109,67)", 60, + "rgb(210,2,29)" + ] + ], + "circle-stroke-color": "white", + "circle-stroke-width": 1 + }, + "filter" : ["==", ["get", "type"], "pt"] + }, + { + "id": "pv-percent-label", + "name": "PV Percentage", + "type": "symbol", + "minzoom": 12, + "layout": { + "text-field": ["get", "pv_percent_label"], + "text-font": ["DIN Offc Pro Medium", "Arial Unicode MS Bold"], + "text-size": 12 + }, + "paint": { + "text-color": "white" + }, + "filter" : ["==", ["get", "type"], "pt"] + } +] diff --git a/src/zepben/examples/studies/style_tap_changer.json b/src/zepben/examples/studies/style_tap_changer.json new file mode 100644 index 0000000..980bc89 --- /dev/null +++ b/src/zepben/examples/studies/style_tap_changer.json @@ -0,0 +1,48 @@ +[ + { + "id": "tap-transformers", + "name": "Tap Changer Transformers", + "type": "circle", + "minzoom": 12, + "paint": { + "circle-radius": 12, + "circle-color": "rgb(0,120,255)", + "circle-stroke-color": "white", + "circle-stroke-width": 1 + }, + "filter" : ["==", ["get", "type"], "pt"] + }, + { + "id": "tap-step-label", + "name": "Tap Step", + "type": "symbol", + "minzoom": 12, + "layout": { + "text-field": ["get", "tap_label"], + "text-font": ["DIN Offc Pro Medium", "Arial Unicode MS Bold"], + "text-size": 11 + }, + "paint": { + "text-color": "white" + }, + "filter" : ["==", ["get", "type"], "pt"] + }, + { + "id": "tap-json-label", + "name": "Tap JSON", + "type": "symbol", + "minzoom": 16, + "layout": { + "text-field": ["get", "tap_json"], + "text-font": ["DIN Offc Pro Medium", "Arial Unicode MS Bold"], + "text-size": 10 + }, + "paint": { + "text-color": "black", + "text-halo-blur": 6, + "text-halo-width": 6, + "text-halo-color": "white" + }, + "filter" : ["==", ["get", "type"], "pt"] + } +] diff --git a/src/zepben/examples/studies/style_transformer_density.json b/src/zepben/examples/studies/style_transformer_density.json new file mode 100644 index 0000000..4f28db2 --- /dev/null +++ b/src/zepben/examples/studies/style_transformer_density.json @@ -0,0 +1,128 @@ +[ + { + "id": "ec-density-circle", + "name": "EC Density (/100m)", + "type": "circle", + "minzoom": 12, + "paint": { + "circle-radius": 15, + "circle-color": [ + "case", + ["==", ["get", "ec_density"], 0], + "rgb(160,160,160)", + ["step", ["get", "ec_density"], + "rgb(0,180,40)", 0.5, + "rgb(171,221,164)", 2, + "rgb(253,174,97)", 5, + "rgb(244,109,67)", 10, + "rgb(210,2,29)" + ] + ], + "circle-stroke-color": "white", + "circle-stroke-width": 1 + }, + "filter" : ["==", ["get", "type"], "pt"] + }, + { + "id": "ec-density-label", + "name": "EC Density Label", + "type": "symbol", + "minzoom": 12, + "layout": { + "text-field": ["get", "ec_density_label"], + "text-font": ["DIN Offc Pro Medium", "Arial Unicode MS Bold"], + "text-size": 12 + }, + "paint": { + "text-color": "black", + "text-halo-color": "white", + "text-halo-width": 2, + "text-halo-blur": 1 + }, + "filter" : ["==", ["get", "type"], "pt"] + }, + { + "id": "up-density-circle", + "name": "Usage Point Density (/100m)", + "type": "circle", + "minzoom": 12, + "paint": { + "circle-radius": 15, + "circle-color": [ + "case", + ["==", ["get", "up_density"], 0], + "rgb(160,160,160)", + ["step", ["get", "up_density"], + "rgb(0,180,40)", 0.5, + "rgb(171,221,164)", 2, + "rgb(253,174,97)", 5, + "rgb(244,109,67)", 10, + "rgb(210,2,29)" + ] + ], + "circle-stroke-color": "white", + "circle-stroke-width": 1 + }, + "filter" : ["==", ["get", "type"], "pt"] + }, + { + "id": "up-density-label", + "name": "Usage Point Density Label", + "type": "symbol", + "minzoom": 12, + "layout": { + "text-field": ["get", "up_density_label"], + "text-font": ["DIN Offc Pro Medium", "Arial Unicode MS Bold"], + "text-size": 12 + }, + "paint": { + "text-color": "black", + "text-halo-color": "white", + "text-halo-width": 2, + "text-halo-blur": 1 + }, + "filter" : ["==", ["get", "type"], "pt"] + }, + { + "id": "pv-density-circle", + "name": "PV Density (/100m)", + "type": "circle", + "minzoom": 12, + "paint": { + "circle-radius": 15, + "circle-color": [ + "case", + ["==", ["get", "pv_density"], 0], + "rgb(160,160,160)", + ["step", ["get", "pv_density"], + "rgb(0,180,40)", 0.1, + "rgb(171,221,164)", 0.5, + "rgb(253,174,97)", 1, + "rgb(244,109,67)", 2, + "rgb(210,2,29)" + ] + ], + "circle-stroke-color": "white", + "circle-stroke-width": 1 + }, + "filter" : ["==", ["get", "type"], "pt"] + }, + { + "id": "pv-density-label", + "name": "PV Density Label", + "type": "symbol", + "minzoom": 12, + "layout": { + "text-field": ["get", "pv_density_label"], + "text-font": ["DIN Offc Pro Medium", "Arial Unicode MS Bold"], + "text-size": 12 + }, + "paint": { + "text-color": "black", + "text-halo-color": "white", + "text-halo-width": 2, + "text-halo-blur": 1 + }, + "filter" : ["==", ["get", "type"], "pt"] + } +] diff --git a/src/zepben/examples/studies/suspect_end_of_line.py b/src/zepben/examples/studies/suspect_end_of_line.py index abcaf6f..2dff9b0 100644 --- a/src/zepben/examples/studies/suspect_end_of_line.py +++ b/src/zepben/examples/studies/suspect_end_of_line.py @@ -14,9 +14,9 @@ from geojson.geometry import Geometry, LineString, Point from zepben.eas.client.eas_client import EasClient from zepben.eas.client.study import Study, Result, GeoJsonOverlay -from zepben.ewb import PowerTransformer, ConductingEquipment, EnergyConsumer, AcLineSegment, \ - NetworkConsumerClient, PhaseCode, PowerElectronicsConnection, Feeder, PowerSystemResource, Location, \ - connect_with_token, NetworkTraceStep, Tracing, downstream, upstream, IncludedEnergizedContainers +from zepben.ewb import PowerTransformer, ConductingEquipment, EnergyConsumer, AcLineSegment, Switch, \ + NetworkConsumerClient, PhaseCode, Feeder, PowerSystemResource, Location, \ + connect_with_token, NetworkTraceStep, Tracing, downstream, IncludedEnergizedContainers with open("../config.json") as f: @@ -30,10 +30,18 @@ def chunk(it, size): async def main(): # Only process feeders in the following zones - zone_mrids = ["MTN"] + zone_mrids = ["CPM"] print(f"Start time: {datetime.now()}") - rpc_channel = connect_with_token(host=c["host"], access_token=c["access_token"], rpc_port=c["rpc_port"]) + rpc_channel = connect_with_token( + host=c["host"], + access_token=c["access_token"], + rpc_port=c["rpc_port"], + ca_filename=c.get("ca_filename"), + timeout_seconds=c.get("timeout_seconds", 5), + debug=bool(c.get("debug", False)), + skip_connection_test=bool(c.get("skip_connection_test", False)), + ) client = NetworkConsumerClient(rpc_channel) hierarchy = (await client.get_network_hierarchy()).throw_on_error() substations = hierarchy.value.substations @@ -47,38 +55,60 @@ async def main(): feeder_mrids.append(feeder.mrid) print(f"Feeders to be processed: {', '.join(feeder_mrids)}") - futures = [] # Process the feeders in batches of 3, using asyncio, for performance batches = chunk(feeder_mrids, 3) for feeders in batches: - all_traced_equipment = [] - transformer_to_suspect_end = dict() - - rpc_channel = connect_with_token(host=c["host"], access_token=c["access_token"], rpc_port=c["rpc_port"]) + feeder_results = [] + futures = [] + + rpc_channel = connect_with_token( + host=c["host"], + access_token=c["access_token"], + rpc_port=c["rpc_port"], + ca_filename=c.get("ca_filename"), + timeout_seconds=c.get("timeout_seconds", 5), + debug=bool(c.get("debug", False)), + skip_connection_test=bool(c.get("skip_connection_test", False)), + ) print(f"Processing feeders {', '.join(feeders)}") for feeder_mrid in feeders: futures.append(asyncio.ensure_future(fetch_feeder_and_trace(feeder_mrid, rpc_channel))) for future in futures: - tx_to_sus_end = await future - if tx_to_sus_end: # Empty if the feeder failed - all_traced_equipment.extend(eq for (k, (count, sus_eq_list)) in tx_to_sus_end.items() for eq in sus_eq_list) - transformer_to_suspect_end.update(tx_to_sus_end) + result = await future + if not result: # Empty if the feeder failed + continue + feeder_mrid, transformers, tx_to_sus_lines, feeder_suspect_lines = result + total_length_m = sum(_line_length_m(line) for line in feeder_suspect_lines) + highlight_equipment = set(transformers) | set(feeder_suspect_lines) + feeder_results.append( + (feeder_mrid, total_length_m, list(highlight_equipment), tx_to_sus_lines) + ) print(f"Created Study for {len(feeder_mrids)} feeders") eas_client = EasClient(host=c["host"], port=c["rpc_port"], protocol="https", access_token=c["access_token"]) print(f"Uploading Study for {', '.join(feeders)} ...") + styles = json.load(open("style_eol.json", "r")) + results = [ + _build_suspect_end_result( + feeder_mrid, + total_length_m, + equipment, + transformer_to_suspect_lines, + styles=styles, + ) + for feeder_mrid, total_length_m, equipment, transformer_to_suspect_lines in feeder_results + ] await upload_suspect_end_of_line_study( eas_client, - all_traced_equipment, - transformer_to_suspect_end, - name=f"Suspect end of line {', '.join(feeders)}", - description="Highlights every line that is downstream of transformer and ends without a consumer.", + results, + name=f"Suspect end of line ({', '.join(feeders)})", + description="Highlights only line segments that have no downstream EnergyConsumers (excludes shared upstream segments).", tags=["suspect_end_of_line", "-".join(zone_mrids)], - styles=json.load(open("style_eol.json", "r")) + styles=styles ) await eas_client.aclose() print(f"Uploaded Study") @@ -86,24 +116,62 @@ async def main(): print(f"Finish time: {datetime.now()}") -def collect_eq_provider(collection: Set[ConductingEquipment]): +def collect_downstream_edges_provider( + adjacency: Dict[ConductingEquipment, Set[ConductingEquipment]], + nodes: Set[ConductingEquipment], +): - async def collect_equipment(ps: NetworkTraceStep, _): - collection.add(ps.path.to_equipment) + async def collect_edges(ps: NetworkTraceStep, _): + nodes.add(ps.path.from_equipment) + nodes.add(ps.path.to_equipment) + if ps.path.traced_externally: + adjacency.setdefault(ps.path.from_equipment, set()).add(ps.path.to_equipment) - return collect_equipment + return collect_edges -async def get_downstream_eq(ce: ConductingEquipment) -> Set[ConductingEquipment]: - equipment_set = set() +async def build_downstream_graph( + start: ConductingEquipment, +) -> Tuple[Dict[ConductingEquipment, Set[ConductingEquipment]], Set[ConductingEquipment]]: + adjacency: Dict[ConductingEquipment, Set[ConductingEquipment]] = {} + nodes: Set[ConductingEquipment] = {start} await ( Tracing.network_trace() .add_condition(downstream()) - .add_step_action(collect_eq_provider(equipment_set)) - ).run(start=ce, phases=PhaseCode.ABCN, can_stop_on_start_item=False) + .add_step_action(collect_downstream_edges_provider(adjacency, nodes)) + ).run(start=start, phases=PhaseCode.ABCN, can_stop_on_start_item=False) + + for node in nodes: + adjacency.setdefault(node, set()) + + return adjacency, nodes - return equipment_set + +def build_has_consumer_downstream( + adjacency: Dict[ConductingEquipment, Set[ConductingEquipment]], + backfeed_switches: Set[ConductingEquipment], +): + memo: Dict[ConductingEquipment, bool] = {} + visiting: Set[ConductingEquipment] = set() + + def has_consumer(node: ConductingEquipment) -> bool: + if node in memo: + return memo[node] + if node in visiting: + return False + visiting.add(node) + result = isinstance(node, EnergyConsumer) or node in backfeed_switches + if not result: + for child in adjacency.get(node, ()): + if has_consumer(child): + result = True + break + visiting.remove(node) + memo[node] = result + return result + + return has_consumer async def fetch_feeder_and_trace(feeder_mrid: str, rpc_channel): @@ -119,111 +187,141 @@ async def fetch_feeder_and_trace(feeder_mrid: str, rpc_channel): ) if result.was_failure: print(f"Failed: {result.thrown}") - return {} + return None network = client.service print(f"Finished fetching Feeder {feeder_mrid}") - print(f"Tracing downstream transformers for feeder {feeder_mrid}") - transformer_to_eq: Dict[str, Set[ConductingEquipment]] = {} + print(f"Tracing suspect lines for feeder {feeder_mrid}") + transformers: List[PowerTransformer] = [] + transformer_to_suspect_lines: Dict[str, Tuple[float, List[ConductingEquipment]]] = {} + feeder_suspect_lines: Set[AcLineSegment] = set() for io in (pt for pt in network.objects(PowerTransformer)): pt: PowerTransformer = io - downstream_equipment = await get_downstream_eq(pt) - transformer_to_eq[pt.mrid] = downstream_equipment - - print(f"Tracing suspect ends for feeder {feeder_mrid}") - transformer_to_suspect_end = await get_transformer_to_suspect_end(transformer_to_eq) - - return transformer_to_suspect_end - - -async def get_transformer_to_suspect_end( - transformer_to_eq: Dict[str, Set[ConductingEquipment]] -) -> Dict[str, Tuple[int, Set[ConductingEquipment]]]: - - transformer_to_suspect_end: Dict[str, (int, List[ConductingEquipment])] = {} - for pt_mrid, eq_list in transformer_to_eq.items(): - single_terminal_junctions = [ - eq for eq in eq_list - if not isinstance(eq, (EnergyConsumer, PowerElectronicsConnection)) - and len(list(eq.terminals)) == 1 - ] - upstream_eq = set( - await _get_upstream_eq_up_to_transformer(stj) for stj in single_terminal_junctions - ) - - transformer_to_suspect_end[pt_mrid] = (len(single_terminal_junctions), list(upstream_eq)) - - return transformer_to_suspect_end + transformers.append(pt) + suspect_lines = await get_suspect_lines_for_transformer(pt) + feeder_suspect_lines.update(suspect_lines) + total_length_m = sum(_line_length_m(line) for line in suspect_lines) + transformer_to_suspect_lines[pt.mrid] = (total_length_m, list(suspect_lines)) + + return feeder_mrid, transformers, transformer_to_suspect_lines, feeder_suspect_lines + + +def _switch_has_external_network( + switch: Switch, + nodes: Set[ConductingEquipment], +) -> bool: + for terminal in switch.terminals: + cn = terminal.connectivity_node + if cn is None: + continue + for term in cn.terminals: + ce = term.conducting_equipment + if ce is None or ce is switch: + continue + if ce not in nodes: + return True + return False + + +def _find_backfeed_switches( + nodes: Set[ConductingEquipment], +) -> Set[ConductingEquipment]: + backfeed = set() + for node in nodes: + if isinstance(node, Switch) and _switch_has_external_network(node, nodes): + backfeed.add(node) + return backfeed + + +async def get_suspect_lines_for_transformer( + transformer: PowerTransformer, +) -> Set[AcLineSegment]: + adjacency, nodes = await build_downstream_graph(transformer) + backfeed_switches = _find_backfeed_switches(nodes) + has_consumer = build_has_consumer_downstream(adjacency, backfeed_switches) + + suspect_lines = { + node for node in nodes + if isinstance(node, AcLineSegment) and not has_consumer(node) + } + return suspect_lines -async def upload_suspect_end_of_line_study( - eas_client: EasClient, - pts: List[PowerTransformer], - transformer_to_suspect_end: Dict[str, Tuple[int, List[ConductingEquipment]]], - name: str, - description: str, - tags: List[str], +def _build_suspect_end_result( + feeder_mrid: str, + total_length_m: float, + pts: List[ConductingEquipment], + transformer_to_suspect_lines: Dict[str, Tuple[float, List[ConductingEquipment]]], styles: List -) -> None: - +) -> Result: class_to_properties = { EnergyConsumer: { "name": lambda ec: ec.name, "type": lambda x: "ec" }, PowerTransformer: { - "consumer_count": _suspect_end_count_from(transformer_to_suspect_end), + "suspect_length_m": _suspect_length_m_from(transformer_to_suspect_lines), + "suspect_length_label": _suspect_length_label_from(transformer_to_suspect_lines), "type": lambda x: "pt" }, - AcLineSegment: {"name": lambda ec: ec.name}, + AcLineSegment: { + "name": lambda ec: ec.name, + "length_m": lambda line: _line_length_m(line), + }, } feature_collection = to_geojson_feature_collection(pts, class_to_properties) + result_name = f"{feeder_mrid} - {round(total_length_m)}m" + return Result( + name=result_name, + geo_json_overlay=GeoJsonOverlay( + data=feature_collection, + styles=[s['id'] for s in styles] + ) + ) + + +async def upload_suspect_end_of_line_study( + eas_client: EasClient, + results: List[Result], + name: str, + description: str, + tags: List[str], + styles: List +) -> None: response = await eas_client.async_upload_study( Study( name=name, description=description, tags=tags, - results=[ - Result( - name=name, - geo_json_overlay=GeoJsonOverlay( - data=feature_collection, - styles=[s['id'] for s in styles] - ) - ) - ], + results=results, styles=styles ) ) print(f"Study response: {response}") -async def _get_upstream_eq_up_to_transformer(ce: ConductingEquipment) -> Set[ConductingEquipment]: - eqs = set() - - await ( - Tracing.network_trace() - .add_condition(upstream()) - .add_step_action(collect_eq_provider(eqs)) - .add_stop_condition(_is_transformer) - ).run(start=ce, phases=PhaseCode.ABCN, can_stop_on_start_item=False) - - return eqs - +def _suspect_length_m_from(pt_to_sus_end: Dict[str, Tuple[float, List[ConductingEquipment]]]): + def fun(pt: PowerTransformer): + value = pt_to_sus_end.get(pt.mrid) + return round(value[0]) if value else 0 -async def _is_transformer(ps: NetworkTraceStep): - return isinstance(ps.path.to_equipment, PowerTransformer) + return fun -def _suspect_end_count_from(pt_to_sus_end: Dict[str, Tuple[int, List[ConductingEquipment]]]): +def _suspect_length_label_from(pt_to_sus_end: Dict[str, Tuple[float, List[ConductingEquipment]]]): def fun(pt: PowerTransformer): - count, suspect_eq = pt_to_sus_end.get(pt.mrid) - return count if count else 0 + value = pt_to_sus_end.get(pt.mrid) + meters = round(value[0]) if value else 0 + return f"{meters}m" return fun +def _line_length_m(line: AcLineSegment) -> float: + return float(line.length or 0.0) + + def to_geojson_feature_collection( psrs: List[PowerSystemResource], class_to_properties: Dict[Type, Dict[str, Callable[[Any], Any]]] @@ -263,5 +361,4 @@ def to_geojson_geometry(location: Location) -> Union[Geometry, None]: if __name__ == "__main__": - loop = asyncio.get_event_loop() - loop.run_until_complete(main()) + asyncio.run(main()) diff --git a/src/zepben/examples/studies/suspect_end_of_line_all_feeders.py b/src/zepben/examples/studies/suspect_end_of_line_all_feeders.py new file mode 100644 index 0000000..7716ca4 --- /dev/null +++ b/src/zepben/examples/studies/suspect_end_of_line_all_feeders.py @@ -0,0 +1,121 @@ +# Copyright 2025 Zeppelin Bend Pty Ltd +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at https://mozilla.org/MPL/2.0/. + +import asyncio +import json +from datetime import datetime +from itertools import islice +from typing import List + +from zepben.eas.client.eas_client import EasClient +from zepben.ewb import NetworkConsumerClient, connect_with_token + +from suspect_end_of_line import ( + fetch_feeder_and_trace, + upload_suspect_end_of_line_study, + _build_suspect_end_result, + _line_length_m, +) + + +with open("../config.json") as f: + c = json.loads(f.read()) + + +def chunk(it, size): + it = iter(it) + return iter(lambda: tuple(islice(it, size)), ()) + + +async def main(): + # Only process feeders in the following zones + zone_mrids = ["CPM"] + print(f"Start time: {datetime.now()}") + + rpc_channel = connect_with_token( + host=c["host"], + access_token=c["access_token"], + rpc_port=c["rpc_port"], + ca_filename=c.get("ca_filename"), + timeout_seconds=c.get("timeout_seconds", 5), + debug=bool(c.get("debug", False)), + skip_connection_test=bool(c.get("skip_connection_test", False)), + ) + client = NetworkConsumerClient(rpc_channel) + hierarchy = (await client.get_network_hierarchy()).throw_on_error() + substations = hierarchy.value.substations + + print(f"Collecting feeders from zones {', '.join(zone_mrids)}.") + feeder_mrids = [] + for zone_mrid in zone_mrids: + if zone_mrid in substations: + for feeder in substations[zone_mrid].feeders: + feeder_mrids.append(feeder.mrid) + + print(f"Feeders to be processed: {', '.join(feeder_mrids)}") + + feeder_results = [] + + # Process the feeders in batches of 3, using asyncio, for performance + batches = chunk(feeder_mrids, 3) + for feeders in batches: + futures = [] + rpc_channel = connect_with_token( + host=c["host"], + access_token=c["access_token"], + rpc_port=c["rpc_port"], + ca_filename=c.get("ca_filename"), + timeout_seconds=c.get("timeout_seconds", 5), + debug=bool(c.get("debug", False)), + skip_connection_test=bool(c.get("skip_connection_test", False)), + ) + print(f"Processing feeders {', '.join(feeders)}") + for feeder_mrid in feeders: + futures.append(asyncio.ensure_future(fetch_feeder_and_trace(feeder_mrid, rpc_channel))) + + for future in futures: + result = await future + if not result: # Empty if the feeder failed + continue + feeder_mrid, transformers, tx_to_sus_lines, feeder_suspect_lines = result + total_length_m = sum(_line_length_m(line) for line in feeder_suspect_lines) + highlight_equipment = set(transformers) | set(feeder_suspect_lines) + feeder_results.append( + (feeder_mrid, total_length_m, list(highlight_equipment), tx_to_sus_lines) + ) + + print(f"Created Study for {len(feeder_mrids)} feeders") + + eas_client = EasClient(host=c["host"], port=c["rpc_port"], protocol="https", access_token=c["access_token"]) + + print(f"Uploading Study for zones {', '.join(zone_mrids)} ...") + styles = json.load(open("style_eol.json", "r")) + results = [ + _build_suspect_end_result( + feeder_mrid, + total_length_m, + equipment, + transformer_to_suspect_lines, + styles=styles, + ) + for feeder_mrid, total_length_m, equipment, transformer_to_suspect_lines in feeder_results + ] + await upload_suspect_end_of_line_study( + eas_client, + results, + name=f"Suspect end of line ({', '.join(zone_mrids)})", + description="Highlights only line segments that have no downstream EnergyConsumers (excludes shared upstream segments).", + tags=["suspect_end_of_line", "-".join(zone_mrids)], + styles=styles, + ) + await eas_client.aclose() + print("Uploaded Study") + + print(f"Finish time: {datetime.now()}") + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/src/zepben/examples/studies/tap_changer_info_by_transformer.py b/src/zepben/examples/studies/tap_changer_info_by_transformer.py new file mode 100644 index 0000000..e8d8cd9 --- /dev/null +++ b/src/zepben/examples/studies/tap_changer_info_by_transformer.py @@ -0,0 +1,325 @@ +# Copyright 2025 Zeppelin Bend Pty Ltd +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at https://mozilla.org/MPL/2.0/. + +import asyncio +import json +from datetime import datetime +from itertools import islice +from typing import List, Dict, Tuple, Callable, Any, Union, Type + +from geojson import FeatureCollection, Feature +from geojson.geometry import Geometry, LineString, Point +from zepben.eas.client.eas_client import EasClient +from zepben.eas.client.study import Study, Result, GeoJsonOverlay +from zepben.ewb import ( + PowerTransformer, + RatioTapChanger, + NetworkConsumerClient, + Feeder, + PowerSystemResource, + Location, + connect_with_token, +) + + +with open("../config.json") as f: + c = json.loads(f.read()) + + +def chunk(it, size): + it = iter(it) + return iter(lambda: tuple(islice(it, size)), ()) + + +async def main(): + # Only process feeders in the following zones + zone_mrids = ["CPM"] + print(f"Start time: {datetime.now()}") + + rpc_channel = connect_with_token( + host=c["host"], + access_token=c["access_token"], + rpc_port=c["rpc_port"], + ca_filename=c.get("ca_filename"), + timeout_seconds=c.get("timeout_seconds", 5), + debug=bool(c.get("debug", False)), + skip_connection_test=bool(c.get("skip_connection_test", False)), + ) + client = NetworkConsumerClient(rpc_channel) + hierarchy = (await client.get_network_hierarchy()).throw_on_error() + substations = hierarchy.value.substations + + print(f"Collecting feeders from zones {', '.join(zone_mrids)}.") + feeder_mrids = [] + for zone_mrid in zone_mrids: + if zone_mrid in substations: + for feeder in substations[zone_mrid].feeders: + feeder_mrids.append(feeder.mrid) + + print(f"Feeders to be processed: {', '.join(feeder_mrids)}") + + all_transformers: List[PowerTransformer] = [] + transformer_to_tap: Dict[str, Dict[str, Any]] = {} + + batches = chunk(feeder_mrids, 3) + for feeders in batches: + futures = [] + rpc_channel = connect_with_token( + host=c["host"], + access_token=c["access_token"], + rpc_port=c["rpc_port"], + ca_filename=c.get("ca_filename"), + timeout_seconds=c.get("timeout_seconds", 5), + debug=bool(c.get("debug", False)), + skip_connection_test=bool(c.get("skip_connection_test", False)), + ) + print(f"Processing feeders {', '.join(feeders)}") + for feeder_mrid in feeders: + futures.append(asyncio.ensure_future(fetch_tap_info_for_feeder(feeder_mrid, rpc_channel))) + + for future in futures: + transformers, tap_info = await future + if transformers: + all_transformers.extend(transformers) + transformer_to_tap.update(tap_info) + + print(f"Creating study for {len(all_transformers)} transformers") + + eas_client = EasClient(host=c["host"], port=c["rpc_port"], protocol="https", access_token=c["access_token"]) + print(f"Uploading Study for zones {', '.join(zone_mrids)} ...") + await upload_tap_changer_study( + eas_client, + all_transformers, + transformer_to_tap, + name=f"Tap changer info ({', '.join(zone_mrids)})", + description="Tap changer info shown as JSON at transformers; step or normal_step shown as label.", + tags=["tap_changer", "-".join(zone_mrids)], + styles=json.load(open("style_tap_changer.json", "r")), + ) + await eas_client.aclose() + print("Uploaded Study") + + print(f"Finish time: {datetime.now()}") + + +async def fetch_tap_info_for_feeder( + feeder_mrid: str, + rpc_channel, +) -> Tuple[List[PowerTransformer], Dict[str, Dict[str, Any]]]: + print(f"Fetching Feeder {feeder_mrid}") + client = NetworkConsumerClient(rpc_channel) + + result = ( + await client.get_equipment_container( + mrid=feeder_mrid, + expected_class=Feeder, + include_energized_containers=None, + ) + ) + if result.was_failure: + print(f"Failed: {result.thrown}") + return [], {} + + network = client.service + print(f"Finished fetching Feeder {feeder_mrid}") + + transformers: List[PowerTransformer] = [] + transformer_to_tap: Dict[str, Dict[str, Any]] = {} + tap_changers_by_transformer = _collect_tap_changers_by_transformer(network) + + for io in (pt for pt in network.objects(PowerTransformer)): + pt: PowerTransformer = io + tap_info = _tap_info_from_transformer(pt, tap_changers_by_transformer.get(pt.mrid, [])) + if tap_info is None: + continue + transformers.append(pt) + transformer_to_tap[pt.mrid] = tap_info + + return transformers, transformer_to_tap + + +def _collect_tap_changers_by_transformer(network) -> Dict[str, List[RatioTapChanger]]: + mapping: Dict[str, List[RatioTapChanger]] = {} + for tc in network.objects(RatioTapChanger): + end = tc.transformer_end + if end is None or end.power_transformer is None: + continue + pt_mrid = end.power_transformer.mrid + mapping.setdefault(pt_mrid, []).append(tc) + return mapping + + +def _tap_info_from_transformer( + pt: PowerTransformer, + extra_tap_changers: List[RatioTapChanger], +) -> Union[Dict[str, Any], None]: + tap_changers: List[RatioTapChanger] = [] + for end in pt.ends: + if end.ratio_tap_changer is not None: + tap_changers.append(end.ratio_tap_changer) + + tap_changers.extend(extra_tap_changers) + if tap_changers: + # De-duplicate by mRID + unique = {} + for tc in tap_changers: + unique[tc.mrid] = tc + tap_changers = list(unique.values()) + + if not tap_changers: + return None + + # Prefer a tap changer with an explicit step; fallback to any with normal_step + preferred = None + for tc in tap_changers: + if tc.step is not None: + preferred = tc + break + if preferred is None: + for tc in tap_changers: + if tc.normal_step is not None: + preferred = tc + break + if preferred is None: + preferred = tap_changers[0] + + step_value = preferred.step if preferred.step is not None else preferred.normal_step + tap_label = f"{step_value}" if step_value is not None else "n/a" + + tap_json = { + "step": preferred.step, + "normal_step": preferred.normal_step, + "neutral_step": preferred.neutral_step, + "low_step": preferred.low_step, + "high_step": preferred.high_step, + "control_enabled": preferred.control_enabled, + } + + return { + "tap_label": tap_label, + "tap_json": json.dumps(tap_json, separators=(",", ":")), + "tap_step": step_value if step_value is not None else 0, + "tap_count": len(tap_changers), + } + + +async def upload_tap_changer_study( + eas_client: EasClient, + pts: List[PowerTransformer], + transformer_to_tap: Dict[str, Dict[str, Any]], + name: str, + description: str, + tags: List[str], + styles: List, +) -> None: + + class_to_properties = { + PowerTransformer: { + "tap_label": _tap_label_from(transformer_to_tap), + "tap_json": _tap_json_from(transformer_to_tap), + "tap_step": _tap_step_from(transformer_to_tap), + "tap_count": _tap_count_from(transformer_to_tap), + "type": lambda x: "pt", + }, + } + feature_collection = to_geojson_feature_collection(pts, class_to_properties) + if not feature_collection.features: + print("No transformer features to display (missing locations or tap changers). Study upload skipped.") + return + response = await eas_client.async_upload_study( + Study( + name=name, + description=description, + tags=tags, + results=[ + Result( + name=name, + geo_json_overlay=GeoJsonOverlay( + data=feature_collection, + styles=[s['id'] for s in styles] + ) + ) + ], + styles=styles + ) + ) + print(f"Study response: {response}") + + +def _tap_label_from(pt_to_tap: Dict[str, Dict[str, Any]]): + def fun(pt: PowerTransformer): + info = pt_to_tap.get(pt.mrid, {}) + return info.get("tap_label", "n/a") + + return fun + + +def _tap_json_from(pt_to_tap: Dict[str, Dict[str, Any]]): + def fun(pt: PowerTransformer): + info = pt_to_tap.get(pt.mrid, {}) + return info.get("tap_json", "{}") + + return fun + + +def _tap_step_from(pt_to_tap: Dict[str, Dict[str, Any]]): + def fun(pt: PowerTransformer): + info = pt_to_tap.get(pt.mrid, {}) + return info.get("tap_step", 0) + + return fun + + +def _tap_count_from(pt_to_tap: Dict[str, Dict[str, Any]]): + def fun(pt: PowerTransformer): + info = pt_to_tap.get(pt.mrid, {}) + return info.get("tap_count", 0) + + return fun + + +def to_geojson_feature_collection( + psrs: List[PowerSystemResource], + class_to_properties: Dict[Type, Dict[str, Callable[[Any], Any]]] +) -> FeatureCollection: + + features = [] + for psr in psrs: + properties_map = class_to_properties.get(type(psr)) + + if properties_map is not None: + feature = to_geojson_feature(psr, properties_map) + if feature is not None: + features.append(feature) + + return FeatureCollection(features) + + +def to_geojson_feature( + psr: PowerSystemResource, + property_map: Dict[str, Callable[[PowerSystemResource], Any]] +) -> Union[Feature, None]: + + geometry = to_geojson_geometry(psr.location) + if geometry is None: + return None + + properties = {k: f(psr) for (k, f) in property_map.items()} + return Feature(psr.mrid, geometry, properties) + + +def to_geojson_geometry(location: Location) -> Union[Geometry, None]: + points = list(location.points) if location is not None else [] + if len(points) > 1: + return LineString([(point.x_position, point.y_position) for point in points]) + elif len(points) == 1: + return Point((points[0].x_position, points[0].y_position)) + else: + return None + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/src/zepben/examples/studies/transformer_downstream_density.py b/src/zepben/examples/studies/transformer_downstream_density.py new file mode 100644 index 0000000..6a89bc7 --- /dev/null +++ b/src/zepben/examples/studies/transformer_downstream_density.py @@ -0,0 +1,356 @@ +# Copyright 2025 Zeppelin Bend Pty Ltd +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at https://mozilla.org/MPL/2.0/. + +import asyncio +import json +from datetime import datetime +from itertools import islice +from typing import List, Dict, Tuple, Callable, Any, Union, Type, Set + +from geojson import FeatureCollection, Feature +from geojson.geometry import Geometry, LineString, Point +from zepben.eas.client.eas_client import EasClient +from zepben.eas.client.study import Study, Result, GeoJsonOverlay +from zepben.ewb import ( + AcLineSegment, + EnergyConsumer, + PhotoVoltaicUnit, + PowerElectronicsConnection, + PowerTransformer, + NetworkConsumerClient, + PhaseCode, + Feeder, + PowerSystemResource, + Location, + connect_with_token, + NetworkTraceStep, + Tracing, + downstream, + IncludedEnergizedContainers, +) + + +with open("../config.json") as f: + c = json.loads(f.read()) + + +def chunk(it, size): + it = iter(it) + return iter(lambda: tuple(islice(it, size)), ()) + + +async def main(): + zone_mrids = ["CPM"] + print(f"Start time: {datetime.now()}") + + rpc_channel = connect_with_token( + host=c["host"], + access_token=c["access_token"], + rpc_port=c["rpc_port"], + ca_filename=c.get("ca_filename"), + timeout_seconds=c.get("timeout_seconds", 5), + debug=bool(c.get("debug", False)), + skip_connection_test=bool(c.get("skip_connection_test", False)), + ) + client = NetworkConsumerClient(rpc_channel) + hierarchy = (await client.get_network_hierarchy()).throw_on_error() + substations = hierarchy.value.substations + + print(f"Collecting feeders from zones {', '.join(zone_mrids)}.") + feeder_mrids = [] + for zone_mrid in zone_mrids: + if zone_mrid in substations: + for feeder in substations[zone_mrid].feeders: + feeder_mrids.append(feeder.mrid) + + print(f"Feeders to be processed: {', '.join(feeder_mrids)}") + + all_transformers: List[PowerTransformer] = [] + transformer_to_metrics: Dict[str, Dict[str, float]] = {} + + batches = chunk(feeder_mrids, 3) + for feeders in batches: + futures = [] + rpc_channel = connect_with_token( + host=c["host"], + access_token=c["access_token"], + rpc_port=c["rpc_port"], + ca_filename=c.get("ca_filename"), + timeout_seconds=c.get("timeout_seconds", 5), + debug=bool(c.get("debug", False)), + skip_connection_test=bool(c.get("skip_connection_test", False)), + ) + print(f"Processing feeders {', '.join(feeders)}") + for feeder_mrid in feeders: + futures.append(asyncio.ensure_future(fetch_transformer_densities(feeder_mrid, rpc_channel))) + + for future in futures: + transformers, metrics = await future + if transformers: + all_transformers.extend(transformers) + transformer_to_metrics.update(metrics) + + print(f"Creating study for {len(all_transformers)} transformers") + + styles = json.load(open("style_transformer_density.json", "r")) + results = [ + _build_density_result( + "EnergyConsumer density (/100m)", + all_transformers, + transformer_to_metrics, + metric_key="ec_density", + label_key="ec_density_label", + style_ids=["ec-density-circle", "ec-density-label"], + ), + _build_density_result( + "UsagePoint density (/100m)", + all_transformers, + transformer_to_metrics, + metric_key="up_density", + label_key="up_density_label", + style_ids=["up-density-circle", "up-density-label"], + ), + _build_density_result( + "PV density (/100m)", + all_transformers, + transformer_to_metrics, + metric_key="pv_density", + label_key="pv_density_label", + style_ids=["pv-density-circle", "pv-density-label"], + ), + ] + + results = [r for r in results if r is not None] + if not results: + print("No transformer features to display (missing locations). Study upload skipped.") + return + + eas_client = EasClient(host=c["host"], port=c["rpc_port"], protocol="https", access_token=c["access_token"]) + print(f"Uploading Study for zones {', '.join(zone_mrids)} ...") + await eas_client.async_upload_study( + Study( + name=f"Transformer densities ({', '.join(zone_mrids)})", + description="Downstream EC, UsagePoint and PV density per 100m of AC line segment.", + tags=["transformer_density", "-".join(zone_mrids)], + results=results, + styles=styles, + ) + ) + await eas_client.aclose() + print("Uploaded Study") + print(f"Finish time: {datetime.now()}") + + +async def fetch_transformer_densities( + feeder_mrid: str, + rpc_channel, +) -> Tuple[List[PowerTransformer], Dict[str, Dict[str, float]]]: + print(f"Fetching Feeder {feeder_mrid}") + client = NetworkConsumerClient(rpc_channel) + + result = ( + await client.get_equipment_container( + mrid=feeder_mrid, + expected_class=Feeder, + include_energized_containers=IncludedEnergizedContainers.LV_FEEDERS, + ) + ) + if result.was_failure: + print(f"Failed: {result.thrown}") + return [], {} + + network = client.service + print(f"Finished fetching Feeder {feeder_mrid}") + + pv_consumers = _find_pv_energy_consumers(network) + + transformers: List[PowerTransformer] = [] + transformer_to_metrics: Dict[str, Dict[str, float]] = {} + for io in (pt for pt in network.objects(PowerTransformer)): + pt: PowerTransformer = io + transformers.append(pt) + + downstream_eq = await get_downstream_eq(pt) + downstream_lines = {eq for eq in downstream_eq if isinstance(eq, AcLineSegment)} + downstream_ecs = {eq for eq in downstream_eq if isinstance(eq, EnergyConsumer)} + + total_length_m = sum(_line_length_m(line) for line in downstream_lines) + ec_count = len(downstream_ecs) + up_count = _usage_point_count(downstream_ecs) + pv_count = sum(1 for ec in downstream_ecs if ec.mrid in pv_consumers) + + ec_density = _safe_density_per_100m(ec_count, total_length_m) + up_density = _safe_density_per_100m(up_count, total_length_m) + pv_density = _safe_density_per_100m(pv_count, total_length_m) + + transformer_to_metrics[pt.mrid] = { + "ec_density": ec_density, + "ec_density_label": _density_label(ec_density), + "up_density": up_density, + "up_density_label": _density_label(up_density), + "pv_density": pv_density, + "pv_density_label": _density_label(pv_density), + } + + return transformers, transformer_to_metrics + + +def _usage_point_count(ecs: Set[EnergyConsumer]) -> int: + usage_points = set() + for ec in ecs: + for up in ec.usage_points: + usage_points.add(up.mrid) + return len(usage_points) + + +def _safe_density_per_100m(count: int, length_m: float) -> float: + if length_m <= 0: + return 0.0 + return (count / length_m) * 100.0 + + +def _density_label(value: float) -> str: + return f"{value:.2f}/100m" if value > 0 else "n/a" + + +def _line_length_m(line: AcLineSegment) -> float: + return float(line.length or 0.0) + + +def collect_downstream_edges_provider( + adjacency: Dict[PowerSystemResource, Set[PowerSystemResource]], + nodes: Set[PowerSystemResource], +): + + async def collect_edges(ps: NetworkTraceStep, _): + nodes.add(ps.path.from_equipment) + nodes.add(ps.path.to_equipment) + if ps.path.traced_externally: + adjacency.setdefault(ps.path.from_equipment, set()).add(ps.path.to_equipment) + + return collect_edges + + +async def get_downstream_eq(ce: PowerTransformer) -> Set[PowerSystemResource]: + nodes: Set[PowerSystemResource] = {ce} + adjacency: Dict[PowerSystemResource, Set[PowerSystemResource]] = {} + + await ( + Tracing.network_trace() + .add_condition(downstream()) + .add_step_action(collect_downstream_edges_provider(adjacency, nodes)) + ).run(start=ce, phases=PhaseCode.ABCN, can_stop_on_start_item=False) + + return nodes + + +def _find_pv_energy_consumers(network) -> Set[str]: + pv_usage_points: Set[str] = set() + pv_nodes: Set[str] = set() + + for pv in network.objects(PhotoVoltaicUnit): + for up in pv.usage_points: + pv_usage_points.add(up.mrid) + if pv.power_electronics_connection: + for up in pv.power_electronics_connection.usage_points: + pv_usage_points.add(up.mrid) + + for pec in network.objects(PowerElectronicsConnection): + if any(isinstance(unit, PhotoVoltaicUnit) for unit in pec.units): + for up in pec.usage_points: + pv_usage_points.add(up.mrid) + for terminal in pec.terminals: + if terminal.connectivity_node: + pv_nodes.add(terminal.connectivity_node.mrid) + + pv_consumers: Set[str] = set() + for ec in network.objects(EnergyConsumer): + if any(up.mrid in pv_usage_points for up in ec.usage_points): + pv_consumers.add(ec.mrid) + continue + if pv_nodes and any(t.connectivity_node and t.connectivity_node.mrid in pv_nodes for t in ec.terminals): + pv_consumers.add(ec.mrid) + + return pv_consumers + + +def _build_density_result( + result_name: str, + transformers: List[PowerTransformer], + transformer_to_metrics: Dict[str, Dict[str, float]], + metric_key: str, + label_key: str, + style_ids: List[str], +) -> Union[Result, None]: + class_to_properties = { + PowerTransformer: { + metric_key: _metric_from(transformer_to_metrics, metric_key), + label_key: _metric_from(transformer_to_metrics, label_key), + "type": lambda x: "pt", + }, + } + feature_collection = to_geojson_feature_collection(transformers, class_to_properties) + if not feature_collection.features: + return None + return Result( + name=result_name, + geo_json_overlay=GeoJsonOverlay( + data=feature_collection, + styles=style_ids, + ) + ) + + +def _metric_from(pt_to_metrics: Dict[str, Dict[str, float]], key: str): + def fun(pt: PowerTransformer): + info = pt_to_metrics.get(pt.mrid, {}) + return info.get(key, 0.0) + + return fun + + +def to_geojson_feature_collection( + psrs: List[PowerSystemResource], + class_to_properties: Dict[Type, Dict[str, Callable[[Any], Any]]] +) -> FeatureCollection: + + features = [] + for psr in psrs: + properties_map = class_to_properties.get(type(psr)) + + if properties_map is not None: + feature = to_geojson_feature(psr, properties_map) + if feature is not None: + features.append(feature) + + return FeatureCollection(features) + + +def to_geojson_feature( + psr: PowerSystemResource, + property_map: Dict[str, Callable[[PowerSystemResource], Any]] +) -> Union[Feature, None]: + + geometry = to_geojson_geometry(psr.location) + if geometry is None: + return None + + properties = {k: f(psr) for (k, f) in property_map.items()} + return Feature(psr.mrid, geometry, properties) + + +def to_geojson_geometry(location: Location) -> Union[Geometry, None]: + points = list(location.points) if location is not None else [] + if len(points) > 1: + return LineString([(point.x_position, point.y_position) for point in points]) + elif len(points) == 1: + return Point((points[0].x_position, points[0].y_position)) + else: + return None + + +if __name__ == "__main__": + asyncio.run(main()) From 3e8c50ab9bf537b9744d6dc46813fe6a0c06ca14 Mon Sep 17 00:00:00 2001 From: adamwhite629 <102849144+adamwhite629@users.noreply.github.com> Date: Wed, 4 Feb 2026 22:51:02 +1100 Subject: [PATCH 2/4] transformer-utilisation-actuals-import-export-study-example --- .../data_quality_summary.py | 126 ++- .../style_transformer_utilisation.json | 118 +++ .../transformer_utilisation_by_demand.py | 828 ++++++++++++++++++ 3 files changed, 1038 insertions(+), 34 deletions(-) create mode 100644 src/zepben/examples/studies/style_transformer_utilisation.json create mode 100644 src/zepben/examples/studies/transformer_utilisation_by_demand.py diff --git a/src/zepben/examples/studies/data_quality_studies/data_quality_summary.py b/src/zepben/examples/studies/data_quality_studies/data_quality_summary.py index bd7bc29..a3f20f5 100644 --- a/src/zepben/examples/studies/data_quality_studies/data_quality_summary.py +++ b/src/zepben/examples/studies/data_quality_studies/data_quality_summary.py @@ -7,6 +7,7 @@ import asyncio import json import sys +import time from datetime import datetime from pathlib import Path from typing import Dict, List, Set, Tuple @@ -28,6 +29,8 @@ import spatial_location_anomalies as sl from dq_utils import chunk, get_zone_mrids, load_config +BATCH_SIZE = 4 + async def main(): zone_mrids = get_zone_mrids(sys.argv, default=["CPM"]) @@ -60,41 +63,32 @@ async def main(): ec_to_distance: Dict[str, float] = {} very_long_lines: Set = set() - for feeders in chunk(feeder_mrids, 3): + for feeders in chunk(feeder_mrids, BATCH_SIZE): rpc_channel = _connect_rpc(config) - for feeder_mrid in feeders: - network = await _fetch_feeder_network(feeder_mrid, rpc_channel) - if network is None: + tasks = [ + asyncio.create_task(_process_feeder(feeder_mrid, rpc_channel)) + for feeder_mrid in feeders + ] + for result in await asyncio.gather(*tasks): + if result is None: continue - - feeder_open, feeder_disconnected = await cg.analyze_network(network, feeder_mrid) - open_ended_lines.update(feeder_open) - disconnected_lines.update(feeder_disconnected) - - feeder_unserved, feeder_missing_lv, feeder_no_load = await cm.analyze_network(network, feeder_mrid) - unserved_ecs.update(feeder_unserved) - missing_lv_feeder_ecs.update(feeder_missing_lv) - no_load_transformers.update(feeder_no_load) - - mismatch_features, missing_lines = await pc.analyze_network(network) - phase_mismatch_features.extend(mismatch_features) - missing_phase_lines.update(missing_lines) - - z_lines, i_lines, t_missing, t_imp_missing, s_missing = await aa.analyze_network(network) - zero_length_lines.update(z_lines) - missing_impedance_lines.update(i_lines) - missing_rating_transformers.update(t_missing) - missing_impedance_transformers.update(t_imp_missing) - missing_normal_state_switches.update(s_missing) - - loop_segments, switch_issues = await pd.analyze_network(network) - loop_lines.update(loop_segments) - switch_terminal_issues.update(switch_issues) - - feeder_ecs, feeder_distances, feeder_lines = await sl.analyze_network(network) - long_service_ecs.update(feeder_ecs) - ec_to_distance.update(feeder_distances) - very_long_lines.update(feeder_lines) + open_ended_lines.update(result["open_ended_lines"]) + disconnected_lines.update(result["disconnected_lines"]) + unserved_ecs.update(result["unserved_ecs"]) + missing_lv_feeder_ecs.update(result["missing_lv_feeder_ecs"]) + no_load_transformers.update(result["no_load_transformers"]) + phase_mismatch_features.extend(result["phase_mismatch_features"]) + missing_phase_lines.update(result["missing_phase_lines"]) + zero_length_lines.update(result["zero_length_lines"]) + missing_impedance_lines.update(result["missing_impedance_lines"]) + missing_rating_transformers.update(result["missing_rating_transformers"]) + missing_impedance_transformers.update(result["missing_impedance_transformers"]) + missing_normal_state_switches.update(result["missing_normal_state_switches"]) + loop_lines.update(result["loop_lines"]) + switch_terminal_issues.update(result["switch_terminal_issues"]) + long_service_ecs.update(result["long_service_ecs"]) + ec_to_distance.update(result["ec_to_distance"]) + very_long_lines.update(result["very_long_lines"]) results, detected_tests, used_style_ids = _build_results( open_ended_lines, @@ -163,6 +157,7 @@ def _collect_feeder_mrids(substations: Dict, zone_mrids: List[str]) -> List[str] async def _fetch_feeder_network(feeder_mrid: str, rpc_channel): print(f"Fetching Feeder {feeder_mrid}") + start = time.perf_counter() client = NetworkConsumerClient(rpc_channel) result = await client.get_equipment_container( mrid=feeder_mrid, @@ -172,10 +167,73 @@ async def _fetch_feeder_network(feeder_mrid: str, rpc_channel): if result.was_failure: print(f"Failed: {result.thrown}") return None - print(f"Finished fetching Feeder {feeder_mrid}") + elapsed = time.perf_counter() - start + print(f"Finished fetching Feeder {feeder_mrid} ({elapsed:.2f}s)") return client.service +async def _process_feeder(feeder_mrid: str, rpc_channel): + feeder_start = time.perf_counter() + network = await _fetch_feeder_network(feeder_mrid, rpc_channel) + if network is None: + return None + + async def timed(label: str, coro): + start = time.perf_counter() + result = await coro + elapsed = time.perf_counter() - start + print(f"[{feeder_mrid}] {label}: {elapsed:.2f}s") + return result + + feeder_open, feeder_disconnected = await timed( + "connectivity_gaps", + cg.analyze_network(network, feeder_mrid), + ) + feeder_unserved, feeder_missing_lv, feeder_no_load = await timed( + "consumer_mapping", + cm.analyze_network(network, feeder_mrid), + ) + mismatch_features, missing_lines = await timed( + "phase_conductor", + pc.analyze_network(network), + ) + z_lines, i_lines, t_missing, t_imp_missing, s_missing = await timed( + "asset_attributes", + aa.analyze_network(network), + ) + loop_segments, switch_issues = await timed( + "protection_directionality", + pd.analyze_network(network), + ) + feeder_ecs, feeder_distances, feeder_lines = await timed( + "spatial_location", + sl.analyze_network(network), + ) + + total = time.perf_counter() - feeder_start + print(f"[{feeder_mrid}] total: {total:.2f}s") + + return { + "open_ended_lines": feeder_open, + "disconnected_lines": feeder_disconnected, + "unserved_ecs": feeder_unserved, + "missing_lv_feeder_ecs": feeder_missing_lv, + "no_load_transformers": feeder_no_load, + "phase_mismatch_features": mismatch_features, + "missing_phase_lines": missing_lines, + "zero_length_lines": z_lines, + "missing_impedance_lines": i_lines, + "missing_rating_transformers": t_missing, + "missing_impedance_transformers": t_imp_missing, + "missing_normal_state_switches": s_missing, + "loop_lines": loop_segments, + "switch_terminal_issues": switch_issues, + "long_service_ecs": feeder_ecs, + "ec_to_distance": feeder_distances, + "very_long_lines": feeder_lines, + } + + def _build_results( open_ended_lines: Set, disconnected_lines: Set, diff --git a/src/zepben/examples/studies/style_transformer_utilisation.json b/src/zepben/examples/studies/style_transformer_utilisation.json new file mode 100644 index 0000000..00f7c38 --- /dev/null +++ b/src/zepben/examples/studies/style_transformer_utilisation.json @@ -0,0 +1,118 @@ +[ + { + "id": "max-demand-utilisation", + "name": "Max Demand Utilisation (Import)", + "type": "circle", + "minzoom": 12, + "paint": { + "circle-radius": 16, + "circle-color": [ + "case", + ["any", + ["==", ["get", "transformer_rating_va"], 0], + ["==", ["get", "has_max_profile"], false] + ], + "rgb(160,160,160)", + ["step", ["get", "max_import_util_percent"], + "rgb(0,180,40)", 30, + "rgb(171,221,164)", 60, + "rgb(253,174,97)", 80, + "rgb(244,109,67)", 90, + "rgb(210,2,29)" + ] + ], + "circle-stroke-color": "white", + "circle-stroke-width": 1 + }, + "metadata": { + "zb:legend": { + "label": "Max Demand Utilisation (Import)" + } + }, + "filter" : ["==", ["get", "type"], "pt"] + }, + { + "id": "max-demand-label", + "name": "Max Demand Utilisation Label", + "type": "symbol", + "minzoom": 12, + "layout": { + "text-field": [ + "case", + ["any", + ["==", ["get", "transformer_rating_va"], 0], + ["==", ["get", "has_max_profile"], false] + ], + "", + ["concat", ["round", ["get", "max_import_util_percent"]], "%"] + ], + "text-font": ["Arial Unicode MS Regular"], + "text-size": 12 + }, + "paint": { + "text-color": "black", + "text-halo-blur": 2, + "text-halo-width": 2, + "text-halo-color": "white" + }, + "filter" : ["==", ["get", "type"], "pt"] + }, + { + "id": "min-demand-utilisation", + "name": "Min Demand Utilisation (Export)", + "type": "circle", + "minzoom": 12, + "paint": { + "circle-radius": 16, + "circle-color": [ + "case", + ["any", + ["==", ["get", "transformer_rating_va"], 0], + ["==", ["get", "has_min_profile"], false] + ], + "rgb(160,160,160)", + ["step", ["get", "min_export_util_percent"], + "rgb(0,180,40)", 30, + "rgb(171,221,164)", 60, + "rgb(253,174,97)", 80, + "rgb(244,109,67)", 90, + "rgb(210,2,29)" + ] + ], + "circle-stroke-color": "white", + "circle-stroke-width": 1 + }, + "metadata": { + "zb:legend": { + "label": "Min Demand Utilisation (Export)" + } + }, + "filter" : ["==", ["get", "type"], "pt"] + }, + { + "id": "min-demand-label", + "name": "Min Demand Utilisation Label", + "type": "symbol", + "minzoom": 12, + "layout": { + "text-field": [ + "case", + ["any", + ["==", ["get", "transformer_rating_va"], 0], + ["==", ["get", "has_min_profile"], false] + ], + "", + ["concat", ["round", ["get", "min_export_util_percent"]], "%"] + ], + "text-font": ["Arial Unicode MS Regular"], + "text-size": 12 + }, + "paint": { + "text-color": "black", + "text-halo-blur": 2, + "text-halo-width": 2, + "text-halo-color": "white" + }, + "filter" : ["==", ["get", "type"], "pt"] + } +] diff --git a/src/zepben/examples/studies/transformer_utilisation_by_demand.py b/src/zepben/examples/studies/transformer_utilisation_by_demand.py new file mode 100644 index 0000000..a9123f6 --- /dev/null +++ b/src/zepben/examples/studies/transformer_utilisation_by_demand.py @@ -0,0 +1,828 @@ +# Copyright 2025 Zeppelin Bend Pty Ltd +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at https://mozilla.org/MPL/2.0/. + +import argparse +from dataclasses import dataclass +import asyncio +import calendar +import json +import os +import sys +import threading +from datetime import date, datetime +from itertools import islice +from typing import List, Dict, Tuple, Callable, Any, Union, Type, Optional + +import requests +from geojson import FeatureCollection, Feature +from geojson.geometry import Geometry, LineString, Point +from zepben.eas.client.eas_client import EasClient +from zepben.eas.client.study import Study, Result, GeoJsonOverlay +from zepben.ewb import ( + PowerTransformer, + NetworkConsumerClient, + Feeder, + PowerSystemResource, + Location, + connect_with_token, + IncludedEnergizedContainers, +) + + +DEFAULT_CONFIG_PATH = os.path.normpath(os.path.join(os.path.dirname(__file__), "..", "config.json")) + + +def chunk(it, size): + it = iter(it) + return iter(lambda: tuple(islice(it, size)), ()) + + +def _build_base_url(host: str, port: Optional[int]) -> str: + host = host.rstrip("/") + if host.startswith("http://") or host.startswith("https://"): + base = host + else: + base = f"https://{host}" + + if port and port not in (80, 443): + # Only append if a port is not already present + if ":" not in base.split("//", 1)[-1]: + base = f"{base}:{port}" + return base + + +def _subtract_months(d: date, months: int) -> date: + year = d.year - (months // 12) + month = d.month - (months % 12) + if month <= 0: + year -= 1 + month += 12 + day = min(d.day, calendar.monthrange(year, month)[1]) + return date(year, month, day) + + +class LoadApiClient: + def __init__( + self, + base_url: str, + access_token: str, + system_tag: str = "EWB", + timeout_seconds: int = 10, + verify: Union[bool, str] = True, + ): + self.base_url = base_url.rstrip("/") + self.system_tag = system_tag + self.timeout_seconds = timeout_seconds + self.verify = verify + self.session = requests.Session() + self.session.headers.update( + { + "Authorization": f"Bearer {access_token}", + "Accept": "application/json", + } + ) + + def close(self): + self.session.close() + + def get_max_demand(self, asset_id: str, from_date: str, to_date: str) -> Dict[str, Any]: + return self._get_profile(f"/ewb/energy/profiles/api/v1/max-demand/{asset_id}", from_date, to_date) + + def get_min_demand(self, asset_id: str, from_date: str, to_date: str) -> Dict[str, Any]: + return self._get_profile(f"/ewb/energy/profiles/api/v1/min-demand/{asset_id}", from_date, to_date) + + def _get_profile(self, path: str, from_date: str, to_date: str) -> Dict[str, Any]: + url = f"{self.base_url}{path}" + params = { + "fromDate": from_date, + "toDate": to_date, + "systemTag": self.system_tag, + } + response = self.session.get(url, params=params, timeout=self.timeout_seconds, verify=self.verify) + response.raise_for_status() + return response.json() + + +class LoadApiConfig: + def __init__( + self, + base_url: str, + access_token: str, + system_tag: str, + timeout_seconds: int, + verify: Union[bool, str], + ): + self.base_url = base_url + self.access_token = access_token + self.system_tag = system_tag + self.timeout_seconds = timeout_seconds + self.verify = verify + + +_thread_local = threading.local() + + +def _get_thread_client(config: LoadApiConfig) -> LoadApiClient: + client = getattr(_thread_local, "client", None) + if client is None: + client = LoadApiClient( + base_url=config.base_url, + access_token=config.access_token, + system_tag=config.system_tag, + timeout_seconds=config.timeout_seconds, + verify=config.verify, + ) + _thread_local.client = client + return client + + +def _iter_series_entries(series: Any): + if isinstance(series, dict): + yield series + return + if not isinstance(series, list): + return + for entry in series: + if isinstance(entry, dict): + yield entry + continue + if isinstance(entry, list): + for nested in entry: + if isinstance(nested, dict): + yield nested + + +def _first_dict(value: Any) -> Optional[Dict[str, Any]]: + if isinstance(value, dict): + return value + if isinstance(value, list) and value: + for item in value: + if isinstance(item, dict): + return item + return None + + +def _preview_payload(payload: Any, max_chars: int = 1200) -> str: + try: + text = json.dumps(payload, default=str) + except (TypeError, ValueError): + text = str(payload) + if len(text) > max_chars: + return f"{text[:max_chars]}... (truncated {len(text) - max_chars} chars)" + return text + + +@dataclass +class DebugConfig: + log_summary: bool = False + log_samples: int = 0 + log_api_samples: int = 0 + + +class DebugState: + def __init__(self, config: DebugConfig): + self.config = config + self._samples_left = max(config.log_samples, 0) + self._api_left = max(config.log_api_samples, 0) + self._lock = threading.Lock() + + def log_api(self, pt: PowerTransformer, max_profile: Any, min_profile: Any): + with self._lock: + if self._api_left <= 0: + return + self._api_left -= 1 + print(f"[debug] API payloads for {pt.mrid}:") + print(f"[debug] max_profile={_preview_payload(max_profile)}") + print(f"[debug] min_profile={_preview_payload(min_profile)}") + + def log_stats(self, pt: PowerTransformer, rating_va: float, max_data: Any, min_data: Any, stats: Dict[str, Any]): + with self._lock: + if self._samples_left <= 0: + return + self._samples_left -= 1 + sample = { + "rating_va": rating_va, + "rating_kva": stats.get("transformer_rating_kva"), + "has_max_profile": stats.get("has_max_profile"), + "has_min_profile": stats.get("has_min_profile"), + "max_import_util_percent": stats.get("max_import_util_percent"), + "max_export_util_percent": stats.get("max_export_util_percent"), + "min_export_util_percent": stats.get("min_export_util_percent"), + "min_import_util_percent": stats.get("min_import_util_percent"), + "max_demand_date": stats.get("max_demand_date"), + "min_demand_date": stats.get("min_demand_date"), + "max_demand_season": stats.get("max_demand_season"), + "min_demand_season": stats.get("min_demand_season"), + } + print(f"[debug] Stats for {pt.mrid}: {sample}") + print(f"[debug] max_data={max_data}") + print(f"[debug] min_data={min_data}") + + +def _extract_maximums(payload: Dict[str, Any]) -> Optional[Dict[str, Any]]: + if payload is None: + return None + if isinstance(payload, list): + payload = _first_dict(payload) + if payload is None: + return None + if not isinstance(payload, dict): + return None + if payload.get("error"): + return None + + results = payload.get("results") or [] + if not results: + return None + + for result in results: + series = result.get("series") if isinstance(result, dict) else None + if not series: + continue + + for entry in _iter_series_entries(series): + energy = _first_dict(entry.get("energy")) if isinstance(entry, dict) else None + if energy is None and isinstance(entry, dict) and ("maximums" in entry or "date" in entry): + energy = entry + if not isinstance(energy, dict): + continue + + maximums = energy.get("maximums") or {} + + return { + "date": energy.get("date"), + "kwIn": maximums.get("kwIn"), + "kwOut": maximums.get("kwOut"), + "kwNet": maximums.get("kwNet"), + "kvaNet": maximums.get("kvaNet"), + "pf": maximums.get("pf"), + } + + return None + + +def _safe_float(value: Any) -> Optional[float]: + if value is None: + return None + try: + return float(value) + except (TypeError, ValueError): + return None + + +def _util_percent(value_kw: Optional[float], rating_va: float) -> Optional[float]: + if value_kw is None or rating_va <= 0: + return None + return round((value_kw / rating_va) * 100, 1) + + +async def main(): + zone_mrids, feeder_mrids, mode, config_path, max_workers, debug_cfg = _parse_args(sys.argv[1:]) + with open(config_path) as f: + c = json.loads(f.read()) + print(f"Start time: {datetime.now()}") + + rpc_channel = connect_with_token( + host=c["host"], + access_token=c["access_token"], + rpc_port=c["rpc_port"], + ca_filename=c.get("ca_filename"), + timeout_seconds=c.get("timeout_seconds", 5), + debug=bool(c.get("debug", False)), + skip_connection_test=bool(c.get("skip_connection_test", False)), + ) + if mode == "zones": + client = NetworkConsumerClient(rpc_channel) + hierarchy = (await client.get_network_hierarchy()).throw_on_error() + substations = hierarchy.value.substations + + print(f"Collecting feeders from zones {', '.join(zone_mrids)}.") + feeder_mrids = [] + for zone_mrid in zone_mrids: + if zone_mrid in substations: + for feeder in substations[zone_mrid].feeders: + feeder_mrids.append(feeder.mrid) + else: + print(f"Running for feeders {', '.join(feeder_mrids)}.") + + print(f"Feeders to be processed: {', '.join(feeder_mrids)}") + + transformers_by_id: Dict[str, PowerTransformer] = {} + + # Process the feeders in batches of 3, using asyncio, for performance + batches = chunk(feeder_mrids, 3) + for feeders in batches: + futures = [] + rpc_channel = connect_with_token( + host=c["host"], + access_token=c["access_token"], + rpc_port=c["rpc_port"], + ca_filename=c.get("ca_filename"), + timeout_seconds=c.get("timeout_seconds", 5), + debug=bool(c.get("debug", False)), + skip_connection_test=bool(c.get("skip_connection_test", False)), + ) + print(f"Processing feeders {', '.join(feeders)}") + for feeder_mrid in feeders: + futures.append(asyncio.ensure_future(fetch_feeder_transformers(feeder_mrid, rpc_channel))) + + for future in futures: + transformers = await future + for pt in transformers: + transformers_by_id[pt.mrid] = pt + + all_transformers = list(transformers_by_id.values()) + print(f"Collected {len(all_transformers)} transformers") + + today = date.today() + start_date = _subtract_months(today, 18) + from_date = start_date.isoformat() + to_date = today.isoformat() + print(f"Load API date range: {from_date} to {to_date}") + + base_url = _build_base_url(c["host"], c.get("rpc_port")) + system_tag = c.get("load_api_system_tag", "EWB") + timeout_seconds = c.get("timeout_seconds", 10) + verify = c.get("ca_filename") or True + season_hemisphere = c.get("season_hemisphere", "southern") + + load_config = LoadApiConfig( + base_url=base_url, + access_token=c["access_token"], + system_tag=system_tag, + timeout_seconds=timeout_seconds, + verify=verify, + ) + + debug_state = ( + DebugState(debug_cfg) + if debug_cfg.log_summary or debug_cfg.log_samples or debug_cfg.log_api_samples + else None + ) + + transformer_to_stats: Dict[str, Dict[str, Any]] = {} + semaphore = asyncio.Semaphore(max_workers) + tasks = [ + asyncio.create_task( + _fetch_transformer_utilisation_async( + pt, load_config, from_date, to_date, season_hemisphere, semaphore, debug_state + ) + ) + for pt in all_transformers + ] + completed = 0 + for future in asyncio.as_completed(tasks): + mrid, stats = await future + transformer_to_stats[mrid] = stats + completed += 1 + if completed % 25 == 0 or completed == len(tasks): + print(f"Load API progress: {completed}/{len(tasks)}") + + print(f"Created Study for {len(all_transformers)} transformers") + + styles = json.load(open("style_transformer_utilisation.json", "r")) + max_feature_collection = to_geojson_feature_collection( + all_transformers, + { + PowerTransformer: { + "max_import_util_percent": _stat_getter(transformer_to_stats, "max_import_util_percent"), + "max_import_label": _stat_getter(transformer_to_stats, "max_import_label"), + "max_import_util_kw_percent": _stat_getter(transformer_to_stats, "max_import_util_kw_percent"), + "max_import_kw": _stat_getter(transformer_to_stats, "max_import_kw"), + "max_import_kva": _stat_getter(transformer_to_stats, "max_import_kva"), + "max_demand_date": _stat_getter(transformer_to_stats, "max_demand_date"), + "max_demand_season": _stat_getter(transformer_to_stats, "max_demand_season"), + "transformer_rating_va": _stat_getter(transformer_to_stats, "transformer_rating_va"), + "transformer_rating_kva": _stat_getter(transformer_to_stats, "transformer_rating_kva"), + "has_max_profile": _stat_getter(transformer_to_stats, "has_max_profile"), + "type": lambda x: "pt", + } + }, + ) + min_feature_collection = to_geojson_feature_collection( + all_transformers, + { + PowerTransformer: { + "min_export_util_percent": _stat_getter(transformer_to_stats, "min_export_util_percent"), + "min_export_label": _stat_getter(transformer_to_stats, "min_export_label"), + "min_export_util_kw_percent": _stat_getter(transformer_to_stats, "min_export_util_kw_percent"), + "min_export_kw": _stat_getter(transformer_to_stats, "min_export_kw"), + "min_export_kva": _stat_getter(transformer_to_stats, "min_export_kva"), + "min_demand_date": _stat_getter(transformer_to_stats, "min_demand_date"), + "min_demand_season": _stat_getter(transformer_to_stats, "min_demand_season"), + "transformer_rating_va": _stat_getter(transformer_to_stats, "transformer_rating_va"), + "transformer_rating_kva": _stat_getter(transformer_to_stats, "transformer_rating_kva"), + "has_min_profile": _stat_getter(transformer_to_stats, "has_min_profile"), + "type": lambda x: "pt", + } + }, + ) + + if debug_state and debug_state.config.log_summary: + missing_location = 0 + for pt in all_transformers: + points = list(pt.location.points) if pt.location is not None else [] + if not points: + missing_location += 1 + + stats_values = list(transformer_to_stats.values()) + has_max_profile = sum(1 for stats in stats_values if stats.get("has_max_profile")) + has_min_profile = sum(1 for stats in stats_values if stats.get("has_min_profile")) + rating_zero = sum(1 for stats in stats_values if stats.get("transformer_rating_va") in (0, 0.0)) + + max_import_values = [ + stats.get("max_import_util_percent") + for stats in stats_values + if stats.get("has_max_profile") and stats.get("max_import_util_percent") is not None + ] + min_export_values = [ + stats.get("min_export_util_percent") + for stats in stats_values + if stats.get("has_min_profile") and stats.get("min_export_util_percent") is not None + ] + + print( + "[debug] Summary:" + f" total_transformers={len(all_transformers)}" + f" max_features={len(max_feature_collection.features)}" + f" min_features={len(min_feature_collection.features)}" + f" missing_location={missing_location}" + f" rating_zero={rating_zero}" + f" has_max_profile={has_max_profile}" + f" has_min_profile={has_min_profile}" + ) + if max_import_values: + print( + "[debug] Max import utilisation (percent):" + f" min={min(max_import_values)}" + f" max={max(max_import_values)}" + ) + if min_export_values: + print( + "[debug] Min export utilisation (percent):" + f" min={min(min_export_values)}" + f" max={max(min_export_values)}" + ) + sample_feature = None + if max_feature_collection.features: + sample_feature = max_feature_collection.features[0] + elif min_feature_collection.features: + sample_feature = min_feature_collection.features[0] + if sample_feature is not None: + sample_props = getattr(sample_feature, "properties", None) + if sample_props is None and isinstance(sample_feature, dict): + sample_props = sample_feature.get("properties") + if sample_props is not None: + print(f"[debug] Sample feature properties: {sample_props}") + + results = [] + if max_feature_collection.features: + results.append( + Result( + name="Max Demand Utilisation (Import)", + geo_json_overlay=GeoJsonOverlay( + data=max_feature_collection, + styles=["max-demand-utilisation", "max-demand-label"], + ), + ) + ) + if min_feature_collection.features: + results.append( + Result( + name="Min Demand Utilisation (Export)", + geo_json_overlay=GeoJsonOverlay( + data=min_feature_collection, + styles=["min-demand-utilisation", "min-demand-label"], + ), + ) + ) + + if not results: + print("No transformer features to display (missing locations). Study upload skipped.") + return + + scope_mrids = zone_mrids if mode == "zones" else feeder_mrids + scope_label = ", ".join(scope_mrids) + scope_tag = "-".join(scope_mrids) + + eas_client = EasClient(host=c["host"], port=c["rpc_port"], protocol="https", access_token=c["access_token"]) + print(f"Uploading Study for {mode} {scope_label} ...") + await eas_client.async_upload_study( + Study( + name=f"Transformer utilisation (import/export) ({scope_label})", + description=( + "Max-demand import utilisation (kwIn) and min-demand export utilisation (kwOut) " + "for the last 18 months via the Energy Profiles API." + ), + tags=["transformer_utilisation", "load_api", scope_tag], + results=results, + styles=styles, + ) + ) + await eas_client.aclose() + print("Uploaded Study") + + print(f"Finish time: {datetime.now()}") + + +async def fetch_feeder_transformers( + feeder_mrid: str, + rpc_channel, +) -> List[PowerTransformer]: + print(f"Fetching Feeder {feeder_mrid}") + client = NetworkConsumerClient(rpc_channel) + + result = ( + await client.get_equipment_container( + mrid=feeder_mrid, + expected_class=Feeder, + include_energized_containers=IncludedEnergizedContainers.LV_FEEDERS, + ) + ) + if result.was_failure: + print(f"Failed: {result.thrown}") + return [] + + network = client.service + print(f"Finished fetching Feeder {feeder_mrid}") + + return [pt for pt in network.objects(PowerTransformer)] + + +def fetch_transformer_utilisation( + pt: PowerTransformer, + load_config: LoadApiConfig, + from_date: str, + to_date: str, + season_hemisphere: str, + debug_state: Optional[DebugState] = None, +) -> Dict[str, Any]: + load_client = _get_thread_client(load_config) + rating_va = _transformer_rating_va(pt) + rating_kva = rating_va / 1000.0 if rating_va else 0.0 + + max_profile = None + min_profile = None + + try: + max_profile = load_client.get_max_demand(pt.mrid, from_date, to_date) + except requests.RequestException as ex: + print(f"Max-demand request failed for {pt.mrid}: {ex}") + + try: + min_profile = load_client.get_min_demand(pt.mrid, from_date, to_date) + except requests.RequestException as ex: + print(f"Min-demand request failed for {pt.mrid}: {ex}") + + max_data = _extract_maximums(max_profile) if max_profile else None + min_data = _extract_maximums(min_profile) if min_profile else None + + max_kw_in = _safe_float(max_data.get("kwIn")) if max_data else None + max_kw_out = _safe_float(max_data.get("kwOut")) if max_data else None + max_kva_net = _safe_float(max_data.get("kvaNet")) if max_data else None + min_kw_in = _safe_float(min_data.get("kwIn")) if min_data else None + min_kw_out = _safe_float(min_data.get("kwOut")) if min_data else None + min_kva_net = _safe_float(min_data.get("kvaNet")) if min_data else None + + max_import_util_kw = _util_percent(max_kw_in, rating_kva) + min_export_util_kw = _util_percent(abs(min_kw_out) if min_kw_out is not None else None, rating_kva) + max_import_util = _util_percent(max_kva_net, rating_kva) + min_export_util = _util_percent(abs(min_kva_net) if min_kva_net is not None else None, rating_kva) + max_season = _season_from_date(max_data.get("date") if max_data else None, season_hemisphere) + min_season = _season_from_date(min_data.get("date") if min_data else None, season_hemisphere) + + stats = { + "max_import_util_percent": max_import_util or 0, + "min_export_util_percent": min_export_util or 0, + "max_import_label": _percent_label(max_import_util), + "min_export_label": _percent_label(min_export_util), + "max_import_util_kw_percent": max_import_util_kw or 0, + "min_export_util_kw_percent": min_export_util_kw or 0, + "max_import_kw": max_kw_in or 0, + "min_export_kw": abs(min_kw_out) if min_kw_out is not None else 0, + "max_import_kva": max_kva_net or 0, + "min_export_kva": abs(min_kva_net) if min_kva_net is not None else 0, + "max_demand_date": max_data.get("date") if max_data else None, + "min_demand_date": min_data.get("date") if min_data else None, + "max_demand_season": max_season, + "min_demand_season": min_season, + "transformer_rating_va": rating_va, + "transformer_rating_kva": rating_kva, + "has_max_profile": max_data is not None, + "has_min_profile": min_data is not None, + "max_export_util_percent": _util_percent(abs(max_kw_out) if max_kw_out is not None else None, rating_kva) or 0, + "min_import_util_percent": _util_percent(min_kw_in, rating_kva) or 0, + } + + if debug_state: + debug_state.log_api(pt, max_profile, min_profile) + debug_state.log_stats(pt, rating_va, max_data, min_data, stats) + + return stats + + +async def _fetch_transformer_utilisation_async( + pt: PowerTransformer, + load_config: LoadApiConfig, + from_date: str, + to_date: str, + season_hemisphere: str, + semaphore: asyncio.Semaphore, + debug_state: Optional[DebugState] = None, +) -> Tuple[str, Dict[str, Any]]: + async with semaphore: + stats = await asyncio.to_thread( + fetch_transformer_utilisation, + pt, + load_config, + from_date, + to_date, + season_hemisphere, + debug_state, + ) + return pt.mrid, stats + + +def _percent_label(value: Optional[float]) -> str: + return "n/a" if value is None else f"{value}%" + + +def _season_from_date(value: Optional[str], hemisphere: str = "southern") -> Optional[str]: + if not value: + return None + try: + parsed = date.fromisoformat(value) + except ValueError: + return None + + month = parsed.month + hemi = hemisphere.strip().lower() if hemisphere else "southern" + is_northern = hemi.startswith("n") + + if is_northern: + if month in (12, 1, 2): + return "Winter" + if month in (3, 4, 5): + return "Spring" + if month in (6, 7, 8): + return "Summer" + return "Autumn" + + if month in (12, 1, 2): + return "Summer" + if month in (3, 4, 5): + return "Autumn" + if month in (6, 7, 8): + return "Winter" + return "Spring" + + +def _transformer_rating_va(pt: PowerTransformer) -> float: + ratings: List[float] = [] + for end in pt.ends: + if end.rated_s is not None: + ratings.append(end.rated_s) + else: + for rating in end.s_ratings: + if rating and rating.rated_s is not None: + ratings.append(rating.rated_s) + return max(ratings) if ratings else 0 + + +def _stat_getter(stats: Dict[str, Dict[str, Any]], key: str, default: Any = None): + def fun(pt: PowerTransformer): + return stats.get(pt.mrid, {}).get(key, default) + + return fun + + +def to_geojson_feature_collection( + psrs: List[PowerSystemResource], + class_to_properties: Dict[Type, Dict[str, Callable[[Any], Any]]], +) -> FeatureCollection: + + features = [] + for psr in psrs: + properties_map = class_to_properties.get(type(psr)) + if properties_map is None: + continue + feature = to_geojson_feature(psr, properties_map) + if feature is not None: + features.append(feature) + + return FeatureCollection(features) + + +def to_geojson_feature( + psr: PowerSystemResource, + property_map: Dict[str, Callable[[PowerSystemResource], Any]], +) -> Union[Feature, None]: + + geometry = to_geojson_geometry(psr.location) + if geometry is None: + return None + + properties = {k: f(psr) for (k, f) in property_map.items()} + return Feature(psr.mrid, geometry, properties) + + +def to_geojson_geometry(location: Location) -> Union[Geometry, None]: + points = list(location.points) if location is not None else [] + if len(points) > 1: + return LineString([(point.x_position, point.y_position) for point in points]) + elif len(points) == 1: + return Point((points[0].x_position, points[0].y_position)) + else: + return None + + +def _parse_args(argv: List[str]) -> Tuple[List[str], List[str], str, str, int, DebugConfig]: + parser = argparse.ArgumentParser( + description="Generate a transformer utilisation study for one or more zones or feeders.", + ) + parser.add_argument( + "--mode", + choices=["zones", "feeders"], + default="zones", + help="Interpret positional values as zones or feeders (default: zones).", + ) + parser.add_argument( + "--zones", + default="CPM", + help="Comma-separated zone codes (default: CPM).", + ) + parser.add_argument( + "--feeders", + default="", + help="Comma-separated feeder MRIDs (used when --mode feeders).", + ) + parser.add_argument( + "--config", + default=DEFAULT_CONFIG_PATH, + help=f"Path to config.json (default: {DEFAULT_CONFIG_PATH}).", + ) + parser.add_argument( + "--max-workers", + type=int, + default=None, + help="Maximum concurrent load API requests (default: 8).", + ) + parser.add_argument( + "--log-summary", + action="store_true", + help="Log summary counts (profiles, ratings, locations).", + ) + parser.add_argument( + "--log-samples", + type=int, + default=0, + help="Log computed stats for the first N transformers.", + ) + parser.add_argument( + "--log-api-samples", + type=int, + default=0, + help="Log raw max/min API payloads for the first N transformers.", + ) + parser.add_argument( + "ids", + nargs="*", + help="Zone codes or feeder MRIDs (positional values override --zones/--feeders).", + ) + args = parser.parse_args(argv) + + def _split_values(values: Any) -> List[str]: + if isinstance(values, list): + items = values + elif isinstance(values, str): + items = values.split(",") + else: + items = [] + return [item.strip() for item in items if item and item.strip()] + + max_workers = args.max_workers or 8 + debug_cfg = DebugConfig( + log_summary=bool(args.log_summary), + log_samples=int(args.log_samples or 0), + log_api_samples=int(args.log_api_samples or 0), + ) + + if args.mode == "feeders": + feeder_mrids = _split_values(args.ids) or _split_values(args.feeders) + if not feeder_mrids: + raise ValueError("At least one feeder MRID is required.") + return [], feeder_mrids, args.mode, args.config, max_workers, debug_cfg + + zone_mrids = _split_values(args.ids) or _split_values(args.zones) + + if not zone_mrids: + raise ValueError("At least one zone code is required.") + + return zone_mrids, [], args.mode, args.config, max_workers, debug_cfg + + +if __name__ == "__main__": + asyncio.run(main()) From e9c428a298ab31163c94a0b78d5e7740d5804165 Mon Sep 17 00:00:00 2001 From: adamwhite629 <102849144+adamwhite629@users.noreply.github.com> Date: Wed, 4 Feb 2026 23:45:36 +1100 Subject: [PATCH 3/4] add-seasonal-transformer-utilisation --- .../style_transformer_utilisation.json | 182 +++++++++++++++++- .../transformer_utilisation_by_demand.py | 26 ++- 2 files changed, 198 insertions(+), 10 deletions(-) diff --git a/src/zepben/examples/studies/style_transformer_utilisation.json b/src/zepben/examples/studies/style_transformer_utilisation.json index 00f7c38..88e5fd0 100644 --- a/src/zepben/examples/studies/style_transformer_utilisation.json +++ b/src/zepben/examples/studies/style_transformer_utilisation.json @@ -5,7 +5,7 @@ "type": "circle", "minzoom": 12, "paint": { - "circle-radius": 16, + "circle-radius": 18, "circle-color": [ "case", ["any", @@ -31,6 +31,90 @@ }, "filter" : ["==", ["get", "type"], "pt"] }, + { + "id": "max-demand-season-shape", + "name": "Max Demand Season Shape", + "type": "symbol", + "minzoom": 12, + "layout": { + "text-field": [ + "case", + ["any", + ["==", ["get", "transformer_rating_va"], 0], + ["==", ["get", "has_max_profile"], false] + ], + "", + ["match", ["get", "max_demand_season"], + "Summer", "▲", + "Autumn", "◆", + "Winter", "■", + "Spring", "●", + "" + ] + ], + "text-font": ["Arial Unicode MS Regular"], + "text-size": [ + "match", ["get", "max_demand_season"], + "Winter", 76.5, + 102 + ], + "text-offset": [0, 0], + "text-allow-overlap": true, + "text-ignore-placement": true + }, + "paint": { + "text-color": [ + "case", + ["any", + ["==", ["get", "transformer_rating_va"], 0], + ["==", ["get", "has_max_profile"], false] + ], + "rgb(160,160,160)", + ["step", ["get", "max_import_util_percent"], + "rgb(0,180,40)", 30, + "rgb(171,221,164)", 60, + "rgb(253,174,97)", 80, + "rgb(244,109,67)", 90, + "rgb(210,2,29)" + ] + ], + "text-halo-color": "white", + "text-halo-width": 1.5, + "text-halo-blur": 1 + }, + "metadata": { + "zb:legend": { + "label": "Max Demand Season", + "states": [ + { + "label": "▲ Summer", + "properties": { + "max_demand_season": "Summer" + } + }, + { + "label": "◆ Autumn", + "properties": { + "max_demand_season": "Autumn" + } + }, + { + "label": "■ Winter", + "properties": { + "max_demand_season": "Winter" + } + }, + { + "label": "● Spring", + "properties": { + "max_demand_season": "Spring" + } + } + ] + } + }, + "filter" : ["==", ["get", "type"], "pt"] + }, { "id": "max-demand-label", "name": "Max Demand Utilisation Label", @@ -47,7 +131,10 @@ ["concat", ["round", ["get", "max_import_util_percent"]], "%"] ], "text-font": ["Arial Unicode MS Regular"], - "text-size": 12 + "text-size": 12, + "text-offset": [0, 0], + "text-allow-overlap": true, + "text-ignore-placement": true }, "paint": { "text-color": "black", @@ -63,7 +150,7 @@ "type": "circle", "minzoom": 12, "paint": { - "circle-radius": 16, + "circle-radius": 18, "circle-color": [ "case", ["any", @@ -89,6 +176,90 @@ }, "filter" : ["==", ["get", "type"], "pt"] }, + { + "id": "min-demand-season-shape", + "name": "Min Demand Season Shape", + "type": "symbol", + "minzoom": 12, + "layout": { + "text-field": [ + "case", + ["any", + ["==", ["get", "transformer_rating_va"], 0], + ["==", ["get", "has_min_profile"], false] + ], + "", + ["match", ["get", "min_demand_season"], + "Summer", "▲", + "Autumn", "◆", + "Winter", "■", + "Spring", "●", + "" + ] + ], + "text-font": ["Arial Unicode MS Regular"], + "text-size": [ + "match", ["get", "min_demand_season"], + "Winter", 76.5, + 102 + ], + "text-offset": [0, 0], + "text-allow-overlap": true, + "text-ignore-placement": true + }, + "paint": { + "text-color": [ + "case", + ["any", + ["==", ["get", "transformer_rating_va"], 0], + ["==", ["get", "has_min_profile"], false] + ], + "rgb(160,160,160)", + ["step", ["get", "min_export_util_percent"], + "rgb(0,180,40)", 30, + "rgb(171,221,164)", 60, + "rgb(253,174,97)", 80, + "rgb(244,109,67)", 90, + "rgb(210,2,29)" + ] + ], + "text-halo-color": "white", + "text-halo-width": 1.5, + "text-halo-blur": 1 + }, + "metadata": { + "zb:legend": { + "label": "Min Demand Season", + "states": [ + { + "label": "▲ Summer", + "properties": { + "min_demand_season": "Summer" + } + }, + { + "label": "◆ Autumn", + "properties": { + "min_demand_season": "Autumn" + } + }, + { + "label": "■ Winter", + "properties": { + "min_demand_season": "Winter" + } + }, + { + "label": "● Spring", + "properties": { + "min_demand_season": "Spring" + } + } + ] + } + }, + "filter" : ["==", ["get", "type"], "pt"] + }, { "id": "min-demand-label", "name": "Min Demand Utilisation Label", @@ -105,7 +276,10 @@ ["concat", ["round", ["get", "min_export_util_percent"]], "%"] ], "text-font": ["Arial Unicode MS Regular"], - "text-size": 12 + "text-size": 12, + "text-offset": [0, 0], + "text-allow-overlap": true, + "text-ignore-placement": true }, "paint": { "text-color": "black", diff --git a/src/zepben/examples/studies/transformer_utilisation_by_demand.py b/src/zepben/examples/studies/transformer_utilisation_by_demand.py index a9123f6..f57738e 100644 --- a/src/zepben/examples/studies/transformer_utilisation_by_demand.py +++ b/src/zepben/examples/studies/transformer_utilisation_by_demand.py @@ -280,7 +280,9 @@ def _util_percent(value_kw: Optional[float], rating_va: float) -> Optional[float async def main(): - zone_mrids, feeder_mrids, mode, config_path, max_workers, debug_cfg = _parse_args(sys.argv[1:]) + zone_mrids, feeder_mrids, mode, config_path, max_workers, debug_cfg, seasonal_shapes_flag = _parse_args( + sys.argv[1:] + ) with open(config_path) as f: c = json.loads(f.read()) print(f"Start time: {datetime.now()}") @@ -348,6 +350,7 @@ async def main(): timeout_seconds = c.get("timeout_seconds", 10) verify = c.get("ca_filename") or True season_hemisphere = c.get("season_hemisphere", "southern") + seasonal_shapes = bool(c.get("seasonal_shapes", False)) or seasonal_shapes_flag load_config = LoadApiConfig( base_url=base_url, @@ -479,13 +482,19 @@ async def main(): print(f"[debug] Sample feature properties: {sample_props}") results = [] + max_styles = ["max-demand-utilisation", "max-demand-label"] + min_styles = ["min-demand-utilisation", "min-demand-label"] + if seasonal_shapes: + max_styles = ["max-demand-season-shape", "max-demand-label"] + min_styles = ["min-demand-season-shape", "min-demand-label"] + if max_feature_collection.features: results.append( Result( name="Max Demand Utilisation (Import)", geo_json_overlay=GeoJsonOverlay( data=max_feature_collection, - styles=["max-demand-utilisation", "max-demand-label"], + styles=max_styles, ), ) ) @@ -495,7 +504,7 @@ async def main(): name="Min Demand Utilisation (Export)", geo_json_overlay=GeoJsonOverlay( data=min_feature_collection, - styles=["min-demand-utilisation", "min-demand-label"], + styles=min_styles, ), ) ) @@ -739,7 +748,7 @@ def to_geojson_geometry(location: Location) -> Union[Geometry, None]: return None -def _parse_args(argv: List[str]) -> Tuple[List[str], List[str], str, str, int, DebugConfig]: +def _parse_args(argv: List[str]) -> Tuple[List[str], List[str], str, str, int, DebugConfig, bool]: parser = argparse.ArgumentParser( description="Generate a transformer utilisation study for one or more zones or feeders.", ) @@ -787,6 +796,11 @@ def _parse_args(argv: List[str]) -> Tuple[List[str], List[str], str, str, int, D default=0, help="Log raw max/min API payloads for the first N transformers.", ) + parser.add_argument( + "--seasonal-shapes", + action="store_true", + help="Use season shapes (Summer/Autumn/Winter/Spring) for icons.", + ) parser.add_argument( "ids", nargs="*", @@ -814,14 +828,14 @@ def _split_values(values: Any) -> List[str]: feeder_mrids = _split_values(args.ids) or _split_values(args.feeders) if not feeder_mrids: raise ValueError("At least one feeder MRID is required.") - return [], feeder_mrids, args.mode, args.config, max_workers, debug_cfg + return [], feeder_mrids, args.mode, args.config, max_workers, debug_cfg, bool(args.seasonal_shapes) zone_mrids = _split_values(args.ids) or _split_values(args.zones) if not zone_mrids: raise ValueError("At least one zone code is required.") - return zone_mrids, [], args.mode, args.config, max_workers, debug_cfg + return zone_mrids, [], args.mode, args.config, max_workers, debug_cfg, bool(args.seasonal_shapes) if __name__ == "__main__": From d43fdba97f7d3ced03de67db5919825ce812513e Mon Sep 17 00:00:00 2001 From: adamwhite629 <102849144+adamwhite629@users.noreply.github.com> Date: Thu, 5 Feb 2026 17:10:06 +1100 Subject: [PATCH 4/4] added-README --- src/zepben/examples/studies/README.md | 54 +++++++++++++++++++++++++++ 1 file changed, 54 insertions(+) create mode 100644 src/zepben/examples/studies/README.md diff --git a/src/zepben/examples/studies/README.md b/src/zepben/examples/studies/README.md new file mode 100644 index 0000000..25d5c76 --- /dev/null +++ b/src/zepben/examples/studies/README.md @@ -0,0 +1,54 @@ +# Studies + +This folder contains runnable study scripts that fetch network data, generate GeoJSON overlays, and upload results to EAS. +Most scripts accept a zone code (e.g. `CPM`) and use the shared config at `src/zepben/examples/config.json`. + +## Quick start + +1. Ensure `src/zepben/examples/config.json` has valid `host`, `access_token`, and `rpc_port`. +2. Run a study from this folder, for example: + +```bash +python transformer_utilisation_by_demand.py CPM +``` + +## Common patterns + +- **Zones vs feeders**: Some scripts support explicit feeder MRIDs. For transformer utilisation, use: + +```bash +python transformer_utilisation_by_demand.py --mode feeders CPM3B3 +``` + +- **Config override**: Most scripts accept `--config` to point at a different config file. +- **Styles**: Each study uses a companion `style_*.json` file to control map rendering. +- **Outputs**: Studies upload results to EAS and will log progress in the terminal. + +## Data quality studies + +Data quality scripts live in `data_quality_studies/`. See the dedicated README for usage: + +- `src/zepben/examples/studies/data_quality_studies/README.md` + +## Troubleshooting + +- **Timeouts**: Large zones can take several minutes. Use a longer shell timeout or reduce concurrency if available. +- **404s from Load API**: Some assets may not have demand profiles; the scripts continue and mark those as missing. +- **No features uploaded**: If locations are missing, the study skips upload and logs a message. + +## Scripts in this folder + +Representative studies: +- `transformer_utilisation_by_demand.py` +- `pv_percent_by_transformer.py` +- `suspect_end_of_line.py` +- `transformer_downstream_density.py` +- `customer_distance_to_transformer.py` +- `loop_impedance_by_energy_consumer.py` +- `tap_changer_info_by_transformer.py` + +See each script’s header and help output for specifics: + +```bash +python