Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
60 changes: 49 additions & 11 deletions atlite/convert.py
Original file line number Diff line number Diff line change
Expand Up @@ -565,14 +565,21 @@
ds: xr.Dataset,
turbine: TurbineConfig,
interpolation_method: Literal["logarithmic", "power"],
windspeed_bias_correction: xr.DataArray | None,
) -> xr.DataArray:
"""
Convert wind speeds for turbine to wind energy generation.
"""
V, POW, hub_height, P = itemgetter("V", "POW", "hub_height", "P")(turbine)

from_height = None
if windspeed_bias_correction is not None:
ds, from_height = windm.apply_windspeed_bias_correction(

Check warning on line 577 in atlite/convert.py

View check run for this annotation

Codecov / codecov/patch

atlite/convert.py#L577

Added line #L577 was not covered by tests
ds, windspeed_bias_correction
)

wnd_hub = windm.extrapolate_wind_speed(
ds, to_height=hub_height, method=interpolation_method
ds, to_height=hub_height, method=interpolation_method, from_height=from_height
)

def apply_power_curve(da):
Expand All @@ -598,6 +605,7 @@
smooth: bool | dict = False,
add_cutout_windspeed: bool = False,
interpolation_method: Literal["logarithmic", "power"] = "logarithmic",
windspeed_bias_correction: bool | xr.DataArray | None = None,
**params,
) -> xr.DataArray:
"""
Expand All @@ -609,17 +617,20 @@
Parameters
----------
turbine : str or dict
A turbineconfig dictionary with the keys 'hub_height' for the
hub height and 'V', 'POW' defining the power curve.
Alternatively a str refering to a local or remote turbine configuration
as accepted by atlite.resource.get_windturbineconfig(). Locally stored turbine
configurations can also be modified with this function. E.g. to setup a different hub
height from the one used in the yaml file,one would write
"turbine=get_windturbineconfig(“NREL_ReferenceTurbine_5MW_offshore”)|{“hub_height”:120}"
A turbineconfig dictionary with the keys 'hub_height' for the hub height
and 'V', 'POW' defining the power curve. Alternatively a str refering to
a local or remote turbine configuration as accepted by
atlite.resource.get_windturbineconfig(). Locally stored turbine
configurations can also be modified with this function. E.g. to setup a
different hub height from the one used in the yaml file, one would write
>>> turbine = (
>>> get_windturbineconfig("NREL_ReferenceTurbine_5MW_offshore")
>>> | {"hub_height":120}
>>> )
smooth : bool or dict
If True smooth power curve with a gaussian kernel as
determined for the Danish wind fleet to Delta_v = 1.27 and
sigma = 2.29. A dict allows to tune these values.
If True smooth power curve with a gaussian kernel as determined for the
Danish wind fleet to Delta_v = 1.27 and sigma = 2.29. A dict allows to
tune these values.
add_cutout_windspeed : bool
If True and in case the power curve does not end with a zero, will add zero power
output at the highest wind speed in the power curve. If False, a warning will be
Expand All @@ -628,6 +639,14 @@
interpolation_method : {"logarithmic", "power"}
Law to interpolate wind speed to turbine hub height. Refer to
:py:func:`atlite.wind.extrapolate_wind_speed`.
windspeed_bias_correction : bool or DataArray, optional
Correction factor that is applied to the wind speed at height
`.attrs["height"]`. Such a correction factor can be calculated using
:py:func:`atlite.wind.calculate_windspeed_bias_correction` with a raster
dataset of mean wind speeds.
if True, the scaling factor is taken from 'wnd_bias_correction' in `cutout`
(or a ValueError is raised),
if None, a scaling factor is applied if it exists in `cutout`

Note
----
Expand All @@ -645,10 +664,29 @@
if smooth:
turbine = windturbine_smooth(turbine, params=smooth)

if isinstance(windspeed_bias_correction, xr.DataArray):
# Front-load coordinate alignment cost
windspeed_bias_correction = windspeed_bias_correction.reindex_like(cutout.data)

Check warning on line 669 in atlite/convert.py

View check run for this annotation

Codecov / codecov/patch

atlite/convert.py#L669

Added line #L669 was not covered by tests
elif windspeed_bias_correction is None:
windspeed_bias_correction = cutout.data.get("wnd_bias_correction")
elif windspeed_bias_correction is True:
try:
windspeed_bias_correction = cutout.data["wnd_bias_correction"]
except KeyError:
raise ValueError(

Check warning on line 676 in atlite/convert.py

View check run for this annotation

Codecov / codecov/patch

atlite/convert.py#L673-L676

Added lines #L673 - L676 were not covered by tests
"Wind speed bias correction is required, but cutout does not contain "
"scaling factor: 'wnd_bias_correction'.\n"
"Regenerate the cutout or provide the scaling factors explicitly, ie.\n"
"cutout.wind(..., windspeed_bias_correction=scaling_factors)"
) from None
else:
windspeed_bias_correction = None

Check warning on line 683 in atlite/convert.py

View check run for this annotation

Codecov / codecov/patch

atlite/convert.py#L683

Added line #L683 was not covered by tests

return cutout.convert_and_aggregate(
convert_func=convert_wind,
turbine=turbine,
interpolation_method=interpolation_method,
windspeed_bias_correction=windspeed_bias_correction,
**params,
)

Expand Down
6 changes: 6 additions & 0 deletions atlite/cutout.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,12 @@ def __init__(self, path, **cutoutparams):
gebco_path: str
Path to find the gebco NetCDF file. Only necessary when including
the gebco module.
windspeed_real_average_path: str, optional
Path to raster dataset with wind speeds to bias correct average wind
speeds. If not given during ERA5 cutout creation, the corresponding
windspeed_bias_correction feature is skipped. It has been tested to
work well with the mean wind speeds at 100m from global wind atlas
at https://globalwindatlas.info/ .
parallel : bool, default False
Whether to open dataset in parallel mode. Take effect for all
xr.open_mfdataset usages.
Expand Down
33 changes: 23 additions & 10 deletions atlite/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,14 +31,15 @@
tmpdir=None,
monthly_requests=False,
concurrent_requests=False,
**parameter_updates,
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We can use this for internal methods, but it's much more readable and gives a better docs/api reference if we avoid this general kwargs handling

Copy link
Member Author

@coroa coroa May 6, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Well, we don't know at cutout creation time or in the prepare doc-string, which of these parameters exist, since they are datasets module specific. So, I don't think there is much i can do about this here.

):
"""
Load the feature data for a given module.

This get the data for a set of features from a module. All modules
in `atlite.datasets` are allowed.
"""
parameters = cutout.data.attrs
parameters = cutout.data.attrs | parameter_updates
lock = SerializableLock()
datasets = []
get_data = datamodules[module].get_data
Expand All @@ -58,7 +59,7 @@

datasets = compute(*datasets)

ds = xr.merge(datasets, compat="equals")
ds = xr.merge([da for da in datasets if da is not None], compat="equals")
for v in ds:
da = ds[v]
da.attrs["module"] = module
Expand Down Expand Up @@ -140,6 +141,7 @@
dask_kwargs=None,
monthly_requests=False,
concurrent_requests=False,
**parameter_updates,
):
"""
Prepare all or a selection of features in a cutout.
Expand Down Expand Up @@ -185,6 +187,9 @@
concurrent_requests : bool, optional
If True, the monthly data requests are posted concurrently.
Only has an effect if `monthly_requests` is True. The default is False.
**parameter_updates
Updates of creation parameters, for list of available params check docstring of
:py:method:`atlite.Cutout.__init__`.

Returns
-------
Expand Down Expand Up @@ -224,19 +229,27 @@
data_format=data_format,
monthly_requests=monthly_requests,
concurrent_requests=concurrent_requests,
**parameter_updates,
)
new_features = set(da.attrs["feature"] for da in ds.data_vars.values())
if not new_features:
logger.warning("No new features prepared")
return
attrs = non_bool_dict(

Check warning on line 238 in atlite/data.py

View check run for this annotation

Codecov / codecov/patch

atlite/data.py#L238

Added line #L238 was not covered by tests
cutout.data.attrs
| ds.attrs
| dict(prepared_features=list(prepared | new_features))
| parameter_updates
)
prepared |= set(missing_features)

cutout.data.attrs.update(dict(prepared_features=list(prepared)))
attrs = non_bool_dict(cutout.data.attrs)
attrs.update(ds.attrs)

# Add optional compression to the newly prepared features
if compression:
for v in missing_vars:
ds[v].encoding.update(compression)
for da in ds.data_vars.values():
da.encoding.update(compression)

Check warning on line 248 in atlite/data.py

View check run for this annotation

Codecov / codecov/patch

atlite/data.py#L248

Added line #L248 was not covered by tests

ds = cutout.data.merge(ds[missing_vars.values]).assign_attrs(**attrs)
ds = cutout.data.merge(

Check warning on line 250 in atlite/data.py

View check run for this annotation

Codecov / codecov/patch

atlite/data.py#L250

Added line #L250 was not covered by tests
ds[missing_vars.loc[list(new_features)].values]
).assign_attrs(attrs)

# write data to tmp file, copy it to original data, this is much safer
# than appending variables
Expand Down
Loading
Loading