Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion input/chili/intercomp/_base.grid.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
output = "chili_base_grid/"
symlink = ""
ref_config = "input/chili/intercomp/_base.toml"
use_slurm = false
use_slurm = true

max_jobs = 9 # maximum number of concurrent tasks (e.g. 500 on Habrok)
max_days = 1 # maximum number of days to run (e.g. 1)
Expand Down
18 changes: 9 additions & 9 deletions input/chili/intercomp/_base.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,23 +6,23 @@ version = "2.0"
path = "chili_base"
logging = "INFO"
plot_fmt = "pdf"
write_mod = 2
plot_mod = 30
write_mod = 4
plot_mod = 40
archive_mod = "none"
remove_sf = true

[params.dt]
starspec = 1e9
starinst = 10.0
method = "adaptive"
minimum = 1000.0
minimum_rel = 1e-05
minimum = 100.0
minimum_rel = 5e-3
maximum = 10000000.0
initial = 30.0
initial = 10.0

[params.dt.adaptive]
atol = 0.02
rtol = 0.09
atol = 0.04
rtol = 0.11

[params.stop]
strict = false
Expand Down Expand Up @@ -133,7 +133,7 @@ ls_default = 1
max_steps = 70
perturb_all = true
mlt_criterion = "s"
fastchem_floor = 500.0
fastchem_floor = 700.0
ini_profile = "isothermal"

[atmos_clim.dummy]
Expand All @@ -146,7 +146,7 @@ reservoir = "outgas"

[escape.zephyrus]
Pxuv = 5e-05
efficiency = 0.1
efficiency = 0.3
tidal = true

[interior]
Expand Down
55 changes: 34 additions & 21 deletions src/proteus/atmos_clim/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,12 +40,14 @@ def ncdf_flag_to_bool(var) -> bool:
raise ValueError(f'Could not parse NetCDF atmos flag variable \n {var}')


def read_ncdf_profile(nc_fpath: str, extra_keys: list = []):
def read_ncdf_profile(nc_fpath: str, extra_keys: list = [], combine_edges: bool = True) -> dict:
"""Read data from atmosphere NetCDF output file.

Automatically reads pressure (p), temperature (t), radius (z) arrays with
cell-centre (N) and cell-edge (N+1) values interleaved into a single combined array of
length (2*N+1).
All variables in SI units, same as NetCDF file content.

Automatically reads pressure (p), temperature (t), radius (z) arrays.
If `combine_edges` is True, cell-centre (N) and cell-edge (N+1) values
are interleaved into a single combined array of length (2*N+1).

Extra keys can be read-in using the extra_keys parameter. These will be stored with
the same dimensions as in the NetCDF file.
Expand All @@ -54,9 +56,10 @@ def read_ncdf_profile(nc_fpath: str, extra_keys: list = []):
----------
nc_fpath : str
Path to NetCDF file.

extra_keys : list
List of extra keys (strings) to read from the file.
combine_edges : bool
Whether to combine cell-centre and cell-edge values into a single array.

Returns
----------
Expand Down Expand Up @@ -96,22 +99,32 @@ def read_ncdf_profile(nc_fpath: str, extra_keys: list = []):

# read pressure, temperature, height data into dictionary values
out = {}
out['p'] = [pl[0]]
out['t'] = [tl[0]]
out['z'] = [zl[0]]
out['r'] = [rl[0]]
for i in range(nlev_c):
out['p'].append(p[i])
out['p'].append(pl[i + 1])

out['t'].append(t[i])
out['t'].append(tl[i + 1])

out['z'].append(z[i])
out['z'].append(zl[i + 1])

out['r'].append(r[i])
out['r'].append(rl[i + 1])
if combine_edges:
out['p'] = [pl[0]]
out['t'] = [tl[0]]
out['z'] = [zl[0]]
out['r'] = [rl[0]]
for i in range(nlev_c):
out['p'].append(p[i])
out['p'].append(pl[i + 1])

out['t'].append(t[i])
out['t'].append(tl[i + 1])

out['z'].append(z[i])
out['z'].append(zl[i + 1])

out['r'].append(r[i])
out['r'].append(rl[i + 1])
else:
out['p'] = p
out['t'] = t
out['z'] = z
out['r'] = r
out['pl'] = pl
out['tmpl'] = tl
out['zl'] = zl
out['rl'] = rl

# flags
for fk in ('transparent', 'solved', 'converged'):
Expand Down
2 changes: 1 addition & 1 deletion src/proteus/interior/timestep.py
Original file line number Diff line number Diff line change
Expand Up @@ -230,7 +230,7 @@ def next_step(

# Min step size
dtminimum = config.params.dt.minimum # absolute
dtminimum += config.params.dt.minimum_rel * hf_row['Time'] * 0.01 # allow small steps
dtminimum += config.params.dt.minimum_rel * hf_row['Time'] # allow small steps
dtswitch = max(dtswitch, dtminimum)

log.info('New time-step target is %.2e years' % dtswitch)
Expand Down
42 changes: 42 additions & 0 deletions tests/atmos_clim/test_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,48 @@ def test_read_ncdf_profile(mock_ds, mock_isfile):
mock_ds.assert_called_with('dummy.nc')


@pytest.mark.unit
@patch('proteus.atmos_clim.common.os.path.isfile')
@patch('netCDF4.Dataset')
def test_read_ncdf_profile_without_combining_edges(mock_ds, mock_isfile):
"""Read centre and edge arrays separately when ``combine_edges`` is false.

This covers the branch added for callers that need native NetCDF layering
(N centre levels and N+1 edge levels as separate arrays).
"""
mock_isfile.return_value = True

ds_instance = MagicMock()
mock_ds.return_value = ds_instance

# Use JANUS-style height variables to ensure this branch also works with z/zl input.
ds_instance.variables = {
'p': np.array([100.0, 80.0]),
'pl': np.array([110.0, 90.0, 70.0]),
'tmp': np.array([500.0, 450.0]),
'tmpl': np.array([520.0, 470.0, 430.0]),
'z': np.array([1.0e4, 2.0e4]),
'zl': np.array([0.0, 1.5e4, 2.5e4]),
'planet_radius': [6.0e6],
'solved': np.array([b'n'], dtype='S1'),
}

result = read_ncdf_profile('dummy.nc', combine_edges=False)

np.testing.assert_allclose(result['p'], np.array([100.0, 80.0]))
np.testing.assert_allclose(result['pl'], np.array([110.0, 90.0, 70.0]))
np.testing.assert_allclose(result['t'], np.array([500.0, 450.0]))
np.testing.assert_allclose(result['tmpl'], np.array([520.0, 470.0, 430.0]))

# JANUS path: r = z + planet_radius and rl = zl + planet_radius.
np.testing.assert_allclose(result['r'], np.array([6.01e6, 6.02e6]))
np.testing.assert_allclose(result['rl'], np.array([6.0e6, 6.015e6, 6.025e6]))

assert result['solved'] == 0.0
assert result['transparent'] == 0.0
assert result['converged'] == 0.0


@pytest.mark.unit
@patch('proteus.atmos_clim.common.read_ncdf_profile')
def test_read_atmosphere_data(mock_read):
Expand Down
140 changes: 140 additions & 0 deletions tests/tools/test_chili_postproc.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,140 @@
"""
Unit tests for CHILI postprocessing helper script.

These tests cover the scalar postprocessing path in ``tools/chili_postproc.py``
without invoking heavy atmosphere/profile readers or plotting.

See also:
- docs/How-to/test_infrastructure.md
- docs/How-to/test_categorization.md
- docs/How-to/test_building.md
"""

from __future__ import annotations

import builtins
import importlib.util
from pathlib import Path
from types import SimpleNamespace

import numpy as np
import pandas as pd
import pytest


def _load_chili_postproc_module():
"""Load ``tools/chili_postproc.py`` as a module for direct function tests."""
repo_root = Path(__file__).resolve().parents[2]
script_path = repo_root / 'tools' / 'chili_postproc.py'
spec = importlib.util.spec_from_file_location('chili_postproc_under_test', script_path)
module = importlib.util.module_from_spec(spec)
assert spec is not None
assert spec.loader is not None
spec.loader.exec_module(module)
return module


def _make_runtime_helpfile(path: Path, gas_list: list[str]):
"""Create a minimal runtime helpfile with physically valid positive values."""
data = {
'Time': [1.0],
'T_surf': [300.0],
'T_pot': [1500.0],
'F_int': [100.0],
'F_olr': [220.0],
'F_ins': [340.0],
'Phi_global_vol': [0.2],
'O2_bar': [1.0e-3],
'C_kg_solid': [1.0e17],
'C_kg_liquid': [2.0e17],
'C_kg_atm': [3.0e17],
'H_kg_solid': [4.0e17],
'H_kg_liquid': [5.0e17],
'H_kg_atm': [6.0e17],
'O_kg_atm': [7.0e17],
'P_surf': [1.0],
'atm_kg_per_mol': [2.8e-2],
'R_obs': [6.5e6],
'R_int': [6.3e6],
'RF_depth': [0.01],
'Phi_global': [0.15],
}
for gas in gas_list:
data[f'{gas}_bar'] = [1.0e-6]

df = pd.DataFrame(data)
df.to_csv(path, sep=' ', index=False)


@pytest.mark.unit
def test_postproc_once_writes_scalar_csv_and_orders_log_message(tmp_path):
"""Writes scalar CSV and logs scalar-write message before config parsing."""
chili_postproc = _load_chili_postproc_module()

simdir = tmp_path / 'run_chili_tr1b'
simdir.mkdir()

init_path = simdir / 'init_coupler.toml'
init_path.write_text('placeholder = true\n', encoding='utf-8')

all_gases = sorted(set(chili_postproc.vol_list))
_make_runtime_helpfile(simdir / 'runtime_helpfile.csv', all_gases)

class _FakeInteriorData:
def get_dict_values(self, keys):
if keys == ['data', 'temp_b']:
return np.array([1000.0, 1500.0, 2000.0])
if keys == ['data', 'visc_b']:
return np.array([1.0e20, 2.0e20, 3.0e20])
raise KeyError(keys)

recorded_messages = []
original_print = builtins.print

def fake_print(*args, **kwargs):
msg = ' '.join(str(a) for a in args)
recorded_messages.append(msg)

def fake_read_config(_config_path):
recorded_messages.append('READ_CONFIG_CALLED')
return SimpleNamespace(
orbit=SimpleNamespace(s0_factor=1.0),
atmos_clim=SimpleNamespace(albedo_pl=0.3, surface_d=1000.0),
)

chili_postproc.read_config_object = fake_read_config
chili_postproc.read_jsons = lambda _simdir, _times: [_FakeInteriorData()]
chili_postproc.read_ncdf_profile = lambda *_args, **_kwargs: None

try:
builtins.print = fake_print
name = chili_postproc.postproc_once(str(simdir), plot=False)
finally:
builtins.print = original_print

assert name == 'trappist1b'
assert ' write CSV file for scalars' in recorded_messages
assert 'READ_CONFIG_CALLED' in recorded_messages
assert recorded_messages.index(' write CSV file for scalars') < recorded_messages.index(
'READ_CONFIG_CALLED'
)

out_csv = simdir / 'chili' / 'evolution-proteus-trappist1b-data.csv'
assert out_csv.is_file()

out_df = pd.read_csv(out_csv)
assert 'flux_ASR(W/m2)' in out_df.columns
assert 'viscosity(Pa.s)' in out_df.columns
assert out_df.loc[0, 'viscosity(Pa.s)'] == pytest.approx(2.0e20)


@pytest.mark.unit
def test_postproc_once_raises_when_helpfile_missing(tmp_path):
"""Raises ``FileNotFoundError`` when runtime helpfile is absent."""
chili_postproc = _load_chili_postproc_module()

simdir = tmp_path / 'case_no_helpfile'
simdir.mkdir()

with pytest.raises(FileNotFoundError, match='runtime_helpfile.csv'):
chili_postproc.postproc_once(str(simdir), plot=False)
5 changes: 3 additions & 2 deletions tools/chili_generate.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
import toml

# Options
use_scratch = False # Store output in scratch folder
use_scratch = True # Store output in scratch folder

# -----------------------------------------------

Expand Down Expand Up @@ -46,7 +46,8 @@
cfg[p] = deepcopy(cfg['base'])

# output
cfg[p]['params']['out']['path'] = f'chili_{p}'
cfg[p]['params']['out']['path'] = 'scratch/' if use_scratch else ''
cfg[p]['params']['out']['path'] += f'chili_{p}/'

# star
cfg[p]['star']['mass'] = 1.0 # Msun
Expand Down
Loading
Loading