Skip to content

Commit 3525aeb

Browse files
committed
rerun irm sim with updated nuisance loss logging
1 parent 347e33d commit 3525aeb

File tree

6 files changed

+29
-18
lines changed

6 files changed

+29
-18
lines changed

doc/irm/irm.qmd

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -251,7 +251,7 @@ generate_and_show_styled_table(
251251

252252
## Tuning
253253

254-
The simulations are based on the the [make_irm_data](https://docs.doubleml.org/stable/api/generated/doubleml.irm.datasets.make_irm_data.html)-DGP with $1000$ observations. This is only an example as the untuned version just relies on the default configuration.
254+
The simulations are based on the the [make_irm_data](https://docs.doubleml.org/stable/api/generated/doubleml.irm.datasets.make_irm_data.html)-DGP with $500$ observations. This is only an example as the untuned version just relies on the default configuration.
255255

256256
::: {.callout-note title="Metadata" collapse="true"}
257257

@@ -275,7 +275,7 @@ df_ate_tune_cov = pd.read_csv("../../results/irm/irm_ate_tune_coverage.csv", ind
275275
assert df_ate_tune_cov["repetition"].nunique() == 1
276276
n_rep_ate_tune_cov = df_ate_tune_cov["repetition"].unique()[0]
277277
278-
display_columns_ate_tune_cov = ["Learner g", "Learner m", "Tuned", "Bias", "CI Length", "Coverage",]
278+
display_columns_ate_tune_cov = ["Learner g", "Learner m", "Tuned", "Bias", "CI Length", "Coverage", "Loss g0", "Loss g1", "Loss m"]
279279
```
280280

281281

@@ -286,7 +286,7 @@ generate_and_show_styled_table(
286286
main_df=df_ate_tune_cov,
287287
filters={"level": 0.95},
288288
display_cols=display_columns_ate_tune_cov,
289-
n_rep=n_rep_ate_cov,
289+
n_rep=n_rep_ate_tune_cov,
290290
level_col="level",
291291
coverage_highlight_cols=["Coverage"]
292292
)
@@ -300,7 +300,7 @@ generate_and_show_styled_table(
300300
main_df=df_ate_tune_cov,
301301
filters={"level": 0.9},
302302
display_cols=display_columns_ate_tune_cov,
303-
n_rep=n_rep_ate_cov,
303+
n_rep=n_rep_ate_tune_cov,
304304
level_col="level",
305305
coverage_highlight_cols=["Coverage"]
306306
)

monte-cover/src/montecover/irm/irm_ate_tune.py

Lines changed: 15 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -37,9 +37,11 @@ def ml_g_params(trial):
3737
"learning_rate", 1e-3, 0.1, log=True
3838
),
3939
"min_child_samples": trial.suggest_int(
40-
"min_child_samples", 20, 100, step=5
40+
"min_child_samples", 10, 50, step=5
4141
),
42-
"max_depth": trial.suggest_int("max_depth", 3, 10, step=1),
42+
"max_depth": 3,
43+
"feature_fraction": trial.suggest_float("feature_fraction", 0.6, 1),
44+
"bagging_fraction": trial.suggest_float("bagging_fraction", 0.6, 1),
4345
"lambda_l1": trial.suggest_float("lambda_l1", 1e-8, 10.0, log=True),
4446
"lambda_l2": trial.suggest_float("lambda_l2", 1e-8, 10.0, log=True),
4547
}
@@ -52,9 +54,11 @@ def ml_m_params(trial):
5254
"learning_rate", 1e-3, 0.1, log=True
5355
),
5456
"min_child_samples": trial.suggest_int(
55-
"min_child_samples", 20, 100, step=5
57+
"min_child_samples", 10, 50, step=5
5658
),
57-
"max_depth": trial.suggest_int("max_depth", 3, 10, step=1),
59+
"max_depth": 3,
60+
"feature_fraction": trial.suggest_float("feature_fraction", 0.6, 1),
61+
"bagging_fraction": trial.suggest_float("bagging_fraction", 0.6, 1),
5862
"lambda_l1": trial.suggest_float("lambda_l1", 1e-8, 10.0, log=True),
5963
"lambda_l2": trial.suggest_float("lambda_l2", 1e-8, 10.0, log=True),
6064
}
@@ -118,6 +122,7 @@ def run_single_rep(
118122
"coverage": [],
119123
}
120124
for model in [dml_model, dml_model_tuned]:
125+
nuisance_loss = model.nuisance_loss
121126
for level in self.confidence_parameters["level"]:
122127
level_result = dict()
123128
level_result["coverage"] = self._compute_coverage(
@@ -135,6 +140,9 @@ def run_single_rep(
135140
"Learner m": learner_m_name,
136141
"level": level,
137142
"Tuned": model is dml_model_tuned,
143+
"Loss g0": nuisance_loss["ml_g0"].mean(),
144+
"Loss g1": nuisance_loss["ml_g1"].mean(),
145+
"Loss m": nuisance_loss["ml_m"].mean(),
138146
}
139147
)
140148
for key, res in level_result.items():
@@ -152,6 +160,9 @@ def summarize_results(self):
152160
"Coverage": "mean",
153161
"CI Length": "mean",
154162
"Bias": "mean",
163+
"Loss g0": "mean",
164+
"Loss g1": "mean",
165+
"Loss m": "mean",
155166
"repetition": "count",
156167
}
157168

results/irm/irm_ate_tune_config.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,13 @@
11
simulation_parameters:
2-
repetitions: 500
2+
repetitions: 200
33
max_runtime: 19800
44
random_seed: 42
55
n_jobs: -2
66
dgp_parameters:
77
theta:
88
- 0.5
99
n_obs:
10-
- 1000
10+
- 500
1111
dim_x:
1212
- 5
1313
learner_definitions:
Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
1-
Learner g,Learner m,level,Tuned,Coverage,CI Length,Bias,repetition
2-
LGBM Regr.,LGBM Clas.,0.9,False,0.952,1.8938962143131024,0.38916017267237296,500
3-
LGBM Regr.,LGBM Clas.,0.9,True,0.82,0.31269268855505145,0.09112340834220442,500
4-
LGBM Regr.,LGBM Clas.,0.95,False,0.988,2.256716530692214,0.38916017267237296,500
5-
LGBM Regr.,LGBM Clas.,0.95,True,0.9,0.3725963196693502,0.09112340834220442,500
1+
Learner g,Learner m,level,Tuned,Coverage,CI Length,Bias,Loss g0,Loss g1,Loss m,repetition
2+
LGBM Regr.,LGBM Clas.,0.9,False,0.92,2.643216056843617,0.6446011747627041,1.1132816524651437,1.1357364135690846,0.667476757878396,200
3+
LGBM Regr.,LGBM Clas.,0.9,True,0.89,0.5145312866755513,0.12860156717199023,0.9989179129452523,1.0613529528468826,0.5188432505665598,200
4+
LGBM Regr.,LGBM Clas.,0.95,False,0.985,3.1495861941059564,0.6446011747627041,1.1132816524651437,1.1357364135690846,0.667476757878396,200
5+
LGBM Regr.,LGBM Clas.,0.95,True,0.935,0.6131018433975747,0.12860156717199023,0.9989179129452523,1.0613529528468826,0.5188432505665598,200
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
11
DoubleML Version,Script,Date,Total Runtime (minutes),Python Version,Config File
2-
0.12.dev0,IRMATETuningCoverageSimulation,2025-11-24 13:07,97.69014709790548,3.12.9,scripts/irm/irm_ate_tune_config.yml
2+
0.12.dev0,IRMATETuningCoverageSimulation,2025-12-01 12:02,27.278138709068298,3.12.9,scripts/irm/irm_ate_tune_config.yml

scripts/irm/irm_ate_tune_config.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,14 @@
11
# Simulation parameters for IRM ATE Coverage with Tuning
22

33
simulation_parameters:
4-
repetitions: 500
4+
repetitions: 200
55
max_runtime: 19800 # 5.5 hours in seconds
66
random_seed: 42
77
n_jobs: -2
88

99
dgp_parameters:
1010
theta: [0.5] # Treatment effect
11-
n_obs: [1000] # Sample size
11+
n_obs: [500] # Sample size
1212
dim_x: [5] # Number of covariates
1313

1414
# Define reusable learner configurations

0 commit comments

Comments
 (0)