Skip to content
Snippets Groups Projects
Commit c37be363 authored by Thomas Vogt's avatar Thomas Vogt
Browse files

Initial commit

parents
Branches main
No related tags found
No related merge requests found
.ipynb_checkpoints
Jupyter notebooks that visualise and play around with the results of the "Social Cost of Tropical Cyclone" paper.
Make sure to install the [scripts](https://gitlab.pik-potsdam.de/tc_cost/tc_cost) and run the full pipeline at least for
the main specification (8 lags). Then, the notebooks need to run in the same conda environment as used by the scripts.
\ No newline at end of file
This diff is collapsed.
source diff could not be displayed: it is too large. Options to address this: view the blob.
This diff is collapsed.
%% Cell type:code id:75cea414-1d85-40f4-ac0d-e0f92dbbeb2b tags:
``` python
# F-test statistics for temperature damage function's structural dependence on SSPs and RCPs
#
# Quote: "We find that the hypotheses $\gamma_i^{e,r,b,l} = 0$ and $\omega_i^{e,r,b,l} = 0$ have
# to be rejected at the 5\% level of significance for between 58.5\% and 86.5\% of the samples
# across uncertainty dimensions 5 and 6 ($r$ and $b$), depending on the country $i$."
#
# Quote: "By contrast, the hypotheses $\gamma_i^{s,r,b,l} = 0$ and $\omega_i^{s,r,b,l} = 0$ cannot
# be rejected at the 10\% level of significance ($p \geq 0.69$) for any country $i$ and for any of
# the samples across uncertainty dimensions 5 and 6 ($r$ and $b$)."
#
%matplotlib widget
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import xarray as xr
import tc_cost.bootstrap.df as bs_df
import tc_cost.util.constants as u_const
import tc_cost.util.data as u_data
```
%% Cell type:code id:dd064256-21ec-46eb-b21e-6f0dadf3d400 tags:
``` python
use_ols = True
use_temp = True
suffix = f"{'_ols' if use_ols else ''}{'_temp' if use_temp else ''}"
timehorizon = 8
```
%% Cell type:code id:f0f6a9d1-1b28-4cd9-9ec4-15e7c1ecc42c tags:
``` python
print("Global regression (not used for SCC!)")
ds = (
xr.open_dataset(
u_const.BOOTSTRAP_DIR / "damage_functions" / (
f"ftest_{timehorizon}-allgcms-GLB{suffix}.nc"
)
)
)
for v in ds["variable"].values:
result = 1 - (
ds["f_pvalue_rcp"].sel(variable=v, groups_rcp="__all__")
> 0.1
).mean(dim=["realisation", "elt", "gcm"]).compute().item()
print(f"{v:>7s} {100 * result: 5.2f}%")
```
%% Output
Global regression (not used for SCC!)
__all__ 92.51%
const 68.34%
gmt 69.51%
%% Cell type:code id:1ce30178-037e-42cd-867c-8ddd78db1c82 tags:
``` python
ds = xr.open_mfdataset([
u_const.BOOTSTRAP_DIR / "damage_functions" / (
f"ftest_{timehorizon}-allgcms-{iso}{suffix}.nc"
) for iso in u_const.L_COUNTRIES
], combine="nested", concat_dim=["iso"], parallel=True)
ds["iso"] = u_const.L_COUNTRIES
```
%% Cell type:code id:65b21e6f-b6a9-4846-8677-164e0d4a4af1 tags:
``` python
result_thresh = {}
for thresh in [0.01, 0.05, 0.10]:
print(f"# Threshold: {thresh}")
result_thresh[thresh] = {}
for g in ["SSP", "rcp"]:
print(f"## Group: {g}")
for v in ds["variable"].values:
print(f"- Variable: {v}")
pval_mean = (
ds[f"f_pvalue_{g}"].sel({
"variable": v,
f"groups_{g}": "__all__",
})
> thresh
).mean(dim=["realisation", "elt", "gcm"]).compute()
result_thresh[thresh][f"{g}-{v}"] = pval_mean.to_dataframe()[f"f_pvalue_{g}"]
print()
```
%% Output
# Threshold: 0.01
## Group: SSP
- Variable: __all__
- Variable: const
- Variable: gmt
## Group: rcp
- Variable: __all__
- Variable: const
- Variable: gmt
# Threshold: 0.05
## Group: SSP
- Variable: __all__
- Variable: const
- Variable: gmt
## Group: rcp
- Variable: __all__
- Variable: const
- Variable: gmt
# Threshold: 0.1
## Group: SSP
- Variable: __all__
- Variable: const
- Variable: gmt
## Group: rcp
- Variable: __all__
- Variable: const
- Variable: gmt
%% Cell type:code id:5df37359-c1d9-4fbd-8eea-b2fac18e131b tags:
``` python
df = pd.DataFrame(result_thresh[0.05])
for g in ["SSP", "rcp"]:
print(g)
for v in ds["variable"].values:
ser = df[f"{g}-{v}"]
vmin = 1 - ser.min()
imin = ser.index[ser.argmin()]
vmax = 1 - ser.max()
imax = ser.index[ser.argmax()]
print(f"{v:>9s} {imin} {100 * vmin:.2f}%")
print(f"{v:>9s} {imax} {100 * vmax:.2f}%")
```
%% Output
SSP
__all__ AUS 0.00%
__all__ AUS 0.00%
const AUS 0.00%
const AUS 0.00%
gmt AUS 0.00%
gmt AUS 0.00%
rcp
__all__ KOR 96.69%
__all__ GTM 82.01%
const USA 86.28%
const VCT 58.54%
gmt USA 86.51%
gmt BLZ 60.23%
%% Cell type:code id:f56e7f0a-7fe2-4997-bd50-1b2b97286a29 tags:
``` python
g = "SSP"
v = "const"
ser = (
ds[f"f_pvalue_{g}"].sel({
"variable": v,
f"groups_{g}": "__all__",
})
).min(dim=["realisation", "elt", "gcm"]).compute().to_series()
pval_min = ser.min()
imin = ser.index[ser.argmin()]
print(f"{imin} {pval_min:.2f}")
```
%% Output
TON 0.69
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment