-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathpostpro_var_dn.py
More file actions
109 lines (87 loc) · 3.3 KB
/
postpro_var_dn.py
File metadata and controls
109 lines (87 loc) · 3.3 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
import numpy as np
from netCDF4 import Dataset
import xarray as xr
import pandas as pd
import glob
import os
import os.path
import sys
import cfgrib
import subprocess
from multiprocessing import Pool
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.tri as tri
import cartopy.crs as ccrs
import cartopy.feature as cfeature
from matplotlib import colors
# var calculation of daily values for day-time and night-time, keeping the domain size
# It opens files, grid and cdo temp files
varname = sys.argv[1]
exp = sys.argv[2] # fcpwp ctrl # Choose exp
testn = sys.argv[3]
mon = sys.argv[4]
if mon == "05":
start=1943
end=2206
elif mon == "06":
start=2207
end=2926
elif mon == "07":
start=2927
end=3670
elif mon == "08":
start=3671
end=4150
else:
print("No month selected")
# Output directory
save_to_path =os.path.abspath('/hpc/uwork/extjroqu/bacy_plots/temp_var_dn')
dr_data = "/hpc/uwork/extjroqu/BACYTEST_D03_ICONBC_"+exp+"/"+testn+"/"
dr_extra = "/hpc/uhome/extjroqu/bacy_data/"
gridfile = Dataset(dr_extra+"griddir/icon_grid_9999_R13B07_L.nc") #3km eu domain
clon, clat = np.rad2deg( gridfile.variables["clon"]) , np.rad2deg( gridfile.variables["clat"]) #[::-1] )
batch_size = 24 # 1 days (24h*1d)
n_processes = 4 # number of CPUs
all_files = []
for f in sorted(glob.glob(os.path.join(dr_data, "fc_DOM01_*.grb."+varname+".grb"))):
timestamp = os.path.basename(f).split("_")[2].split(".grb")[0]
year = int(timestamp[:4])
if start <= year <= end:
all_files.append(f)
total_files = len(all_files)
batches = []
for i in range(0, total_files, batch_size):
batch = all_files[i:i + batch_size]
if len(batch) >= 2: # avoid broken batches
timestamp_file = os.path.basename(batch[1])
timestamp_line = timestamp_file.split("_")[2].split(".grb")[0]
batches.append((i, batch, timestamp_line))
def process_batch(args):
index, batch_files, timestamp_line = args
day_file = os.path.join(save_to_path, f"{exp}_{varname}_{timestamp_line}_day_part.grb")
night_file = os.path.join(save_to_path, f"{exp}_{varname}_{timestamp_line}_night_part.grb")
print(batch_files)
# CDO running... var and mergetime (1days)
merged_tmp = os.path.join(save_to_path, f"tmp_{exp}_{varname}_{timestamp_line}.grb") # temp
subprocess.run(["cdo", "-mergetime", *batch_files, merged_tmp], check=True)
# day-time
subprocess.run([
"cdo", "-timmean", "-selhour,9/17", merged_tmp, day_file
], check=True)
# night-time
tmp1 = os.path.join(save_to_path, f"tmp1_{timestamp_line}.grb")
tmp2 = os.path.join(save_to_path, f"tmp2_{timestamp_line}.grb")
merged_night = os.path.join(save_to_path, f"tmp_night_{timestamp_line}.grb")
subprocess.run(["cdo", "-selhour,21/23", merged_tmp, tmp1], check=True)
subprocess.run(["cdo", "-selhour,0/5", merged_tmp, tmp2], check=True)
subprocess.run(["cdo", "-mergetime", tmp1, tmp2, merged_night], check=True)
subprocess.run(["cdo", "-timmean", merged_night, night_file], check=True)
# Deleting temp files
for tmpf in [merged_tmp, tmp1, tmp2, merged_night]:
os.remove(tmpf)
print(f"Processed and saved: {day_file}")
if __name__ == "__main__":
with Pool(n_processes) as pool:
pool.map(process_batch, batches)
print("DONE with "+exp+"_"+varname)