Skip to content

Commit b1a6833

Browse files
committed
Merge the different branches and make them consistent
numerous changes to config files and formatting edits throughout the processor
1 parent fea39e5 commit b1a6833

20 files changed

+468
-236
lines changed

hypernets_processor/calibration/calibrate.py

Lines changed: 23 additions & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111
from hypernets_processor.data_io.dataset_util import DatasetUtil
1212
from hypernets_processor.plotting.plotting import Plotting
1313
import numpy as np
14+
import os
1415

1516
'''___Authorship___'''
1617
__author__ = "Pieter De Vis"
@@ -32,15 +33,15 @@ def __init__(self, context, MCsteps=1000, parallel_cores=0):
3233
def calibrate_l1a(self, measurandstring, dataset_l0, dataset_l0_bla):
3334

3435
if measurandstring != "radiance" and measurandstring != "irradiance":
35-
print("the measurandstring needs to be either 'radiance' or 'irradiance")
36+
self.context.logger.error("the measurandstring needs to be either 'radiance' or 'irradiance")
3637
exit()
3738

3839
calibrate_function = self._measurement_function_factory.get_measurement_function(
3940
self.context.get_config_value("measurement_function_calibrate"))
4041
input_vars = calibrate_function.get_argument_names()
4142

43+
dataset_l0 = self.preprocess_l0(dataset_l0)
4244
dataset_l1a = self.l1a_template_from_l0_dataset(measurandstring, dataset_l0)
43-
dataset_l0, dataset_l1a = self.preprocess_l0(dataset_l0, dataset_l1a)
4445

4546
calibration_data = self.prepare_calibration_data(measurandstring)
4647

@@ -72,16 +73,16 @@ def calibrate_l1a(self, measurandstring, dataset_l0, dataset_l0_bla):
7273
def prepare_calibration_data(self,measurandstring):
7374
hypstar=self.context.get_config_value("hypstar_cal_number")
7475
caldate=self.context.get_config_value("cal_date")
75-
directory=self.context.get_config_value("processor_directory")
76-
non_linear_cals = np.genfromtxt(directory+r"/../examples/calibration_files/hypstar_"+
77-
str(hypstar)+"_nonlin_corr_coefs_"+str(caldate)+".dat")
76+
directory=self.context.get_config_value("calibration_directory")
77+
non_linear_cals = np.genfromtxt(os.path.join(directory,"hypstar_"+
78+
str(hypstar)+"_nonlin_corr_coefs_"+str(caldate)+".dat"))
7879

7980
if measurandstring == "radiance":
80-
gains = np.genfromtxt(directory+r"/../examples/calibration_files/hypstar_"+
81-
str(hypstar)+"_radcal_L_"+str(caldate)+".dat")
81+
gains = np.genfromtxt(os.path.join(directory,"hypstar_"+
82+
str(hypstar)+"_radcal_L_"+str(caldate)+".dat"))
8283
else:
83-
gains = np.genfromtxt(directory+r"/../examples/calibration_files/hypstar_"+
84-
str(hypstar)+"_radcal_E_"+str(caldate)+".dat")
84+
gains = np.genfromtxt(os.path.join(directory,"hypstar_"+
85+
str(hypstar)+"_radcal_E_"+str(caldate)+".dat"))
8586

8687
# print(non_linear_cals)
8788
# print(gains[:,0])
@@ -128,7 +129,8 @@ def calc_mean_masked(self, dataset, var, rand_unc=False, corr=False):
128129
else:
129130
out = np.empty((len(series_id), len(dataset['wavelength'])))
130131
for i in range(len(series_id)):
131-
ids = np.where((dataset['series_id'] == series_id[i]) & (dataset['quality_flag'] == 0))
132+
ids = np.where((dataset['series_id'] == series_id[i]) &
133+
np.invert(DatasetUtil.unpack_flags(dataset["quality_flag"])["outliers"]))
132134
out[i] = np.mean(dataset[var].values[:, ids], axis=2)[:, 0]
133135
if rand_unc:
134136
out[i] = out[i] / len(ids[0])
@@ -223,7 +225,7 @@ def find_u_systematic_input(self, variables, dataset, datasetbla, ancillary_data
223225

224226
return inputs
225227

226-
def preprocess_l0(self, datasetl0, datasetl1a):
228+
def preprocess_l0(self, datasetl0):
227229
"""
228230
Identifies and removes faulty measurements (e.g. due to cloud cover).
229231
@@ -232,36 +234,13 @@ def preprocess_l0(self, datasetl0, datasetl1a):
232234
:return:
233235
:rtype:
234236
"""
235-
dim_sizes_dict = {"wavelength": len(datasetl0["wavelength"]), "scan": len(datasetl0["scan"])}
236-
du = DatasetUtil()
237-
238237
mask = self.clip_and_mask(datasetl0)
239-
# datasetl0["quality_flag"].values = mask
240-
# datasetl1a["quality_flag"].values = mask
241-
242-
flagval = 2 ** (self.context.get_config_value("outliers"))
243-
244-
print(np.where(mask > 0))
245-
246-
break
247-
datasetl0["quality_flag"].values = [
248-
flagval + datasetl0["quality_flag"].values[i] if mask[i] == 1 else
249-
datasetl0["quality_flag"].values[i] for i in range(len(mask))]
250-
251238

239+
datasetl0["quality_flag"][np.where(mask==1)] = DatasetUtil.set_flag(datasetl0["quality_flag"][np.where(mask==1)],"outliers") #for i in range(len(mask))]
252240

253-
datasetl0["quality_flag"].values = [
254-
flagval + datasetl0["quality_flag"].values[i] if mask[i] == 1 else
255-
datasetl0["quality_flag"].values[i] for i in range(len(mask))]
256-
257-
258-
datasetl1a["quality_flag"].values = [
259-
flagval + datasetl1a["quality_flag"].values[i] if mask[i] == 1 else
260-
datasetl1a["quality_flag"].values[i] for i in range(len(mask))]
261-
262-
DN_rand = du.create_variable([len(datasetl0["wavelength"]), len(datasetl0["scan"])],
241+
DN_rand = DatasetUtil.create_variable([len(datasetl0["wavelength"]), len(datasetl0["scan"])],
263242
dim_names=["wavelength", "scan"], dtype=np.uint32, fill_value=0)
264-
DN_syst = du.create_variable([len(datasetl0["wavelength"]), len(datasetl0["scan"])],
243+
DN_syst = DatasetUtil.create_variable([len(datasetl0["wavelength"]), len(datasetl0["scan"])],
265244
dim_names=["wavelength", "scan"], dtype=np.uint32, fill_value=0)
266245

267246
datasetl0["u_random_digital_number"] = DN_rand
@@ -273,7 +252,7 @@ def preprocess_l0(self, datasetl0, datasetl1a):
273252
datasetl0["u_random_digital_number"].values = rand
274253
datasetl0["u_systematic_digital_number"] = DN_syst
275254

276-
return datasetl0, datasetl1a
255+
return datasetl0
277256

278257
def clip_and_mask(self, dataset, k_unc=3):
279258
mask = []
@@ -290,7 +269,6 @@ def clip_and_mask(self, dataset, k_unc=3):
290269
maski = np.zeros_like(intsig) # mask the columns that have NaN
291270
maski[np.where(np.abs(intsig - noiseavg) >= k_unc * noisestd)] = 1
292271
mask = np.append(mask, maski)
293-
# print("mask",mask)
294272

295273

296274
# check if 10% of pixels are outiers
@@ -351,7 +329,9 @@ def l1a_template_from_l0_dataset(self, measurandstring, dataset_l0):
351329
propagate_ds=dataset_l0)
352330
elif measurandstring == "irradiance":
353331
dataset_l1a = self.hdsb.create_ds_template(l1a_dim_sizes_dict, "L_L1A_IRR", propagate_ds=dataset_l0)
354-
dataset_l1a["wavelength"]=dataset_l0["wavelength"]
332+
333+
dataset_l1a=dataset_l1a.assign_coords(wavelength=dataset_l0.wavelength)
334+
355335
return dataset_l1a
356336

357337
def l1b_template_from_l1a_dataset(self, measurandstring, dataset_l1a):
@@ -371,7 +351,7 @@ def l1b_template_from_l1a_dataset(self, measurandstring, dataset_l1a):
371351
elif measurandstring == "irradiance":
372352
dataset_l1b = self.hdsb.create_ds_template(l1b_dim_sizes_dict, "L_L1B_IRR", propagate_ds=dataset_l1a)
373353

374-
dataset_l1b["wavelength"].values = dataset_l1a["wavelength"].values
354+
dataset_l1b=dataset_l1b.assign_coords(wavelength=dataset_l1a.wavelength)
375355

376356
series_id = np.unique(dataset_l1a['series_id'])
377357
dataset_l1b["series_id"].values = series_id
@@ -380,7 +360,8 @@ def l1b_template_from_l1a_dataset(self, measurandstring, dataset_l1a):
380360
"solar_azimuth_angle", "solar_zenith_angle"]:
381361
temp_arr = np.empty(len(series_id))
382362
for i in range(len(series_id)):
383-
ids = np.where((dataset_l1a['series_id'] == series_id[i]) & (dataset_l1a['quality_flag'] == 0))
363+
ids = np.where((dataset_l1a['series_id'] == series_id[i]) & np.invert(
364+
DatasetUtil.unpack_flags(dataset_l1a["quality_flag"])["outliers"]))
384365
temp_arr[i] = np.mean(dataset_l1a[variablestring].values[ids])
385366
dataset_l1b[variablestring].values = temp_arr
386367

hypernets_processor/cli/sequence_processor_cli.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -103,7 +103,7 @@ def cli():
103103
if key[:5] == "write":
104104
job_config["Output"][key] = "True"
105105

106-
job_config["Log"]["log_path"] = "test.log"#os.path.abspath(parsed_args.log) if parsed_args.log is not None else ""
106+
job_config["Log"]["log_path"] = os.path.abspath(parsed_args.log) if parsed_args.log is not None else ""
107107
job_config["Log"]["verbose"] = str(parsed_args.verbose) if parsed_args.verbose is not None else ""
108108
job_config["Log"]["quiet"] = str(parsed_args.quiet) if parsed_args.verbose is not None else ""
109109

hypernets_processor/context.py

Lines changed: 4 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -22,10 +22,8 @@
2222
class Context:
2323
"""
2424
Class to determine and store processor state
25-
2625
:type processor_config: configparser.RawConfigParser
2726
:param processor_config: processor config data object
28-
2927
:type job_config: configparser.RawConfigParser
3028
:param job_config: job config data object
3129
"""
@@ -41,16 +39,12 @@ def __init__(self, processor_config=None, job_config=None, logger=None):
4139

4240
# Unpack processor_config to set relevant attributes
4341
if processor_config is not None:
44-
config_proc = configparser.ConfigParser()
45-
config_proc.read(processor_config)
46-
self.unpack_config(config_proc)
42+
self.unpack_config(processor_config)
4743

4844
# Unpack processor_config to set relevant attributes
4945
if job_config is not None:
50-
config_job = configparser.ConfigParser()
51-
config_job.read(job_config)
5246
self.unpack_config(
53-
config_job, protected_values=PROCESSOR_CONFIG_PROTECTED_VALUES
47+
job_config, protected_values=PROCESSOR_CONFIG_PROTECTED_VALUES
5448
)
5549

5650
# Connect to databases
@@ -70,25 +64,23 @@ def __init__(self, processor_config=None, job_config=None, logger=None):
7064
def unpack_config(self, config, protected_values=None):
7165
"""
7266
Unpacks config data, sets relevant entries to values instance attribute
73-
7467
:type config: configparser.RawConfigParser
7568
:param config: config data
7669
"""
7770

7871
protected_values = [] if protected_values is None else protected_values
7972
for section in config.sections():
8073
for name in config[section].keys():
74+
8175
if name not in protected_values:
8276
value = get_config_value(config, section, name)
8377
self.set_config_value(name, value)
8478

8579
def set_config_value(self, name, value):
8680
"""
8781
Sets config data to values instance attribute
88-
8982
:type name: str
9083
:param name: config data name
91-
9284
:param value: config data value
9385
"""
9486

@@ -97,10 +89,8 @@ def set_config_value(self, name, value):
9789
def get_config_value(self, name):
9890
"""
9991
Get config value
100-
10192
:type name: str
10293
:param name: config data name
103-
10494
:return: config value
10595
"""
10696

@@ -109,7 +99,6 @@ def get_config_value(self, name):
10999
def get_config_names(self):
110100
"""
111101
Get available config value names
112-
113102
:return: config value names
114103
:rtype: list
115104
"""
@@ -118,4 +107,4 @@ def get_config_names(self):
118107

119108

120109
if __name__ == "__main__":
121-
pass
110+
pass

hypernets_processor/data_io/dataset_util.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44

55
from hypernets_processor.version import __version__
66
import string
7-
from xarray import Variable, DataArray
7+
from xarray import Variable, DataArray, Dataset
88
import numpy as np
99

1010

@@ -35,7 +35,7 @@ def create_default_array(dim_sizes, dtype, dim_names=None, fill_value=None):
3535
3636
:type dtype: type
3737
:param dtype: numpy data type
38-
dekrie
38+
3939
:type dim_names: list
4040
:param dim_names: (optional) dimension names as strings, i.e. ["dim1_name", "dim2_name", "dim3_size"]
4141

hypernets_processor/data_io/hypernets_reader.py

Lines changed: 12 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -183,7 +183,7 @@ def read_series(self, seq_dir, series, lat, lon, metadata, flag, fileformat):
183183
# Header definition with length, description and decoding format
184184

185185
header = self.read_header(f, HEADER_DEF)
186-
self.context.logger.debugger(header)
186+
self.context.logger.debug(header)
187187

188188
pixCount = header['Pixel Count']
189189

@@ -284,12 +284,13 @@ def read_series(self, seq_dir, series, lat, lon, metadata, flag, fileformat):
284284

285285
# print(datetime.fromtimestamp(int(ts+timereboot)))
286286
# print(datetime.fromtimestamp(int(ts+timereboot))-date_time_obj)
287-
288-
ds.attrs["site_latitude"] = lat
289-
ds.attrs["site_longitude"] = lon
290-
ds["solar_zenith_angle"][scan_number] = get_altitude(float(lat), float(lon), acquisitionTime)
291-
ds["solar_azimuth_angle"][scan_number] = get_azimuth(float(lat), float(lon), acquisitionTime)
292-
287+
if lat is not None:
288+
ds.attrs["site_latitude"] = lat
289+
ds.attrs["site_longitude"] = lon
290+
ds["solar_zenith_angle"][scan_number] = get_altitude(float(lat), float(lon), acquisitionTime)
291+
ds["solar_azimuth_angle"][scan_number] = get_azimuth(float(lat), float(lon), acquisitionTime)
292+
else:
293+
self.context.logger.error("Lattitude is not found, using default values instead for lat, lon, sza and saa.")
293294
ds['quality_flag'][scan_number] = flag
294295
ds['integration_time'][scan_number] = header['integration_time']
295296
ds['temperature'][scan_number] = header['temperature']
@@ -358,7 +359,7 @@ def read_metadata(self, seq_dir):
358359
# ACTION_NONE : 0x03 (03)
359360

360361
metadata = ConfigParser()
361-
362+
print("seq",os.path.join(seq_dir, "metadata.txt"))
362363
if os.path.exists(os.path.join(seq_dir, "metadata.txt")):
363364
metadata.read(os.path.join(seq_dir, "metadata.txt"))
364365
# ------------------------------
@@ -455,7 +456,7 @@ def read_sequence(self, seq_dir, setfile=None):
455456
seq_dir)
456457

457458
if seriesIrr:
458-
l0_irr = self.read_series(seq_dir, seriesIrr, lat, lon, metadata, flag, "l0_irr")
459+
l0_irr = self.read_series(seq_dir, seriesIrr, lat, lon, metadata, flag, "L0_IRR")
459460
if self.context.get_config_value("write_l0"):
460461
self.writer.write(l0_irr, overwrite=True)
461462
# can't use this when non concatanted spectra
@@ -467,7 +468,7 @@ def read_sequence(self, seq_dir, setfile=None):
467468
self.context.logger.error("No irradiance data for this sequence")
468469

469470
if seriesRad:
470-
l0_rad = self.read_series(seq_dir, seriesRad, lat, lon, metadata, flag, "l0_rad")
471+
l0_rad = self.read_series(seq_dir, seriesRad, lat, lon, metadata, flag, "L0_RAD")
471472
if self.context.get_config_value("write_l0"):
472473
self.writer.write(l0_rad, overwrite=True)
473474
# if all([os.path.isfile(os.path.join(seq_dir,"RADIOMETER/",f)) for f in seriesRad]):
@@ -478,7 +479,7 @@ def read_sequence(self, seq_dir, setfile=None):
478479
self.context.logger.error("No radiance data for this sequence")
479480

480481
if seriesBlack:
481-
l0_bla = self.read_series(seq_dir, seriesBlack, lat, lon, metadata, flag, "l0_bla")
482+
l0_bla = self.read_series(seq_dir, seriesBlack, lat, lon, metadata, flag, "L0_BLA")
482483
if self.context.get_config_value("write_l0"):
483484
self.writer.write(l0_bla, overwrite=True)
484485
# if all([os.path.isfile(os.path.join(seq_dir, "RADIOMETER/", f)) for f in seriesBlack]):

0 commit comments

Comments
 (0)