From 28f44c7ee01f6d1abe48ebf233076cb39f1993ea Mon Sep 17 00:00:00 2001 From: David Navia Date: Thu, 11 Jan 2024 09:57:57 +0100 Subject: [PATCH 01/55] Add common functionality for FCI data readers --- satpy/readers/fci_base.py | 50 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 50 insertions(+) create mode 100644 satpy/readers/fci_base.py diff --git a/satpy/readers/fci_base.py b/satpy/readers/fci_base.py new file mode 100644 index 0000000000..c4a3714291 --- /dev/null +++ b/satpy/readers/fci_base.py @@ -0,0 +1,50 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2017-2018 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Common functionality for FCI data readers.""" +from __future__ import annotations + + +def calculate_area_extent(area_dict): + """Calculate the area extent seen by MTG FCI instrument. + + Since the center of the FCI L2 grid is located at the interface between the pixels, there are equally many + pixels (e.g. 5568/2 = 2784 for 2km grid) in each direction from the center points. Hence, the area extent + can be easily computed by simply adding and subtracting half the width and height from teh centre point (=0). + + Args: + area_dict: A dictionary containing the required parameters + ncols: number of pixels in east-west direction + nlines: number of pixels in south-north direction + column_step: Pixel resulution in meters in east-west direction + line_step: Pixel resulution in meters in south-north direction + Returns: + tuple: An area extent for the scene defined by the lower left and + upper right corners + + """ + ncols = area_dict["ncols"] + nlines = area_dict["nlines"] + column_step = area_dict["column_step"] + line_step = area_dict["line_step"] + + ll_c = (0 - ncols / 2.) * column_step + ll_l = (0 + nlines / 2.) * line_step + ur_c = (0 + ncols / 2.) * column_step + ur_l = (0 - nlines / 2.) * line_step + + return (ll_c, ll_l, ur_c, ur_l) From d3ccedc745abbb0b523e04d16a84283d58deb827 Mon Sep 17 00:00:00 2001 From: David Navia Date: Thu, 11 Jan 2024 13:07:19 +0100 Subject: [PATCH 02/55] Add reader for both SEVIRI and FCI L2 products in GRIB2 format --- satpy/etc/readers/eum_l2_grib.yaml | 387 +++++++++++++++++++++++++++++ satpy/readers/eum_l2_grib.py | 315 +++++++++++++++++++++++ 2 files changed, 702 insertions(+) create mode 100644 satpy/etc/readers/eum_l2_grib.yaml create mode 100644 satpy/readers/eum_l2_grib.py diff --git a/satpy/etc/readers/eum_l2_grib.yaml b/satpy/etc/readers/eum_l2_grib.yaml new file mode 100644 index 0000000000..80edd3b2e5 --- /dev/null +++ b/satpy/etc/readers/eum_l2_grib.yaml @@ -0,0 +1,387 @@ +reader: + name: eum_l2_grib + short_name: EUM L2 GRIB + long_name: MSG (Meteosat 8 to 11) SEVIRI Level products 2 and FCI L2 products in GRIB2 format + description: Reader for EUMETSAT MSG SEVIRI L2 files and FCI L2 files in GRIB format. + status: Alpha + supports_fsspec: false + sensors: [seviri,fci] + reader: !!python/name:satpy.readers.yaml_reader.GEOFlippableFileYAMLReader + + +file_types: + + # EUMETSAT MSG SEVIRI L2 Aerosol Properties over Sea product + # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:AES + grib_seviri_aes: + file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler + file_patterns: + - 'AESGRIBProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' + - '{spacecraft:4s}-SEVI-MSGAESE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' + - '{spacecraft:4s}-SEVI-MSGAESE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' + - '{spacecraft:4s}-SEVI-MSGAESE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' + + # EUMETSAT MSG SEVIRI L2 Cloud Mask product + # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:CLM + grib_seviri_clm: + file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler + file_patterns: + - 'CLMEncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' + - '{spacecraft:4s}-SEVI-MSGCLMK-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' + - '{spacecraft:4s}-SEVI-MSGCLMK-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' + - '{spacecraft:4s}-SEVI-MSGCLMK-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' + + # EUMETSAT MSG SEVIRI L2 Cloud Top Height product + # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:CTH + grib_seviri_cth: + file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler + file_patterns: + - 'CTHEncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' + - '{spacecraft:4s}-SEVI-MSGCLTH-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' + - '{spacecraft:4s}-SEVI-MSGCLTH-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' + - '{spacecraft:4s}-SEVI-MSGCLTH-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' + + # EUMETSAT MSG SEVIRI L2 Clear-Sky Reflectance Map product + # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:CRM + grib_seviri_crm: + file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler + file_patterns: + - 'CRMEncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' + - '{spacecraft:4s}-SEVI-MSGCRMN-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' + - '{spacecraft:4s}-SEVI-MSGCRMN-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' + - '{spacecraft:4s}-SEVI-MSGCRMN-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' + + # EUMETSAT MSG SEVIRI L2 Active Fire Monitoring product + # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:FIR + grib_seviri_fir: + file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler + file_patterns: + - 'FIREncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' + - '{spacecraft:4s}-SEVI-MSGFIRG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' + - '{spacecraft:4s}-SEVI-MSGFIRG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' + - '{spacecraft:4s}-SEVI-MSGFIRG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' + + # EUMETSAT MSG SEVIRI L2 Multi-Sensor Precipitation Estimate product + # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:MPE-GRIB + grib_seviri_mpe: + file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler + file_patterns: + - 'MPEGRIBProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' + - '{spacecraft:4s}-SEVI-MSGMPEG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' + - '{spacecraft:4s}-SEVI-MSGMPEG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' + - '{spacecraft:4s}-SEVI-MSGMPEG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' + + # EUMETSAT MSG SEVIRI L2 Optimal Cloud Analysis product + # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:OCA + grib_seviri_oca: + file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler + file_patterns: + - 'OCAEncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' + - '{spacecraft:4s}-SEVI-MSGOCAE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' + - '{spacecraft:4s}-SEVI-MSGOCAE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' + - '{spacecraft:4s}-SEVI-MSGOCAE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' + + grib_fci_clm: + file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler + file_patterns: + - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-CLM-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-GRIB2_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.bin' + +datasets: + + # EUMETSAT MSG SEVIRI L2 Aerosol Properties over Sea product + aerosol_optical_thickness_vis06: + name: aerosol_optical_thickness_vis06 + long_name: Aerosol optical Thickness at 0.6um + standard_name: atmosphere_absorption_optical_thickness_due_to_ambient_aerosol + resolution: 9001.209497451 + file_type: grib_seviri_aes + parameter_number: 20 + units: "1" + + aerosol_optical_thickness_vis08: + name: aerosol_optical_thickness_vis08 + long_name: Aerosol optical Thickness at 0.8um + standard_name: atmosphere_absorption_optical_thickness_due_to_ambient_aerosol + resolution: 9001.209497451 + file_type: grib_seviri_aes + parameter_number: 21 + units: "1" + + aerosol_optical_thickness_vis16: + name: aerosol_optical_thickness_vis16 + long_name: Aerosol optical Thickness at 1.6um + standard_name: atmosphere_absorption_optical_thickness_due_to_ambient_aerosol + resolution: 9001.209497451 + file_type: grib_seviri_aes + parameter_number: 22 + units: "1" + + angstroem_coefficient: + name: angstroem_coefficient + long_name: Angstroem Coefficient + standard_name: aerosol_angstrom_exponent + resolution: 9001.209497451 + file_type: grib_seviri_aes + parameter_number: 23 + units: "1" + + aes_quality: + name: aes_quality + long_name: AES Product Quality Flag + standard_name: quality_flag + resolution: 9001.209497451 + file_type: grib_seviri_aes + parameter_number: 192 + units: "1" + flag_values: [0, 1, 2, 3] + flag_meanings: ['clear sky over water','clear sky over land', 'cloudy', 'no data' ] + + + # EUMETSAT MSG SEVIRI L2 Cloud Mask product + cloud_mask: + name: cloud_mask + long_name: Cloud Classification + standard_name: cloud_classification + resolution: + 3000.403165817: {file_type: grib_seviri_clm} + 2000: {file_type: grib_fci_clm} + parameter_number: 7 + units: "1" + flag_values: [0, 1, 2, 3] + flag_meanings: ['clear sky over water','clear sky over land', 'cloudy', 'no data' ] + + + # EUMETSAT MSG SEVIRI L2 Cloud Top Height product + cloud_top_height: + name: cloud_top_height + long_name: Cloud Top Height + standard_name: height_at_cloud_top + resolution: 9001.209497451 + file_type: grib_seviri_cth + parameter_number: 2 + units: m + + cloud_top_quality: + name: cloud_top_quality + long_name: CTH Product Quality Flag + standard_name: height_at_cloud_top quality_flag + resolution: 9001.209497451 + file_type: grib_seviri_cth + parameter_number: 3 + units: "1" + flag_values: [0, 1] + flag_meanings: ['good quality retrieval','poor quality retrieval' ] + + + # EUMETSAT MSG SEVIRI L2 Clear-Sky Reflectance Map product + vis_refl_06: + name: vis_refl_06 + long_name: TOA Bidirectional Reflectance at 0.6um (7 days average) + standard_name: toa_bidirectional_reflectance + resolution: 3000.403165817 + wavelength: [0.56, 0.635, 0.71] + file_type: grib_seviri_crm + parameter_number: 9 + units: "%" + + vis_refl_08: + name: vis_refl_08 + long_name: TOA Bidirectional Reflectance at 0.8um (7 days average) + standard_name: toa_bidirectional_reflectance + resolution: 3000.403165817 + wavelength: [0.74, 0.81, 0.88] + file_type: grib_seviri_crm + parameter_number: 10 + units: "%" + + vis_refl_16: + name: vis_refl_16 + long_name: TOA Bidirectional Reflectance at 1.6um (7 days average) + standard_name: toa_bidirectional_reflectance + resolution: 3000.403165817 + wavelength: [1.5, 1.64, 1.78] + file_type: grib_seviri_crm + parameter_number: 11 + units: "%" + + nir_refl_39: + name: nir_refl_39 + long_name: TOA Bidirectional Reflectance at 3.9um (7 days average) + standard_name: toa_bidirectional_reflectance + resolution: 3000.403165817 + wavelength: [3.48, 3.92, 4.36] + file_type: grib_seviri_crm + parameter_number: 12 + units: "%" + + num_accumulations: + name: num_accumulations + long_name: CRM Product Number of Accumulations + standard_name: number_of_accumulations + resolution: 3000.403165817 + file_type: grib_seviri_crm + parameter_number: 6 + units: "1" + + solar_zenith_angle: + name: solar_zenith_angle + long_name: Solar Zenith Angle (7 days average) + standard_name: solar_zenith_angle + resolution: 3000.403165817 + file_type: grib_seviri_crm + parameter_number: 7 + units: degrees + + relative_azimuth_angle: + name: relative_azimuth_angle + long_name: Relative Azimuth Angle (7 days average) + standard_name: relative_sensor_azimuth_angle + resolution: 3000.403165817 + file_type: grib_seviri_crm + parameter_number: 8 + units: degrees + + + # EUMETSAT MSG SEVIRI L2 Active Fire Monitoring product + active_fires: + name: active_fires + long_name: Active Fire Classification + standard_name: active_fire_classification + resolution: 3000.403165817 + file_type: grib_seviri_fir + parameter_number: 9 + units: "1" + flag_values: [0, 1, 2, 3] + flag_meanings: ['no fire','possible fire', 'probable fire', 'missing' ] + + fire_probability: + name: fire_probability + long_name: Fire Probability + standard_name: fire_probability + resolution: 3000.403165817 + file_type: grib_seviri_fir + parameter_number: 192 + units: "%" + + + # EUMETSAT MSG SEVIRI L2 Multi-Sensor Precipitation Estimate product + instantaneous_rain_rate: + name: instantaneous_rain_rate + long_name: MPE Product Instantaneous Rain Rate + standard_name: rainfall_rate + resolution: 3000.403165817 + file_type: grib_seviri_mpe + parameter_number: 1 + units: "kg m-2 s-1" + + + # EUMETSAT MSG SEVIRI L2 Optimal Cloud Analysis product + pixel_scene_type: + name: pixel_scene_type + long_name: Cloud Type + standard_name: scene_classification + resolution: 3000.403165817 + file_type: grib_seviri_oca + parameter_number: 8 + units: "1" + flag_values: [24,111,112] + flag_meanings: ['multi-layered cloud','water cloud','ice cloud'] + + measurement_cost: + name: measurement_cost + long_name: OCA Cost Function - Measurement part + standard_name: cost_function + resolution: 3000.403165817 + file_type: grib_seviri_oca + parameter_number: 30 + units: "1" + + upper_layer_cloud_optical_depth: + name: upper_layer_cloud_optical_depth + long_name: Upper Cloud Layer Optical Depth + standard_name: atmosphere_optical_thickness_due_to_cloud + resolution: 3000.403165817 + file_type: grib_seviri_oca + parameter_number: 31 + units: "1" + + upper_layer_cloud_top_pressure: + name: upper_layer_cloud_top_pressure + long_name: Upper Cloud Top Pressure + standard_name: air_pressure_at_cloud_top + resolution: 3000.403165817 + file_type: grib_seviri_oca + parameter_number: 32 + units: Pa + + upper_layer_cloud_effective_radius: + name: upper_layer_cloud_effective_radius + long_name: Upper Cloud Particle Effective Radius + standard_name: effective_radius_of_cloud_condensed_water_particles_at_cloud_top + resolution: 3000.403165817 + file_type: grib_seviri_oca + parameter_number: 33 + units: m + + error_in_upper_layer_cloud_optical_depth: + name: error_in_upper_layer_cloud_optical_depth + long_name: Upper Cloud Optical Depth Error Estimate + standard_name: atmosphere_optical_thickness_due_to_cloud standard_error + resolution: 3000.403165817 + file_type: grib_seviri_oca + parameter_number: 34 + units: "1" + + error_in_upper_layer_cloud_top_pressure: + name: error_in_upper_layer_cloud_top_pressure + long_name: Upper Cloud Top Pressure Error Estimate + standard_name: air_pressure_at_cloud_top standard_error + resolution: 3000.403165817 + file_type: grib_seviri_oca + parameter_number: 35 + units: Pa + + error_in_upper_layer_cloud_effective_radius: + name: error_in_upper_layer_cloud_effective_radius + long_name: Upper Cloud Particle Effective Radius Error Estimate + standard_name: effective_radius_of_cloud_condensed_water_particles_at_cloud_top standard_error + resolution: 3000.403165817 + file_type: grib_seviri_oca + parameter_number: 36 + units: m + + lower_layer_cloud_optical_depth: + name: lower_layer_cloud_optical_depth + long_name: Lower Cloud Optical Depth + standard_name: atmosphere_optical_thickness_due_to_cloud_in_lower_atmosphere_layer + resolution: 3000.403165817 + file_type: grib_seviri_oca + parameter_number: 37 + units: "1" + + lower_layer_cloud_top_pressure: + name: lower_layer_cloud_top_pressure + long_name: Lower Cloud Top Pressure + standard_name: air_pressure_at_cloud_top_in_lower_atmosphere_layer + resolution: 3000.403165817 + file_type: grib_seviri_oca + parameter_number: 38 + units: Pa + + error_in_lower_layer_cloud_optical_depth: + name: error_in_lower_layer_cloud_optical_depth + long_name: Lower Cloud Optical Depth Error Estimate + standard_name: atmosphere_optical_thickness_due_to_cloud_in_lower_atmosphere_layer standard_error + resolution: 3000.403165817 + file_type: grib_seviri_oca + parameter_number: 39 + units: "1" + + error_in_lower_layer_cloud_top_pressure: + name: error_in_lower_layer_cloud_top_pressure + long_name: Lower Cloud Top Pressure Error Estimate + standard_name: air_pressure_at_cloud_top_in_lower_atmosphere_layer standard_error + resolution: 3000.403165817 + file_type: grib_seviri_oca + parameter_number: 40 + units: Pa diff --git a/satpy/readers/eum_l2_grib.py b/satpy/readers/eum_l2_grib.py new file mode 100644 index 0000000000..47cf9a0ba9 --- /dev/null +++ b/satpy/readers/eum_l2_grib.py @@ -0,0 +1,315 @@ +# Copyright (c) 2019-2023 Satpy developers +# +# satpy is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# satpy is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with satpy. If not, see . + +"""Reader for both SEVIRI and FCI L2 products in GRIB2 format. + +References: + FM 92 GRIB Edition 2 + https://www.wmo.int/pages/prog/www/WMOCodes/Guides/GRIB/GRIB2_062006.pdf + EUMETSAT Product Navigator + https://navigator.eumetsat.int/ +""" + +import logging +from datetime import timedelta + +import dask.array as da +import numpy as np +import xarray as xr + +from satpy.readers._geos_area import get_area_definition, get_geos_area_naming +from satpy.readers.eum_base import get_service_mode +from satpy.readers.fci_base import calculate_area_extent as fci_calculate_area_extent +from satpy.readers.file_handlers import BaseFileHandler +from satpy.readers.seviri_base import PLATFORM_DICT, REPEAT_CYCLE_DURATION +from satpy.readers.seviri_base import calculate_area_extent as seviri_calculate_area_extent +from satpy.utils import get_legacy_chunk_size + +CHUNK_SIZE = get_legacy_chunk_size() + +try: + import eccodes as ec +except ImportError: + raise ImportError( + "Missing eccodes-python and/or eccodes C-library installation. Use conda to install eccodes") + +logger = logging.getLogger(__name__) + + +class EUML2GribFileHandler(BaseFileHandler): + """Reader class for EUM L2 products in GRIB format.""" + + calculate_area_extent = None + + def __init__(self, filename, filename_info, filetype_info): + """Read the global attributes and prepare for dataset reading.""" + super().__init__(filename, filename_info, filetype_info) + # Turn on support for multiple fields in single GRIB messages (required for SEVIRI L2 files) + ec.codes_grib_multi_support_on() + + if "seviri" in self.filetype_info["file_type"]: + self.sensor = "seviri" + self.PLATFORM_NAME = PLATFORM_DICT[self.filename_info["spacecraft"]] + elif "fci" in self.filetype_info["file_type"]: + self.sensor = "fci" + self.PLATFORM_NAME = f"MTG-i{self.filename_info['spacecraft_id']}" + pass + + @property + def start_time(self): + """Return the sensing start time.""" + return self.filename_info["start_time"] + + @property + def end_time(self): + """Return the sensing end time.""" + if self.sensor == "seviri": + return self.start_time + timedelta(minutes=REPEAT_CYCLE_DURATION) + elif self.sensor == "fci": + return self.filename_info["end_time"] + + def get_area_def(self, dataset_id): + """Return the area definition for a dataset.""" + # Compute the dictionary with the area extension + + self._area_dict["column_step"] = dataset_id["resolution"] + self._area_dict["line_step"] = dataset_id["resolution"] + + if self.sensor == "seviri": + area_extent = seviri_calculate_area_extent(self._area_dict) + + elif self.sensor == "fci": + area_extent = fci_calculate_area_extent(self._area_dict) + + # Call the get_area_definition function to obtain the area + area_def = get_area_definition(self._pdict, area_extent) + + return area_def + + def get_dataset(self, dataset_id, dataset_info): + """Get dataset using the parameter_number key in dataset_info. + + In a previous version of the reader, the attributes (nrows, ncols, ssp_lon) and projection information + (pdict and area_dict) were computed while initializing the file handler. Also the code would break out from + the While-loop below as soon as the correct parameter_number was found. This has now been revised becasue the + reader would sometimes give corrupt information about the number of messages in the file and the dataset + dimensions within a given message if the file was only partly read (not looping over all messages) in an earlier + instance. + """ + logger.debug("Reading in file to get dataset with parameter number %d.", + dataset_info["parameter_number"]) + + xarr = None + message_found = False + with open(self.filename, "rb") as fh: + + # Iterate over all messages and fetch data when the correct parameter number is found + while True: + gid = ec.codes_grib_new_from_file(fh) + + if gid is None: + if not message_found: + # Could not obtain a valid message ID from the grib file + logger.warning("Could not find parameter_number %d in GRIB file, no valid Dataset created", + dataset_info["parameter_number"]) + break + + # Check if the parameter number in the GRIB message corresponds to the required key + parameter_number = self._get_from_msg(gid, "parameterNumber") + + if parameter_number == dataset_info["parameter_number"]: + + self._res = dataset_id["resolution"] + self._read_attributes(gid) + + # Read the missing value + missing_value = self._get_from_msg(gid, "missingValue") + + # Retrieve values and metadata from the GRIB message, masking the values equal to missing_value + xarr = self._get_xarray_from_msg(gid) + + xarr.data = da.where(xarr.data == missing_value, np.nan, xarr.data) + + ec.codes_release(gid) + + # Combine all metadata into the dataset attributes and break out of the loop + xarr.attrs.update(dataset_info) + xarr.attrs.update(self._get_attributes()) + + message_found = True + + else: + # The parameter number is not the correct one, release gid and skip to next message + ec.codes_release(gid) + + return xarr + + def _read_attributes(self, gid): + """Read the parameter attributes from the message and create the projection and area dictionaries.""" + # Read SSP and date/time + self._ssp_lon = self._get_from_msg(gid, "longitudeOfSubSatellitePointInDegrees") + + # Read number of points on the x and y axes + self._nrows = self._get_from_msg(gid, "Ny") + self._ncols = self._get_from_msg(gid, "Nx") + + # Creates the projection and area dictionaries + self._pdict, self._area_dict = self._get_proj_area(gid) + + def _get_proj_area(self, gid): + """Compute the dictionary with the projection and area definition from a GRIB message. + + Args: + gid: The ID of the GRIB message. + + Returns: + tuple: A tuple of two dictionaries for the projection and the area definition. + pdict: + a: Earth major axis [m] + b: Earth minor axis [m] + h: Height over surface [m] + ssp_lon: longitude of subsatellite point [deg] + nlines: number of lines + ncols: number of columns + a_name: name of the area + a_desc: description of the area + p_id: id of the projection + area_dict: + center_point: coordinate of the center point + north: coodinate of the north limit + east: coodinate of the east limit + west: coodinate of the west limit + south: coodinate of the south limit + """ + # Get name of area definition + area_naming_input_dict = {"platform_name": "msg", + "instrument_name": self.sensor, + "resolution": self._res, + } + + area_naming = get_geos_area_naming({**area_naming_input_dict, + **get_service_mode(self.sensor, self._ssp_lon)}) + + # Read all projection and area parameters from the message + earth_major_axis_in_meters = self._get_from_msg(gid, "earthMajorAxis") * 1000.0 # [m] + earth_minor_axis_in_meters = self._get_from_msg(gid, "earthMinorAxis") * 1000.0 # [m] + + if self.sensor == "seviri": + earth_major_axis_in_meters = self._scale_earth_axis(earth_major_axis_in_meters) + earth_minor_axis_in_meters = self._scale_earth_axis(earth_minor_axis_in_meters) + + nr_in_radius_of_earth = self._get_from_msg(gid, "NrInRadiusOfEarth") + xp_in_grid_lengths = self._get_from_msg(gid, "XpInGridLengths") + h_in_meters = earth_major_axis_in_meters * (nr_in_radius_of_earth - 1.0) # [m] + + # Create the dictionary with the projection data + pdict = { + "a": earth_major_axis_in_meters, + "b": earth_minor_axis_in_meters, + "h": h_in_meters, + "ssp_lon": self._ssp_lon, + "nlines": self._ncols, + "ncols": self._nrows, + "a_name": area_naming["area_id"], + "a_desc": area_naming["description"], + "p_id": "", + } + + if self.sensor == "seviri": + # Compute the dictionary with the area extension + area_dict = { + "center_point": xp_in_grid_lengths, + "north": self._nrows, + "east": 1, + "west": self._ncols, + "south": 1, + } + + elif self.sensor == "fci": + area_dict = { + "nlines": self._ncols, + "ncols": self._nrows, + } + + return pdict, area_dict + + @staticmethod + def _scale_earth_axis(data): + """Scale Earth axis data to make sure the value matched the expected unit [m]. + + The earthMinorAxis value stored in the aerosol over sea product is scaled incorrectly by a factor of 1e8. This + method provides a flexible temporarily workaraound by making sure that all earth axis values are scaled such + that they are on the order of millions of meters as expected by the reader. As soon as the scaling issue has + been resolved by EUMETSAT this workaround can be removed. + + """ + scale_factor = 10 ** np.ceil(np.log10(1e6/data)) + return data * scale_factor + + def _get_xarray_from_msg(self, gid): + """Read the values from the GRIB message and return a DataArray object. + + Args: + gid: The ID of the GRIB message. + + Returns: + DataArray: The array containing the retrieved values. + """ + # Data from GRIB message are read into an Xarray... + xarr = xr.DataArray(da.from_array(ec.codes_get_values( + gid).reshape(self._nrows, self._ncols), CHUNK_SIZE), dims=("y", "x")) + + return xarr + + def _get_attributes(self): + """Create a dictionary of attributes to be added to the dataset. + + Returns: + dict: A dictionary of parameter attributes. + ssp_lon: longitude of subsatellite point + sensor: name of sensor + platform_name: name of the platform + """ + orbital_parameters = { + "projection_longitude": self._ssp_lon + } + + attributes = { + "orbital_parameters": orbital_parameters, + "sensor": self.sensor + } + + + attributes["platform_name"] = self.PLATFORM_NAME + + return attributes + + @staticmethod + def _get_from_msg(gid, key): + """Get a value from the GRIB message based on the key, return None if missing. + + Args: + gid: The ID of the GRIB message. + key: The key of the required attribute. + + Returns: + The retrieved attribute or None if the key is missing. + """ + try: + attr = ec.codes_get(gid, key) + except ec.KeyValueNotFoundError: + logger.warning("Key %s not found in GRIB message", key) + attr = None + return attr From de65b636e5ccd21cca449b93afee3b477c75b1b8 Mon Sep 17 00:00:00 2001 From: David Navia Date: Thu, 11 Jan 2024 13:11:24 +0100 Subject: [PATCH 03/55] Add EUM L2 GRIB-reader test package --- satpy/tests/reader_tests/test_eum_l2_grib.py | 319 +++++++++++++++++++ 1 file changed, 319 insertions(+) create mode 100644 satpy/tests/reader_tests/test_eum_l2_grib.py diff --git a/satpy/tests/reader_tests/test_eum_l2_grib.py b/satpy/tests/reader_tests/test_eum_l2_grib.py new file mode 100644 index 0000000000..3e4dee87a8 --- /dev/null +++ b/satpy/tests/reader_tests/test_eum_l2_grib.py @@ -0,0 +1,319 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright (c) 2019 Satpy developers +# +# satpy is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# satpy is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with satpy. If not, see . + +"""EUM L2 GRIB-reader test package.""" + +import datetime +import sys +import unittest +from unittest import mock + +import numpy as np + +from satpy.tests.utils import make_dataid + +# Dictionary to be used as fake GRIB message +FAKE_SEVIRI_MESSAGE = { + "longitudeOfSubSatellitePointInDegrees": 9.5, + "dataDate": 20191020, + "dataTime": 1745, + "Nx": 1000, + "Ny": 1200, + "earthMajorAxis": 6400., + "earthMinorAxis": 6300., + "NrInRadiusOfEarth": 6., + "XpInGridLengths": 500, + "parameterNumber": 30, + "missingValue": 9999, +} + +FAKE_FCI_MESSAGE = { + "longitudeOfSubSatellitePointInDegrees": 0.0, + "dataDate": 20191020, + "dataTime": 1745, + "Nx": 5568, + "Ny": 5568, + "earthMajorAxis": 6378140., + "earthMinorAxis": 6356755., + "NrInRadiusOfEarth": 6.6107, + "XpInGridLengths": 2784.0, + "parameterNumber": 30, + "missingValue": 9999, +} + +# List to be used as fake GID source +FAKE_GID = [0, 1, 2, 3, None] + + +class Test_EUML2GribFileHandler(unittest.TestCase): + """Test the EUML2GribFileHandler reader.""" + + @mock.patch("satpy.readers.eum_l2_grib.ec") + def setUp(self, ec_): + """Set up the test by creating a mocked eccodes library.""" + fake_gid_generator = (i for i in FAKE_GID) + ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) + self.ec_ = ec_ + + @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") + @mock.patch("satpy.readers.eum_l2_grib.xr") + @mock.patch("satpy.readers.eum_l2_grib.da") + def test_seviri_data_reading(self, da_, xr_): + """Test the reading of data from the product.""" + from satpy.readers.eum_l2_grib import REPEAT_CYCLE_DURATION, EUML2GribFileHandler + from satpy.utils import get_legacy_chunk_size + CHUNK_SIZE = get_legacy_chunk_size() + + with mock.patch("builtins.open", mock.mock_open()) as mock_file: + with mock.patch("satpy.readers.eum_l2_grib.ec", self.ec_): + self.ec_.codes_get_values.return_value = np.ones(1000*1200) + self.ec_.codes_get.side_effect = lambda gid, key: FAKE_SEVIRI_MESSAGE[key] + self.reader = EUML2GribFileHandler( + filename="test.grib", + filename_info={ + "spacecraft": "MET11", + "start_time": datetime.datetime(year=2020, month=10, day=20, + hour=19, minute=45, second=0) + }, + filetype_info={ + "file_type" : "seviri" + } + ) + + dataset_id = make_dataid(name="dummmy", resolution=3000) + + # Checks that the codes_grib_multi_support_on function has been called + self.ec_.codes_grib_multi_support_on.assert_called() + + # Restarts the id generator and clears the call history + fake_gid_generator = (i for i in FAKE_GID) + self.ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) + self.ec_.codes_grib_new_from_file.reset_mock() + self.ec_.codes_release.reset_mock() + + # Checks the correct execution of the get_dataset function with a valid parameter_number + valid_dataset = self.reader.get_dataset(dataset_id, {"parameter_number": 30}) + # Checks the correct file open call + mock_file.assert_called_with("test.grib", "rb") + # Checks that the dataset has been created as a DataArray object + assert valid_dataset._extract_mock_name() == "xr.DataArray()" + # Checks that codes_release has been called after each codes_grib_new_from_file call + # (except after the last one which has returned a None) + assert self.ec_.codes_grib_new_from_file.call_count == self.ec_.codes_release.call_count + 1 + + # Restarts the id generator and clears the call history + fake_gid_generator = (i for i in FAKE_GID) + self.ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) + self.ec_.codes_grib_new_from_file.reset_mock() + self.ec_.codes_release.reset_mock() + + # Checks the correct execution of the get_dataset function with an invalid parameter_number + invalid_dataset = self.reader.get_dataset(dataset_id, {"parameter_number": 50}) + # Checks that the function returns None + assert invalid_dataset is None + # Checks that codes_release has been called after each codes_grib_new_from_file call + # (except after the last one which has returned a None) + assert self.ec_.codes_grib_new_from_file.call_count == self.ec_.codes_release.call_count + 1 + + # Checks the basic data reading + assert REPEAT_CYCLE_DURATION == 15 + + # Checks the correct execution of the _get_global_attributes and _get_metadata_from_msg functions + attributes = self.reader._get_attributes() + expected_attributes = { + "orbital_parameters": { + "projection_longitude": 9.5 + }, + "sensor": "seviri", + "platform_name": "Meteosat-11" + } + assert attributes == expected_attributes + + # Checks the reading of an array from the message + self.reader._get_xarray_from_msg(0) + + # Checks that dask.array has been called with the correct arguments + name, args, kwargs = da_.mock_calls[0] + assert np.all(args[0] == np.ones((1200, 1000))) + assert args[1] == CHUNK_SIZE + + # Checks that xarray.DataArray has been called with the correct arguments + name, args, kwargs = xr_.mock_calls[0] + assert kwargs["dims"] == ("y", "x") + + # Checks the correct execution of the _get_proj_area function + pdict, area_dict = self.reader._get_proj_area(0) + + expected_pdict = { + "a": 6400000., + "b": 6300000., + "h": 32000000., + "ssp_lon": 9.5, + "nlines": 1000, + "ncols": 1200, + "a_name": "msg_seviri_rss_3km", + "a_desc": "MSG SEVIRI Rapid Scanning Service area definition with 3 km resolution", + "p_id": "", + } + assert pdict == expected_pdict + expected_area_dict = { + "center_point": 500, + "north": 1200, + "east": 1, + "west": 1000, + "south": 1, + } + assert area_dict == expected_area_dict + + # Checks the correct execution of the get_area_def function + with mock.patch("satpy.readers.eum_l2_grib.seviri_calculate_area_extent", + mock.Mock(name="seviri_calculate_area_extent")) as cae: + with mock.patch("satpy.readers.eum_l2_grib.get_area_definition", mock.Mock()) as gad: + dataset_id = make_dataid(name="dummmy", resolution=400.) + self.reader.get_area_def(dataset_id) + # Asserts that seviri_calculate_area_extent has been called with the correct arguments + expected_args = ({"center_point": 500, "east": 1, "west": 1000, "south": 1, "north": 1200, + "column_step": 400., "line_step": 400.},) + name, args, kwargs = cae.mock_calls[0] + assert args == expected_args + # Asserts that get_area_definition has been called with the correct arguments + name, args, kwargs = gad.mock_calls[0] + assert args[0] == expected_pdict + # The second argument must be the return result of seviri_calculate_area_extent + assert args[1]._extract_mock_name() == "seviri_calculate_area_extent()" + + @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") + @mock.patch("satpy.readers.eum_l2_grib.xr") + @mock.patch("satpy.readers.eum_l2_grib.da") + def test_fci_data_reading(self, da_, xr_): + """Test the reading of fci data from the product.""" + from satpy.readers.eum_l2_grib import EUML2GribFileHandler + from satpy.utils import get_legacy_chunk_size + CHUNK_SIZE = get_legacy_chunk_size() + + with mock.patch("builtins.open", mock.mock_open()) as mock_file: + with mock.patch("satpy.readers.eum_l2_grib.ec", self.ec_): + self.ec_.codes_get_values.return_value = np.ones(5568*5568) + self.ec_.codes_get.side_effect = lambda gid, key: FAKE_FCI_MESSAGE[key] + self.reader = EUML2GribFileHandler( + filename="test.grib", + filename_info={ + "spacecraft_id": "1", + "start_time": datetime.datetime(year=2020, month=10, day=20, + hour=19, minute=45, second=0) + }, + filetype_info={ + "file_type" : "fci" + } + ) + + dataset_id = make_dataid(name="dummmy", resolution=2000) + + # Checks that the codes_grib_multi_support_on function has been called + self.ec_.codes_grib_multi_support_on.assert_called() + + # Restarts the id generator and clears the call history + fake_gid_generator = (i for i in FAKE_GID) + self.ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) + self.ec_.codes_grib_new_from_file.reset_mock() + self.ec_.codes_release.reset_mock() + + # Checks the correct execution of the get_dataset function with a valid parameter_number + valid_dataset = self.reader.get_dataset(dataset_id, {"parameter_number": 30}) + # Checks the correct file open call + mock_file.assert_called_with("test.grib", "rb") + # Checks that the dataset has been created as a DataArray object + assert valid_dataset._extract_mock_name() == "xr.DataArray()" + # Checks that codes_release has been called after each codes_grib_new_from_file call + # (except after the last one which has returned a None) + assert self.ec_.codes_grib_new_from_file.call_count == self.ec_.codes_release.call_count + 1 + + # Restarts the id generator and clears the call history + fake_gid_generator = (i for i in FAKE_GID) + self.ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) + self.ec_.codes_grib_new_from_file.reset_mock() + self.ec_.codes_release.reset_mock() + + # Checks the correct execution of the get_dataset function with an invalid parameter_number + invalid_dataset = self.reader.get_dataset(dataset_id, {"parameter_number": 50}) + # Checks that the function returns None + assert invalid_dataset is None + # Checks that codes_release has been called after each codes_grib_new_from_file call + # (except after the last one which has returned a None) + assert self.ec_.codes_grib_new_from_file.call_count == self.ec_.codes_release.call_count + 1 + + # Checks the correct execution of the _get_global_attributes and _get_metadata_from_msg functions + attributes = self.reader._get_attributes() + expected_attributes = { + "orbital_parameters": { + "projection_longitude": 0.0 + }, + "sensor": "fci", + "platform_name": "MTG-i1" + } + assert attributes == expected_attributes + + # Checks the reading of an array from the message + self.reader._get_xarray_from_msg(0) + + # Checks that dask.array has been called with the correct arguments + name, args, kwargs = da_.mock_calls[0] + assert np.all(args[0] == np.ones((5568, 5568))) + assert args[1] == CHUNK_SIZE + + # Checks that xarray.DataArray has been called with the correct arguments + name, args, kwargs = xr_.mock_calls[0] + assert kwargs["dims"] == ("y", "x") + + # Checks the correct execution of the _get_proj_area function + pdict, area_dict = self.reader._get_proj_area(0) + + expected_pdict = { + "a": 6378140000.0, + "b": 6356755000.0, + "h": 35785830098.0, + "ssp_lon": 0.0, + "nlines": 5568, + "ncols": 5568, + "a_name": "msg_fci_fdss_2km", + "a_desc": "MSG FCI Full Disk Scanning Service area definition with 2 km resolution", + "p_id": "" + } + assert pdict == expected_pdict + expected_area_dict = { + "nlines": 5568, + "ncols": 5568 + } + assert area_dict == expected_area_dict + + # Checks the correct execution of the get_area_def function + with mock.patch("satpy.readers.eum_l2_grib.fci_calculate_area_extent", + mock.Mock(name="fci_calculate_area_extent")) as cae: + with mock.patch("satpy.readers.eum_l2_grib.get_area_definition", mock.Mock()) as gad: + dataset_id = make_dataid(name="dummmy", resolution=2000.) + self.reader.get_area_def(dataset_id) + # Asserts that seviri_calculate_area_extent has been called with the correct arguments + expected_args = ({"nlines": 5568, "ncols": 5568, + "column_step": 2000., "line_step": 2000.},) + name, args, kwargs = cae.mock_calls[0] + assert args == expected_args + # Asserts that get_area_definition has been called with the correct arguments + name, args, kwargs = gad.mock_calls[0] + assert args[0] == expected_pdict + # The second argument must be the return result of seviri_calculate_area_extent + assert args[1]._extract_mock_name() == "fci_calculate_area_extent()" From 0474fd083636a2d335b52ead6dd1ef3e8330bdfc Mon Sep 17 00:00:00 2001 From: David Navia Date: Thu, 11 Jan 2024 13:39:30 +0100 Subject: [PATCH 04/55] Add my name to AUTHORS.md --- AUTHORS.md | 1 + 1 file changed, 1 insertion(+) diff --git a/AUTHORS.md b/AUTHORS.md index 9078e441b4..1d98572541 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -87,3 +87,4 @@ The following people have made contributions to this project: - [Xin Zhang (zxdawn)](https://github.com/zxdawn) - [Yufei Zhu (yufeizhu600)](https://github.com/yufeizhu600) - [Youva Aoun (YouvaEUMex)](https://github.com/YouvaEUMex) +- [David Navia (dnaviap)](https://github.com/dnaviap) From 767aeabf656ab0ffbfa6c68a66bde7372fc37a2e Mon Sep 17 00:00:00 2001 From: David Navia Date: Fri, 12 Jan 2024 16:33:31 +0100 Subject: [PATCH 05/55] Delete eum_l2_grib.yaml and update seviri_l2_grib.yaml to avoid changing the user interface to the reader --- satpy/etc/readers/eum_l2_grib.yaml | 387 -------------------------- satpy/etc/readers/seviri_l2_grib.yaml | 14 +- 2 files changed, 7 insertions(+), 394 deletions(-) delete mode 100644 satpy/etc/readers/eum_l2_grib.yaml diff --git a/satpy/etc/readers/eum_l2_grib.yaml b/satpy/etc/readers/eum_l2_grib.yaml deleted file mode 100644 index 80edd3b2e5..0000000000 --- a/satpy/etc/readers/eum_l2_grib.yaml +++ /dev/null @@ -1,387 +0,0 @@ -reader: - name: eum_l2_grib - short_name: EUM L2 GRIB - long_name: MSG (Meteosat 8 to 11) SEVIRI Level products 2 and FCI L2 products in GRIB2 format - description: Reader for EUMETSAT MSG SEVIRI L2 files and FCI L2 files in GRIB format. - status: Alpha - supports_fsspec: false - sensors: [seviri,fci] - reader: !!python/name:satpy.readers.yaml_reader.GEOFlippableFileYAMLReader - - -file_types: - - # EUMETSAT MSG SEVIRI L2 Aerosol Properties over Sea product - # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:AES - grib_seviri_aes: - file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler - file_patterns: - - 'AESGRIBProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - - '{spacecraft:4s}-SEVI-MSGAESE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' - - '{spacecraft:4s}-SEVI-MSGAESE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' - - '{spacecraft:4s}-SEVI-MSGAESE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' - - # EUMETSAT MSG SEVIRI L2 Cloud Mask product - # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:CLM - grib_seviri_clm: - file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler - file_patterns: - - 'CLMEncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - - '{spacecraft:4s}-SEVI-MSGCLMK-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' - - '{spacecraft:4s}-SEVI-MSGCLMK-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' - - '{spacecraft:4s}-SEVI-MSGCLMK-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' - - # EUMETSAT MSG SEVIRI L2 Cloud Top Height product - # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:CTH - grib_seviri_cth: - file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler - file_patterns: - - 'CTHEncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - - '{spacecraft:4s}-SEVI-MSGCLTH-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' - - '{spacecraft:4s}-SEVI-MSGCLTH-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' - - '{spacecraft:4s}-SEVI-MSGCLTH-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' - - # EUMETSAT MSG SEVIRI L2 Clear-Sky Reflectance Map product - # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:CRM - grib_seviri_crm: - file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler - file_patterns: - - 'CRMEncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - - '{spacecraft:4s}-SEVI-MSGCRMN-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' - - '{spacecraft:4s}-SEVI-MSGCRMN-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' - - '{spacecraft:4s}-SEVI-MSGCRMN-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' - - # EUMETSAT MSG SEVIRI L2 Active Fire Monitoring product - # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:FIR - grib_seviri_fir: - file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler - file_patterns: - - 'FIREncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - - '{spacecraft:4s}-SEVI-MSGFIRG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' - - '{spacecraft:4s}-SEVI-MSGFIRG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' - - '{spacecraft:4s}-SEVI-MSGFIRG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' - - # EUMETSAT MSG SEVIRI L2 Multi-Sensor Precipitation Estimate product - # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:MPE-GRIB - grib_seviri_mpe: - file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler - file_patterns: - - 'MPEGRIBProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - - '{spacecraft:4s}-SEVI-MSGMPEG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' - - '{spacecraft:4s}-SEVI-MSGMPEG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' - - '{spacecraft:4s}-SEVI-MSGMPEG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' - - # EUMETSAT MSG SEVIRI L2 Optimal Cloud Analysis product - # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:OCA - grib_seviri_oca: - file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler - file_patterns: - - 'OCAEncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - - '{spacecraft:4s}-SEVI-MSGOCAE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' - - '{spacecraft:4s}-SEVI-MSGOCAE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' - - '{spacecraft:4s}-SEVI-MSGOCAE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' - - grib_fci_clm: - file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler - file_patterns: - - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-CLM-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-GRIB2_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.bin' - -datasets: - - # EUMETSAT MSG SEVIRI L2 Aerosol Properties over Sea product - aerosol_optical_thickness_vis06: - name: aerosol_optical_thickness_vis06 - long_name: Aerosol optical Thickness at 0.6um - standard_name: atmosphere_absorption_optical_thickness_due_to_ambient_aerosol - resolution: 9001.209497451 - file_type: grib_seviri_aes - parameter_number: 20 - units: "1" - - aerosol_optical_thickness_vis08: - name: aerosol_optical_thickness_vis08 - long_name: Aerosol optical Thickness at 0.8um - standard_name: atmosphere_absorption_optical_thickness_due_to_ambient_aerosol - resolution: 9001.209497451 - file_type: grib_seviri_aes - parameter_number: 21 - units: "1" - - aerosol_optical_thickness_vis16: - name: aerosol_optical_thickness_vis16 - long_name: Aerosol optical Thickness at 1.6um - standard_name: atmosphere_absorption_optical_thickness_due_to_ambient_aerosol - resolution: 9001.209497451 - file_type: grib_seviri_aes - parameter_number: 22 - units: "1" - - angstroem_coefficient: - name: angstroem_coefficient - long_name: Angstroem Coefficient - standard_name: aerosol_angstrom_exponent - resolution: 9001.209497451 - file_type: grib_seviri_aes - parameter_number: 23 - units: "1" - - aes_quality: - name: aes_quality - long_name: AES Product Quality Flag - standard_name: quality_flag - resolution: 9001.209497451 - file_type: grib_seviri_aes - parameter_number: 192 - units: "1" - flag_values: [0, 1, 2, 3] - flag_meanings: ['clear sky over water','clear sky over land', 'cloudy', 'no data' ] - - - # EUMETSAT MSG SEVIRI L2 Cloud Mask product - cloud_mask: - name: cloud_mask - long_name: Cloud Classification - standard_name: cloud_classification - resolution: - 3000.403165817: {file_type: grib_seviri_clm} - 2000: {file_type: grib_fci_clm} - parameter_number: 7 - units: "1" - flag_values: [0, 1, 2, 3] - flag_meanings: ['clear sky over water','clear sky over land', 'cloudy', 'no data' ] - - - # EUMETSAT MSG SEVIRI L2 Cloud Top Height product - cloud_top_height: - name: cloud_top_height - long_name: Cloud Top Height - standard_name: height_at_cloud_top - resolution: 9001.209497451 - file_type: grib_seviri_cth - parameter_number: 2 - units: m - - cloud_top_quality: - name: cloud_top_quality - long_name: CTH Product Quality Flag - standard_name: height_at_cloud_top quality_flag - resolution: 9001.209497451 - file_type: grib_seviri_cth - parameter_number: 3 - units: "1" - flag_values: [0, 1] - flag_meanings: ['good quality retrieval','poor quality retrieval' ] - - - # EUMETSAT MSG SEVIRI L2 Clear-Sky Reflectance Map product - vis_refl_06: - name: vis_refl_06 - long_name: TOA Bidirectional Reflectance at 0.6um (7 days average) - standard_name: toa_bidirectional_reflectance - resolution: 3000.403165817 - wavelength: [0.56, 0.635, 0.71] - file_type: grib_seviri_crm - parameter_number: 9 - units: "%" - - vis_refl_08: - name: vis_refl_08 - long_name: TOA Bidirectional Reflectance at 0.8um (7 days average) - standard_name: toa_bidirectional_reflectance - resolution: 3000.403165817 - wavelength: [0.74, 0.81, 0.88] - file_type: grib_seviri_crm - parameter_number: 10 - units: "%" - - vis_refl_16: - name: vis_refl_16 - long_name: TOA Bidirectional Reflectance at 1.6um (7 days average) - standard_name: toa_bidirectional_reflectance - resolution: 3000.403165817 - wavelength: [1.5, 1.64, 1.78] - file_type: grib_seviri_crm - parameter_number: 11 - units: "%" - - nir_refl_39: - name: nir_refl_39 - long_name: TOA Bidirectional Reflectance at 3.9um (7 days average) - standard_name: toa_bidirectional_reflectance - resolution: 3000.403165817 - wavelength: [3.48, 3.92, 4.36] - file_type: grib_seviri_crm - parameter_number: 12 - units: "%" - - num_accumulations: - name: num_accumulations - long_name: CRM Product Number of Accumulations - standard_name: number_of_accumulations - resolution: 3000.403165817 - file_type: grib_seviri_crm - parameter_number: 6 - units: "1" - - solar_zenith_angle: - name: solar_zenith_angle - long_name: Solar Zenith Angle (7 days average) - standard_name: solar_zenith_angle - resolution: 3000.403165817 - file_type: grib_seviri_crm - parameter_number: 7 - units: degrees - - relative_azimuth_angle: - name: relative_azimuth_angle - long_name: Relative Azimuth Angle (7 days average) - standard_name: relative_sensor_azimuth_angle - resolution: 3000.403165817 - file_type: grib_seviri_crm - parameter_number: 8 - units: degrees - - - # EUMETSAT MSG SEVIRI L2 Active Fire Monitoring product - active_fires: - name: active_fires - long_name: Active Fire Classification - standard_name: active_fire_classification - resolution: 3000.403165817 - file_type: grib_seviri_fir - parameter_number: 9 - units: "1" - flag_values: [0, 1, 2, 3] - flag_meanings: ['no fire','possible fire', 'probable fire', 'missing' ] - - fire_probability: - name: fire_probability - long_name: Fire Probability - standard_name: fire_probability - resolution: 3000.403165817 - file_type: grib_seviri_fir - parameter_number: 192 - units: "%" - - - # EUMETSAT MSG SEVIRI L2 Multi-Sensor Precipitation Estimate product - instantaneous_rain_rate: - name: instantaneous_rain_rate - long_name: MPE Product Instantaneous Rain Rate - standard_name: rainfall_rate - resolution: 3000.403165817 - file_type: grib_seviri_mpe - parameter_number: 1 - units: "kg m-2 s-1" - - - # EUMETSAT MSG SEVIRI L2 Optimal Cloud Analysis product - pixel_scene_type: - name: pixel_scene_type - long_name: Cloud Type - standard_name: scene_classification - resolution: 3000.403165817 - file_type: grib_seviri_oca - parameter_number: 8 - units: "1" - flag_values: [24,111,112] - flag_meanings: ['multi-layered cloud','water cloud','ice cloud'] - - measurement_cost: - name: measurement_cost - long_name: OCA Cost Function - Measurement part - standard_name: cost_function - resolution: 3000.403165817 - file_type: grib_seviri_oca - parameter_number: 30 - units: "1" - - upper_layer_cloud_optical_depth: - name: upper_layer_cloud_optical_depth - long_name: Upper Cloud Layer Optical Depth - standard_name: atmosphere_optical_thickness_due_to_cloud - resolution: 3000.403165817 - file_type: grib_seviri_oca - parameter_number: 31 - units: "1" - - upper_layer_cloud_top_pressure: - name: upper_layer_cloud_top_pressure - long_name: Upper Cloud Top Pressure - standard_name: air_pressure_at_cloud_top - resolution: 3000.403165817 - file_type: grib_seviri_oca - parameter_number: 32 - units: Pa - - upper_layer_cloud_effective_radius: - name: upper_layer_cloud_effective_radius - long_name: Upper Cloud Particle Effective Radius - standard_name: effective_radius_of_cloud_condensed_water_particles_at_cloud_top - resolution: 3000.403165817 - file_type: grib_seviri_oca - parameter_number: 33 - units: m - - error_in_upper_layer_cloud_optical_depth: - name: error_in_upper_layer_cloud_optical_depth - long_name: Upper Cloud Optical Depth Error Estimate - standard_name: atmosphere_optical_thickness_due_to_cloud standard_error - resolution: 3000.403165817 - file_type: grib_seviri_oca - parameter_number: 34 - units: "1" - - error_in_upper_layer_cloud_top_pressure: - name: error_in_upper_layer_cloud_top_pressure - long_name: Upper Cloud Top Pressure Error Estimate - standard_name: air_pressure_at_cloud_top standard_error - resolution: 3000.403165817 - file_type: grib_seviri_oca - parameter_number: 35 - units: Pa - - error_in_upper_layer_cloud_effective_radius: - name: error_in_upper_layer_cloud_effective_radius - long_name: Upper Cloud Particle Effective Radius Error Estimate - standard_name: effective_radius_of_cloud_condensed_water_particles_at_cloud_top standard_error - resolution: 3000.403165817 - file_type: grib_seviri_oca - parameter_number: 36 - units: m - - lower_layer_cloud_optical_depth: - name: lower_layer_cloud_optical_depth - long_name: Lower Cloud Optical Depth - standard_name: atmosphere_optical_thickness_due_to_cloud_in_lower_atmosphere_layer - resolution: 3000.403165817 - file_type: grib_seviri_oca - parameter_number: 37 - units: "1" - - lower_layer_cloud_top_pressure: - name: lower_layer_cloud_top_pressure - long_name: Lower Cloud Top Pressure - standard_name: air_pressure_at_cloud_top_in_lower_atmosphere_layer - resolution: 3000.403165817 - file_type: grib_seviri_oca - parameter_number: 38 - units: Pa - - error_in_lower_layer_cloud_optical_depth: - name: error_in_lower_layer_cloud_optical_depth - long_name: Lower Cloud Optical Depth Error Estimate - standard_name: atmosphere_optical_thickness_due_to_cloud_in_lower_atmosphere_layer standard_error - resolution: 3000.403165817 - file_type: grib_seviri_oca - parameter_number: 39 - units: "1" - - error_in_lower_layer_cloud_top_pressure: - name: error_in_lower_layer_cloud_top_pressure - long_name: Lower Cloud Top Pressure Error Estimate - standard_name: air_pressure_at_cloud_top_in_lower_atmosphere_layer standard_error - resolution: 3000.403165817 - file_type: grib_seviri_oca - parameter_number: 40 - units: Pa diff --git a/satpy/etc/readers/seviri_l2_grib.yaml b/satpy/etc/readers/seviri_l2_grib.yaml index cbe6c81f09..e9a8cc5231 100644 --- a/satpy/etc/readers/seviri_l2_grib.yaml +++ b/satpy/etc/readers/seviri_l2_grib.yaml @@ -14,7 +14,7 @@ file_types: # EUMETSAT MSG SEVIRI L2 Aerosol Properties over Sea product # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:AES grib_seviri_aes: - file_reader: !!python/name:satpy.readers.seviri_l2_grib.SeviriL2GribFileHandler + file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler file_patterns: - 'AESGRIBProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - '{spacecraft:4s}-SEVI-MSGAESE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' @@ -24,7 +24,7 @@ file_types: # EUMETSAT MSG SEVIRI L2 Cloud Mask product # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:CLM grib_seviri_clm: - file_reader: !!python/name:satpy.readers.seviri_l2_grib.SeviriL2GribFileHandler + file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler file_patterns: - 'CLMEncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - '{spacecraft:4s}-SEVI-MSGCLMK-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' @@ -34,7 +34,7 @@ file_types: # EUMETSAT MSG SEVIRI L2 Cloud Top Height product # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:CTH grib_seviri_cth: - file_reader: !!python/name:satpy.readers.seviri_l2_grib.SeviriL2GribFileHandler + file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler file_patterns: - 'CTHEncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - '{spacecraft:4s}-SEVI-MSGCLTH-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' @@ -44,7 +44,7 @@ file_types: # EUMETSAT MSG SEVIRI L2 Clear-Sky Reflectance Map product # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:CRM grib_seviri_crm: - file_reader: !!python/name:satpy.readers.seviri_l2_grib.SeviriL2GribFileHandler + file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler file_patterns: - 'CRMEncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - '{spacecraft:4s}-SEVI-MSGCRMN-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' @@ -54,7 +54,7 @@ file_types: # EUMETSAT MSG SEVIRI L2 Active Fire Monitoring product # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:FIR grib_seviri_fir: - file_reader: !!python/name:satpy.readers.seviri_l2_grib.SeviriL2GribFileHandler + file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler file_patterns: - 'FIREncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - '{spacecraft:4s}-SEVI-MSGFIRG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' @@ -64,7 +64,7 @@ file_types: # EUMETSAT MSG SEVIRI L2 Multi-Sensor Precipitation Estimate product # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:MPE-GRIB grib_seviri_mpe: - file_reader: !!python/name:satpy.readers.seviri_l2_grib.SeviriL2GribFileHandler + file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler file_patterns: - 'MPEGRIBProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - '{spacecraft:4s}-SEVI-MSGMPEG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' @@ -74,7 +74,7 @@ file_types: # EUMETSAT MSG SEVIRI L2 Optimal Cloud Analysis product # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:OCA grib_seviri_oca: - file_reader: !!python/name:satpy.readers.seviri_l2_grib.SeviriL2GribFileHandler + file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler file_patterns: - 'OCAEncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - '{spacecraft:4s}-SEVI-MSGOCAE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' From a53ddead94ec05d0117eba1c3e971fda11d17980 Mon Sep 17 00:00:00 2001 From: David Navia Date: Fri, 12 Jan 2024 16:37:12 +0100 Subject: [PATCH 06/55] Add fci_l2_grib.yaml reader --- satpy/etc/readers/fci_l2_grib.yaml | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 satpy/etc/readers/fci_l2_grib.yaml diff --git a/satpy/etc/readers/fci_l2_grib.yaml b/satpy/etc/readers/fci_l2_grib.yaml new file mode 100644 index 0000000000..cc16c77081 --- /dev/null +++ b/satpy/etc/readers/fci_l2_grib.yaml @@ -0,0 +1,28 @@ +reader: + name: fci_l2_grib + short_name: FCI L2 GRIB2 + long_name: MTG FCI L2 data in GRIB2 format + description: Reader for EUMETSAT MTG FCI L2 files in GRIB2 format. + status: Nominal + supports_fsspec: false + sensors: [fci] + reader: !!python/name:satpy.readers.yaml_reader.GEOFlippableFileYAMLReader + +file_types: + grib_fci_clm: + file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler + file_patterns: + - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-CLM-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-GRIB2_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.bin' + + +datasets: + cloud_mask: + name: cloud_mask + long_name: Cloud Classification + standard_name: cloud_classification + resolution: 2000 + file_type: grib_fci_clm + parameter_number: 7 + units: "1" + flag_values: [0, 1, 2, 3] + flag_meanings: ['clear sky over water','clear sky over land', 'cloudy', 'undefined' ] From f94c4f721445de5e873e1077b5d44a6b852e1aa6 Mon Sep 17 00:00:00 2001 From: David Navia Date: Fri, 12 Jan 2024 16:39:40 +0100 Subject: [PATCH 07/55] Delete seviri_l2_grib.py since eum_l2_grib.py is compatible with FCI and SEVIRI data --- satpy/readers/seviri_l2_grib.py | 282 -------------------------------- 1 file changed, 282 deletions(-) delete mode 100644 satpy/readers/seviri_l2_grib.py diff --git a/satpy/readers/seviri_l2_grib.py b/satpy/readers/seviri_l2_grib.py deleted file mode 100644 index b69c60e7ac..0000000000 --- a/satpy/readers/seviri_l2_grib.py +++ /dev/null @@ -1,282 +0,0 @@ -# Copyright (c) 2019-2023 Satpy developers -# -# satpy is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# satpy is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with satpy. If not, see . - -"""Reader for the SEVIRI L2 products in GRIB2 format. - -References: - FM 92 GRIB Edition 2 - https://www.wmo.int/pages/prog/www/WMOCodes/Guides/GRIB/GRIB2_062006.pdf - EUMETSAT Product Navigator - https://navigator.eumetsat.int/ -""" - -import logging -from datetime import timedelta - -import dask.array as da -import numpy as np -import xarray as xr - -from satpy.readers._geos_area import get_area_definition, get_geos_area_naming -from satpy.readers.eum_base import get_service_mode -from satpy.readers.file_handlers import BaseFileHandler -from satpy.readers.seviri_base import PLATFORM_DICT, REPEAT_CYCLE_DURATION, calculate_area_extent -from satpy.utils import get_legacy_chunk_size - -try: - import eccodes as ec -except ImportError: - raise ImportError( - "Missing eccodes-python and/or eccodes C-library installation. Use conda to install eccodes") - -CHUNK_SIZE = get_legacy_chunk_size() -logger = logging.getLogger(__name__) - - -class SeviriL2GribFileHandler(BaseFileHandler): - """Reader class for SEVIRI L2 products in GRIB format.""" - - def __init__(self, filename, filename_info, filetype_info): - """Read the global attributes and prepare for dataset reading.""" - super().__init__(filename, filename_info, filetype_info) - # Turn on support for multiple fields in single GRIB messages (required for SEVIRI L2 files) - ec.codes_grib_multi_support_on() - - @property - def start_time(self): - """Return the sensing start time.""" - return self.filename_info["start_time"] - - @property - def end_time(self): - """Return the sensing end time.""" - return self.start_time + timedelta(minutes=REPEAT_CYCLE_DURATION) - - def get_area_def(self, dataset_id): - """Return the area definition for a dataset.""" - self._area_dict["column_step"] = dataset_id["resolution"] - self._area_dict["line_step"] = dataset_id["resolution"] - - area_extent = calculate_area_extent(self._area_dict) - - # Call the get_area_definition function to obtain the area - area_def = get_area_definition(self._pdict, area_extent) - - return area_def - - def get_dataset(self, dataset_id, dataset_info): - """Get dataset using the parameter_number key in dataset_info. - - In a previous version of the reader, the attributes (nrows, ncols, ssp_lon) and projection information - (pdict and area_dict) were computed while initializing the file handler. Also the code would break out from - the While-loop below as soon as the correct parameter_number was found. This has now been revised becasue the - reader would sometimes give corrupt information about the number of messages in the file and the dataset - dimensions within a given message if the file was only partly read (not looping over all messages) in an earlier - instance. - """ - logger.debug("Reading in file to get dataset with parameter number %d.", - dataset_info["parameter_number"]) - - xarr = None - message_found = False - with open(self.filename, "rb") as fh: - - # Iterate over all messages and fetch data when the correct parameter number is found - while True: - gid = ec.codes_grib_new_from_file(fh) - - if gid is None: - if not message_found: - # Could not obtain a valid message ID from the grib file - logger.warning("Could not find parameter_number %d in GRIB file, no valid Dataset created", - dataset_info["parameter_number"]) - break - - # Check if the parameter number in the GRIB message corresponds to the required key - parameter_number = self._get_from_msg(gid, "parameterNumber") - - if parameter_number == dataset_info["parameter_number"]: - - self._res = dataset_id["resolution"] - self._read_attributes(gid) - - # Read the missing value - missing_value = self._get_from_msg(gid, "missingValue") - - # Retrieve values and metadata from the GRIB message, masking the values equal to missing_value - xarr = self._get_xarray_from_msg(gid) - - xarr.data = da.where(xarr.data == missing_value, np.nan, xarr.data) - - ec.codes_release(gid) - - # Combine all metadata into the dataset attributes and break out of the loop - xarr.attrs.update(dataset_info) - xarr.attrs.update(self._get_attributes()) - - message_found = True - - else: - # The parameter number is not the correct one, release gid and skip to next message - ec.codes_release(gid) - - return xarr - - def _read_attributes(self, gid): - """Read the parameter attributes from the message and create the projection and area dictionaries.""" - # Read SSP and date/time - self._ssp_lon = self._get_from_msg(gid, "longitudeOfSubSatellitePointInDegrees") - - # Read number of points on the x and y axes - self._nrows = self._get_from_msg(gid, "Ny") - self._ncols = self._get_from_msg(gid, "Nx") - - # Creates the projection and area dictionaries - self._pdict, self._area_dict = self._get_proj_area(gid) - - def _get_proj_area(self, gid): - """Compute the dictionary with the projection and area definition from a GRIB message. - - Args: - gid: The ID of the GRIB message. - - Returns: - tuple: A tuple of two dictionaries for the projection and the area definition. - pdict: - a: Earth major axis [m] - b: Earth minor axis [m] - h: Height over surface [m] - ssp_lon: longitude of subsatellite point [deg] - nlines: number of lines - ncols: number of columns - a_name: name of the area - a_desc: description of the area - p_id: id of the projection - area_dict: - center_point: coordinate of the center point - north: coodinate of the north limit - east: coodinate of the east limit - west: coodinate of the west limit - south: coodinate of the south limit - """ - # Get name of area definition - area_naming_input_dict = {"platform_name": "msg", - "instrument_name": "seviri", - "resolution": self._res, - } - - area_naming = get_geos_area_naming({**area_naming_input_dict, - **get_service_mode("seviri", self._ssp_lon)}) - - # Read all projection and area parameters from the message - earth_major_axis_in_meters = self._get_from_msg(gid, "earthMajorAxis") * 1000.0 # [m] - earth_minor_axis_in_meters = self._get_from_msg(gid, "earthMinorAxis") * 1000.0 # [m] - - earth_major_axis_in_meters = self._scale_earth_axis(earth_major_axis_in_meters) - earth_minor_axis_in_meters = self._scale_earth_axis(earth_minor_axis_in_meters) - - nr_in_radius_of_earth = self._get_from_msg(gid, "NrInRadiusOfEarth") - xp_in_grid_lengths = self._get_from_msg(gid, "XpInGridLengths") - h_in_meters = earth_major_axis_in_meters * (nr_in_radius_of_earth - 1.0) # [m] - - # Create the dictionary with the projection data - pdict = { - "a": earth_major_axis_in_meters, - "b": earth_minor_axis_in_meters, - "h": h_in_meters, - "ssp_lon": self._ssp_lon, - "nlines": self._ncols, - "ncols": self._nrows, - "a_name": area_naming["area_id"], - "a_desc": area_naming["description"], - "p_id": "", - } - - # Compute the dictionary with the area extension - area_dict = { - "center_point": xp_in_grid_lengths, - "north": self._nrows, - "east": 1, - "west": self._ncols, - "south": 1, - } - - return pdict, area_dict - - @staticmethod - def _scale_earth_axis(data): - """Scale Earth axis data to make sure the value matched the expected unit [m]. - - The earthMinorAxis value stored in the aerosol over sea product is scaled incorrectly by a factor of 1e8. This - method provides a flexible temporarily workaraound by making sure that all earth axis values are scaled such - that they are on the order of millions of meters as expected by the reader. As soon as the scaling issue has - been resolved by EUMETSAT this workaround can be removed. - - """ - scale_factor = 10 ** np.ceil(np.log10(1e6/data)) - return data * scale_factor - - def _get_xarray_from_msg(self, gid): - """Read the values from the GRIB message and return a DataArray object. - - Args: - gid: The ID of the GRIB message. - - Returns: - DataArray: The array containing the retrieved values. - """ - # Data from GRIB message are read into an Xarray... - xarr = xr.DataArray(da.from_array(ec.codes_get_values( - gid).reshape(self._nrows, self._ncols), CHUNK_SIZE), dims=("y", "x")) - - return xarr - - def _get_attributes(self): - """Create a dictionary of attributes to be added to the dataset. - - Returns: - dict: A dictionary of parameter attributes. - ssp_lon: longitude of subsatellite point - sensor: name of sensor - platform_name: name of the platform - """ - orbital_parameters = { - "projection_longitude": self._ssp_lon - } - - attributes = { - "orbital_parameters": orbital_parameters, - "sensor": "seviri", - "platform_name": PLATFORM_DICT[self.filename_info["spacecraft"]] - } - return attributes - - @staticmethod - def _get_from_msg(gid, key): - """Get a value from the GRIB message based on the key, return None if missing. - - Args: - gid: The ID of the GRIB message. - key: The key of the required attribute. - - Returns: - The retrieved attribute or None if the key is missing. - """ - try: - attr = ec.codes_get(gid, key) - except ec.KeyValueNotFoundError: - logger.warning("Key %s not found in GRIB message", key) - attr = None - return attr From 66946ad5abccdcbee94654d366c31d0402f48fb3 Mon Sep 17 00:00:00 2001 From: David Navia Date: Fri, 12 Jan 2024 16:46:12 +0100 Subject: [PATCH 08/55] Delete obsolete test_seviri_l2_grib.py --- .../tests/reader_tests/test_seviri_l2_grib.py | 182 ------------------ 1 file changed, 182 deletions(-) delete mode 100644 satpy/tests/reader_tests/test_seviri_l2_grib.py diff --git a/satpy/tests/reader_tests/test_seviri_l2_grib.py b/satpy/tests/reader_tests/test_seviri_l2_grib.py deleted file mode 100644 index d3b40d6caa..0000000000 --- a/satpy/tests/reader_tests/test_seviri_l2_grib.py +++ /dev/null @@ -1,182 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2019 Satpy developers -# -# satpy is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# satpy is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with satpy. If not, see . - -"""SEVIRI L2 GRIB-reader test package.""" - -import datetime -import sys -import unittest -from unittest import mock - -import numpy as np - -from satpy.tests.utils import make_dataid - -# Dictionary to be used as fake GRIB message -FAKE_MESSAGE = { - "longitudeOfSubSatellitePointInDegrees": 9.5, - "dataDate": 20191020, - "dataTime": 1745, - "Nx": 1000, - "Ny": 1200, - "earthMajorAxis": 6400., - "earthMinorAxis": 6300., - "NrInRadiusOfEarth": 6., - "XpInGridLengths": 500, - "parameterNumber": 30, - "missingValue": 9999, -} - -# List to be used as fake GID source -FAKE_GID = [0, 1, 2, 3, None] - - -class Test_SeviriL2GribFileHandler(unittest.TestCase): - """Test the SeviriL2GribFileHandler reader.""" - - @mock.patch("satpy.readers.seviri_l2_grib.ec") - def setUp(self, ec_): - """Set up the test by creating a mocked eccodes library.""" - fake_gid_generator = (i for i in FAKE_GID) - ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) - ec_.codes_get.side_effect = lambda gid, key: FAKE_MESSAGE[key] - ec_.codes_get_values.return_value = np.ones(1000*1200) - self.ec_ = ec_ - - @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") - @mock.patch("satpy.readers.seviri_l2_grib.xr") - @mock.patch("satpy.readers.seviri_l2_grib.da") - def test_data_reading(self, da_, xr_): - """Test the reading of data from the product.""" - from satpy.readers.seviri_l2_grib import REPEAT_CYCLE_DURATION, SeviriL2GribFileHandler - from satpy.utils import get_legacy_chunk_size - CHUNK_SIZE = get_legacy_chunk_size() - - with mock.patch("builtins.open", mock.mock_open()) as mock_file: - with mock.patch("satpy.readers.seviri_l2_grib.ec", self.ec_): - self.reader = SeviriL2GribFileHandler( - filename="test.grib", - filename_info={ - "spacecraft": "MET11", - "start_time": datetime.datetime(year=2020, month=10, day=20, - hour=19, minute=45, second=0) - }, - filetype_info={} - ) - - dataset_id = make_dataid(name="dummmy", resolution=3000) - - # Checks that the codes_grib_multi_support_on function has been called - self.ec_.codes_grib_multi_support_on.assert_called() - - # Restarts the id generator and clears the call history - fake_gid_generator = (i for i in FAKE_GID) - self.ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) - self.ec_.codes_grib_new_from_file.reset_mock() - self.ec_.codes_release.reset_mock() - - # Checks the correct execution of the get_dataset function with a valid parameter_number - valid_dataset = self.reader.get_dataset(dataset_id, {"parameter_number": 30}) - # Checks the correct file open call - mock_file.assert_called_with("test.grib", "rb") - # Checks that the dataset has been created as a DataArray object - assert valid_dataset._extract_mock_name() == "xr.DataArray()" - # Checks that codes_release has been called after each codes_grib_new_from_file call - # (except after the last one which has returned a None) - assert self.ec_.codes_grib_new_from_file.call_count == self.ec_.codes_release.call_count + 1 - - # Restarts the id generator and clears the call history - fake_gid_generator = (i for i in FAKE_GID) - self.ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) - self.ec_.codes_grib_new_from_file.reset_mock() - self.ec_.codes_release.reset_mock() - - # Checks the correct execution of the get_dataset function with an invalid parameter_number - invalid_dataset = self.reader.get_dataset(dataset_id, {"parameter_number": 50}) - # Checks that the function returns None - assert invalid_dataset is None - # Checks that codes_release has been called after each codes_grib_new_from_file call - # (except after the last one which has returned a None) - assert self.ec_.codes_grib_new_from_file.call_count == self.ec_.codes_release.call_count + 1 - - # Checks the basic data reading - assert REPEAT_CYCLE_DURATION == 15 - - # Checks the correct execution of the _get_global_attributes and _get_metadata_from_msg functions - attributes = self.reader._get_attributes() - expected_attributes = { - "orbital_parameters": { - "projection_longitude": 9.5 - }, - "sensor": "seviri", - "platform_name": "Meteosat-11" - } - assert attributes == expected_attributes - - # Checks the reading of an array from the message - self.reader._get_xarray_from_msg(0) - - # Checks that dask.array has been called with the correct arguments - name, args, kwargs = da_.mock_calls[0] - assert np.all(args[0] == np.ones((1200, 1000))) - assert args[1] == CHUNK_SIZE - - # Checks that xarray.DataArray has been called with the correct arguments - name, args, kwargs = xr_.mock_calls[0] - assert kwargs["dims"] == ("y", "x") - - # Checks the correct execution of the _get_proj_area function - pdict, area_dict = self.reader._get_proj_area(0) - - expected_pdict = { - "a": 6400000., - "b": 6300000., - "h": 32000000., - "ssp_lon": 9.5, - "nlines": 1000, - "ncols": 1200, - "a_name": "msg_seviri_rss_3km", - "a_desc": "MSG SEVIRI Rapid Scanning Service area definition with 3 km resolution", - "p_id": "", - } - assert pdict == expected_pdict - expected_area_dict = { - "center_point": 500, - "north": 1200, - "east": 1, - "west": 1000, - "south": 1, - } - assert area_dict == expected_area_dict - - # Checks the correct execution of the get_area_def function - with mock.patch("satpy.readers.seviri_l2_grib.calculate_area_extent", - mock.Mock(name="calculate_area_extent")) as cae: - with mock.patch("satpy.readers.seviri_l2_grib.get_area_definition", mock.Mock()) as gad: - dataset_id = make_dataid(name="dummmy", resolution=400.) - self.reader.get_area_def(dataset_id) - # Asserts that calculate_area_extent has been called with the correct arguments - expected_args = ({"center_point": 500, "east": 1, "west": 1000, "south": 1, "north": 1200, - "column_step": 400., "line_step": 400.},) - name, args, kwargs = cae.mock_calls[0] - assert args == expected_args - # Asserts that get_area_definition has been called with the correct arguments - name, args, kwargs = gad.mock_calls[0] - assert args[0] == expected_pdict - # The second argument must be the return result of calculate_area_extent - assert args[1]._extract_mock_name() == "calculate_area_extent()" From 5bbb4219ace06aef45b0ca4268b01198a159b2dc Mon Sep 17 00:00:00 2001 From: David Navia Date: Wed, 8 May 2024 15:38:24 +0200 Subject: [PATCH 09/55] Refactor duplicate code in tests --- satpy/tests/reader_tests/test_eum_l2_grib.py | 101 +++++++------------ 1 file changed, 37 insertions(+), 64 deletions(-) diff --git a/satpy/tests/reader_tests/test_eum_l2_grib.py b/satpy/tests/reader_tests/test_eum_l2_grib.py index 3e4dee87a8..a7846be706 100644 --- a/satpy/tests/reader_tests/test_eum_l2_grib.py +++ b/satpy/tests/reader_tests/test_eum_l2_grib.py @@ -70,6 +70,41 @@ def setUp(self, ec_): ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) self.ec_ = ec_ + def common_checks(self, mock_file, dataset_id): + """Commmon checks for fci and seviri data.""" + # Checks that the codes_grib_multi_support_on function has been called + self.ec_.codes_grib_multi_support_on.assert_called() + + # Restarts the id generator and clears the call history + fake_gid_generator = (i for i in FAKE_GID) + self.ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) + self.ec_.codes_grib_new_from_file.reset_mock() + self.ec_.codes_release.reset_mock() + + # Checks the correct execution of the get_dataset function with a valid parameter_number + valid_dataset = self.reader.get_dataset(dataset_id, {"parameter_number": 30}) + # Checks the correct file open call + mock_file.assert_called_with("test.grib", "rb") + # Checks that the dataset has been created as a DataArray object + assert valid_dataset._extract_mock_name() == "xr.DataArray()" + # Checks that codes_release has been called after each codes_grib_new_from_file call + # (except after the last one which has returned a None) + assert self.ec_.codes_grib_new_from_file.call_count == self.ec_.codes_release.call_count + 1 + + # Restarts the id generator and clears the call history + fake_gid_generator = (i for i in FAKE_GID) + self.ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) + self.ec_.codes_grib_new_from_file.reset_mock() + self.ec_.codes_release.reset_mock() + + # Checks the correct execution of the get_dataset function with an invalid parameter_number + invalid_dataset = self.reader.get_dataset(dataset_id, {"parameter_number": 50}) + # Checks that the function returns None + assert invalid_dataset is None + # Checks that codes_release has been called after each codes_grib_new_from_file call + # (except after the last one which has returned a None) + assert self.ec_.codes_grib_new_from_file.call_count == self.ec_.codes_release.call_count + 1 + @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") @mock.patch("satpy.readers.eum_l2_grib.xr") @mock.patch("satpy.readers.eum_l2_grib.da") @@ -97,38 +132,7 @@ def test_seviri_data_reading(self, da_, xr_): dataset_id = make_dataid(name="dummmy", resolution=3000) - # Checks that the codes_grib_multi_support_on function has been called - self.ec_.codes_grib_multi_support_on.assert_called() - - # Restarts the id generator and clears the call history - fake_gid_generator = (i for i in FAKE_GID) - self.ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) - self.ec_.codes_grib_new_from_file.reset_mock() - self.ec_.codes_release.reset_mock() - - # Checks the correct execution of the get_dataset function with a valid parameter_number - valid_dataset = self.reader.get_dataset(dataset_id, {"parameter_number": 30}) - # Checks the correct file open call - mock_file.assert_called_with("test.grib", "rb") - # Checks that the dataset has been created as a DataArray object - assert valid_dataset._extract_mock_name() == "xr.DataArray()" - # Checks that codes_release has been called after each codes_grib_new_from_file call - # (except after the last one which has returned a None) - assert self.ec_.codes_grib_new_from_file.call_count == self.ec_.codes_release.call_count + 1 - - # Restarts the id generator and clears the call history - fake_gid_generator = (i for i in FAKE_GID) - self.ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) - self.ec_.codes_grib_new_from_file.reset_mock() - self.ec_.codes_release.reset_mock() - - # Checks the correct execution of the get_dataset function with an invalid parameter_number - invalid_dataset = self.reader.get_dataset(dataset_id, {"parameter_number": 50}) - # Checks that the function returns None - assert invalid_dataset is None - # Checks that codes_release has been called after each codes_grib_new_from_file call - # (except after the last one which has returned a None) - assert self.ec_.codes_grib_new_from_file.call_count == self.ec_.codes_release.call_count + 1 + self.common_checks(mock_file, dataset_id) # Checks the basic data reading assert REPEAT_CYCLE_DURATION == 15 @@ -224,38 +228,7 @@ def test_fci_data_reading(self, da_, xr_): dataset_id = make_dataid(name="dummmy", resolution=2000) - # Checks that the codes_grib_multi_support_on function has been called - self.ec_.codes_grib_multi_support_on.assert_called() - - # Restarts the id generator and clears the call history - fake_gid_generator = (i for i in FAKE_GID) - self.ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) - self.ec_.codes_grib_new_from_file.reset_mock() - self.ec_.codes_release.reset_mock() - - # Checks the correct execution of the get_dataset function with a valid parameter_number - valid_dataset = self.reader.get_dataset(dataset_id, {"parameter_number": 30}) - # Checks the correct file open call - mock_file.assert_called_with("test.grib", "rb") - # Checks that the dataset has been created as a DataArray object - assert valid_dataset._extract_mock_name() == "xr.DataArray()" - # Checks that codes_release has been called after each codes_grib_new_from_file call - # (except after the last one which has returned a None) - assert self.ec_.codes_grib_new_from_file.call_count == self.ec_.codes_release.call_count + 1 - - # Restarts the id generator and clears the call history - fake_gid_generator = (i for i in FAKE_GID) - self.ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) - self.ec_.codes_grib_new_from_file.reset_mock() - self.ec_.codes_release.reset_mock() - - # Checks the correct execution of the get_dataset function with an invalid parameter_number - invalid_dataset = self.reader.get_dataset(dataset_id, {"parameter_number": 50}) - # Checks that the function returns None - assert invalid_dataset is None - # Checks that codes_release has been called after each codes_grib_new_from_file call - # (except after the last one which has returned a None) - assert self.ec_.codes_grib_new_from_file.call_count == self.ec_.codes_release.call_count + 1 + self.common_checks(mock_file, dataset_id) # Checks the correct execution of the _get_global_attributes and _get_metadata_from_msg functions attributes = self.reader._get_attributes() From 486b3a6e77fbebb0b17e1c4915b5d1abceab7e87 Mon Sep 17 00:00:00 2001 From: David Navia Date: Wed, 8 May 2024 15:50:13 +0200 Subject: [PATCH 10/55] Correct for RSS data --- satpy/readers/eum_l2_grib.py | 16 ++++++---------- satpy/readers/seviri_base.py | 2 ++ 2 files changed, 8 insertions(+), 10 deletions(-) diff --git a/satpy/readers/eum_l2_grib.py b/satpy/readers/eum_l2_grib.py index 47cf9a0ba9..c3cc7e61c4 100644 --- a/satpy/readers/eum_l2_grib.py +++ b/satpy/readers/eum_l2_grib.py @@ -23,7 +23,6 @@ """ import logging -from datetime import timedelta import dask.array as da import numpy as np @@ -33,7 +32,7 @@ from satpy.readers.eum_base import get_service_mode from satpy.readers.fci_base import calculate_area_extent as fci_calculate_area_extent from satpy.readers.file_handlers import BaseFileHandler -from satpy.readers.seviri_base import PLATFORM_DICT, REPEAT_CYCLE_DURATION +from satpy.readers.seviri_base import PLATFORM_DICT, REPEAT_CYCLE_DURATION, REPEAT_CYCLE_DURATION_RSS from satpy.readers.seviri_base import calculate_area_extent as seviri_calculate_area_extent from satpy.utils import get_legacy_chunk_size @@ -75,10 +74,8 @@ def start_time(self): @property def end_time(self): """Return the sensing end time.""" - if self.sensor == "seviri": - return self.start_time + timedelta(minutes=REPEAT_CYCLE_DURATION) - elif self.sensor == "fci": - return self.filename_info["end_time"] + delta = REPEAT_CYCLE_DURATION_RSS if self._ssp_lon == 9.5 else REPEAT_CYCLE_DURATION + return self.start_time + delta def get_area_def(self, dataset_id): """Return the area definition for a dataset.""" @@ -249,10 +246,9 @@ def _get_proj_area(self, gid): def _scale_earth_axis(data): """Scale Earth axis data to make sure the value matched the expected unit [m]. - The earthMinorAxis value stored in the aerosol over sea product is scaled incorrectly by a factor of 1e8. This - method provides a flexible temporarily workaraound by making sure that all earth axis values are scaled such - that they are on the order of millions of meters as expected by the reader. As soon as the scaling issue has - been resolved by EUMETSAT this workaround can be removed. + The earthMinorAxis value stored in the MPEF aerosol over sea product prior to December 12, 2022 has the wrong + unit and this method provides a flexible work-around by making sure that all earth axis values are scaled such + that they are on the order of millions of meters as expected by the reader. """ scale_factor = 10 ** np.ceil(np.log10(1e6/data)) diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index 5b19e56833..d2ed5c3847 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -212,6 +212,8 @@ REPEAT_CYCLE_DURATION = 15 +REPEAT_CYCLE_DURATION_RSS = 5 + C1 = 1.19104273e-5 C2 = 1.43877523 From c6322faec97301b7f2578f215c1b3044b4cef4d0 Mon Sep 17 00:00:00 2001 From: David Navia Date: Wed, 8 May 2024 16:02:35 +0200 Subject: [PATCH 11/55] Modify fci_base doc-string --- satpy/readers/fci_base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/fci_base.py b/satpy/readers/fci_base.py index c4a3714291..c1f6fc2110 100644 --- a/satpy/readers/fci_base.py +++ b/satpy/readers/fci_base.py @@ -22,7 +22,7 @@ def calculate_area_extent(area_dict): """Calculate the area extent seen by MTG FCI instrument. - Since the center of the FCI L2 grid is located at the interface between the pixels, there are equally many + Since the center of the FCI grids is located at the interface between the pixels, there are equally many pixels (e.g. 5568/2 = 2784 for 2km grid) in each direction from the center points. Hence, the area extent can be easily computed by simply adding and subtracting half the width and height from teh centre point (=0). From ed5213bd2e0b8e37a5e1f45b2b3d06d1948ff7f4 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 13 Jun 2024 14:05:34 +0000 Subject: [PATCH 12/55] Fix end_time computation, optimize SEVIRI imports and fix code style issues. --- satpy/readers/eum_l2_grib.py | 23 ++++++++++---------- satpy/tests/reader_tests/test_eum_l2_grib.py | 20 ++++++++--------- 2 files changed, 22 insertions(+), 21 deletions(-) diff --git a/satpy/readers/eum_l2_grib.py b/satpy/readers/eum_l2_grib.py index c3cc7e61c4..543aa71c30 100644 --- a/satpy/readers/eum_l2_grib.py +++ b/satpy/readers/eum_l2_grib.py @@ -22,6 +22,7 @@ https://navigator.eumetsat.int/ """ +import datetime as dt import logging import dask.array as da @@ -32,7 +33,9 @@ from satpy.readers.eum_base import get_service_mode from satpy.readers.fci_base import calculate_area_extent as fci_calculate_area_extent from satpy.readers.file_handlers import BaseFileHandler -from satpy.readers.seviri_base import PLATFORM_DICT, REPEAT_CYCLE_DURATION, REPEAT_CYCLE_DURATION_RSS +from satpy.readers.seviri_base import PLATFORM_DICT as SEVIRI_PLATFORM_DICT +from satpy.readers.seviri_base import REPEAT_CYCLE_DURATION as SEVIRI_REPEAT_CYCLE_DURATION +from satpy.readers.seviri_base import REPEAT_CYCLE_DURATION_RSS as SEVIRI_REPEAT_CYCLE_DURATION_RSS from satpy.readers.seviri_base import calculate_area_extent as seviri_calculate_area_extent from satpy.utils import get_legacy_chunk_size @@ -60,7 +63,7 @@ def __init__(self, filename, filename_info, filetype_info): if "seviri" in self.filetype_info["file_type"]: self.sensor = "seviri" - self.PLATFORM_NAME = PLATFORM_DICT[self.filename_info["spacecraft"]] + self.PLATFORM_NAME = SEVIRI_PLATFORM_DICT[self.filename_info["spacecraft"]] elif "fci" in self.filetype_info["file_type"]: self.sensor = "fci" self.PLATFORM_NAME = f"MTG-i{self.filename_info['spacecraft_id']}" @@ -74,8 +77,11 @@ def start_time(self): @property def end_time(self): """Return the sensing end time.""" - delta = REPEAT_CYCLE_DURATION_RSS if self._ssp_lon == 9.5 else REPEAT_CYCLE_DURATION - return self.start_time + delta + if self.sensor == "seviri": + delta = SEVIRI_REPEAT_CYCLE_DURATION_RSS if self._ssp_lon == 9.5 else SEVIRI_REPEAT_CYCLE_DURATION + return self.start_time + dt.timedelta(minutes=delta) + elif self.sensor == "fci": + return self.filename_info["end_time"] def get_area_def(self, dataset_id): """Return the area definition for a dataset.""" @@ -282,13 +288,8 @@ def _get_attributes(self): "projection_longitude": self._ssp_lon } - attributes = { - "orbital_parameters": orbital_parameters, - "sensor": self.sensor - } - - - attributes["platform_name"] = self.PLATFORM_NAME + attributes = {"orbital_parameters": orbital_parameters, "sensor": self.sensor, + "platform_name": self.PLATFORM_NAME} return attributes diff --git a/satpy/tests/reader_tests/test_eum_l2_grib.py b/satpy/tests/reader_tests/test_eum_l2_grib.py index a7846be706..8745fc33d2 100644 --- a/satpy/tests/reader_tests/test_eum_l2_grib.py +++ b/satpy/tests/reader_tests/test_eum_l2_grib.py @@ -60,7 +60,7 @@ FAKE_GID = [0, 1, 2, 3, None] -class Test_EUML2GribFileHandler(unittest.TestCase): +class TestEUML2GribFileHandler(unittest.TestCase): """Test the EUML2GribFileHandler reader.""" @mock.patch("satpy.readers.eum_l2_grib.ec") @@ -72,7 +72,7 @@ def setUp(self, ec_): def common_checks(self, mock_file, dataset_id): """Commmon checks for fci and seviri data.""" - # Checks that the codes_grib_multi_support_on function has been called + # Checks that the codes_grib_multi_support_on function has been called self.ec_.codes_grib_multi_support_on.assert_called() # Restarts the id generator and clears the call history @@ -110,9 +110,9 @@ def common_checks(self, mock_file, dataset_id): @mock.patch("satpy.readers.eum_l2_grib.da") def test_seviri_data_reading(self, da_, xr_): """Test the reading of data from the product.""" - from satpy.readers.eum_l2_grib import REPEAT_CYCLE_DURATION, EUML2GribFileHandler + from satpy.readers.eum_l2_grib import SEVIRI_REPEAT_CYCLE_DURATION, EUML2GribFileHandler from satpy.utils import get_legacy_chunk_size - CHUNK_SIZE = get_legacy_chunk_size() + chunk_size = get_legacy_chunk_size() with mock.patch("builtins.open", mock.mock_open()) as mock_file: with mock.patch("satpy.readers.eum_l2_grib.ec", self.ec_): @@ -126,7 +126,7 @@ def test_seviri_data_reading(self, da_, xr_): hour=19, minute=45, second=0) }, filetype_info={ - "file_type" : "seviri" + "file_type": "seviri" } ) @@ -135,7 +135,7 @@ def test_seviri_data_reading(self, da_, xr_): self.common_checks(mock_file, dataset_id) # Checks the basic data reading - assert REPEAT_CYCLE_DURATION == 15 + assert SEVIRI_REPEAT_CYCLE_DURATION == 15 # Checks the correct execution of the _get_global_attributes and _get_metadata_from_msg functions attributes = self.reader._get_attributes() @@ -154,7 +154,7 @@ def test_seviri_data_reading(self, da_, xr_): # Checks that dask.array has been called with the correct arguments name, args, kwargs = da_.mock_calls[0] assert np.all(args[0] == np.ones((1200, 1000))) - assert args[1] == CHUNK_SIZE + assert args[1] == chunk_size # Checks that xarray.DataArray has been called with the correct arguments name, args, kwargs = xr_.mock_calls[0] @@ -208,7 +208,7 @@ def test_fci_data_reading(self, da_, xr_): """Test the reading of fci data from the product.""" from satpy.readers.eum_l2_grib import EUML2GribFileHandler from satpy.utils import get_legacy_chunk_size - CHUNK_SIZE = get_legacy_chunk_size() + chunk_size = get_legacy_chunk_size() with mock.patch("builtins.open", mock.mock_open()) as mock_file: with mock.patch("satpy.readers.eum_l2_grib.ec", self.ec_): @@ -222,7 +222,7 @@ def test_fci_data_reading(self, da_, xr_): hour=19, minute=45, second=0) }, filetype_info={ - "file_type" : "fci" + "file_type": "fci" } ) @@ -247,7 +247,7 @@ def test_fci_data_reading(self, da_, xr_): # Checks that dask.array has been called with the correct arguments name, args, kwargs = da_.mock_calls[0] assert np.all(args[0] == np.ones((5568, 5568))) - assert args[1] == CHUNK_SIZE + assert args[1] == chunk_size # Checks that xarray.DataArray has been called with the correct arguments name, args, kwargs = xr_.mock_calls[0] From 164bcba5b8cb51081823e436a9861a53d6463259 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 13 Jun 2024 16:08:36 +0000 Subject: [PATCH 13/55] Add tests for end_time. --- satpy/tests/reader_tests/test_eum_l2_grib.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/satpy/tests/reader_tests/test_eum_l2_grib.py b/satpy/tests/reader_tests/test_eum_l2_grib.py index 8745fc33d2..593eb2f5af 100644 --- a/satpy/tests/reader_tests/test_eum_l2_grib.py +++ b/satpy/tests/reader_tests/test_eum_l2_grib.py @@ -110,7 +110,7 @@ def common_checks(self, mock_file, dataset_id): @mock.patch("satpy.readers.eum_l2_grib.da") def test_seviri_data_reading(self, da_, xr_): """Test the reading of data from the product.""" - from satpy.readers.eum_l2_grib import SEVIRI_REPEAT_CYCLE_DURATION, EUML2GribFileHandler + from satpy.readers.eum_l2_grib import EUML2GribFileHandler from satpy.utils import get_legacy_chunk_size chunk_size = get_legacy_chunk_size() @@ -134,8 +134,9 @@ def test_seviri_data_reading(self, da_, xr_): self.common_checks(mock_file, dataset_id) - # Checks the basic data reading - assert SEVIRI_REPEAT_CYCLE_DURATION == 15 + # Check end_time + assert self.reader.end_time == datetime.datetime(year=2020, month=10, day=20, + hour=19, minute=50, second=0) # Checks the correct execution of the _get_global_attributes and _get_metadata_from_msg functions attributes = self.reader._get_attributes() @@ -219,7 +220,9 @@ def test_fci_data_reading(self, da_, xr_): filename_info={ "spacecraft_id": "1", "start_time": datetime.datetime(year=2020, month=10, day=20, - hour=19, minute=45, second=0) + hour=19, minute=40, second=0), + "end_time": datetime.datetime(year=2020, month=10, day=20, + hour=19, minute=50, second=0) }, filetype_info={ "file_type": "fci" @@ -230,6 +233,10 @@ def test_fci_data_reading(self, da_, xr_): self.common_checks(mock_file, dataset_id) + # Check end_time + assert self.reader.end_time == datetime.datetime(year=2020, month=10, day=20, + hour=19, minute=50, second=0) + # Checks the correct execution of the _get_global_attributes and _get_metadata_from_msg functions attributes = self.reader._get_attributes() expected_attributes = { From c3efc55d6bbb2377d89cd81bc691e4f8910dfe11 Mon Sep 17 00:00:00 2001 From: David Navia Date: Mon, 24 Jun 2024 17:38:31 +0200 Subject: [PATCH 14/55] Add fci base test --- satpy/tests/reader_tests/test_fci_base.py | 46 +++++++++++++++++++++++ 1 file changed, 46 insertions(+) create mode 100644 satpy/tests/reader_tests/test_fci_base.py diff --git a/satpy/tests/reader_tests/test_fci_base.py b/satpy/tests/reader_tests/test_fci_base.py new file mode 100644 index 0000000000..41ac956b67 --- /dev/null +++ b/satpy/tests/reader_tests/test_fci_base.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2017 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . + +"""FCI base reader tests package.""" + +# import datetime as dt +import unittest + +from satpy.readers.fci_base import calculate_area_extent +from satpy.tests.utils import make_dataid + + +class TestCalculateAreaExtent(unittest.TestCase): + """Test TestCalculateAreaExtent.""" + + def test_fun(self): + """Test function for TestCalculateAreaExtent.""" + dataset_id = make_dataid(name="dummmy", resolution=2000.) + + area_dict = { + "nlines": 5568, + "ncols": 5568, + "line_step": dataset_id["resolution"], + "column_step": dataset_id["resolution"], + } + + area_extent = calculate_area_extent(area_dict) + + expected = (-5568000.0, 5568000.0, 5568000.0, -5568000.0) + + assert area_extent == expected From d0f379b4fff28084f62aa51e40a056c5fca69d79 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Tue, 3 Sep 2024 14:56:21 +0200 Subject: [PATCH 15/55] Remove rogue unit assignment in MSI SAFE reader --- satpy/readers/msi_safe.py | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/readers/msi_safe.py b/satpy/readers/msi_safe.py index b041436a74..8bab073c86 100644 --- a/satpy/readers/msi_safe.py +++ b/satpy/readers/msi_safe.py @@ -83,7 +83,6 @@ def get_dataset(self, key, info): if proj is None: return proj.attrs = info.copy() - proj.attrs["units"] = "%" proj.attrs["platform_name"] = self.platform_name return proj From 39ed676eb1310d8a04648ad66a4e0ac42965f5b4 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Tue, 3 Sep 2024 15:04:10 +0200 Subject: [PATCH 16/55] Add check for S2/MSI processing level to catch method needed for radiance calculation. --- satpy/readers/msi_safe.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/satpy/readers/msi_safe.py b/satpy/readers/msi_safe.py index 8bab073c86..11a0312059 100644 --- a/satpy/readers/msi_safe.py +++ b/satpy/readers/msi_safe.py @@ -92,7 +92,9 @@ def _read_from_file(self, key): if key["calibration"] == "reflectance": return self._mda.calibrate_to_reflectances(proj, self._channel) if key["calibration"] == "radiance": - return self._mda.calibrate_to_radiances(proj, self._channel) + # The calibration procedure differs for L1B and L1C/L2A data! + if self.process_level == "L1B": + return self._mda.calibrate_to_radiances(proj, self._channel) if key["calibration"] == "counts": return self._mda._sanitize_data(proj) if key["calibration"] in ["aerosol_thickness", "water_vapor"]: @@ -218,7 +220,7 @@ def saturated(self): """Get the saturated value from the metadata.""" return self.special_values["SATURATED"] - def calibrate_to_radiances(self, data, band_name): + def calibrate_to_radiances_l1b(self, data, band_name): """Calibrate *data* to radiance using the radiometric information for the metadata.""" physical_gain = self.physical_gain(band_name) data = self._sanitize_data(data) From acf2088c50ef3cdd2f78cca1aa2e688aa5bb50c1 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Tue, 3 Sep 2024 17:19:41 +0200 Subject: [PATCH 17/55] Debugging MSI SAFE radiances --- satpy/readers/msi_safe.py | 46 ++++++++++++++++++++++++++++++++++++++- 1 file changed, 45 insertions(+), 1 deletion(-) diff --git a/satpy/readers/msi_safe.py b/satpy/readers/msi_safe.py index 11a0312059..ea3f44cbf8 100644 --- a/satpy/readers/msi_safe.py +++ b/satpy/readers/msi_safe.py @@ -94,7 +94,14 @@ def _read_from_file(self, key): if key["calibration"] == "radiance": # The calibration procedure differs for L1B and L1C/L2A data! if self.process_level == "L1B": - return self._mda.calibrate_to_radiances(proj, self._channel) + # For L1B the radiances can be directly computed from the digital counts. + return self._mda.calibrate_to_radiances_l1b(proj, self._channel) + else: + # For higher level data, radiances must be computed from the reflectance. + # sza = self._tile_mda.get_dataset() + tmp_refl = self._mda.calibrate_to_reflectances(proj, self._channel) + return self._mda.calibrate_to_radiances(tmp_refl, self._channel) + if key["calibration"] == "counts": return self._mda._sanitize_data(proj) if key["calibration"] in ["aerosol_thickness", "water_vapor"]: @@ -203,6 +210,30 @@ def band_offsets(self): band_offsets = {} return band_offsets + def solar_irradiance(self, band_name): + """Get the solar irradiance for a given *band_name*.""" + band_index = self._band_index(band_name) + return self.solar_irradiances[band_index] + + @cached_property + def solar_irradiances(self): + """Get the TOA solar irradiance values from the metadata.""" + irrads = self.root.find(".//Solar_Irradiance_List") + if irrads is not None: + solar_irrad = {int(irr.attrib["bandId"]): float(irr.text) for irr in irrads} + else: + solar_irrad = {} + return solar_irrad + + @cached_property + def sun_earth_dist(self): + """Get the sun-earth distance from the metadata.""" + sed = self.root.find(".//U") + if sed is not None: + return float(sed.text) + else: + return -1 + @cached_property def special_values(self): """Get the special values from the metadata.""" @@ -226,6 +257,19 @@ def calibrate_to_radiances_l1b(self, data, band_name): data = self._sanitize_data(data) return (data + self.band_offset(band_name)) / physical_gain + def calibrate_to_radiances(self, data, band_name): + """Calibrate *data* to radiance using the radiometric information for the metadata.""" + sed = self.sun_earth_dist + if sed < 0.5 or sed > 1.5: + raise ValueError(f"Sun-Earth distance is incorrect in the metadata: {sed}") + solar_irrad_band = self.solar_irradiance(band_name) + + solar_zenith = 32.029 + + solar_zenith = np.deg2rad(solar_zenith) + + return (data / 100.) * solar_irrad_band * np.cos(solar_zenith) / (np.pi * sed * sed) + def physical_gain(self, band_name): """Get the physical gain for a given *band_name*.""" band_index = self._band_index(band_name) From 976b642710435750611a96e576899aeff097a3d9 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Thu, 5 Sep 2024 13:01:32 +0200 Subject: [PATCH 18/55] Bugfix for Sentinel-2 L1C radiance calculation, initial tests. --- satpy/readers/msi_safe.py | 33 ++++++++++++++++----- satpy/tests/reader_tests/test_msi_safe.py | 35 +++++++++++++---------- 2 files changed, 46 insertions(+), 22 deletions(-) diff --git a/satpy/readers/msi_safe.py b/satpy/readers/msi_safe.py index ea3f44cbf8..33dca3a3f0 100644 --- a/satpy/readers/msi_safe.py +++ b/satpy/readers/msi_safe.py @@ -59,13 +59,15 @@ class SAFEMSIL1C(BaseFileHandler): """File handler for SAFE MSI files (jp2).""" - def __init__(self, filename, filename_info, filetype_info, mda, tile_mda, mask_saturated=True): + def __init__(self, filename, filename_info, filetype_info, mda, tile_mda, + mask_saturated=True, solar_ang_method="mean"): """Initialize the reader.""" super(SAFEMSIL1C, self).__init__(filename, filename_info, filetype_info) del mask_saturated self._channel = filename_info["band_name"] self.process_level = filename_info["process_level"] + self.solar_ang_method = solar_ang_method self._tile_mda = tile_mda self._mda = mda self.platform_name = PLATFORMS[filename_info["fmission_id"]] @@ -98,9 +100,17 @@ def _read_from_file(self, key): return self._mda.calibrate_to_radiances_l1b(proj, self._channel) else: # For higher level data, radiances must be computed from the reflectance. - # sza = self._tile_mda.get_dataset() + # By default, we use the mean solar angles so that the user does not need to resample, + # but the user can also choose to use the solar angles from the tile metadata. + # This is on a coarse grid so for most bands must be resampled before use. + if self.solar_ang_method == "mean": + zen, azi = self._tile_mda.mean_sun_angles + else: + from satpy import DataQuery + dq = DataQuery(name="solar_zenith_angle") + zen = self._tile_mda.get_dataset(dq, {}) tmp_refl = self._mda.calibrate_to_reflectances(proj, self._channel) - return self._mda.calibrate_to_radiances(tmp_refl, self._channel) + return self._mda.calibrate_to_radiances(tmp_refl, zen, self._channel) if key["calibration"] == "counts": return self._mda._sanitize_data(proj) @@ -257,15 +267,13 @@ def calibrate_to_radiances_l1b(self, data, band_name): data = self._sanitize_data(data) return (data + self.band_offset(band_name)) / physical_gain - def calibrate_to_radiances(self, data, band_name): + def calibrate_to_radiances(self, data, solar_zenith, band_name): """Calibrate *data* to radiance using the radiometric information for the metadata.""" sed = self.sun_earth_dist if sed < 0.5 or sed > 1.5: raise ValueError(f"Sun-Earth distance is incorrect in the metadata: {sed}") solar_irrad_band = self.solar_irradiance(band_name) - solar_zenith = 32.029 - solar_zenith = np.deg2rad(solar_zenith) return (data / 100.) * solar_irrad_band * np.cos(solar_zenith) / (np.pi * sed * sed) @@ -313,7 +321,18 @@ def get_area_def(self, dsid): cols, rows, area_extent) - return area + return (area) + + @cached_property + def mean_sun_angles(self): + """Get the mean sun angles from the metadata.""" + angs = self.root.find(".//Mean_Sun_Angle") + if angs is not None: + zen = float(angs.find("ZENITH_ANGLE").text) + azi = float(angs.find("AZIMUTH_ANGLE").text) + return zen, azi + else: + return -999, -999 @cached_property def projection(self): diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index 84828f4ecf..d51c520865 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -1442,11 +1442,12 @@ def jp2_builder(process_level, band_name, mask_saturated=True): process_level=process_level.replace("old", "")) xml_fh = xml_builder(process_level, mask_saturated, band_name)[0] tile_xml_fh = mock.create_autospec(SAFEMSITileMDXML)(BytesIO(TILE_XMLS[PROCESS_LEVELS.index(process_level)]), - filename_info, mock.MagicMock()) + filename_info, mock.MagicMock()) tile_xml_fh.start_time.return_value = tilemd_dt jp2_fh = SAFEMSIL1C("somefile", filename_info, mock.MagicMock(), xml_fh, tile_xml_fh) return jp2_fh + def make_alt_dataid(**items): """Make a DataID with modified keys.""" from satpy.dataset.dataid import DataID, ModifierTuple, WavelengthRange @@ -1578,26 +1579,26 @@ def setup_method(self): [ ("L1C", True, "B01", ([[[np.nan, -9.99, -9.98, -9.97], [-9.96, 0, 645.34, np.inf]]], - [[[np.nan, -251.584265, -251.332429, -251.080593], - [-250.828757, 0., 16251.99095, np.inf]]], + [[[0.0, 5.60879825, 11.2175965, 16.8263948,], + [22.435193, 5608.79825, 367566.985, 367572.593]]], [[[np.nan, 1, 2, 3], [4, 1000, 65534, np.inf]]])), ("L1C", False, "B10", ([[[np.nan, -19.99, -19.98, -19.97], [-19.96, -10, 635.34, 635.35]]], - [[[np.nan, -35.465976, -35.448234, -35.430493], - [-35.412751, -17.741859, 1127.211275, 1127.229017]]], + [[[0.0, 1.09348075, 2.1869615, 3.28044225], + [4.373923, 1093.48075, 71660.1675, 71661.2609]]], [[[np.nan, 1, 2, 3], [4, 1000, 65534, 65535]]])), ("oldL1C", True, "B01", ([[[np.nan, 0.01, 0.02, 0.03], [0.04, 10, 655.34, np.inf]]], - [[[np.nan, 0.251836101, 0.503672202, 0.755508303], - [1.00734440, 251.836101, 16503.8271, np.inf]]], + [[[0.0, 5.60879825, 11.2175965, 16.8263948,], + [22.435193, 5608.79825, 367566.985, 367572.593]]], [[[np.nan, 1, 2, 3], [4, 1000, 65534, np.inf]]])), ("L2A", False, "B03", ([[[np.nan, -9.99, -9.98, -9.97], [-9.96, 0, 645.34, 645.35]]], - [[[np.nan, -238.571863, -238.333052, -238.094241], - [-237.855431, 0, 15411.407995, 15411.646806]]], + [[[0.0, 5.25188783, 10.5037757, 15.7556635,], + [21.0075513, 5251.88783, 344177.217, 344182.469]]], [[[np.nan, 1, 2, 3], [4, 1000, 65534, 65535]]])), ]) @@ -1606,10 +1607,11 @@ def test_xml_calibration(self, process_level, mask_saturated, band_name, expecte xml_fh = xml_builder(process_level, mask_saturated)[0] res1 = xml_fh.calibrate_to_reflectances(self.fake_data, band_name) - res2 = xml_fh.calibrate_to_radiances(self.fake_data, band_name) + res2 = xml_fh.calibrate_to_radiances(self.fake_data, 25.6, band_name) res3 = xml_fh._sanitize_data(self.fake_data) results = (res1, res2, res3) + np.testing.assert_allclose(results, expected) @pytest.mark.parametrize(("process_level", "mask_saturated", "band_name", "expected"), @@ -1655,11 +1657,14 @@ def test_calibration_and_masking(self, mask_saturated, dataset_name, calibration jp2_fh = jp2_builder("L2A", dataset_name, mask_saturated) with mock.patch("xarray.open_dataset", return_value=self.fake_data): - res = jp2_fh.get_dataset(make_alt_dataid(name=dataset_name, calibration=calibration), info=dict()) - if res is not None: - np.testing.assert_allclose(res, expected) - else: - assert res is expected + with mock.patch("satpy.readers.msi_safe.SAFEMSITileMDXML.mean_sun_angles", + new_callable=mock.PropertyMock) as mocker: + mocker.return_value = (25, 8) + res = jp2_fh.get_dataset(make_alt_dataid(name=dataset_name, calibration=calibration), info=dict()) + if res is not None: + np.testing.assert_allclose(res, expected) + else: + assert res is expected @pytest.mark.parametrize(("process_level", "band_name", "dataset_name"), [ From c1348f6bc527f5615a25c467e349004c572fcdb3 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Fri, 6 Sep 2024 15:42:49 +0200 Subject: [PATCH 19/55] Update S2/MSI reader to use only the gridded SZA for radiance calculation. --- satpy/readers/msi_safe.py | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/satpy/readers/msi_safe.py b/satpy/readers/msi_safe.py index 33dca3a3f0..a7c1828ba4 100644 --- a/satpy/readers/msi_safe.py +++ b/satpy/readers/msi_safe.py @@ -60,14 +60,13 @@ class SAFEMSIL1C(BaseFileHandler): """File handler for SAFE MSI files (jp2).""" def __init__(self, filename, filename_info, filetype_info, mda, tile_mda, - mask_saturated=True, solar_ang_method="mean"): + mask_saturated=True): """Initialize the reader.""" super(SAFEMSIL1C, self).__init__(filename, filename_info, filetype_info) del mask_saturated self._channel = filename_info["band_name"] self.process_level = filename_info["process_level"] - self.solar_ang_method = solar_ang_method self._tile_mda = tile_mda self._mda = mda self.platform_name = PLATFORMS[filename_info["fmission_id"]] @@ -103,12 +102,8 @@ def _read_from_file(self, key): # By default, we use the mean solar angles so that the user does not need to resample, # but the user can also choose to use the solar angles from the tile metadata. # This is on a coarse grid so for most bands must be resampled before use. - if self.solar_ang_method == "mean": - zen, azi = self._tile_mda.mean_sun_angles - else: - from satpy import DataQuery - dq = DataQuery(name="solar_zenith_angle") - zen = self._tile_mda.get_dataset(dq, {}) + dq = dict(name="solar_zenith_angle", resolution=key["resolution"]) + zen = self._tile_mda.get_dataset(dq, dict(xml_tag="Sun_Angles_Grid/Zenith")) tmp_refl = self._mda.calibrate_to_reflectances(proj, self._channel) return self._mda.calibrate_to_radiances(tmp_refl, zen, self._channel) @@ -321,7 +316,7 @@ def get_area_def(self, dsid): cols, rows, area_extent) - return (area) + return area @cached_property def mean_sun_angles(self): From 4309738ae3e7b930c3375dab4f16760e00a6d269 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Fri, 6 Sep 2024 16:36:35 +0200 Subject: [PATCH 20/55] Prepare S2/MSI reader for availability of L1B files, but raise error if user passes L1B data. --- satpy/readers/msi_safe.py | 31 +++++++++++-------------------- 1 file changed, 11 insertions(+), 20 deletions(-) diff --git a/satpy/readers/msi_safe.py b/satpy/readers/msi_safe.py index a7c1828ba4..1599bcd8fe 100644 --- a/satpy/readers/msi_safe.py +++ b/satpy/readers/msi_safe.py @@ -67,6 +67,8 @@ def __init__(self, filename, filename_info, filetype_info, mda, tile_mda, del mask_saturated self._channel = filename_info["band_name"] self.process_level = filename_info["process_level"] + if self.process_level not in ["L1C", "L2A"]: + raise ValueError(f"Unsupported process level: {self.process_level}") self._tile_mda = tile_mda self._mda = mda self.platform_name = PLATFORMS[filename_info["fmission_id"]] @@ -94,10 +96,7 @@ def _read_from_file(self, key): return self._mda.calibrate_to_reflectances(proj, self._channel) if key["calibration"] == "radiance": # The calibration procedure differs for L1B and L1C/L2A data! - if self.process_level == "L1B": - # For L1B the radiances can be directly computed from the digital counts. - return self._mda.calibrate_to_radiances_l1b(proj, self._channel) - else: + if self.process_level in ["L1C", "L2A"]: # For higher level data, radiances must be computed from the reflectance. # By default, we use the mean solar angles so that the user does not need to resample, # but the user can also choose to use the solar angles from the tile metadata. @@ -106,6 +105,10 @@ def _read_from_file(self, key): zen = self._tile_mda.get_dataset(dq, dict(xml_tag="Sun_Angles_Grid/Zenith")) tmp_refl = self._mda.calibrate_to_reflectances(proj, self._channel) return self._mda.calibrate_to_radiances(tmp_refl, zen, self._channel) + #else: + # For L1B the radiances can be directly computed from the digital counts. + #return self._mda.calibrate_to_radiances_l1b(proj, self._channel) + if key["calibration"] == "counts": return self._mda._sanitize_data(proj) @@ -162,7 +165,7 @@ class SAFEMSIMDXML(SAFEMSIXMLMetadata): def calibrate_to_reflectances(self, data, band_name): """Calibrate *data* using the radiometric information for the metadata.""" - quantification = int(self.root.find(".//QUANTIFICATION_VALUE").text) if self.process_level == "L1C" else \ + quantification = int(self.root.find(".//QUANTIFICATION_VALUE").text) if self.process_level[:2] == "L1" else \ int(self.root.find(".//BOA_QUANTIFICATION_VALUE").text) data = self._sanitize_data(data) return (data + self.band_offset(band_name)) / quantification * 100 @@ -170,7 +173,7 @@ def calibrate_to_reflectances(self, data, band_name): def calibrate_to_atmospheric(self, data, band_name): """Calibrate L2A AOT/WVP product.""" atmospheric_bands = ["AOT", "WVP"] - if self.process_level == "L1C": + if self.process_level == "L1C" or self.process_level == "L1B": return elif self.process_level == "L2A" and band_name not in atmospheric_bands: return @@ -207,7 +210,7 @@ def band_indices(self): @cached_property def band_offsets(self): """Get the band offsets from the metadata.""" - offsets = self.root.find(".//Radiometric_Offset_List") if self.process_level == "L1C" else \ + offsets = self.root.find(".//Radiometric_Offset_List") if self.process_level[:2] == "L1" else \ self.root.find(".//BOA_ADD_OFFSET_VALUES_LIST") if offsets is not None: band_offsets = {int(off.attrib["band_id"]): float(off.text) for off in offsets} @@ -236,8 +239,7 @@ def sun_earth_dist(self): sed = self.root.find(".//U") if sed is not None: return float(sed.text) - else: - return -1 + return -1 @cached_property def special_values(self): @@ -318,17 +320,6 @@ def get_area_def(self, dsid): area_extent) return area - @cached_property - def mean_sun_angles(self): - """Get the mean sun angles from the metadata.""" - angs = self.root.find(".//Mean_Sun_Angle") - if angs is not None: - zen = float(angs.find("ZENITH_ANGLE").text) - azi = float(angs.find("AZIMUTH_ANGLE").text) - return zen, azi - else: - return -999, -999 - @cached_property def projection(self): """Get the geographic projection.""" From 485ef66f3691b53daee7ab091081399f14cd6569 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Fri, 6 Sep 2024 16:38:13 +0200 Subject: [PATCH 21/55] Update S2/MSI tests for radiance calculations. --- satpy/tests/reader_tests/test_msi_safe.py | 49 ++++++++++++++--------- 1 file changed, 30 insertions(+), 19 deletions(-) diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index d51c520865..4970767ea8 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -1435,15 +1435,21 @@ def xml_builder(process_level, mask_saturated=True, band_name=None): return xml_fh, xml_tile_fh -def jp2_builder(process_level, band_name, mask_saturated=True): +def jp2_builder(process_level, band_name, mask_saturated=True, test_l1b=False): """Build fake SAFE jp2 image file.""" from satpy.readers.msi_safe import SAFEMSIL1C, SAFEMSITileMDXML filename_info = dict(observation_time=fname_dt, dtile_number=None, band_name=band_name, fmission_id="S2A", process_level=process_level.replace("old", "")) + if test_l1b: + filename_info["process_level"] = "L1B" + xml_fh = xml_builder(process_level, mask_saturated, band_name)[0] tile_xml_fh = mock.create_autospec(SAFEMSITileMDXML)(BytesIO(TILE_XMLS[PROCESS_LEVELS.index(process_level)]), filename_info, mock.MagicMock()) tile_xml_fh.start_time.return_value = tilemd_dt + tile_xml_fh.get_dataset.return_value = xr.DataArray([[22.5, 23.8], + [22.5, 24.8]], + dims=["x", "y"]) jp2_fh = SAFEMSIL1C("somefile", filename_info, mock.MagicMock(), xml_fh, tile_xml_fh) return jp2_fh @@ -1642,29 +1648,28 @@ class TestSAFEMSIL1C: def setup_method(self): """Set up the test.""" self.fake_data = xr.Dataset({"band_data": xr.DataArray([[[0, 1], [65534, 65535]]], dims=["band", "x", "y"])}) + self.fake_data = xr.Dataset({"band_data": xr.DataArray([[[0, 1], [65534, 65535]]], dims=["band", "x", "y"])}) - @pytest.mark.parametrize(("mask_saturated", "dataset_name", "calibration", "expected"), + @pytest.mark.parametrize(("process_level", "mask_saturated", "dataset_name", "calibration", "expected"), [ - (False, "B01", "reflectance", [[np.nan, -9.99], [645.34, 645.35]]), - (True, "B02", "radiance", [[np.nan, -265.970568], [17181.325973, np.inf]]), - (True, "B03", "counts", [[np.nan, 1], [65534, np.inf]]), - (False, "AOT", "aerosol_thickness", [[np.nan, 0.001], [65.534, 65.535]]), - (True, "WVP", "water_vapor", [[np.nan, 0.001], [65.534, np.inf]]), - (True, "SNOW", "water_vapor", None), + ("L2A", False, "B01", "reflectance", [[np.nan, -9.99], [645.34, 645.35]]), + ("L1C", True, "B02", "radiance", [[np.nan, -59.439197], [3877.121602, np.inf]]), + ("L2A", True, "B03", "counts", [[np.nan, 1], [65534, np.inf]]), + ("L2A", False, "AOT", "aerosol_thickness", [[np.nan, 0.001], [65.534, 65.535]]), + ("L2A", True, "WVP", "water_vapor", [[np.nan, 0.001], [65.534, np.inf]]), + ("L2A", True, "SNOW", "water_vapor", None), ]) - def test_calibration_and_masking(self, mask_saturated, dataset_name, calibration, expected): + def test_calibration_and_masking(self, process_level, mask_saturated, dataset_name, calibration, expected): """Test that saturated is masked with inf when requested and that calibration is performed.""" - jp2_fh = jp2_builder("L2A", dataset_name, mask_saturated) + jp2_fh = jp2_builder(process_level, dataset_name, mask_saturated) with mock.patch("xarray.open_dataset", return_value=self.fake_data): - with mock.patch("satpy.readers.msi_safe.SAFEMSITileMDXML.mean_sun_angles", - new_callable=mock.PropertyMock) as mocker: - mocker.return_value = (25, 8) - res = jp2_fh.get_dataset(make_alt_dataid(name=dataset_name, calibration=calibration), info=dict()) - if res is not None: - np.testing.assert_allclose(res, expected) - else: - assert res is expected + res = jp2_fh.get_dataset(make_alt_dataid(name=dataset_name, calibration=calibration, resolution="20"), + info=dict()) + if res is not None: + np.testing.assert_allclose(res, expected) + else: + assert res is expected @pytest.mark.parametrize(("process_level", "band_name", "dataset_name"), [ @@ -1682,7 +1687,13 @@ def test_filename_dsname_mismatch(self, process_level, band_name, dataset_name): assert res1 is None assert res2 is None - def test_start_time(self): + def test_start_end_time(self): """Test that the correct start time is returned.""" jp2_fh = jp2_builder("L1C", "B01") assert tilemd_dt == jp2_fh.start_time + assert tilemd_dt == jp2_fh.end_time + + def test_l1b_error(self): + """We can't process L1B data yet, so check an error is raised.""" + with pytest.raises(ValueError, match="Unsupported process level: L1B"): + jp2_builder("L1C", "B01", test_l1b=True) From 38a05d1d5d38ebec88d299d0145f73dacf006c17 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Fri, 6 Sep 2024 16:40:28 +0200 Subject: [PATCH 22/55] Add docs note to S2/MSI specifying that L1B data is not currently supported. --- satpy/readers/msi_safe.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/satpy/readers/msi_safe.py b/satpy/readers/msi_safe.py index 1599bcd8fe..3c75169744 100644 --- a/satpy/readers/msi_safe.py +++ b/satpy/readers/msi_safe.py @@ -15,7 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . -"""SAFE MSI L1C reader. +"""SAFE MSI L1C/L2A reader. The MSI data has a special value for saturated pixels. By default, these pixels are set to np.inf, but for some applications it might be desirable @@ -32,6 +32,10 @@ https://sentinels.copernicus.eu/documents/247904/685211/S2-PDGS-TAS-DI-PSD-V14.9.pdf/3d3b6c9c-4334-dcc4-3aa7-f7c0deffbaf7?t=1643013091529 +NOTE: At present, L1B data is not supported. If the user needs radiance data instead of counts or reflectances, these +are retrieved by first calculating the reflectance and then working back to the radiance. L1B radiance data support +will be added once the data is published onto the Copernicus data ecosystem. + """ import logging From b86f4b63926f9fbd166bf696c9c9f0aed4c762f2 Mon Sep 17 00:00:00 2001 From: verduijn Date: Wed, 25 Sep 2024 10:18:57 +0200 Subject: [PATCH 23/55] Fix wrong return type in 'check_satpy' comment --- satpy/utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/utils.py b/satpy/utils.py index 77645a476a..b7a5c28376 100644 --- a/satpy/utils.py +++ b/satpy/utils.py @@ -498,8 +498,8 @@ def check_satpy(readers=None, writers=None, extras=None): writers (list or None): Limit writers checked to those specified extras (list or None): Limit extras checked to those specified - Returns: bool - True if all specified features were successfully loaded. + Returns: + None """ from satpy.readers import configs_for_reader From fd5749d8992e0f1c25f50436189f3128bec2f08a Mon Sep 17 00:00:00 2001 From: verduijn Date: Fri, 27 Sep 2024 21:07:46 +0200 Subject: [PATCH 24/55] Add show_versions to utils and use in check_satpy --- satpy/tests/test_utils.py | 18 +++++---- satpy/utils.py | 80 +++++++++++++++++++++++++++++---------- 2 files changed, 72 insertions(+), 26 deletions(-) diff --git a/satpy/tests/test_utils.py b/satpy/tests/test_utils.py index 61255c8006..455971babd 100644 --- a/satpy/tests/test_utils.py +++ b/satpy/tests/test_utils.py @@ -278,13 +278,17 @@ def test_specific_check_satpy(self): """Test 'check_satpy' with specific features provided.""" from satpy.utils import check_satpy with mock.patch("satpy.utils.print") as print_mock: - check_satpy(readers=["viirs_sdr"], extras=("cartopy", "__fake")) - checked_fake = False - for call in print_mock.mock_calls: - if len(call[1]) > 0 and "__fake" in call[1][0]: - assert "ok" not in call[1][1] - checked_fake = True - assert checked_fake, "Did not find __fake module mentioned in checks" + check_satpy(readers=["viirs_sdr"], packages=("cartopy", "__fake")) + checked_fake = any("__fake: not installed" in c[1] for c in print_mock.mock_calls if len(c[1])) + assert checked_fake, "Did not find __fake package mentioned in checks" + +class TestShowVersions: + """Test the 'show_versions' function.""" + + def test_basic_show_versions(self): + """Test 'check_satpy' basic functionality.""" + from satpy.utils import show_versions + show_versions() def test_debug_on(caplog): diff --git a/satpy/utils.py b/satpy/utils.py index b7a5c28376..64c3be54b8 100644 --- a/satpy/utils.py +++ b/satpy/utils.py @@ -20,9 +20,11 @@ import contextlib import datetime +import importlib.metadata import logging import os import pathlib +import platform import warnings from contextlib import contextmanager from copy import deepcopy @@ -476,27 +478,72 @@ def _check_yaml_configs(configs, key): pass return diagnostic +def _check_package_version(package_name: str) -> Optional[str]: + """Check the version of `package_name`. -def _check_import(module_names): - """Import the specified modules and provide status.""" - diagnostics = {} - for module_name in module_names: - try: - __import__(module_name) - res = "ok" - except ImportError as err: - res = str(err) - diagnostics[module_name] = res - return diagnostics + Args: + package_name (str): the distribution package name. + + Returns: + the version number if available else `None`. + """ + try: + return importlib.metadata.version(package_name) + except importlib.metadata.PackageNotFoundError: + return None +def show_versions(packages=None): + """Shows version for system, python and common packages (if installed). -def check_satpy(readers=None, writers=None, extras=None): + Args: + packages (list or None): Limit packages to those specified. + + Returns: + None. + + """ + packages = ( + ( + "cartopy", + "geoviews", + "numpy", + "dask", + "xarray", + "gdal", + "rasterio", + "pyproj", + "netcdf4", + "h5py", + "pyhdf", + "h5netcdf", + "fsspec", + ) + if packages is None + else packages + ) + + print("Versions") # noqa: T201 + print("======") # noqa: T201 + print(f"platform: {platform.platform()}") # noqa: T201 + print(f"python: {platform.python_version()}") # noqa: T201 + print() # noqa: T201 + + for package_name in sorted(packages): + package_version = _check_package_version(package_name) + print( # noqa: T201 + f"{package_name}: {package_version if package_version else 'not installed'}" + ) + + print() # noqa: T201 + + +def check_satpy(readers=None, writers=None, packages=None): """Check the satpy readers and writers for correct installation. Args: readers (list or None): Limit readers checked to those specified writers (list or None): Limit writers checked to those specified - extras (list or None): Limit extras checked to those specified + packages (list or None): Limit packages checked to those specified Returns: None @@ -517,12 +564,7 @@ def check_satpy(readers=None, writers=None, extras=None): print(writer + ": ", res) # noqa: T201 print() # noqa: T201 - print("Extras") # noqa: T201 - print("======") # noqa: T201 - module_names = extras if extras is not None else ("cartopy", "geoviews") - for module_name, res in sorted(_check_import(module_names).items()): - print(module_name + ": ", res) # noqa: T201 - print() # noqa: T201 + show_versions(packages=packages) def unify_chunks(*data_arrays: xr.DataArray) -> tuple[xr.DataArray, ...]: From d8bae6048c042cdf1563c896f20351b0a42c09e4 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Mon, 30 Sep 2024 14:23:11 +0000 Subject: [PATCH 25/55] Improve SEVIRI metadata documentation --- satpy/readers/seviri_base.py | 22 +++++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index ace63e3f12..1a98dda098 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -153,9 +153,21 @@ scn['IR_108']['y'] = mi scn['IR_108'].sel(time=np.datetime64('2019-03-01T12:06:13.052000000')) -* Raw metadata from the file header can be included by setting the reader - argument ``include_raw_metadata=True`` (HRIT and Native format only). Note - that this comes with a performance penalty of up to 10% if raw metadata from +* HRIT and Native readers can add raw metadata from the file header, such + as calibration coefficients, to dataset attributes. Use the reader keyword + argument ``include_raw_metadata``. Here's an example for extracting + calibration coefficients from Native files. + + .. code-block:: python + + scene = satpy.Scene(filenames, + reader='seviri_l1b_native', + reader_kwargs={'include_raw_metadata': True}) + scene.load(["IR_108"]) + mda = scene["IR_108"].attrs["raw_metadata"] + coefs = mda["15_DATA_HEADER"]["RadiometricProcessing"]["Level15ImageCalibration"] + + Note that this comes with a performance penalty of up to 10% if raw metadata from multiple segments or scans need to be combined. By default, arrays with more than 100 elements are excluded to limit the performance penalty. This threshold can be adjusted using the ``mda_max_array_size`` reader keyword @@ -164,8 +176,8 @@ .. code-block:: python scene = satpy.Scene(filenames, - reader='seviri_l1b_hrit/native', - reader_kwargs={'include_raw_metadata': True, + reader='seviri_l1b_native', + reader_kwargs={'include_raw_metadata': True, 'mda_max_array_size': 1000}) References: From 3d15b3feb232b7317bf69ff20024f32814f6209d Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Mon, 30 Sep 2024 14:36:34 +0000 Subject: [PATCH 26/55] Fall back to conda --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index c9f5aa1f73..8bd9f63eaf 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -41,7 +41,7 @@ jobs: miniforge-version: latest python-version: ${{ matrix.python-version }} activate-environment: test-environment - mamba-version: "*" + # mamba-version: "*" channels: conda-forge - name: Set cache environment variables From 6a4f2afab9d965c1bdc9825c83e1cd98e464748d Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Mon, 30 Sep 2024 14:46:00 +0000 Subject: [PATCH 27/55] Pin mamba version --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 8bd9f63eaf..2cc2948a16 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -41,7 +41,7 @@ jobs: miniforge-version: latest python-version: ${{ matrix.python-version }} activate-environment: test-environment - # mamba-version: "*" + mamba-version: "1.5.10" channels: conda-forge - name: Set cache environment variables From 0ad7c32a9a1597300f9deb2903e83a33fd2c2f5e Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 30 Sep 2024 20:35:30 -0500 Subject: [PATCH 28/55] Fix deprecated "compositor" usage in modifier definitions --- satpy/etc/composites/sgli.yaml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/satpy/etc/composites/sgli.yaml b/satpy/etc/composites/sgli.yaml index 451c60d8e6..d5d46114a4 100644 --- a/satpy/etc/composites/sgli.yaml +++ b/satpy/etc/composites/sgli.yaml @@ -4,7 +4,7 @@ sensor_name: visir/sgli modifiers: rayleigh_corrected: - compositor: !!python/name:satpy.modifiers.PSPRayleighReflectance + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rayleigh_only prerequisites: @@ -17,7 +17,7 @@ modifiers: - solar_zenith_angle rayleigh_corrected_marine_clean: - compositor: !!python/name:satpy.modifiers.PSPRayleighReflectance + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: marine_clean_aerosol prerequisites: @@ -30,7 +30,7 @@ modifiers: - solar_zenith_angle rayleigh_corrected_marine_tropical: - compositor: !!python/name:satpy.modifiers.PSPRayleighReflectance + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: tropical aerosol_type: marine_tropical_aerosol prerequisites: @@ -43,7 +43,7 @@ modifiers: - solar_zenith_angle rayleigh_corrected_desert: - compositor: !!python/name:satpy.modifiers.PSPRayleighReflectance + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: tropical aerosol_type: desert_aerosol prerequisites: @@ -56,7 +56,7 @@ modifiers: - solar_zenith_angle rayleigh_corrected_land: - compositor: !!python/name:satpy.modifiers.PSPRayleighReflectance + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: continental_average_aerosol prerequisites: From 3aa390b0ce60605cfba8a3da4c123f410329434f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 1 Oct 2024 10:17:12 +0000 Subject: [PATCH 29/55] Bump pypa/gh-action-pypi-publish from 1.10.0 to 1.10.2 Bumps [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) from 1.10.0 to 1.10.2. - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/v1.10.0...v1.10.2) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/deploy-sdist.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deploy-sdist.yaml b/.github/workflows/deploy-sdist.yaml index 130b3a43b8..12042f4f36 100644 --- a/.github/workflows/deploy-sdist.yaml +++ b/.github/workflows/deploy-sdist.yaml @@ -23,7 +23,7 @@ jobs: - name: Publish package to PyPI if: github.event.action == 'published' - uses: pypa/gh-action-pypi-publish@v1.10.0 + uses: pypa/gh-action-pypi-publish@v1.10.2 with: user: __token__ password: ${{ secrets.pypi_password }} From 9fb59bd5ce652238e3e4c0986ef5f7c651e292b1 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Wed, 2 Oct 2024 06:29:58 +0000 Subject: [PATCH 30/55] Use plain miniconda in CI --- .github/workflows/ci.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 2cc2948a16..f74cc830ad 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -41,7 +41,6 @@ jobs: miniforge-version: latest python-version: ${{ matrix.python-version }} activate-environment: test-environment - mamba-version: "1.5.10" channels: conda-forge - name: Set cache environment variables From 78324b1f3e51ffcacde5b506182d108ce2b726ca Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 2 Oct 2024 10:25:51 +0200 Subject: [PATCH 31/55] Update MSI SAFE reader with more tests and some code streamlining for error messages. --- satpy/readers/msi_safe.py | 19 ++++----- satpy/tests/reader_tests/test_msi_safe.py | 49 ++++++++++++++++++++++- 2 files changed, 58 insertions(+), 10 deletions(-) diff --git a/satpy/readers/msi_safe.py b/satpy/readers/msi_safe.py index 3c75169744..fa747e14d8 100644 --- a/satpy/readers/msi_safe.py +++ b/satpy/readers/msi_safe.py @@ -109,9 +109,9 @@ def _read_from_file(self, key): zen = self._tile_mda.get_dataset(dq, dict(xml_tag="Sun_Angles_Grid/Zenith")) tmp_refl = self._mda.calibrate_to_reflectances(proj, self._channel) return self._mda.calibrate_to_radiances(tmp_refl, zen, self._channel) - #else: + else: # For L1B the radiances can be directly computed from the digital counts. - #return self._mda.calibrate_to_radiances_l1b(proj, self._channel) + return self._mda.calibrate_to_radiances_l1b(proj, self._channel) if key["calibration"] == "counts": @@ -231,19 +231,22 @@ def solar_irradiance(self, band_name): def solar_irradiances(self): """Get the TOA solar irradiance values from the metadata.""" irrads = self.root.find(".//Solar_Irradiance_List") + if irrads is not None: solar_irrad = {int(irr.attrib["bandId"]): float(irr.text) for irr in irrads} - else: - solar_irrad = {} - return solar_irrad + if len(solar_irrad) > 0: + return solar_irrad + raise ValueError("No solar irradiance values were found in the metadata.") + + @cached_property def sun_earth_dist(self): """Get the sun-earth distance from the metadata.""" sed = self.root.find(".//U") - if sed is not None: + if sed.text is not None: return float(sed.text) - return -1 + raise ValueError("Sun-Earth distance in metadata is missing.") @cached_property def special_values(self): @@ -271,8 +274,6 @@ def calibrate_to_radiances_l1b(self, data, band_name): def calibrate_to_radiances(self, data, solar_zenith, band_name): """Calibrate *data* to radiance using the radiometric information for the metadata.""" sed = self.sun_earth_dist - if sed < 0.5 or sed > 1.5: - raise ValueError(f"Sun-Earth distance is incorrect in the metadata: {sed}") solar_irrad_band = self.solar_irradiance(band_name) solar_zenith = np.deg2rad(solar_zenith) diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index 4970767ea8..1f2e603ee2 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -1648,7 +1648,8 @@ class TestSAFEMSIL1C: def setup_method(self): """Set up the test.""" self.fake_data = xr.Dataset({"band_data": xr.DataArray([[[0, 1], [65534, 65535]]], dims=["band", "x", "y"])}) - self.fake_data = xr.Dataset({"band_data": xr.DataArray([[[0, 1], [65534, 65535]]], dims=["band", "x", "y"])}) + self.fake_data_l1b = xr.Dataset({"band_data": xr.DataArray([[[1000, 1205.5], [3000.4, 2542.]]], + dims=["band", "x", "y"])}) @pytest.mark.parametrize(("process_level", "mask_saturated", "dataset_name", "calibration", "expected"), [ @@ -1697,3 +1698,49 @@ def test_l1b_error(self): """We can't process L1B data yet, so check an error is raised.""" with pytest.raises(ValueError, match="Unsupported process level: L1B"): jp2_builder("L1C", "B01", test_l1b=True) + + + @pytest.mark.parametrize(("st_str", "en_str", "err_str"), + [ + ("", + "", + "Sun-Earth distance in metadata is missing."), + ("", + "", + "No solar irradiance values were found in the metadata."), + ]) + def test_missing_esd(self, st_str, en_str, err_str): + """Test that missing Earth-Sun distance in the metadata is handled correctly.""" + from satpy.readers.msi_safe import SAFEMSIMDXML + + tmp_xml = str(mtd_l1c_xml) + p1 = tmp_xml.find(st_str) + p2 = tmp_xml.find(en_str) + tmp_xml = tmp_xml[:p1+len(st_str)] + tmp_xml[p2:] + + filename_info = dict(observation_time=fname_dt, dtile_number=None, + band_name="B01", fmission_id="S2A", process_level="L1C") + + xml_fh = SAFEMSIMDXML(StringIO(tmp_xml), filename_info, mock.MagicMock()) + + if st_str == "": + with pytest.raises(ValueError, match=err_str): + xml_fh.sun_earth_dist + else: + with pytest.raises(ValueError, match=err_str): + xml_fh.solar_irradiances + + + def test_l1b_calib(self): + """Test that Level-1B calibration can be performed.""" + from satpy.readers.msi_safe import SAFEMSIMDXML + + filename_info = dict(observation_time=fname_dt, dtile_number=None, + band_name="B01", fmission_id="S2A", process_level="L1C") + + xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_xml), filename_info, mock.MagicMock()) + + res = xml_fh.calibrate_to_radiances_l1b(self.fake_data_l1b, "B01") + np.testing.assert_allclose(res.band_data.data.ravel(), + np.array((0.0, 51.752319, 503.77294, 388.33127)), + rtol=1e-4) From fb1521acb78abc0aa6ec3b6604f443052fde93b9 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 2 Oct 2024 11:40:37 +0200 Subject: [PATCH 32/55] Update satpy/readers/msi_safe.py --- satpy/readers/msi_safe.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/satpy/readers/msi_safe.py b/satpy/readers/msi_safe.py index fa747e14d8..ec6a39f084 100644 --- a/satpy/readers/msi_safe.py +++ b/satpy/readers/msi_safe.py @@ -238,8 +238,6 @@ def solar_irradiances(self): return solar_irrad raise ValueError("No solar irradiance values were found in the metadata.") - - @cached_property def sun_earth_dist(self): """Get the sun-earth distance from the metadata.""" From 8cbe7b89e1c899cf7d82ca99083e35db48b47237 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 7 Oct 2024 23:53:12 +0000 Subject: [PATCH 33/55] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.6.3 → v0.6.9](https://github.com/astral-sh/ruff-pre-commit/compare/v0.6.3...v0.6.9) - [github.com/pre-commit/pre-commit-hooks: v4.6.0 → v5.0.0](https://github.com/pre-commit/pre-commit-hooks/compare/v4.6.0...v5.0.0) - [github.com/PyCQA/bandit: 1.7.9 → 1.7.10](https://github.com/PyCQA/bandit/compare/1.7.9...1.7.10) --- .pre-commit-config.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a6c4b2b3d2..82163f8b60 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,18 +3,18 @@ fail_fast: false repos: - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: 'v0.6.3' + rev: 'v0.6.9' hooks: - id: ruff - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.6.0 + rev: v5.0.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer - id: check-yaml args: [--unsafe] - repo: https://github.com/PyCQA/bandit - rev: '1.7.9' # Update me! + rev: '1.7.10' # Update me! hooks: - id: bandit args: [--ini, .bandit] From 3a93ea97616846541d8211d35f7489157d08a52f Mon Sep 17 00:00:00 2001 From: verduijn Date: Tue, 8 Oct 2024 08:54:49 +0200 Subject: [PATCH 34/55] Apply code formatting suggestions from code review Co-authored-by: Panu Lahtinen --- satpy/tests/test_utils.py | 1 + satpy/utils.py | 1 + 2 files changed, 2 insertions(+) diff --git a/satpy/tests/test_utils.py b/satpy/tests/test_utils.py index 455971babd..74b5a3ecfd 100644 --- a/satpy/tests/test_utils.py +++ b/satpy/tests/test_utils.py @@ -282,6 +282,7 @@ def test_specific_check_satpy(self): checked_fake = any("__fake: not installed" in c[1] for c in print_mock.mock_calls if len(c[1])) assert checked_fake, "Did not find __fake package mentioned in checks" + class TestShowVersions: """Test the 'show_versions' function.""" diff --git a/satpy/utils.py b/satpy/utils.py index 64c3be54b8..f4d456d4f6 100644 --- a/satpy/utils.py +++ b/satpy/utils.py @@ -492,6 +492,7 @@ def _check_package_version(package_name: str) -> Optional[str]: except importlib.metadata.PackageNotFoundError: return None + def show_versions(packages=None): """Shows version for system, python and common packages (if installed). From f53c6f191443a904922646176f1e4f159455712c Mon Sep 17 00:00:00 2001 From: verduijn Date: Tue, 8 Oct 2024 11:10:08 +0200 Subject: [PATCH 35/55] Add tests for `show_versions` with installed and missing packages --- satpy/tests/test_utils.py | 31 +++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/satpy/tests/test_utils.py b/satpy/tests/test_utils.py index 74b5a3ecfd..a10e285886 100644 --- a/satpy/tests/test_utils.py +++ b/satpy/tests/test_utils.py @@ -291,6 +291,37 @@ def test_basic_show_versions(self): from satpy.utils import show_versions show_versions() + def test_show_specific_version(self): + """Test 'show_version' works with installed package.""" + from satpy.utils import show_versions + with mock.patch("satpy.utils.print") as print_mock: + show_versions(packages=["pytest"]) + + # no regex or `.__version__` based checks to prevent edge case failures + pytest_mentioned = any( + "pytest:" in c[1][0] for c in print_mock.mock_calls if len(c[1]) + ) + pytest_installed = all( + "pytest: not installed" not in c[1][0] + for c in print_mock.mock_calls + if len(c[1]) + ) + check_pytest = pytest_mentioned and pytest_installed + assert check_pytest, "pytest with package version not in print output" + + def test_show_missing_specific_version(self): + """Test 'show_version' works with missing package.""" + from satpy.utils import show_versions + + with mock.patch("satpy.utils.print") as print_mock: + show_versions(packages=["__fake"]) + checked_fake = any( + "__fake: not installed" in c[1] + for c in print_mock.mock_calls + if len(c[1]) + ) + assert checked_fake, "Did not find '__fake: not installed' in print output" + def test_debug_on(caplog): """Test that debug_on is working as expected.""" From 692c9b1506bfc5d0e1f238067a12fbebf0d9d8c9 Mon Sep 17 00:00:00 2001 From: verduijn Date: Tue, 8 Oct 2024 15:49:36 +0200 Subject: [PATCH 36/55] Use capys fixture instead of patching print in tests --- satpy/tests/test_utils.py | 42 ++++++++++++--------------------------- 1 file changed, 13 insertions(+), 29 deletions(-) diff --git a/satpy/tests/test_utils.py b/satpy/tests/test_utils.py index a10e285886..c52006f1be 100644 --- a/satpy/tests/test_utils.py +++ b/satpy/tests/test_utils.py @@ -291,36 +291,25 @@ def test_basic_show_versions(self): from satpy.utils import show_versions show_versions() - def test_show_specific_version(self): + def test_show_specific_version(self, capsys): """Test 'show_version' works with installed package.""" from satpy.utils import show_versions - with mock.patch("satpy.utils.print") as print_mock: - show_versions(packages=["pytest"]) + show_versions(packages=["pytest"]) + out, _ = capsys.readouterr() - # no regex or `.__version__` based checks to prevent edge case failures - pytest_mentioned = any( - "pytest:" in c[1][0] for c in print_mock.mock_calls if len(c[1]) - ) - pytest_installed = all( - "pytest: not installed" not in c[1][0] - for c in print_mock.mock_calls - if len(c[1]) - ) - check_pytest = pytest_mentioned and pytest_installed - assert check_pytest, "pytest with package version not in print output" + pytest_mentioned = "pytest:" in out + pytest_installed = "pytest: not installed" not in out + check_pytest = pytest_mentioned and pytest_installed + assert check_pytest, "pytest with package version not in print output" - def test_show_missing_specific_version(self): + def test_show_missing_specific_version(self, capsys): """Test 'show_version' works with missing package.""" from satpy.utils import show_versions + show_versions(packages=["__fake"]) + out, _ = capsys.readouterr() - with mock.patch("satpy.utils.print") as print_mock: - show_versions(packages=["__fake"]) - checked_fake = any( - "__fake: not installed" in c[1] - for c in print_mock.mock_calls - if len(c[1]) - ) - assert checked_fake, "Did not find '__fake: not installed' in print output" + check_fake = "__fake: not installed" in out + assert check_fake, "Did not find '__fake: not installed' in print output" def test_debug_on(caplog): @@ -330,12 +319,7 @@ def test_debug_on(caplog): def depwarn(): logger = logging.getLogger("satpy.silly") logger.debug("But now it's just got SILLY.") - warnings.warn( - "Stop that! It's SILLY.", - DeprecationWarning, - stacklevel=2 - ) - + warnings.warn("Stop that! It's SILLY.", DeprecationWarning, stacklevel=2) warnings.filterwarnings("ignore", category=DeprecationWarning) debug_on(False) filts_before = warnings.filters.copy() From f66621091d478d009368ef066066e4878873c5df Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 10 Oct 2024 09:01:06 +0300 Subject: [PATCH 37/55] Refactor generic image reader tests --- .../tests/reader_tests/test_generic_image.py | 554 ++++++++++-------- 1 file changed, 295 insertions(+), 259 deletions(-) diff --git a/satpy/tests/reader_tests/test_generic_image.py b/satpy/tests/reader_tests/test_generic_image.py index 40d7611eb4..21e2b5b09e 100644 --- a/satpy/tests/reader_tests/test_generic_image.py +++ b/satpy/tests/reader_tests/test_generic_image.py @@ -16,273 +16,309 @@ # satpy. If not, see . """Unittests for generic image reader.""" -import os -import unittest +import datetime as dt import dask.array as da import numpy as np import pytest import xarray as xr +from pyresample.geometry import AreaDefinition +from rasterio.errors import NotGeoreferencedWarning +from satpy import Scene +from satpy.readers.generic_image import GenericImageFileHandler from satpy.tests.utils import RANDOM_GEN, make_dataid +DATA_DATE = dt.datetime(2018, 1, 1) -class TestGenericImage(unittest.TestCase): - """Test generic image reader.""" - - def setUp(self): - """Create temporary images to test on.""" - import datetime as dt - import tempfile - - from pyresample.geometry import AreaDefinition - - from satpy.scene import Scene - - self.date = dt.datetime(2018, 1, 1) - - # Create area definition - pcs_id = "ETRS89 / LAEA Europe" - proj4_dict = "EPSG:3035" - self.x_size = 100 - self.y_size = 100 - area_extent = (2426378.0132, 1528101.2618, 6293974.6215, 5446513.5222) - self.area_def = AreaDefinition("geotiff_area", pcs_id, pcs_id, - proj4_dict, self.x_size, self.y_size, - area_extent) - - # Create datasets for L, LA, RGB and RGBA mode images - r__ = da.random.randint(0, 256, size=(self.y_size, self.x_size), - chunks=(50, 50)).astype(np.uint8) - g__ = da.random.randint(0, 256, size=(self.y_size, self.x_size), - chunks=(50, 50)).astype(np.uint8) - b__ = da.random.randint(0, 256, size=(self.y_size, self.x_size), - chunks=(50, 50)).astype(np.uint8) - a__ = 255 * np.ones((self.y_size, self.x_size), dtype=np.uint8) - a__[:10, :10] = 0 - a__ = da.from_array(a__, chunks=(50, 50)) - - r_nan__ = RANDOM_GEN.uniform(0., 1., size=(self.y_size, self.x_size)) - r_nan__[:10, :10] = np.nan - r_nan__ = da.from_array(r_nan__, chunks=(50, 50)) - - ds_l = xr.DataArray(da.stack([r__]), dims=("bands", "y", "x"), - attrs={"name": "test_l", - "start_time": self.date}) - ds_l["bands"] = ["L"] - ds_la = xr.DataArray(da.stack([r__, a__]), dims=("bands", "y", "x"), - attrs={"name": "test_la", - "start_time": self.date}) - ds_la["bands"] = ["L", "A"] - ds_rgb = xr.DataArray(da.stack([r__, g__, b__]), +X_SIZE = 100 +Y_SIZE = 100 +AREA_DEFINITION = AreaDefinition("geotiff_area", "ETRS89 / LAEA Europe", "ETRS89 / LAEA Europe", + "EPSG:3035", X_SIZE, Y_SIZE, + (2426378.0132, 1528101.2618, 6293974.6215, 5446513.5222)) + + +@pytest.fixture +def random_image_channel(): + """Create random data.""" + return da.random.randint(0, 256, size=(Y_SIZE, X_SIZE), chunks=(50, 50)).astype(np.uint8) + + +random_image_channel_l = random_image_channel +random_image_channel_r = random_image_channel +random_image_channel_g = random_image_channel +random_image_channel_b = random_image_channel + + +@pytest.fixture +def alpha_channel(): + """Create alpha channel with fully transparent and opaque areas.""" + a__ = 255 * np.ones((Y_SIZE, X_SIZE), dtype=np.uint8) + a__[:10, :10] = 0 + return da.from_array(a__, chunks=(50, 50)) + + +@pytest.fixture +def random_image_channel_with_nans(): + """Create random data and replace a portion of it with NaN values.""" + arr = RANDOM_GEN.uniform(0., 1., size=(Y_SIZE, X_SIZE)) + arr[:10, :10] = np.nan + return da.from_array(arr, chunks=(50, 50)) + + +@pytest.fixture +def test_image_l(tmp_path, random_image_channel_l): + """Create a test image with mode L.""" + dset = xr.DataArray(da.stack([random_image_channel_l]), dims=("bands", "y", "x"), + attrs={"name": "test_l", "start_time": DATA_DATE}) + dset["bands"] = ["L"] + fname = tmp_path / "test_l.png" + _save_image(dset, fname, "simple_image") + + return fname + + +@pytest.fixture +def test_image_l_nan(tmp_path, random_image_channel_with_nans): + """Create a test image with mode L where data has NaN values.""" + dset = xr.DataArray(da.stack([random_image_channel_with_nans]), dims=("bands", "y", "x"), + attrs={"name": "test_l_nan", "start_time": DATA_DATE}) + dset["bands"] = ["L"] + fname = tmp_path / "test_l_nan_nofillvalue.tif" + _save_image(dset, fname, "geotiff") + + return fname + + +@pytest.fixture +def test_image_l_nan_fill_value(tmp_path, random_image_channel_with_nans): + """Create a test image with mode L where data has NaN values and fill value is set.""" + dset = xr.DataArray(da.stack([random_image_channel_with_nans]), dims=("bands", "y", "x"), + attrs={"name": "test_l_nan", "start_time": DATA_DATE}) + dset["bands"] = ["L"] + fname = tmp_path / "test_l_nan_fillvalue.tif" + _save_image(dset, fname, "geotiff", fill_value=0) + + return fname + + +@pytest.fixture +def test_image_la(tmp_path, random_image_channel_l, alpha_channel): + """Create a test image with mode LA.""" + dset = xr.DataArray(da.stack([random_image_channel_l, alpha_channel]), + dims=("bands", "y", "x"), + attrs={"name": "test_la", "start_time": DATA_DATE}) + dset["bands"] = ["L", "A"] + fname = tmp_path / "20180101_0000_test_la.png" + _save_image(dset, fname, "simple_image") + + return fname + + +@pytest.fixture +def test_image_rgb(tmp_path, random_image_channel_r, random_image_channel_g, random_image_channel_b): + """Create a test image with mode RGB.""" + dset = xr.DataArray(da.stack([random_image_channel_r, random_image_channel_g, random_image_channel_b]), dims=("bands", "y", "x"), attrs={"name": "test_rgb", - "start_time": self.date}) - ds_rgb["bands"] = ["R", "G", "B"] - ds_rgba = xr.DataArray(da.stack([r__, g__, b__, a__]), - dims=("bands", "y", "x"), - attrs={"name": "test_rgba", - "start_time": self.date}) - ds_rgba["bands"] = ["R", "G", "B", "A"] - - ds_l_nan = xr.DataArray(da.stack([r_nan__]), - dims=("bands", "y", "x"), - attrs={"name": "test_l_nan", - "start_time": self.date}) - ds_l_nan["bands"] = ["L"] - - # Temp dir for the saved images - self.base_dir = tempfile.mkdtemp() - - # Put the datasets to Scene for easy saving - scn = Scene() - scn["l"] = ds_l - scn["l"].attrs["area"] = self.area_def - scn["la"] = ds_la - scn["la"].attrs["area"] = self.area_def - scn["rgb"] = ds_rgb - scn["rgb"].attrs["area"] = self.area_def - scn["rgba"] = ds_rgba - scn["rgba"].attrs["area"] = self.area_def - scn["l_nan"] = ds_l_nan - scn["l_nan"].attrs["area"] = self.area_def - - # Save the images. Two images in PNG and two in GeoTIFF - scn.save_dataset("l", os.path.join(self.base_dir, "test_l.png"), writer="simple_image") - scn.save_dataset("la", os.path.join(self.base_dir, "20180101_0000_test_la.png"), writer="simple_image") - scn.save_dataset("rgb", os.path.join(self.base_dir, "20180101_0000_test_rgb.tif"), writer="geotiff") - scn.save_dataset("rgba", os.path.join(self.base_dir, "test_rgba.tif"), writer="geotiff") - scn.save_dataset("l_nan", os.path.join(self.base_dir, "test_l_nan_fillvalue.tif"), - writer="geotiff", fill_value=0) - scn.save_dataset("l_nan", os.path.join(self.base_dir, "test_l_nan_nofillvalue.tif"), - writer="geotiff") - - self.scn = scn - - def tearDown(self): - """Remove the temporary directory created for a test.""" - try: - import shutil - shutil.rmtree(self.base_dir, ignore_errors=True) - except OSError: - pass - - def test_png_scene(self): - """Test reading PNG images via satpy.Scene().""" - from rasterio.errors import NotGeoreferencedWarning - - from satpy import Scene - - fname = os.path.join(self.base_dir, "test_l.png") - with pytest.warns(NotGeoreferencedWarning, match=r"Dataset has no geotransform"): - scn = Scene(reader="generic_image", filenames=[fname]) - scn.load(["image"]) - assert scn["image"].shape == (1, self.y_size, self.x_size) - assert scn.sensor_names == {"images"} - assert scn.start_time is None - assert scn.end_time is None - assert "area" not in scn["image"].attrs - - fname = os.path.join(self.base_dir, "20180101_0000_test_la.png") - with pytest.warns(NotGeoreferencedWarning, match=r"Dataset has no geotransform"): - scn = Scene(reader="generic_image", filenames=[fname]) - scn.load(["image"]) - data = da.compute(scn["image"].data) - assert scn["image"].shape == (1, self.y_size, self.x_size) - assert scn.sensor_names == {"images"} - assert scn.start_time == self.date - assert scn.end_time == self.date - assert "area" not in scn["image"].attrs - assert np.sum(np.isnan(data)) == 100 - - def test_geotiff_scene(self): - """Test reading TIFF images via satpy.Scene().""" - from satpy import Scene - - fname = os.path.join(self.base_dir, "20180101_0000_test_rgb.tif") - scn = Scene(reader="generic_image", filenames=[fname]) - scn.load(["image"]) - assert scn["image"].shape == (3, self.y_size, self.x_size) - assert scn.sensor_names == {"images"} - assert scn.start_time == self.date - assert scn.end_time == self.date - assert scn["image"].area == self.area_def - - fname = os.path.join(self.base_dir, "test_rgba.tif") - scn = Scene(reader="generic_image", filenames=[fname]) - scn.load(["image"]) - assert scn["image"].shape == (3, self.y_size, self.x_size) - assert scn.sensor_names == {"images"} - assert scn.start_time is None - assert scn.end_time is None - assert scn["image"].area == self.area_def - - def test_geotiff_scene_nan(self): - """Test reading TIFF images originally containing NaN values via satpy.Scene().""" - from satpy import Scene - - fname = os.path.join(self.base_dir, "test_l_nan_fillvalue.tif") - scn = Scene(reader="generic_image", filenames=[fname]) - scn.load(["image"]) - assert scn["image"].shape == (1, self.y_size, self.x_size) - assert np.sum(scn["image"].data[0][:10, :10].compute()) == 0 - - fname = os.path.join(self.base_dir, "test_l_nan_nofillvalue.tif") - scn = Scene(reader="generic_image", filenames=[fname]) - scn.load(["image"]) - assert scn["image"].shape == (1, self.y_size, self.x_size) - assert np.all(np.isnan(scn["image"].data[0][:10, :10].compute())) - - def test_GenericImageFileHandler(self): - """Test direct use of the reader.""" - from satpy.readers.generic_image import GenericImageFileHandler - - fname = os.path.join(self.base_dir, "test_rgba.tif") - fname_info = {"start_time": self.date} - ftype_info = {} - reader = GenericImageFileHandler(fname, fname_info, ftype_info) - - foo = make_dataid(name="image") - assert reader.file_content - assert reader.finfo["filename"] == fname - assert reader.finfo["start_time"] == self.date - assert reader.finfo["end_time"] == self.date - assert reader.area == self.area_def - assert reader.get_area_def(None) == self.area_def - assert reader.start_time == self.date - assert reader.end_time == self.date - - dataset = reader.get_dataset(foo, {}) - assert isinstance(dataset, xr.DataArray) - assert "spatial_ref" in dataset.coords - assert np.all(np.isnan(dataset.data[:, :10, :10].compute())) - - def test_GenericImageFileHandler_masking_only_integer(self): - """Test direct use of the reader.""" - from satpy.readers.generic_image import GenericImageFileHandler - - class FakeGenericImageFileHandler(GenericImageFileHandler): - - def __init__(self, filename, filename_info, filetype_info, file_content, **kwargs): - """Get fake file content from 'get_test_content'.""" - super(GenericImageFileHandler, self).__init__(filename, filename_info, filetype_info) - self.file_content = file_content - self.dataset_name = None - self.file_content.update(kwargs) - - data = self.scn["rgba"] - - # do nothing if not integer - float_data = data / 255. - reader = FakeGenericImageFileHandler("dummy", {}, {}, {"image": float_data}) - assert reader.get_dataset(make_dataid(name="image"), {}) is float_data - - # masking if integer - data = data.astype(np.uint32) - assert data.bands.size == 4 - reader = FakeGenericImageFileHandler("dummy", {}, {}, {"image": data}) - ret_data = reader.get_dataset(make_dataid(name="image"), {}) - assert ret_data.bands.size == 3 - - def test_GenericImageFileHandler_datasetid(self): - """Test direct use of the reader.""" - from satpy.readers.generic_image import GenericImageFileHandler - - fname = os.path.join(self.base_dir, "test_rgba.tif") - fname_info = {"start_time": self.date} - ftype_info = {} - reader = GenericImageFileHandler(fname, fname_info, ftype_info) - - foo = make_dataid(name="image-custom") - assert reader.file_content - dataset = reader.get_dataset(foo, {}) - assert isinstance(dataset, xr.DataArray) - - def test_GenericImageFileHandler_nodata(self): - """Test nodata handling with direct use of the reader.""" - from satpy.readers.generic_image import GenericImageFileHandler - - fname = os.path.join(self.base_dir, "test_l_nan_fillvalue.tif") - fname_info = {"start_time": self.date} - ftype_info = {} - reader = GenericImageFileHandler(fname, fname_info, ftype_info) - - foo = make_dataid(name="image-custom") - assert reader.file_content - info = {"nodata_handling": "nan_mask"} - dataset = reader.get_dataset(foo, info) - assert isinstance(dataset, xr.DataArray) - assert np.all(np.isnan(dataset.data[0][:10, :10].compute())) - assert np.isnan(dataset.attrs["_FillValue"]) - - info = {"nodata_handling": "fill_value"} - dataset = reader.get_dataset(foo, info) - assert isinstance(dataset, xr.DataArray) - assert np.sum(dataset.data[0][:10, :10].compute()) == 0 - assert dataset.attrs["_FillValue"] == 0 - - # default same as 'nodata_handling': 'fill_value' - dataset = reader.get_dataset(foo, {}) - assert isinstance(dataset, xr.DataArray) - assert np.sum(dataset.data[0][:10, :10].compute()) == 0 - assert dataset.attrs["_FillValue"] == 0 + "start_time": DATA_DATE}) + dset["bands"] = ["R", "G", "B"] + fname = tmp_path / "20180101_0000_test_rgb.tif" + _save_image(dset, fname, "geotiff") + + return fname + + +@pytest.fixture +def rgba_dset(random_image_channel_r, random_image_channel_g, random_image_channel_b, alpha_channel): + """Create an RGB dataset.""" + dset = xr.DataArray( + da.stack([random_image_channel_r, random_image_channel_g, random_image_channel_b, alpha_channel]), + dims=("bands", "y", "x"), + attrs={"name": "test_rgba", + "start_time": DATA_DATE}) + dset["bands"] = ["R", "G", "B", "A"] + return dset + + +@pytest.fixture +def test_image_rgba(tmp_path, rgba_dset): + """Create a test image with mode RGBA.""" + fname = tmp_path / "test_rgba.tif" + _save_image(rgba_dset, fname, "geotiff") + + return fname + + +def _save_image(dset, fname, writer, fill_value=None): + scn = Scene() + scn["data"] = dset + scn["data"].attrs["area"] = AREA_DEFINITION + scn.save_dataset("data", str(fname), writer=writer, fill_value=fill_value) + + +def test_png_scene_l_mode(test_image_l): + """Test reading a PNG image with L mode via satpy.Scene().""" + with pytest.warns(NotGeoreferencedWarning, match=r"Dataset has no geotransform"): + scn = Scene(reader="generic_image", filenames=[test_image_l]) + scn.load(["image"]) + assert scn["image"].shape == (1, Y_SIZE, X_SIZE) + assert scn.sensor_names == {"images"} + assert scn.start_time is None + assert scn.end_time is None + assert "area" not in scn["image"].attrs + + +def test_png_scene_la_mode(test_image_la): + """Test reading a PNG image with LA mode via satpy.Scene().""" + with pytest.warns(NotGeoreferencedWarning, match=r"Dataset has no geotransform"): + scn = Scene(reader="generic_image", filenames=[test_image_la]) + scn.load(["image"]) + data = da.compute(scn["image"].data) + assert scn["image"].shape == (1, Y_SIZE, X_SIZE) + assert scn.sensor_names == {"images"} + assert scn.start_time == DATA_DATE + assert scn.end_time == DATA_DATE + assert "area" not in scn["image"].attrs + assert np.sum(np.isnan(data)) == 100 + + +def test_geotiff_scene_rgb(test_image_rgb): + """Test reading geotiff image in RGB mode via satpy.Scene().""" + scn = Scene(reader="generic_image", filenames=[test_image_rgb]) + scn.load(["image"]) + assert scn["image"].shape == (3, Y_SIZE, X_SIZE) + assert scn.sensor_names == {"images"} + assert scn.start_time == DATA_DATE + assert scn.end_time == DATA_DATE + assert scn["image"].area == AREA_DEFINITION + + +def test_geotiff_scene_rgba(test_image_rgba): + """Test reading geotiff image in RGBA mode via satpy.Scene().""" + scn = Scene(reader="generic_image", filenames=[test_image_rgba]) + scn.load(["image"]) + assert scn["image"].shape == (3, Y_SIZE, X_SIZE) + assert scn.sensor_names == {"images"} + assert scn.start_time is None + assert scn.end_time is None + assert scn["image"].area == AREA_DEFINITION + + +def test_geotiff_scene_nan_fill_value(test_image_l_nan_fill_value): + """Test reading geotiff image with fill value set via satpy.Scene().""" + scn = Scene(reader="generic_image", filenames=[test_image_l_nan_fill_value]) + scn.load(["image"]) + assert scn["image"].shape == (1, Y_SIZE, X_SIZE) + assert np.sum(scn["image"].data[0][:10, :10].compute()) == 0 + + +def test_geotiff_scene_nan(test_image_l_nan): + """Test reading geotiff image with NaN values in it via satpy.Scene().""" + scn = Scene(reader="generic_image", filenames=[test_image_l_nan]) + scn.load(["image"]) + assert scn["image"].shape == (1, Y_SIZE, X_SIZE) + assert np.all(np.isnan(scn["image"].data[0][:10, :10].compute())) + + +def test_GenericImageFileHandler(test_image_rgba): + """Test direct use of the reader.""" + from satpy.readers.generic_image import GenericImageFileHandler + + fname_info = {"start_time": DATA_DATE} + ftype_info = {} + reader = GenericImageFileHandler(test_image_rgba, fname_info, ftype_info) + + data_id = make_dataid(name="image") + assert reader.file_content + assert reader.finfo["filename"] == test_image_rgba + assert reader.finfo["start_time"] == DATA_DATE + assert reader.finfo["end_time"] == DATA_DATE + assert reader.area == AREA_DEFINITION + assert reader.get_area_def(None) == AREA_DEFINITION + assert reader.start_time == DATA_DATE + assert reader.end_time == DATA_DATE + + dataset = reader.get_dataset(data_id, {}) + assert isinstance(dataset, xr.DataArray) + assert "spatial_ref" in dataset.coords + assert np.all(np.isnan(dataset.data[:, :10, :10].compute())) + + +class FakeGenericImageFileHandler(GenericImageFileHandler): + """Fake file handler.""" + + def __init__(self, filename, filename_info, filetype_info, file_content, **kwargs): + """Get fake file content from 'get_test_content'.""" + super(GenericImageFileHandler, self).__init__(filename, filename_info, filetype_info) + self.file_content = file_content + self.dataset_name = None + self.file_content.update(kwargs) + + +def test_GenericImageFileHandler_no_masking_for_float(rgba_dset): + """Test direct use of the reader for float_data.""" + # do nothing if not integer + float_data = rgba_dset / 255. + reader = FakeGenericImageFileHandler("dummy", {}, {}, {"image": float_data}) + assert reader.get_dataset(make_dataid(name="image"), {}) is float_data + + +def test_GenericImageFileHandler_masking_for_integer(rgba_dset): + """Test direct use of the reader for float_data.""" + # masking if integer + data = rgba_dset.astype(np.uint32) + assert data.bands.size == 4 + reader = FakeGenericImageFileHandler("dummy", {}, {}, {"image": data}) + ret_data = reader.get_dataset(make_dataid(name="image"), {}) + assert ret_data.bands.size == 3 + + +def test_GenericImageFileHandler_datasetid(test_image_rgba): + """Test direct use of the reader.""" + fname_info = {"start_time": DATA_DATE} + ftype_info = {} + reader = GenericImageFileHandler(test_image_rgba, fname_info, ftype_info) + + data_id = make_dataid(name="image-custom") + assert reader.file_content + dataset = reader.get_dataset(data_id, {}) + assert isinstance(dataset, xr.DataArray) + + +@pytest.fixture +def reader_l_nan_fill_value(test_image_l_nan_fill_value): + """Create GenericImageFileHandler.""" + fname_info = {"start_time": DATA_DATE} + ftype_info = {} + return GenericImageFileHandler(test_image_l_nan_fill_value, fname_info, ftype_info) + + +def test_GenericImageFileHandler_nodata_nan_mask(reader_l_nan_fill_value): + """Test nodata handling with direct use of the reader with nodata handling: nan_mask.""" + data_id = make_dataid(name="image-custom") + assert reader_l_nan_fill_value.file_content + info = {"nodata_handling": "nan_mask"} + dataset = reader_l_nan_fill_value.get_dataset(data_id, info) + assert isinstance(dataset, xr.DataArray) + assert np.all(np.isnan(dataset.data[0][:10, :10].compute())) + assert np.isnan(dataset.attrs["_FillValue"]) + + +def test_GenericImageFileHandler_nodata_fill_value(reader_l_nan_fill_value): + """Test nodata handling with direct use of the reader with nodata handling: fill_value.""" + info = {"nodata_handling": "fill_value"} + data_id = make_dataid(name="image-custom") + dataset = reader_l_nan_fill_value.get_dataset(data_id, info) + assert isinstance(dataset, xr.DataArray) + assert np.sum(dataset.data[0][:10, :10].compute()) == 0 + assert dataset.attrs["_FillValue"] == 0 + + +def test_GenericImageFileHandler_nodata_nan_mask_default(reader_l_nan_fill_value): + """Test nodata handling with direct use of the reader with default nodata handling.""" + data_id = make_dataid(name="image-custom") + dataset = reader_l_nan_fill_value.get_dataset(data_id, {}) + assert isinstance(dataset, xr.DataArray) + assert np.sum(dataset.data[0][:10, :10].compute()) == 0 + assert dataset.attrs["_FillValue"] == 0 From ac433d69d411c9030056d5bb3721458e7401ac4a Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 10 Oct 2024 11:05:27 +0300 Subject: [PATCH 38/55] Fix generic image reader to return float32 when float is needed --- satpy/readers/generic_image.py | 2 +- satpy/tests/reader_tests/test_generic_image.py | 6 ++++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/satpy/readers/generic_image.py b/satpy/readers/generic_image.py index 5032b7bbb8..c0e334302f 100644 --- a/satpy/readers/generic_image.py +++ b/satpy/readers/generic_image.py @@ -147,7 +147,7 @@ def _mask_image_data(data, info): if not np.issubdtype(data.dtype, np.integer): raise ValueError("Only integer datatypes can be used as a mask.") mask = data.data[-1, :, :] == np.iinfo(data.dtype).min - data = data.astype(np.float64) + data = data.astype(np.float32) masked_data = da.stack([da.where(mask, np.nan, data.data[i, :, :]) for i in range(data.shape[0])]) data.data = masked_data diff --git a/satpy/tests/reader_tests/test_generic_image.py b/satpy/tests/reader_tests/test_generic_image.py index 21e2b5b09e..1ab3e073ec 100644 --- a/satpy/tests/reader_tests/test_generic_image.py +++ b/satpy/tests/reader_tests/test_generic_image.py @@ -167,6 +167,7 @@ def test_png_scene_l_mode(test_image_l): assert scn.start_time is None assert scn.end_time is None assert "area" not in scn["image"].attrs + assert scn["image"].dtype == np.float32 def test_png_scene_la_mode(test_image_la): @@ -181,6 +182,7 @@ def test_png_scene_la_mode(test_image_la): assert scn.end_time == DATA_DATE assert "area" not in scn["image"].attrs assert np.sum(np.isnan(data)) == 100 + assert scn["image"].dtype == np.float32 def test_geotiff_scene_rgb(test_image_rgb): @@ -192,6 +194,7 @@ def test_geotiff_scene_rgb(test_image_rgb): assert scn.start_time == DATA_DATE assert scn.end_time == DATA_DATE assert scn["image"].area == AREA_DEFINITION + assert scn["image"].dtype == np.float32 def test_geotiff_scene_rgba(test_image_rgba): @@ -203,6 +206,7 @@ def test_geotiff_scene_rgba(test_image_rgba): assert scn.start_time is None assert scn.end_time is None assert scn["image"].area == AREA_DEFINITION + assert scn["image"].dtype == np.float32 def test_geotiff_scene_nan_fill_value(test_image_l_nan_fill_value): @@ -211,6 +215,7 @@ def test_geotiff_scene_nan_fill_value(test_image_l_nan_fill_value): scn.load(["image"]) assert scn["image"].shape == (1, Y_SIZE, X_SIZE) assert np.sum(scn["image"].data[0][:10, :10].compute()) == 0 + assert scn["image"].dtype == np.uint8 def test_geotiff_scene_nan(test_image_l_nan): @@ -219,6 +224,7 @@ def test_geotiff_scene_nan(test_image_l_nan): scn.load(["image"]) assert scn["image"].shape == (1, Y_SIZE, X_SIZE) assert np.all(np.isnan(scn["image"].data[0][:10, :10].compute())) + assert scn["image"].dtype == np.float32 def test_GenericImageFileHandler(test_image_rgba): From d129942fc97918a79c746a235f9d374e4fe80aae Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 10 Oct 2024 11:11:21 +0300 Subject: [PATCH 39/55] Fix add_bands() to not promote the data type when adding alpha band --- satpy/composites/__init__.py | 1 + satpy/tests/test_composites.py | 12 ++++++++---- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 0ac99c98f7..b032f23a32 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -985,6 +985,7 @@ def add_bands(data, bands): alpha = new_data[0].copy() alpha.data = da.ones((data.sizes["y"], data.sizes["x"]), + dtype=new_data[0].dtype, chunks=new_data[0].chunks) # Rename band to indicate it's alpha alpha["bands"] = "A" diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 2af010e9ac..1b60161a52 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -1302,7 +1302,7 @@ def test_add_bands_l_rgb(self): from satpy.composites import add_bands # L + RGB -> RGB - data = xr.DataArray(da.ones((1, 3, 3)), dims=("bands", "y", "x"), + data = xr.DataArray(da.ones((1, 3, 3), dtype="float32"), dims=("bands", "y", "x"), coords={"bands": ["L"]}) new_bands = xr.DataArray(da.array(["R", "G", "B"]), dims=("bands"), coords={"bands": ["R", "G", "B"]}) @@ -1311,13 +1311,14 @@ def test_add_bands_l_rgb(self): assert res.attrs["mode"] == "".join(res_bands) np.testing.assert_array_equal(res.bands, res_bands) np.testing.assert_array_equal(res.coords["bands"], res_bands) + assert res.dtype == np.float32 def test_add_bands_l_rgba(self): """Test adding bands.""" from satpy.composites import add_bands # L + RGBA -> RGBA - data = xr.DataArray(da.ones((1, 3, 3)), dims=("bands", "y", "x"), + data = xr.DataArray(da.ones((1, 3, 3), dtype="float32"), dims=("bands", "y", "x"), coords={"bands": ["L"]}, attrs={"mode": "L"}) new_bands = xr.DataArray(da.array(["R", "G", "B", "A"]), dims=("bands"), coords={"bands": ["R", "G", "B", "A"]}) @@ -1326,13 +1327,14 @@ def test_add_bands_l_rgba(self): assert res.attrs["mode"] == "".join(res_bands) np.testing.assert_array_equal(res.bands, res_bands) np.testing.assert_array_equal(res.coords["bands"], res_bands) + assert res.dtype == np.float32 def test_add_bands_la_rgb(self): """Test adding bands.""" from satpy.composites import add_bands # LA + RGB -> RGBA - data = xr.DataArray(da.ones((2, 3, 3)), dims=("bands", "y", "x"), + data = xr.DataArray(da.ones((2, 3, 3), dtype="float32"), dims=("bands", "y", "x"), coords={"bands": ["L", "A"]}, attrs={"mode": "LA"}) new_bands = xr.DataArray(da.array(["R", "G", "B"]), dims=("bands"), coords={"bands": ["R", "G", "B"]}) @@ -1341,13 +1343,14 @@ def test_add_bands_la_rgb(self): assert res.attrs["mode"] == "".join(res_bands) np.testing.assert_array_equal(res.bands, res_bands) np.testing.assert_array_equal(res.coords["bands"], res_bands) + assert res.dtype == np.float32 def test_add_bands_rgb_rbga(self): """Test adding bands.""" from satpy.composites import add_bands # RGB + RGBA -> RGBA - data = xr.DataArray(da.ones((3, 3, 3)), dims=("bands", "y", "x"), + data = xr.DataArray(da.ones((3, 3, 3), dtype="float32"), dims=("bands", "y", "x"), coords={"bands": ["R", "G", "B"]}, attrs={"mode": "RGB"}) new_bands = xr.DataArray(da.array(["R", "G", "B", "A"]), dims=("bands"), @@ -1357,6 +1360,7 @@ def test_add_bands_rgb_rbga(self): assert res.attrs["mode"] == "".join(res_bands) np.testing.assert_array_equal(res.bands, res_bands) np.testing.assert_array_equal(res.coords["bands"], res_bands) + assert res.dtype == np.float32 def test_add_bands_p_l(self): """Test adding bands.""" From c6db95d92a61d8a2b2f09d378989cf626e21fc8e Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 10 Oct 2024 15:27:45 +0300 Subject: [PATCH 40/55] Put common asserts to a helper function --- .../tests/reader_tests/test_generic_image.py | 45 ++++++++----------- 1 file changed, 19 insertions(+), 26 deletions(-) diff --git a/satpy/tests/reader_tests/test_generic_image.py b/satpy/tests/reader_tests/test_generic_image.py index 1ab3e073ec..0d5d647420 100644 --- a/satpy/tests/reader_tests/test_generic_image.py +++ b/satpy/tests/reader_tests/test_generic_image.py @@ -162,12 +162,20 @@ def test_png_scene_l_mode(test_image_l): with pytest.warns(NotGeoreferencedWarning, match=r"Dataset has no geotransform"): scn = Scene(reader="generic_image", filenames=[test_image_l]) scn.load(["image"]) - assert scn["image"].shape == (1, Y_SIZE, X_SIZE) - assert scn.sensor_names == {"images"} - assert scn.start_time is None - assert scn.end_time is None + _assert_image_common(scn, 1, None, None, np.float32) assert "area" not in scn["image"].attrs - assert scn["image"].dtype == np.float32 + + +def _assert_image_common(scn, channels, start_time, end_time, dtype): + assert scn["image"].shape == (channels, Y_SIZE, X_SIZE) + assert scn.sensor_names == {"images"} + try: + assert scn.start_time is start_time + assert scn.end_time is end_time + except AssertionError: + assert scn.start_time == start_time + assert scn.end_time == end_time + assert scn["image"].dtype == dtype def test_png_scene_la_mode(test_image_la): @@ -176,55 +184,40 @@ def test_png_scene_la_mode(test_image_la): scn = Scene(reader="generic_image", filenames=[test_image_la]) scn.load(["image"]) data = da.compute(scn["image"].data) - assert scn["image"].shape == (1, Y_SIZE, X_SIZE) - assert scn.sensor_names == {"images"} - assert scn.start_time == DATA_DATE - assert scn.end_time == DATA_DATE - assert "area" not in scn["image"].attrs assert np.sum(np.isnan(data)) == 100 - assert scn["image"].dtype == np.float32 + assert "area" not in scn["image"].attrs + _assert_image_common(scn, 1, DATA_DATE, DATA_DATE, np.float32) def test_geotiff_scene_rgb(test_image_rgb): """Test reading geotiff image in RGB mode via satpy.Scene().""" scn = Scene(reader="generic_image", filenames=[test_image_rgb]) scn.load(["image"]) - assert scn["image"].shape == (3, Y_SIZE, X_SIZE) - assert scn.sensor_names == {"images"} - assert scn.start_time == DATA_DATE - assert scn.end_time == DATA_DATE assert scn["image"].area == AREA_DEFINITION - assert scn["image"].dtype == np.float32 + _assert_image_common(scn, 3, DATA_DATE, DATA_DATE, np.float32) def test_geotiff_scene_rgba(test_image_rgba): """Test reading geotiff image in RGBA mode via satpy.Scene().""" scn = Scene(reader="generic_image", filenames=[test_image_rgba]) scn.load(["image"]) - assert scn["image"].shape == (3, Y_SIZE, X_SIZE) - assert scn.sensor_names == {"images"} - assert scn.start_time is None - assert scn.end_time is None + _assert_image_common(scn, 3, None, None, np.float32) assert scn["image"].area == AREA_DEFINITION - assert scn["image"].dtype == np.float32 def test_geotiff_scene_nan_fill_value(test_image_l_nan_fill_value): """Test reading geotiff image with fill value set via satpy.Scene().""" scn = Scene(reader="generic_image", filenames=[test_image_l_nan_fill_value]) scn.load(["image"]) - assert scn["image"].shape == (1, Y_SIZE, X_SIZE) assert np.sum(scn["image"].data[0][:10, :10].compute()) == 0 - assert scn["image"].dtype == np.uint8 - + _assert_image_common(scn, 1, None, None, np.uint8) def test_geotiff_scene_nan(test_image_l_nan): """Test reading geotiff image with NaN values in it via satpy.Scene().""" scn = Scene(reader="generic_image", filenames=[test_image_l_nan]) scn.load(["image"]) - assert scn["image"].shape == (1, Y_SIZE, X_SIZE) assert np.all(np.isnan(scn["image"].data[0][:10, :10].compute())) - assert scn["image"].dtype == np.float32 + _assert_image_common(scn, 1, None, None, np.float32) def test_GenericImageFileHandler(test_image_rgba): From 044a0ec9404b0783fc61445d7767cdbfa79cc88a Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Fri, 11 Oct 2024 12:20:10 +0200 Subject: [PATCH 41/55] Import DataTree from xarray --- satpy/readers/insat3d_img_l1b_h5.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/satpy/readers/insat3d_img_l1b_h5.py b/satpy/readers/insat3d_img_l1b_h5.py index 41ddee5df6..dede8aefcd 100644 --- a/satpy/readers/insat3d_img_l1b_h5.py +++ b/satpy/readers/insat3d_img_l1b_h5.py @@ -7,11 +7,7 @@ import dask.array as da import numpy as np import xarray as xr - -from satpy.utils import import_error_helper - -with import_error_helper("xarray-datatree"): - from datatree import DataTree +from xarray.core.datatree import DataTree from satpy.readers.file_handlers import BaseFileHandler From 879893eb2cda176d48a1ec1f728a6f3690686348 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Fri, 11 Oct 2024 12:44:06 +0200 Subject: [PATCH 42/55] Use spline interpolation for faster processing This requires https://github.com/pytroll/python-geotiepoints/pull/85 to be merged and released. --- satpy/readers/sar_c_safe.py | 13 ++- satpy/readers/sgli_l1b.py | 4 +- satpy/tests/reader_tests/test_sar_c_safe.py | 121 ++++---------------- 3 files changed, 33 insertions(+), 105 deletions(-) diff --git a/satpy/readers/sar_c_safe.py b/satpy/readers/sar_c_safe.py index 986440759a..0d42491cbd 100644 --- a/satpy/readers/sar_c_safe.py +++ b/satpy/readers/sar_c_safe.py @@ -51,7 +51,7 @@ import xarray as xr from dask import array as da from geotiepoints.geointerpolator import lonlat2xyz, xyz2lonlat -from geotiepoints.interpolator import MultipleGridInterpolator +from geotiepoints.interpolator import MultipleSplineInterpolator from xarray import DataArray from satpy.dataset.data_dict import DatasetDict @@ -636,10 +636,15 @@ def _get_lonlatalts_uncached(self): fine_points = [np.arange(size) for size in shape] x, y, z = lonlat2xyz(gcp_lons, gcp_lats) - interpolator = MultipleGridInterpolator((ypoints, xpoints), x, y, z, gcp_alts) - hx, hy, hz, altitudes = interpolator.interpolate(fine_points, method="cubic", chunks=self.chunks) - longitudes, latitudes = xyz2lonlat(hx, hy, hz) + kx = 2 + ky = 2 + + interpolator = MultipleSplineInterpolator((ypoints, xpoints), x, y, z, gcp_alts, kx=kx, ky=ky) + hx, hy, hz, altitudes = interpolator.interpolate(fine_points, chunks=self.chunks) + + + longitudes, latitudes = xyz2lonlat(hx, hy, hz) altitudes = xr.DataArray(altitudes, dims=["y", "x"]) longitudes = xr.DataArray(longitudes, dims=["y", "x"]) latitudes = xr.DataArray(latitudes, dims=["y", "x"]) diff --git a/satpy/readers/sgli_l1b.py b/satpy/readers/sgli_l1b.py index 079f93d2f3..f22f77b03a 100644 --- a/satpy/readers/sgli_l1b.py +++ b/satpy/readers/sgli_l1b.py @@ -175,7 +175,7 @@ def get_lon_lats(self, key): def interpolate_spherical(self, azimuthal_angle, polar_angle, resampling_interval): """Interpolate spherical coordinates.""" - from geotiepoints.geointerpolator import GeoGridInterpolator + from geotiepoints.geointerpolator import GeoSplineInterpolator full_shape = (self.h5file["Image_data"].attrs["Number_of_lines"], self.h5file["Image_data"].attrs["Number_of_pixels"]) @@ -183,7 +183,7 @@ def interpolate_spherical(self, azimuthal_angle, polar_angle, resampling_interva tie_lines = np.arange(0, polar_angle.shape[0] * resampling_interval, resampling_interval) tie_cols = np.arange(0, polar_angle.shape[1] * resampling_interval, resampling_interval) - interpolator = GeoGridInterpolator((tie_lines, tie_cols), azimuthal_angle, polar_angle, method="slinear") + interpolator = GeoSplineInterpolator((tie_lines, tie_cols), azimuthal_angle, polar_angle, kx=2, ky=2) new_azi, new_pol = interpolator.interpolate_to_shape(full_shape, chunks="auto") return new_azi, new_pol diff --git a/satpy/tests/reader_tests/test_sar_c_safe.py b/satpy/tests/reader_tests/test_sar_c_safe.py index 9e24c00c4e..26ff603b10 100644 --- a/satpy/tests/reader_tests/test_sar_c_safe.py +++ b/satpy/tests/reader_tests/test_sar_c_safe.py @@ -174,102 +174,26 @@ def measurement_filehandler(measurement_file, noise_filehandler, calibration_fil -expected_longitudes = np.array([[3.79492915e-16, 5.91666667e-01, 9.09722222e-01, - 1.00000000e+00, 9.08333333e-01, 6.80555556e-01, - 3.62500000e-01, 8.32667268e-17, -3.61111111e-01, - -6.75000000e-01, -8.95833333e-01, -9.77777778e-01, - -8.75000000e-01, -5.41666667e-01, 6.80555556e-02, - 1.00000000e+00], - [1.19166667e+00, 1.32437500e+00, 1.36941964e+00, - 1.34166667e+00, 1.25598214e+00, 1.12723214e+00, - 9.70282738e-01, 8.00000000e-01, 6.31250000e-01, - 4.78898810e-01, 3.57812500e-01, 2.82857143e-01, - 2.68898810e-01, 3.30803571e-01, 4.83437500e-01, - 7.41666667e-01], - [1.82638889e+00, 1.77596726e+00, 1.72667765e+00, - 1.67757937e+00, 1.62773172e+00, 1.57619402e+00, - 1.52202558e+00, 1.46428571e+00, 1.40203373e+00, - 1.33432894e+00, 1.26023065e+00, 1.17879819e+00, - 1.08909084e+00, 9.90167942e-01, 8.81088790e-01, - 7.60912698e-01], - [2.00000000e+00, 1.99166667e+00, 1.99305556e+00, - 2.00000000e+00, 2.00833333e+00, 2.01388889e+00, - 2.01250000e+00, 2.00000000e+00, 1.97222222e+00, - 1.92500000e+00, 1.85416667e+00, 1.75555556e+00, - 1.62500000e+00, 1.45833333e+00, 1.25138889e+00, - 1.00000000e+00], - [1.80833333e+00, 2.01669643e+00, 2.18011267e+00, - 2.30119048e+00, 2.38253827e+00, 2.42676446e+00, - 2.43647747e+00, 2.41428571e+00, 2.36279762e+00, - 2.28462160e+00, 2.18236607e+00, 2.05863946e+00, - 1.91605017e+00, 1.75720663e+00, 1.58471726e+00, - 1.40119048e+00], - [1.34722222e+00, 1.89627976e+00, 2.29940830e+00, - 2.57341270e+00, 2.73509779e+00, 2.80126842e+00, - 2.78872945e+00, 2.71428571e+00, 2.59474206e+00, - 2.44690334e+00, 2.28757440e+00, 2.13356009e+00, - 2.00166525e+00, 1.90869473e+00, 1.87145337e+00, - 1.90674603e+00], - [7.12500000e-01, 1.67563988e+00, 2.36250177e+00, - 2.80892857e+00, 3.05076318e+00, 3.12384850e+00, - 3.06402742e+00, 2.90714286e+00, 2.68903770e+00, - 2.44555485e+00, 2.21253720e+00, 2.02582766e+00, - 1.92126913e+00, 1.93470451e+00, 2.10197669e+00, - 2.45892857e+00], - [5.55111512e-16, 1.40000000e+00, 2.38095238e+00, - 3.00000000e+00, 3.31428571e+00, 3.38095238e+00, - 3.25714286e+00, 3.00000000e+00, 2.66666667e+00, - 2.31428571e+00, 2.00000000e+00, 1.78095238e+00, - 1.71428571e+00, 1.85714286e+00, 2.26666667e+00, - 3.00000000e+00], - [-6.94444444e-01, 1.11458333e+00, 2.36631944e+00, - 3.13888889e+00, 3.51041667e+00, 3.55902778e+00, - 3.36284722e+00, 3.00000000e+00, 2.54861111e+00, - 2.08680556e+00, 1.69270833e+00, 1.44444444e+00, - 1.42013889e+00, 1.69791667e+00, 2.35590278e+00, - 3.47222222e+00], - [-1.27500000e+00, 8.64613095e-01, 2.33016227e+00, - 3.21785714e+00, 3.62390731e+00, 3.64452239e+00, - 3.37591199e+00, 2.91428571e+00, 2.35585317e+00, - 1.79682398e+00, 1.33340774e+00, 1.06181406e+00, - 1.07825255e+00, 1.47893282e+00, 2.36006448e+00, - 3.81785714e+00], - [-1.64583333e+00, 6.95312500e-01, 2.28404018e+00, - 3.22916667e+00, 3.63950893e+00, 3.62388393e+00, - 3.29110863e+00, 2.75000000e+00, 2.10937500e+00, - 1.47805060e+00, 9.64843750e-01, 6.78571429e-01, - 7.28050595e-01, 1.22209821e+00, 2.26953125e+00, - 3.97916667e+00], - [-1.71111111e+00, 6.51904762e-01, 2.23951247e+00, - 3.16507937e+00, 3.54197279e+00, 3.48356009e+00, - 3.10320862e+00, 2.51428571e+00, 1.83015873e+00, - 1.16419501e+00, 6.29761905e-01, 3.40226757e-01, - 4.08956916e-01, 9.49319728e-01, 2.07468254e+00, - 3.89841270e+00], - [-1.37500000e+00, 7.79613095e-01, 2.20813846e+00, - 3.01785714e+00, 3.31605017e+00, 3.20999858e+00, - 2.80698342e+00, 2.21428571e+00, 1.53918651e+00, - 8.88966837e-01, 3.70907738e-01, 9.22902494e-02, - 1.60395408e-01, 6.82504252e-01, 1.76589782e+00, - 3.51785714e+00], - [-5.41666667e-01, 1.12366071e+00, 2.20147747e+00, - 2.77976190e+00, 2.94649235e+00, 2.78964711e+00, - 2.39720451e+00, 1.85714286e+00, 1.25744048e+00, - 6.86075680e-01, 2.31026786e-01, -1.97278912e-02, - 2.17899660e-02, 4.43558673e-01, 1.33355655e+00, - 2.77976190e+00], - [8.84722222e-01, 1.72927083e+00, 2.23108879e+00, - 2.44305556e+00, 2.41805060e+00, 2.20895337e+00, - 1.86864335e+00, 1.45000000e+00, 1.00590278e+00, - 5.89231151e-01, 2.52864583e-01, 4.96825397e-02, - 3.25644841e-02, 2.54389881e-01, 7.68038194e-01, - 1.62638889e+00], - [3.00000000e+00, 2.64166667e+00, 2.30853175e+00, - 2.00000000e+00, 1.71547619e+00, 1.45436508e+00, - 1.21607143e+00, 1.00000000e+00, 8.05555556e-01, - 6.32142857e-01, 4.79166667e-01, 3.46031746e-01, - 2.32142857e-01, 1.36904762e-01, 5.97222222e-02, - 0.00000000e+00]]) +expected_longitudes = np.array([[-0., 0.54230055, 0.87563228, 1., 0.91541479, + 0.62184442, 0.26733714, -0., -0.18015287, -0.27312165], + [1.0883956 , 1.25662247, 1.34380634, 1.34995884, 1.2750712 , + 1.11911385, 0.9390845 , 0.79202785, 0.67796547, 0.59691204], + [1.75505196, 1.74123364, 1.71731849, 1.68330292, 1.63918145, + 1.58494674, 1.52376394, 1.45880655, 1.39007883, 1.31758574], + [2., 1.99615628, 1.99615609, 2., 2.00768917, + 2.0192253 , 2.02115051, 2. , 1.95576762, 1.88845002], + [1.82332931, 2.02143515, 2.18032829, 2.30002491, 2.38053511, + 2.4218612 , 2.43113105, 2.41546985, 2.37487052, 2.3093278 ], + [1.22479001, 1.81701462, 2.26984318, 2.58335874, 2.75765719, + 2.79279164, 2.75366973, 2.70519769, 2.64737395, 2.58019762], + [0.51375081, 1.53781389, 2.3082042 , 2.82500549, 3.0885147 , + 3.09893859, 2.98922885, 2.89232293, 2.8082302 , 2.7369586 ], + [0., 1.33889733, 2.33891557, 3., 3.32266837, + 3.30731797, 3.1383157 , 3., 2.8923933 , 2.81551297], + [-0.31638932, 1.22031759, 2.36197571, 3.10836734, 3.46019271, + 3.41800603, 3.20098223, 3.02826595, 2.89989242, 2.81588745], + [-0.43541441, 1.18211505, 2.37738272, 3.1501186 , 3.50112948, + 3.43104055, 3.17724665, 2.97712796, 2.83072911, 2.73808164]]) class Calibration(Enum): @@ -304,8 +228,7 @@ def test_read_lon_lats(self, measurement_filehandler): """Test reading lons and lats.""" query = DataQuery(name="longitude", polarization="vv") xarr = measurement_filehandler.get_dataset(query, info=dict()) - expected = expected_longitudes - np.testing.assert_allclose(xarr.values, expected[:10, :10], atol=1e-3) + np.testing.assert_allclose(xarr.values, expected_longitudes) annotation_xml = b""" @@ -860,7 +783,7 @@ def test_reading_from_reader(measurement_file, calibration_file, noise_file, ann query = DataID(reader._id_keys, **query.to_dict()) dataset_dict = reader.load([query]) array = dataset_dict["measurement"] - np.testing.assert_allclose(array.attrs["area"].lons, expected_longitudes[:10, :10], atol=1e-3) + np.testing.assert_allclose(array.attrs["area"].lons, expected_longitudes) expected_db = np.array([[np.nan, -15.674268], [4.079997, 5.153585]]) np.testing.assert_allclose(array.values[:2, :2], expected_db) From 98762e4375368757f2d4d496fe3038439d49e942 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Fri, 11 Oct 2024 12:47:29 +0200 Subject: [PATCH 43/55] Fix style --- satpy/tests/reader_tests/test_sar_c_safe.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/tests/reader_tests/test_sar_c_safe.py b/satpy/tests/reader_tests/test_sar_c_safe.py index 26ff603b10..3ec3aa9577 100644 --- a/satpy/tests/reader_tests/test_sar_c_safe.py +++ b/satpy/tests/reader_tests/test_sar_c_safe.py @@ -174,8 +174,8 @@ def measurement_filehandler(measurement_file, noise_filehandler, calibration_fil -expected_longitudes = np.array([[-0., 0.54230055, 0.87563228, 1., 0.91541479, - 0.62184442, 0.26733714, -0., -0.18015287, -0.27312165], +expected_longitudes = np.array([[-0., 0.54230055, 0.87563228, 1., 0.91541479, + 0.62184442, 0.26733714, -0., -0.18015287, -0.27312165], [1.0883956 , 1.25662247, 1.34380634, 1.34995884, 1.2750712 , 1.11911385, 0.9390845 , 0.79202785, 0.67796547, 0.59691204], [1.75505196, 1.74123364, 1.71731849, 1.68330292, 1.63918145, From 7eb17ab75caf1914f117b09f893aecf500fc9360 Mon Sep 17 00:00:00 2001 From: verduijn Date: Sat, 12 Oct 2024 09:01:29 +0200 Subject: [PATCH 44/55] Replace patched `print` with capsys fixture --- satpy/tests/test_utils.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/satpy/tests/test_utils.py b/satpy/tests/test_utils.py index c52006f1be..ac3a2e921f 100644 --- a/satpy/tests/test_utils.py +++ b/satpy/tests/test_utils.py @@ -274,13 +274,15 @@ def test_basic_check_satpy(self): from satpy.utils import check_satpy check_satpy() - def test_specific_check_satpy(self): + def test_specific_check_satpy(self, capsys): """Test 'check_satpy' with specific features provided.""" from satpy.utils import check_satpy - with mock.patch("satpy.utils.print") as print_mock: - check_satpy(readers=["viirs_sdr"], packages=("cartopy", "__fake")) - checked_fake = any("__fake: not installed" in c[1] for c in print_mock.mock_calls if len(c[1])) - assert checked_fake, "Did not find __fake package mentioned in checks" + check_satpy(readers=["viirs_sdr"], packages=("cartopy", "__fake")) + out, _ = capsys.readouterr() + checked_fake = "__fake: not installed" in out + checked_viirs_sdr = "Readers\n=======\nviirs_sdr" in out + assert checked_fake, "Did not find __fake package mentioned in checks" + assert checked_viirs_sdr, "Did not find viirs_sdr in readers mentioned in checks" class TestShowVersions: From 63de5e07965b7ae3d421722baa52e07d83b9cbad Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 14 Oct 2024 11:20:56 +0200 Subject: [PATCH 45/55] Run sar tests only with geotiepoints >= 1.7.5 --- satpy/tests/reader_tests/test_sar_c_safe.py | 1 + 1 file changed, 1 insertion(+) diff --git a/satpy/tests/reader_tests/test_sar_c_safe.py b/satpy/tests/reader_tests/test_sar_c_safe.py index 3ec3aa9577..f7191a951a 100644 --- a/satpy/tests/reader_tests/test_sar_c_safe.py +++ b/satpy/tests/reader_tests/test_sar_c_safe.py @@ -32,6 +32,7 @@ from satpy.readers.sar_c_safe import Calibrator, Denoiser, SAFEXMLAnnotation rasterio = pytest.importorskip("rasterio") +geotiepoints = pytest.importorskip("geotiepoints", "1.7.5") dirname_suffix = "20190201T024655_20190201T024720_025730_02DC2A_AE07" From ee7edefbe57be93c56db5ed1d32a4a9e170b8359 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 14 Oct 2024 11:45:30 +0200 Subject: [PATCH 46/55] Do importskip before importing satpy --- satpy/tests/reader_tests/test_sar_c_safe.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/satpy/tests/reader_tests/test_sar_c_safe.py b/satpy/tests/reader_tests/test_sar_c_safe.py index f7191a951a..531305798b 100644 --- a/satpy/tests/reader_tests/test_sar_c_safe.py +++ b/satpy/tests/reader_tests/test_sar_c_safe.py @@ -26,13 +26,14 @@ import pytest import yaml -from satpy._config import PACKAGE_CONFIG_PATH -from satpy.dataset import DataQuery -from satpy.dataset.dataid import DataID -from satpy.readers.sar_c_safe import Calibrator, Denoiser, SAFEXMLAnnotation +geotiepoints = pytest.importorskip("geotiepoints", "1.7.5") + +from satpy._config import PACKAGE_CONFIG_PATH # noqa: E402 +from satpy.dataset import DataQuery # noqa: E402 +from satpy.dataset.dataid import DataID # noqa: E402 +from satpy.readers.sar_c_safe import Calibrator, Denoiser, SAFEXMLAnnotation # noqa: E402 rasterio = pytest.importorskip("rasterio") -geotiepoints = pytest.importorskip("geotiepoints", "1.7.5") dirname_suffix = "20190201T024655_20190201T024720_025730_02DC2A_AE07" From 54acda0d3879592e5c5e487a0a925bcc3c51b69c Mon Sep 17 00:00:00 2001 From: Johan Strandgren <42137969+strandgren@users.noreply.github.com> Date: Mon, 14 Oct 2024 16:45:01 +0200 Subject: [PATCH 47/55] Adapt to use pytest instead of unittest --- satpy/tests/reader_tests/test_fci_base.py | 29 ++++++++++------------- 1 file changed, 13 insertions(+), 16 deletions(-) diff --git a/satpy/tests/reader_tests/test_fci_base.py b/satpy/tests/reader_tests/test_fci_base.py index 41ac956b67..1534965402 100644 --- a/satpy/tests/reader_tests/test_fci_base.py +++ b/satpy/tests/reader_tests/test_fci_base.py @@ -19,28 +19,25 @@ """FCI base reader tests package.""" # import datetime as dt -import unittest +import pytest from satpy.readers.fci_base import calculate_area_extent from satpy.tests.utils import make_dataid -class TestCalculateAreaExtent(unittest.TestCase): - """Test TestCalculateAreaExtent.""" +def test_calculate_area_extent(): + """Test function for calculate_area_extent.""" + dataset_id = make_dataid(name="dummy", resolution=2000.0) - def test_fun(self): - """Test function for TestCalculateAreaExtent.""" - dataset_id = make_dataid(name="dummmy", resolution=2000.) + area_dict = { + "nlines": 5568, + "ncols": 5568, + "line_step": dataset_id["resolution"], + "column_step": dataset_id["resolution"], + } - area_dict = { - "nlines": 5568, - "ncols": 5568, - "line_step": dataset_id["resolution"], - "column_step": dataset_id["resolution"], - } + area_extent = calculate_area_extent(area_dict) - area_extent = calculate_area_extent(area_dict) + expected = (-5568000.0, 5568000.0, 5568000.0, -5568000.0) - expected = (-5568000.0, 5568000.0, 5568000.0, -5568000.0) - - assert area_extent == expected + assert area_extent == expected From b263479bd782060c4058e20734546543b8b287a9 Mon Sep 17 00:00:00 2001 From: Johan Strandgren <42137969+strandgren@users.noreply.github.com> Date: Mon, 14 Oct 2024 17:36:06 +0200 Subject: [PATCH 48/55] Update tests to use pytest instead of unittest --- satpy/tests/reader_tests/test_eum_l2_grib.py | 475 ++++++++++--------- 1 file changed, 239 insertions(+), 236 deletions(-) diff --git a/satpy/tests/reader_tests/test_eum_l2_grib.py b/satpy/tests/reader_tests/test_eum_l2_grib.py index 593eb2f5af..d3f5622bb7 100644 --- a/satpy/tests/reader_tests/test_eum_l2_grib.py +++ b/satpy/tests/reader_tests/test_eum_l2_grib.py @@ -20,7 +20,7 @@ import datetime import sys -import unittest +import pytest from unittest import mock import numpy as np @@ -60,240 +60,243 @@ FAKE_GID = [0, 1, 2, 3, None] -class TestEUML2GribFileHandler(unittest.TestCase): - """Test the EUML2GribFileHandler reader.""" - - @mock.patch("satpy.readers.eum_l2_grib.ec") - def setUp(self, ec_): - """Set up the test by creating a mocked eccodes library.""" - fake_gid_generator = (i for i in FAKE_GID) - ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) - self.ec_ = ec_ - - def common_checks(self, mock_file, dataset_id): - """Commmon checks for fci and seviri data.""" - # Checks that the codes_grib_multi_support_on function has been called - self.ec_.codes_grib_multi_support_on.assert_called() - - # Restarts the id generator and clears the call history - fake_gid_generator = (i for i in FAKE_GID) - self.ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) - self.ec_.codes_grib_new_from_file.reset_mock() - self.ec_.codes_release.reset_mock() - - # Checks the correct execution of the get_dataset function with a valid parameter_number - valid_dataset = self.reader.get_dataset(dataset_id, {"parameter_number": 30}) - # Checks the correct file open call - mock_file.assert_called_with("test.grib", "rb") - # Checks that the dataset has been created as a DataArray object - assert valid_dataset._extract_mock_name() == "xr.DataArray()" - # Checks that codes_release has been called after each codes_grib_new_from_file call - # (except after the last one which has returned a None) - assert self.ec_.codes_grib_new_from_file.call_count == self.ec_.codes_release.call_count + 1 - - # Restarts the id generator and clears the call history - fake_gid_generator = (i for i in FAKE_GID) - self.ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) - self.ec_.codes_grib_new_from_file.reset_mock() - self.ec_.codes_release.reset_mock() - - # Checks the correct execution of the get_dataset function with an invalid parameter_number - invalid_dataset = self.reader.get_dataset(dataset_id, {"parameter_number": 50}) - # Checks that the function returns None - assert invalid_dataset is None - # Checks that codes_release has been called after each codes_grib_new_from_file call - # (except after the last one which has returned a None) - assert self.ec_.codes_grib_new_from_file.call_count == self.ec_.codes_release.call_count + 1 - - @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") - @mock.patch("satpy.readers.eum_l2_grib.xr") - @mock.patch("satpy.readers.eum_l2_grib.da") - def test_seviri_data_reading(self, da_, xr_): - """Test the reading of data from the product.""" - from satpy.readers.eum_l2_grib import EUML2GribFileHandler - from satpy.utils import get_legacy_chunk_size - chunk_size = get_legacy_chunk_size() - - with mock.patch("builtins.open", mock.mock_open()) as mock_file: - with mock.patch("satpy.readers.eum_l2_grib.ec", self.ec_): - self.ec_.codes_get_values.return_value = np.ones(1000*1200) - self.ec_.codes_get.side_effect = lambda gid, key: FAKE_SEVIRI_MESSAGE[key] - self.reader = EUML2GribFileHandler( - filename="test.grib", - filename_info={ - "spacecraft": "MET11", - "start_time": datetime.datetime(year=2020, month=10, day=20, - hour=19, minute=45, second=0) - }, - filetype_info={ - "file_type": "seviri" - } - ) - - dataset_id = make_dataid(name="dummmy", resolution=3000) - - self.common_checks(mock_file, dataset_id) - - # Check end_time - assert self.reader.end_time == datetime.datetime(year=2020, month=10, day=20, - hour=19, minute=50, second=0) - - # Checks the correct execution of the _get_global_attributes and _get_metadata_from_msg functions - attributes = self.reader._get_attributes() - expected_attributes = { - "orbital_parameters": { - "projection_longitude": 9.5 - }, - "sensor": "seviri", - "platform_name": "Meteosat-11" +@pytest.fixture +@mock.patch("satpy.readers.eum_l2_grib.ec") +def setup_reader(ec_): + """Set up the test by creating a mocked eccodes library.""" + fake_gid_generator = (i for i in FAKE_GID) + ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) + return ec_ + + +def common_checks(ec_, reader, mock_file, dataset_id): + """Commmon checks for fci and seviri data.""" + # Checks that the codes_grib_multi_support_on function has been called + ec_.codes_grib_multi_support_on.assert_called() + + # Restarts the id generator and clears the call history + fake_gid_generator = (i for i in FAKE_GID) + ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) + ec_.codes_grib_new_from_file.reset_mock() + ec_.codes_release.reset_mock() + + # Checks the correct execution of the get_dataset function with a valid parameter_number + valid_dataset = reader.get_dataset(dataset_id, {"parameter_number": 30}) + # Checks the correct file open call + mock_file.assert_called_with("test.grib", "rb") + # Checks that the dataset has been created as a DataArray object + assert valid_dataset._extract_mock_name() == "xr.DataArray()" + # Checks that codes_release has been called after each codes_grib_new_from_file call + # (except after the last one which has returned a None) + assert ec_.codes_grib_new_from_file.call_count == ec_.codes_release.call_count + 1 + + # Restarts the id generator and clears the call history + fake_gid_generator = (i for i in FAKE_GID) + ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) + ec_.codes_grib_new_from_file.reset_mock() + ec_.codes_release.reset_mock() + + # Checks the correct execution of the get_dataset function with an invalid parameter_number + invalid_dataset = reader.get_dataset(dataset_id, {"parameter_number": 50}) + # Checks that the function returns None + assert invalid_dataset is None + # Checks that codes_release has been called after each codes_grib_new_from_file call + # (except after the last one which has returned a None) + assert ec_.codes_grib_new_from_file.call_count == ec_.codes_release.call_count + 1 + + +@pytest.mark.skipif(sys.platform.startswith("win"), reason="'eccodes' not supported on Windows") +@mock.patch("satpy.readers.eum_l2_grib.xr") +@mock.patch("satpy.readers.eum_l2_grib.da") +def test_seviri_data_reading(da_, xr_, setup_reader): + """Test the reading of data from the product.""" + from satpy.readers.eum_l2_grib import EUML2GribFileHandler + from satpy.utils import get_legacy_chunk_size + ec_ = setup_reader + chunk_size = get_legacy_chunk_size() + + with mock.patch("builtins.open", mock.mock_open()) as mock_file: + with mock.patch("satpy.readers.eum_l2_grib.ec", ec_): + ec_.codes_get_values.return_value = np.ones(1000 * 1200) + ec_.codes_get.side_effect = lambda gid, key: FAKE_SEVIRI_MESSAGE[key] + reader = EUML2GribFileHandler( + filename="test.grib", + filename_info={ + "spacecraft": "MET11", + "start_time": datetime.datetime(year=2020, month=10, day=20, + hour=19, minute=45, second=0) + }, + filetype_info={ + "file_type": "seviri" } - assert attributes == expected_attributes - - # Checks the reading of an array from the message - self.reader._get_xarray_from_msg(0) - - # Checks that dask.array has been called with the correct arguments - name, args, kwargs = da_.mock_calls[0] - assert np.all(args[0] == np.ones((1200, 1000))) - assert args[1] == chunk_size - - # Checks that xarray.DataArray has been called with the correct arguments - name, args, kwargs = xr_.mock_calls[0] - assert kwargs["dims"] == ("y", "x") - - # Checks the correct execution of the _get_proj_area function - pdict, area_dict = self.reader._get_proj_area(0) - - expected_pdict = { - "a": 6400000., - "b": 6300000., - "h": 32000000., - "ssp_lon": 9.5, - "nlines": 1000, - "ncols": 1200, - "a_name": "msg_seviri_rss_3km", - "a_desc": "MSG SEVIRI Rapid Scanning Service area definition with 3 km resolution", - "p_id": "", + ) + + dataset_id = make_dataid(name="dummmy", resolution=3000) + + common_checks(ec_, reader, mock_file, dataset_id) + + # Check end_time + assert reader.end_time == datetime.datetime(year=2020, month=10, day=20, + hour=19, minute=50, second=0) + + # Checks the correct execution of the _get_global_attributes and _get_metadata_from_msg functions + attributes = reader._get_attributes() + expected_attributes = { + "orbital_parameters": { + "projection_longitude": 9.5 + }, + "sensor": "seviri", + "platform_name": "Meteosat-11" + } + assert attributes == expected_attributes + + # Checks the reading of an array from the message + reader._get_xarray_from_msg(0) + + # Checks that dask.array has been called with the correct arguments + name, args, kwargs = da_.mock_calls[0] + assert np.all(args[0] == np.ones((1200, 1000))) + assert args[1] == chunk_size + + # Checks that xarray.DataArray has been called with the correct arguments + name, args, kwargs = xr_.mock_calls[0] + assert kwargs["dims"] == ("y", "x") + + # Checks the correct execution of the _get_proj_area function + pdict, area_dict = reader._get_proj_area(0) + + expected_pdict = { + "a": 6400000., + "b": 6300000., + "h": 32000000., + "ssp_lon": 9.5, + "nlines": 1000, + "ncols": 1200, + "a_name": "msg_seviri_rss_3km", + "a_desc": "MSG SEVIRI Rapid Scanning Service area definition with 3 km resolution", + "p_id": "", + } + assert pdict == expected_pdict + expected_area_dict = { + "center_point": 500, + "north": 1200, + "east": 1, + "west": 1000, + "south": 1, + } + assert area_dict == expected_area_dict + + # Checks the correct execution of the get_area_def function + with mock.patch("satpy.readers.eum_l2_grib.seviri_calculate_area_extent", + mock.Mock(name="seviri_calculate_area_extent")) as cae: + with mock.patch("satpy.readers.eum_l2_grib.get_area_definition", mock.Mock()) as gad: + dataset_id = make_dataid(name="dummmy", resolution=400.) + reader.get_area_def(dataset_id) + # Asserts that seviri_calculate_area_extent has been called with the correct arguments + expected_args = ({"center_point": 500, "east": 1, "west": 1000, "south": 1, "north": 1200, + "column_step": 400., "line_step": 400.},) + name, args, kwargs = cae.mock_calls[0] + assert args == expected_args + # Asserts that get_area_definition has been called with the correct arguments + name, args, kwargs = gad.mock_calls[0] + assert args[0] == expected_pdict + # The second argument must be the return result of seviri_calculate_area_extent + assert args[1]._extract_mock_name() == "seviri_calculate_area_extent()" + + +@pytest.mark.skipif(sys.platform.startswith("win"), reason="'eccodes' not supported on Windows") +@mock.patch("satpy.readers.eum_l2_grib.xr") +@mock.patch("satpy.readers.eum_l2_grib.da") +def test_fci_data_reading(da_, xr_, setup_reader): + """Test the reading of fci data from the product.""" + from satpy.readers.eum_l2_grib import EUML2GribFileHandler + from satpy.utils import get_legacy_chunk_size + ec_ = setup_reader + chunk_size = get_legacy_chunk_size() + + with mock.patch("builtins.open", mock.mock_open()) as mock_file: + with mock.patch("satpy.readers.eum_l2_grib.ec", ec_): + ec_.codes_get_values.return_value = np.ones(5568 * 5568) + ec_.codes_get.side_effect = lambda gid, key: FAKE_FCI_MESSAGE[key] + reader = EUML2GribFileHandler( + filename="test.grib", + filename_info={ + "spacecraft_id": "1", + "start_time": datetime.datetime(year=2020, month=10, day=20, + hour=19, minute=40, second=0), + "end_time": datetime.datetime(year=2020, month=10, day=20, + hour=19, minute=50, second=0) + }, + filetype_info={ + "file_type": "fci" } - assert pdict == expected_pdict - expected_area_dict = { - "center_point": 500, - "north": 1200, - "east": 1, - "west": 1000, - "south": 1, - } - assert area_dict == expected_area_dict - - # Checks the correct execution of the get_area_def function - with mock.patch("satpy.readers.eum_l2_grib.seviri_calculate_area_extent", - mock.Mock(name="seviri_calculate_area_extent")) as cae: - with mock.patch("satpy.readers.eum_l2_grib.get_area_definition", mock.Mock()) as gad: - dataset_id = make_dataid(name="dummmy", resolution=400.) - self.reader.get_area_def(dataset_id) - # Asserts that seviri_calculate_area_extent has been called with the correct arguments - expected_args = ({"center_point": 500, "east": 1, "west": 1000, "south": 1, "north": 1200, - "column_step": 400., "line_step": 400.},) - name, args, kwargs = cae.mock_calls[0] - assert args == expected_args - # Asserts that get_area_definition has been called with the correct arguments - name, args, kwargs = gad.mock_calls[0] - assert args[0] == expected_pdict - # The second argument must be the return result of seviri_calculate_area_extent - assert args[1]._extract_mock_name() == "seviri_calculate_area_extent()" - - @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") - @mock.patch("satpy.readers.eum_l2_grib.xr") - @mock.patch("satpy.readers.eum_l2_grib.da") - def test_fci_data_reading(self, da_, xr_): - """Test the reading of fci data from the product.""" - from satpy.readers.eum_l2_grib import EUML2GribFileHandler - from satpy.utils import get_legacy_chunk_size - chunk_size = get_legacy_chunk_size() - - with mock.patch("builtins.open", mock.mock_open()) as mock_file: - with mock.patch("satpy.readers.eum_l2_grib.ec", self.ec_): - self.ec_.codes_get_values.return_value = np.ones(5568*5568) - self.ec_.codes_get.side_effect = lambda gid, key: FAKE_FCI_MESSAGE[key] - self.reader = EUML2GribFileHandler( - filename="test.grib", - filename_info={ - "spacecraft_id": "1", - "start_time": datetime.datetime(year=2020, month=10, day=20, - hour=19, minute=40, second=0), - "end_time": datetime.datetime(year=2020, month=10, day=20, - hour=19, minute=50, second=0) - }, - filetype_info={ - "file_type": "fci" - } - ) - - dataset_id = make_dataid(name="dummmy", resolution=2000) - - self.common_checks(mock_file, dataset_id) - - # Check end_time - assert self.reader.end_time == datetime.datetime(year=2020, month=10, day=20, - hour=19, minute=50, second=0) - - # Checks the correct execution of the _get_global_attributes and _get_metadata_from_msg functions - attributes = self.reader._get_attributes() - expected_attributes = { - "orbital_parameters": { - "projection_longitude": 0.0 - }, - "sensor": "fci", - "platform_name": "MTG-i1" - } - assert attributes == expected_attributes - - # Checks the reading of an array from the message - self.reader._get_xarray_from_msg(0) - - # Checks that dask.array has been called with the correct arguments - name, args, kwargs = da_.mock_calls[0] - assert np.all(args[0] == np.ones((5568, 5568))) - assert args[1] == chunk_size - - # Checks that xarray.DataArray has been called with the correct arguments - name, args, kwargs = xr_.mock_calls[0] - assert kwargs["dims"] == ("y", "x") - - # Checks the correct execution of the _get_proj_area function - pdict, area_dict = self.reader._get_proj_area(0) - - expected_pdict = { - "a": 6378140000.0, - "b": 6356755000.0, - "h": 35785830098.0, - "ssp_lon": 0.0, - "nlines": 5568, - "ncols": 5568, - "a_name": "msg_fci_fdss_2km", - "a_desc": "MSG FCI Full Disk Scanning Service area definition with 2 km resolution", - "p_id": "" - } - assert pdict == expected_pdict - expected_area_dict = { - "nlines": 5568, - "ncols": 5568 - } - assert area_dict == expected_area_dict - - # Checks the correct execution of the get_area_def function - with mock.patch("satpy.readers.eum_l2_grib.fci_calculate_area_extent", - mock.Mock(name="fci_calculate_area_extent")) as cae: - with mock.patch("satpy.readers.eum_l2_grib.get_area_definition", mock.Mock()) as gad: - dataset_id = make_dataid(name="dummmy", resolution=2000.) - self.reader.get_area_def(dataset_id) - # Asserts that seviri_calculate_area_extent has been called with the correct arguments - expected_args = ({"nlines": 5568, "ncols": 5568, - "column_step": 2000., "line_step": 2000.},) - name, args, kwargs = cae.mock_calls[0] - assert args == expected_args - # Asserts that get_area_definition has been called with the correct arguments - name, args, kwargs = gad.mock_calls[0] - assert args[0] == expected_pdict - # The second argument must be the return result of seviri_calculate_area_extent - assert args[1]._extract_mock_name() == "fci_calculate_area_extent()" + ) + + dataset_id = make_dataid(name="dummmy", resolution=2000) + + common_checks(ec_, reader, mock_file, dataset_id) + + # Check end_time + assert reader.end_time == datetime.datetime(year=2020, month=10, day=20, + hour=19, minute=50, second=0) + + # Checks the correct execution of the _get_global_attributes and _get_metadata_from_msg functions + attributes = reader._get_attributes() + expected_attributes = { + "orbital_parameters": { + "projection_longitude": 0.0 + }, + "sensor": "fci", + "platform_name": "MTG-i1" + } + assert attributes == expected_attributes + + # Checks the reading of an array from the message + reader._get_xarray_from_msg(0) + + # Checks that dask.array has been called with the correct arguments + name, args, kwargs = da_.mock_calls[0] + assert np.all(args[0] == np.ones((5568, 5568))) + assert args[1] == chunk_size + + # Checks that xarray.DataArray has been called with the correct arguments + name, args, kwargs = xr_.mock_calls[0] + assert kwargs["dims"] == ("y", "x") + + # Checks the correct execution of the _get_proj_area function + pdict, area_dict = reader._get_proj_area(0) + + expected_pdict = { + "a": 6378140000.0, + "b": 6356755000.0, + "h": 35785830098.0, + "ssp_lon": 0.0, + "nlines": 5568, + "ncols": 5568, + "a_name": "msg_fci_fdss_2km", + "a_desc": "MSG FCI Full Disk Scanning Service area definition with 2 km resolution", + "p_id": "" + } + assert pdict == expected_pdict + expected_area_dict = { + "nlines": 5568, + "ncols": 5568 + } + assert area_dict == expected_area_dict + + # Checks the correct execution of the get_area_def function + with mock.patch("satpy.readers.eum_l2_grib.fci_calculate_area_extent", + mock.Mock(name="fci_calculate_area_extent")) as cae: + with mock.patch("satpy.readers.eum_l2_grib.get_area_definition", mock.Mock()) as gad: + dataset_id = make_dataid(name="dummmy", resolution=2000.) + reader.get_area_def(dataset_id) + # Asserts that seviri_calculate_area_extent has been called with the correct arguments + expected_args = ({"nlines": 5568, "ncols": 5568, + "column_step": 2000., "line_step": 2000.},) + name, args, kwargs = cae.mock_calls[0] + assert args == expected_args + # Asserts that get_area_definition has been called with the correct arguments + name, args, kwargs = gad.mock_calls[0] + assert args[0] == expected_pdict + # The second argument must be the return result of seviri_calculate_area_extent + assert args[1]._extract_mock_name() == "fci_calculate_area_extent()" From 12ebe3feab2d9f7b92bca8eabe059776291500cf Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 15 Oct 2024 22:02:13 +0200 Subject: [PATCH 49/55] Update satpy/readers/sar_c_safe.py --- satpy/readers/sar_c_safe.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/satpy/readers/sar_c_safe.py b/satpy/readers/sar_c_safe.py index 0d42491cbd..b6d84e8fb7 100644 --- a/satpy/readers/sar_c_safe.py +++ b/satpy/readers/sar_c_safe.py @@ -637,10 +637,8 @@ def _get_lonlatalts_uncached(self): fine_points = [np.arange(size) for size in shape] x, y, z = lonlat2xyz(gcp_lons, gcp_lats) - kx = 2 - ky = 2 - interpolator = MultipleSplineInterpolator((ypoints, xpoints), x, y, z, gcp_alts, kx=kx, ky=ky) + interpolator = MultipleSplineInterpolator((ypoints, xpoints), x, y, z, gcp_alts, kx=2, ky=2) hx, hy, hz, altitudes = interpolator.interpolate(fine_points, chunks=self.chunks) From eba7964e43678181f1b3bded706ae50f53775c49 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 16 Oct 2024 07:25:43 +0000 Subject: [PATCH 50/55] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- satpy/tests/reader_tests/test_eum_l2_grib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_eum_l2_grib.py b/satpy/tests/reader_tests/test_eum_l2_grib.py index d3f5622bb7..50c6be5398 100644 --- a/satpy/tests/reader_tests/test_eum_l2_grib.py +++ b/satpy/tests/reader_tests/test_eum_l2_grib.py @@ -20,10 +20,10 @@ import datetime import sys -import pytest from unittest import mock import numpy as np +import pytest from satpy.tests.utils import make_dataid From ba307335d78aa050e19928c4082d92f06413fb47 Mon Sep 17 00:00:00 2001 From: Johan Strandgren <42137969+strandgren@users.noreply.github.com> Date: Wed, 16 Oct 2024 09:36:54 +0200 Subject: [PATCH 51/55] Remove unused imports --- satpy/tests/reader_tests/test_fci_base.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/satpy/tests/reader_tests/test_fci_base.py b/satpy/tests/reader_tests/test_fci_base.py index 1534965402..eda7eee8a1 100644 --- a/satpy/tests/reader_tests/test_fci_base.py +++ b/satpy/tests/reader_tests/test_fci_base.py @@ -18,9 +18,6 @@ """FCI base reader tests package.""" -# import datetime as dt -import pytest - from satpy.readers.fci_base import calculate_area_extent from satpy.tests.utils import make_dataid From 30ee8992cf62a27a084562265dbd20dee9b1f6e0 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 16 Oct 2024 18:00:04 +0200 Subject: [PATCH 52/55] Add no-op `image_ready` enhancement --- satpy/enhancements/__init__.py | 5 +++++ satpy/etc/enhancements/generic.yaml | 6 ++++++ 2 files changed, 11 insertions(+) diff --git a/satpy/enhancements/__init__.py b/satpy/enhancements/__init__.py index a44ca590cf..95a147aafb 100644 --- a/satpy/enhancements/__init__.py +++ b/satpy/enhancements/__init__.py @@ -653,3 +653,8 @@ def _jma_true_color_reproduction(img_data, platform=None): output = da.dot(img_data.T, ccm.T) return output.T + + +def no_op(img): + """Do not do anything to the image.""" + return img.data diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index 5d17154aab..7e23281531 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -1285,3 +1285,9 @@ enhancements: imager_with_lightning: standard_name: imager_with_lightning operations: [] + + image_ready: + standard_name: image_ready + operations: + - name: no_op + method: !!python/name:satpy.enhancements.no_op From 4c0b1e71f16a78dd70494664bfd23e5eb35bd3e3 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 16 Oct 2024 18:01:13 +0200 Subject: [PATCH 53/55] Fix style --- satpy/composites/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index b032f23a32..d7518be91d 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -721,7 +721,7 @@ def __init__(self, name, lim_low=85., lim_high=88., day_night="day_night", inclu self.day_night = day_night self.include_alpha = include_alpha self._has_sza = False - super(DayNightCompositor, self).__init__(name, **kwargs) + super().__init__(name, **kwargs) def __call__( self, From 3ab5c64ba18e504bd43bb3d184137971caa92c14 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 17 Oct 2024 08:23:31 +0200 Subject: [PATCH 54/55] Fix warning --- satpy/tests/enhancement_tests/test_enhancements.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/tests/enhancement_tests/test_enhancements.py b/satpy/tests/enhancement_tests/test_enhancements.py index b30a073968..89ff21aafa 100644 --- a/satpy/tests/enhancement_tests/test_enhancements.py +++ b/satpy/tests/enhancement_tests/test_enhancements.py @@ -456,10 +456,10 @@ def test_cmap_list(self): """Test that colors can be a list/tuple.""" from satpy.enhancements import create_colormap colors = [ - [0, 0, 1], - [1, 0, 1], - [0, 1, 1], - [1, 1, 1], + [0., 0., 1.], + [1., 0., 1.], + [0., 1., 1.], + [1., 1., 1.], ] values = [2, 4, 6, 8] cmap = create_colormap({"colors": colors, "color_scale": 1}) From b408e65f14c58b4df296262180b319b09989d1b0 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 17 Oct 2024 08:24:11 +0200 Subject: [PATCH 55/55] Test no-op --- satpy/tests/enhancement_tests/test_enhancements.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/satpy/tests/enhancement_tests/test_enhancements.py b/satpy/tests/enhancement_tests/test_enhancements.py index 89ff21aafa..b0f2b3d31b 100644 --- a/satpy/tests/enhancement_tests/test_enhancements.py +++ b/satpy/tests/enhancement_tests/test_enhancements.py @@ -711,3 +711,15 @@ def test_jma_true_color_reproduction(self): img = XRImage(self.rgb) with pytest.raises(KeyError, match="No conversion matrix found for platform Fakesat"): jma_true_color_reproduction(img) + + +def test_no_op_enhancement(): + """Test the no-op enhancement.""" + from satpy.enhancements import no_op + + data = da.arange(-100, 1000, 110).reshape(2, 5) + rgb_data = np.stack([data, data, data]) + rgb = xr.DataArray(rgb_data, dims=("bands", "y", "x"), + coords={"bands": ["R", "G", "B"]}, + attrs={"platform_name": "Himawari-8"}) + assert no_op(rgb) is rgb.data