diff --git a/Load_S1_Mosaic.ipynb b/Load_S1_Mosaic.ipynb index 7acc961..00ff809 100644 --- a/Load_S1_Mosaic.ipynb +++ b/Load_S1_Mosaic.ipynb @@ -7,7 +7,9 @@ "outputs": [], "source": [ "from pystac_client import Client\n", - "from odc.stac import load" + "from odc.stac import load\n", + "\n", + "import numpy as np" ] }, { @@ -29,7 +31,7 @@ "fiji_bbox = [177.2, -18.4, 178.9, -17.2]\n", "datetime = \"2023\"\n", "\n", - "items = list(client.search(collections=[\"dep_s1_mosaic\"], bbox=fiji_bbox, datetime=datetime).items())\n", + "items = list(client.search(collections=[\"dep_s1_mosaic\"], bbox=fiji_bbox).items())\n", "\n", "print(f\"Found {len(items)} items\")" ] @@ -40,7 +42,20 @@ "metadata": {}, "outputs": [], "source": [ - "data = load(items, bbox=fiji_bbox, resolution=100, chunks={}).squeeze()\n", + "cfg = {\n", + " \"dep_s1_mosaic\": {\n", + " \"assets\": {\n", + " \"*\": {\n", + " \"data_type\": \"float32\",\n", + " },\n", + " \"count\": {\n", + " \"data_type\": \"int64\",\n", + " },\n", + " }\n", + " }\n", + "}\n", + "\n", + "data = load(items, bbox=fiji_bbox, resolution=100, chunks={}, stac_cfg=cfg)\n", "data" ] }, @@ -50,7 +65,16 @@ "metadata": {}, "outputs": [], "source": [ - "data.median_vv_vh.plot.imshow(size=10, robust=True)" + "data.mean_vv.plot.imshow(col=\"time\", size=10, robust=True)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "log_10_vv = (10 * np.log10(data.mean_vv)).compute()" ] }, { @@ -58,7 +82,18 @@ "execution_count": null, "metadata": {}, "outputs": [], - "source": [] + "source": [ + "log_10_vv.plot.imshow(col=\"time\", col_wrap=2, size=10, robust=True)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "data[\"count\"].plot.imshow(col=\"time\", col_wrap=2, size=10, robust=True)" + ] } ], "metadata": { diff --git a/Test_S1_Mosaic.ipynb b/Test_S1_Mosaic.ipynb index 0b4d512..91aa445 100644 --- a/Test_S1_Mosaic.ipynb +++ b/Test_S1_Mosaic.ipynb @@ -194,7 +194,7 @@ " convert_to_int16=False\n", ")\n", "print(f\"Writing to: {dep_path._folder(item_id)}\")\n", - "out_files = writer.write(loaded, item_id)" + "out_files = writer.write(output_data, item_id)" ] }, { @@ -216,10 +216,22 @@ "metadata": {}, "outputs": [], "source": [ - "d = load([item])\n", - "\n", - "d.median_vv_vh.plot.imshow(size=8, robust=True)" + "d = load([item])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "d[\"count\"].plot()" ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [] } ], "metadata": { diff --git a/src/run_task.py b/src/run_task.py index 7267c56..7785907 100644 --- a/src/run_task.py +++ b/src/run_task.py @@ -63,10 +63,6 @@ def _get_items(self, area): area, collections=["sentinel-1-rtc"], datetime=self.datetime, query=query ) - # Fix a few issues with STAC items - # fix_bad_epsgs(item_collection) - # item_collection = remove_bad_items(item_collection) - if len(item_collection) == 0: raise EmptyCollectionError() @@ -99,14 +95,17 @@ def process(self, input_data: DataArray) -> Dataset: arrays.append(input_data[band].std("time").rename(f"std_{band}")) # Add count - arrays.append(input_data["vv"].count("time").rename("count")) + arrays.append(input_data["vv"].count("time").rename("count").astype("int16")) # Merge the arrays together into a Dataset with the names we want data = merge(arrays, compat="override") # Set nodata on all the outputs for band in data.data_vars: - data[band].attrs["nodata"] = -32768 + if band == "count": + data[band].attrs["nodata"] = 0 + else: + data[band].attrs["nodata"] = -32768 output = set_stac_properties(input_data, data)