From 00883a9e0e3c1c541a905f1b5d4f5f1677461114 Mon Sep 17 00:00:00 2001 From: MartinSchobben Date: Wed, 14 Aug 2024 15:56:38 +0000 Subject: [PATCH] Quarto output --- notebooks/01_classification.ipynb | 124 +++++++++++++++--------------- notebooks/02_floodmapping.ipynb | 50 ++++++------ 2 files changed, 87 insertions(+), 87 deletions(-) diff --git a/notebooks/01_classification.ipynb b/notebooks/01_classification.ipynb index 9e43386..7e3de95 100644 --- a/notebooks/01_classification.ipynb +++ b/notebooks/01_classification.ipynb @@ -85,12 +85,12 @@ "geom = {\n", " 'type': 'Polygon',\n", " 'coordinates': [[\n", - " [minx, miny`,\n", - " [minx, maxy`,\n", - " [maxx, maxy`,\n", - " [maxx, miny`,\n", - " [minx, miny`\n", - " ``\n", + " [minx, miny],\n", + " [minx, maxy],\n", + " [maxx, maxy],\n", + " [maxx, miny],\n", + " [minx, miny]\n", + " ]]\n", "}\n", "\n", "# Set Temporal extent\n", @@ -108,7 +108,7 @@ " \"https://earth-search.aws.element84.com/v1\"\n", ").search(\n", " intersects=geom,\n", - " collections=[\"sentinel-2-l2a\"`,\n", + " collections=[\"sentinel-2-l2a\"],\n", " datetime=date_query,\n", " limit=100,\n", ").item_collection()\n", @@ -141,7 +141,7 @@ "# lazily combine items\n", "ds_odc = odc.stac.load(\n", " items,\n", - " bands=[\"scl\", \"red\", \"green\", \"blue\", \"nir\"`,\n", + " bands=[\"scl\", \"red\", \"green\", \"blue\", \"nir\"],\n", " chunks={'time': 5, 'x': 600, 'y': 600},\n", " geobox=geobox,\n", " resampling=\"bilinear\",\n", @@ -173,7 +173,7 @@ " # include only vegetated, not_vegitated, water, and snow\n", " return ((data > 3) & (data < 7)) | (data==11)\n", "\n", - "ds_odc['valid'` = is_valid_pixel(ds_odc.scl)\n", + "ds_odc['valid'] = is_valid_pixel(ds_odc.scl)\n", "#ds_odc.valid.sum(\"time\").plot()\n", "\n", "def avg(ds):\n", @@ -181,7 +181,7 @@ "\n", "# compute the masked median\n", "rgb_median = (\n", - " ds_odc[['red', 'green', 'blue'``\n", + " ds_odc[['red', 'green', 'blue']]\n", " .where(ds_odc.valid)\n", " .to_dataarray(dim=\"band\")\n", " .transpose(..., \"band\")\n", @@ -210,7 +210,7 @@ "source": [ "# compute the false color image\n", "fc_median = (\n", - " ds_odc[['nir', 'green', 'blue'``\n", + " ds_odc[['nir', 'green', 'blue']]\n", " .where(ds_odc.valid)\n", " .to_dataarray(dim=\"band\")\n", " .transpose(..., \"band\")\n", @@ -276,27 +276,27 @@ "source": [ "# Define Polygons\n", "forest_areas = {\n", - " 0: [Polygon([(16.482772, 47.901753), (16.465133, 47.870124), (16.510142, 47.874382), (16.482772, 47.901753)`)`,\n", - " 1: [Polygon([(16.594079, 47.938855), (16.581914, 47.894454), (16.620233, 47.910268), (16.594079, 47.938855)`)`,\n", - " 2: [Polygon([(16.67984, 47.978998), (16.637263, 47.971091), (16.660376, 47.929123), (16.67984, 47.978998)`)`,\n", - " 3: [Polygon([(16.756477, 48.000286), (16.723024, 47.983256), (16.739446, 47.972916), (16.756477, 48.000286)`)`,\n", - " 4: [Polygon([(16.80696, 48.135923), (16.780806, 48.125583), (16.798445, 48.115243), (16.80696, 48.135923)`)`,\n", - " 5: [Polygon([(16.684097, 48.144438), (16.664634, 48.124366), (16.690788, 48.118892), (16.684097, 48.144438)`)`,\n", - " 6: [Polygon([(16.550894, 48.169984), (16.530822, 48.165118), (16.558801, 48.137139), (16.550894, 48.169984)`)`,\n", - " 7: [Polygon([(16.588604, 48.402329), (16.556976, 48.401112), (16.580697, 48.382865), (16.588604, 48.402329)`)`,\n", + " 0: [Polygon([(16.482772, 47.901753), (16.465133, 47.870124), (16.510142, 47.874382), (16.482772, 47.901753)])],\n", + " 1: [Polygon([(16.594079, 47.938855), (16.581914, 47.894454), (16.620233, 47.910268), (16.594079, 47.938855)])],\n", + " 2: [Polygon([(16.67984, 47.978998), (16.637263, 47.971091), (16.660376, 47.929123), (16.67984, 47.978998)])],\n", + " 3: [Polygon([(16.756477, 48.000286), (16.723024, 47.983256), (16.739446, 47.972916), (16.756477, 48.000286)])],\n", + " 4: [Polygon([(16.80696, 48.135923), (16.780806, 48.125583), (16.798445, 48.115243), (16.80696, 48.135923)])],\n", + " 5: [Polygon([(16.684097, 48.144438), (16.664634, 48.124366), (16.690788, 48.118892), (16.684097, 48.144438)])],\n", + " 6: [Polygon([(16.550894, 48.169984), (16.530822, 48.165118), (16.558801, 48.137139), (16.550894, 48.169984)])],\n", + " 7: [Polygon([(16.588604, 48.402329), (16.556976, 48.401112), (16.580697, 48.382865), (16.588604, 48.402329)])],\n", "}\n", "\n", "nonforest_areas = {\n", - " 0: [Polygon([(16.674974, 48.269126), (16.623882, 48.236281), (16.682272, 48.213168), (16.674974, 48.269126)`)`,\n", - " 1: [Polygon([(16.375723, 48.228374), (16.357476, 48.188839), (16.399444, 48.185798), (16.375723, 48.228374)`)`,\n", - " 2: [Polygon([(16.457834, 48.26426), (16.418907, 48.267301), (16.440804, 48.23324), (16.457834, 48.26426)`)`,\n", - " 3: [Polygon([(16.519266, 48.101861), (16.470607, 48.100645), (16.500411, 48.07145), (16.519266, 48.101861)`)`,\n", - " 4: [Polygon([(16.453577, 48.051986), (16.412217, 48.067192), (16.425598, 48.012451), (16.453577, 48.051986)`)`,\n", + " 0: [Polygon([(16.674974, 48.269126), (16.623882, 48.236281), (16.682272, 48.213168), (16.674974, 48.269126)])],\n", + " 1: [Polygon([(16.375723, 48.228374), (16.357476, 48.188839), (16.399444, 48.185798), (16.375723, 48.228374)])],\n", + " 2: [Polygon([(16.457834, 48.26426), (16.418907, 48.267301), (16.440804, 48.23324), (16.457834, 48.26426)])],\n", + " 3: [Polygon([(16.519266, 48.101861), (16.470607, 48.100645), (16.500411, 48.07145), (16.519266, 48.101861)])],\n", + " 4: [Polygon([(16.453577, 48.051986), (16.412217, 48.067192), (16.425598, 48.012451), (16.453577, 48.051986)])],\n", "}\n", "\n", "# Geoppandas Dataframe from Polygons\n", - "forest_df = gpd.GeoDataFrame({'geometry': [poly[0` for poly in forest_areas.values()`}, crs=\"EPSG:4326\")\n", - "nonforest_df = gpd.GeoDataFrame({'geometry': [poly[0` for poly in nonforest_areas.values()`}, crs=\"EPSG:4326\")\n", + "forest_df = gpd.GeoDataFrame({'geometry': [poly[0] for poly in forest_areas.values()]}, crs=\"EPSG:4326\")\n", + "nonforest_df = gpd.GeoDataFrame({'geometry': [poly[0] for poly in nonforest_areas.values()]}, crs=\"EPSG:4326\")\n", "\n", "\n", "# Plotting Regions of Interest\n", @@ -324,9 +324,9 @@ "outputs": [], "source": [ "# Classifiying dataset (only necessary bands)\n", - "bands = ['red', 'green', 'blue', 'nir'`\n", + "bands = ['red', 'green', 'blue', 'nir']\n", "ds_class = (\n", - " ds_odc[bands`\n", + " ds_odc[bands]\n", " .where(ds_odc.valid)\n", " .median(dim=\"time\")\n", ")\n", @@ -343,24 +343,24 @@ "data_dict_nonfeat = {idx: clip_array(ds_class, polygon) for idx, polygon in nonforest_areas.items()}\n", "\n", "# Reshape the polygon dataarrays to get a tuple (one value per band) of pixel values\n", - "feat_data = [xarray.to_array().values.reshape(len(bands),-1).T for xarray in data_dict_feat.values()` # replaced median_data_dict_feat with data_dict_feat\n", - "nonfeat_data = [xarray.to_array().values.reshape(len(bands),-1).T for xarray in data_dict_nonfeat.values()` # replaced median_data_dict_feat with data_dict_feat\n", + "feat_data = [xarray.to_array().values.reshape(len(bands),-1).T for xarray in data_dict_feat.values()] # replaced median_data_dict_feat with data_dict_feat\n", + "nonfeat_data = [xarray.to_array().values.reshape(len(bands),-1).T for xarray in data_dict_nonfeat.values()] # replaced median_data_dict_feat with data_dict_feat\n", "\n", "# The rows of the different polygons are concatenated to a single array for further processing\n", "feat_values = np.concatenate(feat_data)\n", "nonfeat_values = np.concatenate(nonfeat_data)\n", "\n", "# Drop Nan Values\n", - "X_feat_data = feat_values[~np.isnan(feat_values).any(axis=1)`\n", - "X_nonfeat_data = nonfeat_values[~np.isnan(nonfeat_values).any(axis=1)`\n", + "X_feat_data = feat_values[~np.isnan(feat_values).any(axis=1)]\n", + "X_nonfeat_data = nonfeat_values[~np.isnan(nonfeat_values).any(axis=1)]\n", "\n", "# Creating Output Vector (1 for pixel is features; 0 for pixel is not feature)\n", - "y_feat_data = np.ones(X_feat_data.shape[0`)\n", - "y_nonfeat_data = np.zeros(X_nonfeat_data.shape[0`)\n", + "y_feat_data = np.ones(X_feat_data.shape[0])\n", + "y_nonfeat_data = np.zeros(X_nonfeat_data.shape[0])\n", "\n", "# Concatnate all Classes for training \n", - "X = np.concatenate([X_feat_data, X_nonfeat_data`)\n", - "y = np.concatenate([y_feat_data, y_nonfeat_data`)\n", + "X = np.concatenate([X_feat_data, X_nonfeat_data])\n", + "y = np.concatenate([y_feat_data, y_nonfeat_data])\n", "\n", "# Split into Training and Testing Data.\n", "X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.5, random_state=42)" @@ -379,10 +379,10 @@ "metadata": {}, "outputs": [], "source": [ - "image_data = ds_class[bands`.to_array(dim='band').transpose('latitude', 'longitude', 'band')\n", + "image_data = ds_class[bands].to_array(dim='band').transpose('latitude', 'longitude', 'band')\n", "\n", "# Reshape the image data\n", - "num_of_pixels = ds_class.sizes['longitude'` * ds_class.sizes['latitude'`\n", + "num_of_pixels = ds_class.sizes['longitude'] * ds_class.sizes['latitude']\n", "num_of_bands = len(bands)\n", "X_image_data = image_data.values.reshape(num_of_pixels, num_of_bands)" ] @@ -410,10 +410,10 @@ "\n", "# Prediction on image\n", "nb_predict_img = nb.predict(X_image_data)\n", - "nb_predict_img = nb_predict_img.reshape(ds_class.sizes['latitude'`, ds_class.sizes['longitude'`)\n", + "nb_predict_img = nb_predict_img.reshape(ds_class.sizes['latitude'], ds_class.sizes['longitude'])\n", "\n", "# Adding the Naive Bayes Prediction to the dataset\n", - "ds_class['NB-forest'` = xr.DataArray(nb_predict_img, dims=['latitude', 'longitude'`, coords={'longitude': ds_class['longitude'`, 'latitude': ds_class['latitude'`})" + "ds_class['NB-forest'] = xr.DataArray(nb_predict_img, dims=['latitude', 'longitude'], coords={'longitude': ds_class['longitude'], 'latitude': ds_class['latitude']})" ] }, { @@ -431,11 +431,11 @@ "source": [ "# Plot Naive Bayes\n", "alpha = 1\t\n", - "cmap_green = colors.ListedColormap([(1, 1, 1, alpha), 'green'`)\n", + "cmap_green = colors.ListedColormap([(1, 1, 1, alpha), 'green'])\n", "\n", - "plot = ds_class['NB-forest'`.plot.imshow(cmap=cmap_green, cbar_kwargs={'ticks': [0.25,0.75`})\n", + "plot = ds_class['NB-forest'].plot.imshow(cmap=cmap_green, cbar_kwargs={'ticks': [0.25,0.75]})\n", "cbar = plot.colorbar\n", - "cbar.set_ticklabels(['non-forest', 'forest'`)\n", + "cbar.set_ticklabels(['non-forest', 'forest'])\n", "plot.axes.set_title('Naive Bayes Classification')\n", "plt.show()\n", "\n", @@ -444,8 +444,8 @@ "\n", "# Print the confusion matrix\n", "con_mat_nb = pd.DataFrame(confusion_matrix(y_test, nb_predict), \n", - " index=['Actual Negative', 'Actual Positive'`, \n", - " columns=['Predicted Negative', 'Predicted Positive'`)\n", + " index=['Actual Negative', 'Actual Positive'], \n", + " columns=['Predicted Negative', 'Predicted Positive'])\n", "display(con_mat_nb)" ] }, @@ -470,14 +470,14 @@ "\n", "# Prediction on image\n", "rf_predict_img = rf.predict(X_image_data)\n", - "rf_predict_img = rf_predict_img.reshape(ds_class.sizes['latitude'`, ds_class.sizes['longitude'`)\n", + "rf_predict_img = rf_predict_img.reshape(ds_class.sizes['latitude'], ds_class.sizes['longitude'])\n", "\n", "# Adding the Random Forest Prediction to the dataset\n", - "ds_class['RF-forest'` = xr.DataArray(rf_predict_img, dims=['latitude', 'longitude'`, coords={'longitude': ds_class['longitude'`, 'latitude': ds_class['latitude'`})\n", + "ds_class['RF-forest'] = xr.DataArray(rf_predict_img, dims=['latitude', 'longitude'], coords={'longitude': ds_class['longitude'], 'latitude': ds_class['latitude']})\n", "\n", - "plot = ds_class['RF-forest'`.plot.imshow(cmap=cmap_green, cbar_kwargs={'ticks': [0.25,0.75`})\n", + "plot = ds_class['RF-forest'].plot.imshow(cmap=cmap_green, cbar_kwargs={'ticks': [0.25,0.75]})\n", "cbar = plot.colorbar\n", - "cbar.set_ticklabels(['non-forest', 'forest'`)\n", + "cbar.set_ticklabels(['non-forest', 'forest'])\n", "plot.axes.set_title('Random Forest Classification')\n", "plt.show()\n", "\n", @@ -486,8 +486,8 @@ "\n", "# Print the confusion matrix\n", "con_mat_rf = pd.DataFrame(confusion_matrix(y_test, rf_predict), \n", - " index=['Actual Negative', 'Actual Positive'`, \n", - " columns=['Predicted Negative', 'Predicted Positive'`)\n", + " index=['Actual Negative', 'Actual Positive'], \n", + " columns=['Predicted Negative', 'Predicted Positive'])\n", "display(con_mat_rf)" ] }, @@ -509,17 +509,17 @@ "outputs": [], "source": [ "#| code-fold: true\n", - "cmap_trio = colors.ListedColormap(['whitesmoke' ,'indianred', 'goldenrod', 'darkgreen'`)\n", + "cmap_trio = colors.ListedColormap(['whitesmoke' ,'indianred', 'goldenrod', 'darkgreen'])\n", "\n", "\n", - "double_clf = (ds_class['NB-forest'` + 2*ds_class['RF-forest'`)\n", + "double_clf = (ds_class['NB-forest'] + 2*ds_class['RF-forest'])\n", "\n", "fig, ax = plt.subplots()\n", "cax = ax.imshow(double_clf, cmap=cmap_trio, interpolation='none')\n", "\n", "# Add a colorbar with custom tick labels\n", - "cbar = fig.colorbar(cax, ticks=[1*0.375, 3*0.375, 5*0.375, 7*0.375`)\n", - "cbar.ax.set_yticklabels(['None', 'Naive Bayes', 'Random Forest', 'Both'`)\n", + "cbar = fig.colorbar(cax, ticks=[1*0.375, 3*0.375, 5*0.375, 7*0.375])\n", + "cbar.ax.set_yticklabels(['None', 'Naive Bayes', 'Random Forest', 'Both'])\n", "ax.set_title('Classification Comparisson')\n", "ax.set_axis_off()\n", "plt.show()" @@ -544,11 +544,11 @@ "ax = axs.ravel()\n", "\n", "for i in range(4):\n", - " ax[i`.imshow(double_clf==i, cmap='cmc.oleron_r', interpolation='none')\n", - " category = ['by None', 'only by Naive Bayes', 'only by Random Forest', 'by Both'`[i`\n", + " ax[i].imshow(double_clf==i, cmap='cmc.oleron_r', interpolation='none')\n", + " category = ['by None', 'only by Naive Bayes', 'only by Random Forest', 'by Both'][i]\n", " title = 'Areas classified ' + category\n", - " ax[i`.set_title(title)\n", - " ax[i`.set_axis_off()\n", + " ax[i].set_title(title)\n", + " ax[i].set_axis_off()\n", "\n", "plt.tight_layout()" ] @@ -574,10 +574,10 @@ "counts = {}\n", "for num in range(0,4):\n", " num_2_class = {0: 'None', 1: 'Naive Bayes', 2: 'Random Forest', 3: 'Both'}\n", - " counts[num_2_class[num`` = int((double_clf == num).sum().values)\n", + " counts[num_2_class[num]] = int((double_clf == num).sum().values)\n", "\n", - "class_counts_df = pd.DataFrame(list(counts.items()), columns=['Class', 'Count'`)\n", - "class_counts_df['Percentage'` = (class_counts_df['Count'` / class_counts_df['Count'`.sum())*100\n", + "class_counts_df = pd.DataFrame(list(counts.items()), columns=['Class', 'Count'])\n", + "class_counts_df['Percentage'] = (class_counts_df['Count'] / class_counts_df['Count'].sum())*100\n", "ax = class_counts_df.plot.bar(x='Class', y='Percentage', rot=0, color='darkgreen', ylim=(0,100), title='Classified Areas per Classificator (%)')\n", "\n", "# Annotate the bars with the percentage values\n", diff --git a/notebooks/02_floodmapping.ipynb b/notebooks/02_floodmapping.ipynb index cb6fedf..2f3a07a 100644 --- a/notebooks/02_floodmapping.ipynb +++ b/notebooks/02_floodmapping.ipynb @@ -73,11 +73,11 @@ "m2.add_wms.OpenStreetMap.add_layer.default()\n", "m.apply_layout(\n", " {\n", - " 'figsize': [7.32, 4.59`,\n", - " '0_map': [0.05, 0.18, 0.35, 0.64`,\n", - " '1_cb': [0.8125, 0.1, 0.1, 0.8`,\n", + " 'figsize': [7.32, 4.59],\n", + " '0_map': [0.05, 0.18, 0.35, 0.64],\n", + " '1_cb': [0.8125, 0.1, 0.1, 0.8],\n", " '1_cb_histogram_size': 0.8,\n", - " '2_map': [0.4375, 0.18, 0.35, 0.64`\n", + " '2_map': [0.4375, 0.18, 0.35, 0.64]\n", " }\n", " )\n", "m.show()" @@ -124,14 +124,14 @@ "outputs": [], "source": [ "#| label: fig-lik\n", - "#| fig-cap: 'Likelihoods for $\\sigma^0$ being associated with land or water for 1 pixel in the Greek area of Thessaly. Likelihoods are calculated over a range of $\\sigma^0$. The pixel''s observed $\\sigma^0$ is given with a vertical line. Click on the map to re-calculate and update this figure for another pixel in the study area. Map created with EOmaps {cite}`quast_getting_2024`.'\n", + "#| fig-cap: 'Likelihoods for $\\sigma^0$ being associated with land or water for 1 pixel in the Greek area of Thessaly. Likelihoods are calculated over a range of $\\sigma^0$. The pixel''s observed $\\sigma^0$ is given with a vertical line. Click on the map to re-calculate and update this figure for another pixel in the study area. Map created with EOmaps [@quast_getting_2024].'\n", "\n", "RANGE = np.arange(-30, 0, 0.1)\n", "hparam_dc = xr.open_dataset('../data/tuw_s1_harpar/S1_CSAR_IWGRDH/SIG0-HPAR/V0M2R3/EQUI7_EU020M/E054N006T3/D080.nc')\n", "plia_dc = xr.open_dataset('../data/s1_parameters/S1_CSAR_IWGRDH/PLIA-TAG/V01R03/EQUI7_EU020M/E054N006T3/PLIA-TAG-MEAN_20200101T000000_20201231T235959__D080_E054N006T3_EU020M_V01R03_S1IWGRDH.nc')\n", - "sig0_dc['id'` = (('y', 'x'), np.arange(sig0_dc.SIG0.size).reshape(sig0_dc.SIG0.shape))\n", - "hparam_dc['id'` = (('y', 'x'), np.arange(sig0_dc.SIG0.size).reshape(sig0_dc.SIG0.shape))\n", - "plia_dc['id'` = (('y', 'x'), np.arange(sig0_dc.SIG0.size).reshape(sig0_dc.SIG0.shape))\n", + "sig0_dc['id'] = (('y', 'x'), np.arange(sig0_dc.SIG0.size).reshape(sig0_dc.SIG0.shape))\n", + "hparam_dc['id'] = (('y', 'x'), np.arange(sig0_dc.SIG0.size).reshape(sig0_dc.SIG0.shape))\n", + "plia_dc['id'] = (('y', 'x'), np.arange(sig0_dc.SIG0.size).reshape(sig0_dc.SIG0.shape))\n", "\n", "def calc_water_likelihood(id, x):\n", " point = plia_dc.where(plia_dc.id == id, drop=True)\n", @@ -165,7 +165,7 @@ "\n", "def calc_likelihoods(id, x):\n", " if isinstance(x, list):\n", - " x = np.arange(x[0`, x[1`, 0.1)\n", + " x = np.arange(x[0], x[1], 0.1)\n", " water_likelihood, land_likelihood = calc_water_likelihood(id=id, x=x), calc_land_likelihood(id=id, x=x)\n", " return water_likelihood, land_likelihood\n", "\n", @@ -189,18 +189,18 @@ " m2 = m.new_layer(layer=\"map\")\n", " m2.add_wms.OpenStreetMap.add_layer.default()\n", " flood_classification = bayesian_flood_decision(sig0_dc.id, sig0_dc.SIG0)\n", - " sig0_dc[\"decision\"` = (('y', 'x'), flood_classification.reshape(sig0_dc.SIG0.shape))\n", - " sig0_dc[\"decision\"` = sig0_dc.decision.where(sig0_dc.SIG0.notnull())\n", - " sig0_dc[\"decision\"` = sig0_dc.decision.where(sig0_dc.decision==0)\n", + " sig0_dc[\"decision\"] = (('y', 'x'), flood_classification.reshape(sig0_dc.SIG0.shape))\n", + " sig0_dc[\"decision\"] = sig0_dc.decision.where(sig0_dc.SIG0.notnull())\n", + " sig0_dc[\"decision\"] = sig0_dc.decision.where(sig0_dc.decision==0)\n", " m.set_data(data=sig0_dc, x=\"x\", y=\"y\", parameter=\"decision\", crs=Maps.CRS.Equi7_EU)\n", " m.plot_map()\n", " m.show_layer(\"map\", (\"data\", 0.5))\n", " m.apply_layout(\n", " {\n", - " 'figsize': [7.32, 4.59`,\n", - " '0_map': [0.44573, 0.11961, 0.3375, 0.75237`,\n", - " '1_': [0.10625, 0.5781, 0.3125, 0.29902`,\n", - " '2_': [0.10625, 0.11961, 0.3125, 0.29902`,\n", + " 'figsize': [7.32, 4.59],\n", + " '0_map': [0.44573, 0.11961, 0.3375, 0.75237],\n", + " '1_': [0.10625, 0.5781, 0.3125, 0.29902],\n", + " '2_': [0.10625, 0.11961, 0.3125, 0.29902],\n", " }\n", " )\n", "\n", @@ -210,11 +210,11 @@ " m.add_colorbar(label=\"$\\sigma^0$ (dB)\", orientation=\"vertical\", hist_bins=30)\n", " m.apply_layout(\n", " {\n", - " 'figsize': [7.32, 4.59`,\n", - " '0_map': [0.44573, 0.11961, 0.3375, 0.75237`,\n", - " '1_': [0.10625, 0.5781, 0.3125, 0.29902`,\n", - " '2_': [0.10625, 0.11961, 0.3125, 0.29902`,\n", - " '3_cb': [0.8, 0.09034, 0.1, 0.85`,\n", + " 'figsize': [7.32, 4.59],\n", + " '0_map': [0.44573, 0.11961, 0.3375, 0.75237],\n", + " '1_': [0.10625, 0.5781, 0.3125, 0.29902],\n", + " '2_': [0.10625, 0.11961, 0.3125, 0.29902],\n", + " '3_cb': [0.8, 0.09034, 0.1, 0.85],\n", " '3_cb_histogram_size': 0.8\n", " }\n", " )\n", @@ -233,7 +233,7 @@ "\n", " # add all artists as \"temporary pick artists\" so that they\n", " # are removed when the next datapoint is selected\n", - " for a in [water, land, value_left`:\n", + " for a in [water, land, value_left]:\n", " m.cb.pick.add_temporary_artist(a)\n", "\n", " if calc_posteriors is not None:\n", @@ -242,7 +242,7 @@ " (nf,) = ax_lower.plot(RANGE, nf_post,'r-', lw=5, alpha=0.6, label=\"non-flood\")\n", " value_right = ax_lower.vlines(x=value, ymin=-0.1, ymax=1.1, lw=3, label=\"observed\")\n", " ax_lower.legend(loc=\"upper left\")\n", - " for a in [f, nf, value_right`:\n", + " for a in [f, nf, value_right]:\n", " m.cb.pick.add_temporary_artist(a)\n", "\n", " # re-compute axis limits based on the new artists\n", @@ -295,7 +295,7 @@ "outputs": [], "source": [ "#| label: fig-post\n", - "#| fig-cap: 'Posterior probabilities for $\\sigma^0$ of 1 pixel being associated with land for water in the Greek area of Thessaly. Click on the map to re-calculate and update this figure for another pixel in the study area. Map created with EOmaps {cite}`quast_getting_2024`.'\n", + "#| fig-cap: 'Posterior probabilities for $\\sigma^0$ of 1 pixel being associated with land for water in the Greek area of Thessaly. Click on the map to re-calculate and update this figure for another pixel in the study area. Map created with EOmaps [@quast_getting_2024].'\n", "view_bayes_flood(sig0_dc, calc_posteriors)" ] }, @@ -333,7 +333,7 @@ "outputs": [], "source": [ "#| label: fig-clas\n", - "#| fig-cap: 'Flood extent of the Greek region of Thessaly based on Bayesian probabilities are shown on the map superimposed on an open street map. Click on a pixel to generate the point''s water and land likelihoods as well as the posterior probabilities. Map created with EOmaps {cite}`quast_getting_2024`.'\n", + "#| fig-cap: 'Flood extent of the Greek region of Thessaly based on Bayesian probabilities are shown on the map superimposed on an open street map. Click on a pixel to generate the point''s water and land likelihoods as well as the posterior probabilities. Map created with EOmaps [@quast_getting_2024].'\n", "view_bayes_flood(sig0_dc, calc_posteriors, bayesian_flood_decision)" ] }