Skip to content

Commit

Permalink
Working config for geomad
Browse files Browse the repository at this point in the history
  • Loading branch information
alexgleith committed Dec 18, 2023
1 parent 1c8f22c commit 09b4eae
Show file tree
Hide file tree
Showing 31 changed files with 2,799 additions and 585 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
*.pyc
62 changes: 60 additions & 2 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,47 @@
# BBOX over Samoa and Tonga
BBOX := -180.0,-20.0,-170.0,-10.0

# Bigger BBOX over most of the Pacific
BIGBBOX := 135.0,-30.0,180.0,15.0
BIGBBOX2 := -180,-30.0,-120.0,15.0

up:
docker compose up

# Init the DB

datacube-init:
docker-compose exec explorer \
datacube system init --no-init-users

# Explorer

explorer-init:
docker-compose exec explorer \
cubedash-gen --init

explorer-geupdaten:
docker-compose exec explorer \
cubedash-gen --all

# OWS

ows-shell:
docker-compose exec ows bash

ows-init:
docker-compose exec ows \
datacube-ows-update --schema --role odc_admin

ows-update:
docker-compose exec ows \
bash -c " \
datacube-ows-update --views && \
datacube-ows-update \
"

# Indexing

products:
dc-sync-products products.csv

Expand All @@ -15,13 +53,21 @@ index-esri-lc:
stac-to-dc \
--catalog-href=https://planetarycomputer.microsoft.com/api/stac/v1/ \
--collections=io-lulc-9-class \
--bbox=${BBOX}
--bbox=${BIGBBOX}
stac-to-dc \
--catalog-href=https://planetarycomputer.microsoft.com/api/stac/v1/ \
--collections=io-lulc-9-class \
--bbox=${BIGBBOX2}

index-nasadem:
stac-to-dc \
--catalog-href=https://planetarycomputer.microsoft.com/api/stac/v1/ \
--collections=nasadem \
--bbox=${BBOX}
--bbox=${BIGBBOX}
stac-to-dc \
--catalog-href=https://planetarycomputer.microsoft.com/api/stac/v1/ \
--collections=nasadem \
--bbox=${BIGBBOX2}

index-sentinel-1:
stac-to-dc \
Expand All @@ -30,6 +76,18 @@ index-sentinel-1:
--datetime='2023-01-01/2023-12-31' \
--collections='sentinel-1-rtc'

index-sentinel-1-pacific:
stac-to-dc \
--catalog-href='https://planetarycomputer.microsoft.com/api/stac/v1/' \
--bbox='$(BIGBBOX)' \
--datetime='2023-01-01/2023-12-31' \
--collections='sentinel-1-rtc'
stac-to-dc \
--catalog-href='https://planetarycomputer.microsoft.com/api/stac/v1/' \
--bbox='$(BIGBBOX2)' \
--datetime='2023-01-01/2023-12-31' \
--collections='sentinel-1-rtc'

index-sentinel-2:
stac-to-dc \
--catalog-href='https://planetarycomputer.microsoft.com/api/stac/v1/' \
Expand Down
7 changes: 2 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,19 +10,16 @@ need to have environment variables exported for connectivity to the
Postgres database too, which runs in Docker.

* Export environment variables:

``` bash
export DB_HOSTNAME=localhost
export DB_USERNAME=pacific
export DB_PASSWORD=secretpassword
export DB_DATABASE=odc
```

* Start the postgres DB: `make up` or `docker compose up`
* Add products: `make products` or `dc-sync-products products.csv`
* Index data individually or do it all with `make index-all`
* Now you can use any of the products. There's only one example notebook
for now, for [Sentinel-1](notebooks/Sentinel1_Basic.ipynb).


## Next steps

Todo: get OWS and Explorer working as well a little Terria to visualise OWS
82 changes: 75 additions & 7 deletions docker-compose.yaml
Original file line number Diff line number Diff line change
@@ -1,10 +1,78 @@
services:
postgis:
image: mdillon/postgis:9.6
restart: always
# postgis:
# image: mdillon/postgis:9.6
# restart: always
# environment:
# POSTGRES_USER: pacific
# POSTGRES_PASSWORD: secretpassword
# POSTGRES_DB: odc
# ports:
# - "5432:5432"
# terria:
# image: alexgleith/terriamap
# # environment:
# # - VIRTUAL_HOST=terria.vcap.me
# # expose:
# # - "3001"
# ports:
# - "3001:3001"
ows:
network_mode: host
build:
context: ows
dockerfile: Dockerfile
volumes:
- ./ows_config:/env/config/ows_config
environment:
POSTGRES_USER: pacific
POSTGRES_PASSWORD: secretpassword
POSTGRES_DB: odc
- DB_HOSTNAME=host.docker.internal
- DB_USERNAME=odc_admin
- DB_PASSWORD=${DB_PASSWORD}
- DB_DATABASE=odc
- PYTHONPATH=/env/config
- WMS_CONFIG_PATH=/env/config/ows_config/pacific_ows.py
- DATACUBE_OWS_CFG=ows_config.pacific_ows.ows_cfg
# - PC_SDK_SUBSCRIPTION_KEY=${PC_SDK_SUBSCRIPTION_KEY}
# - VIRTUAL_HOST=ows.localtest.me
ports:
- "5432:5432"
- "80:8000"
# explorer:
# image: opendatacube/explorer:latest
# environment:
# - DB_HOSTNAME=postgis
# - DB_USERNAME=pacific
# - DB_PASSWORD=secretpassword
# - DB_DATABASE=odc
# # - CUBEDASH_SETTINGS=/code/settings.env.py
# # - VIRTUAL_HOST=explorer.localtest.me
# # expose:
# # - "8080"
# ports:
# - "8080:8080"
# # volumes:
# # - ./explorer/settings.env.py:/code/settings.env.py
# command:
# - "gunicorn"
# - "-b"
# - "0.0.0.0:8080"
# - "-w"
# - "3"
# - "--threads=2"
# - "-k"
# - "gthread"
# - "--timeout"
# - "90"
# - "--config"
# - "python:cubedash.gunicorn_config"
# - "cubedash:app"
# nginx-proxy:
# image: nginxproxy/nginx-proxy
# container_name: nginx-proxy
# ports:
# - "80:80"
# # - "443:443"
# volumes:
# # - conf:/etc/nginx/conf.d
# # - vhost:/etc/nginx/vhost.d
# # - html:/usr/share/nginx/html
# # - certs:/etc/nginx/certs:ro
# - /var/run/docker.sock:/tmp/docker.sock:ro
69 changes: 69 additions & 0 deletions notebooks/DownloadFromPC.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import pystac\n",
"from planetary_computer import sign_url, sign_item\n",
"from pathlib import Path\n",
"import requests"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"item = pystac.Item.from_file(\"https://planetarycomputer.microsoft.com/api/stac/v1/collections/sentinel-2-l2a/items/S2A_MSIL2A_20230623T214811_R043_T02LMK_20230624T044756\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"signed = sign_item(item)\n",
"\n",
"folder = Path(\"files\") / item.id\n",
"folder.mkdir(exist_ok=True)\n",
"\n",
"for asset in signed.assets.values():\n",
" # local_file is the final part of the URL, e.g. \"S2A_MSIL2A_20230623T214811_R043_T02LMK_20230624T044756.tif\"\n",
" local_file = folder / Path(asset.href).name.split(\"?\")[0]\n",
" print(f\"Downloading {asset.href} to {local_file}\")\n",
"\n",
" # Download the file using requests\n",
" with open(local_file, \"wb\") as f:\n",
" f.write(requests.get(asset.href).content)\n",
"\n"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.6"
},
"orig_nbformat": 4
},
"nbformat": 4,
"nbformat_minor": 2
}
Loading

0 comments on commit 09b4eae

Please sign in to comment.