diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index ee4e2d5..59299bf 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -1,4 +1,4 @@ -name: Deploy to Ubuntu Server +name: Deploy Website to Ubuntu Server on: push: diff --git a/.github/workflows/deploy_blog.yml b/.github/workflows/deploy_blog.yml new file mode 100644 index 0000000..e678a73 --- /dev/null +++ b/.github/workflows/deploy_blog.yml @@ -0,0 +1,49 @@ +name: Deploy Blog to Ubuntu Server + +on: + push: + branches: + - master + - ppe + +jobs: + deploy: + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v2 + - name: Login to GitHub Container Registry + uses: docker/login-action@v1 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GHCR_TOKEN_OAP}} + - name: Build and push Docker image + uses: docker/build-push-action@v2 + with: + context: . + push: true + tags: ghcr.io/${{ github.repository_owner }}/oap_blog:latest + file: Blog/Dockerfile + - name: SSH into server and deploy + uses: appleboy/ssh-action@master + with: + host: ${{ secrets.SERVER_HOST }} + username: ${{ secrets.SERVER_USERNAME }} + password: ${{ secrets.SERVER_PASSWORD }} + port: ${{ secrets.SSH_PORT}} + script: | + echo "${{ secrets.GHCR_TOKEN}}" | docker login ghcr.io -u ${{ github.repository_owner }} --password-stdin + if [ ${{ github.ref }} = 'refs/heads/master' ] + then + docker stop oap_blog_prod || true + docker rm oap_blog_prod || true + docker pull ghcr.io/${{ github.repository_owner }}/oap_blog:latest + docker run -d --name oap_blog_prod -p 83:80 ghcr.io/${{ github.repository_owner }}/oap_blog:latest + elif [ ${{ github.ref }} = 'refs/heads/ppe' ] + then + docker stop oap_blog_ppe || true + docker rm oap_blog_ppe || true + docker pull ghcr.io/${{ github.repository_owner }}/oap_blog:latest + docker run -d --name oap_blog_ppe -p 8083:80 ghcr.io/${{ github.repository_owner }}/oap_blog:latest + fi \ No newline at end of file diff --git a/.gitignore b/.gitignore index bd56c34..7ca9a4b 100644 --- a/.gitignore +++ b/.gitignore @@ -322,3 +322,10 @@ TestData/5.MLData/ TestData/4.GFSFiltered1xInterpolationZarr/lat_lon_union.csv study.pkl DataPipelineNotebooks/.last_checked + +# Hugo +/Blog/public/ +/Blog/resources/_gen/ +/Blog/hugo_stats.json +/Blog/themes/*/node_modules/ +/Blog/themes/*/dist/ \ No newline at end of file diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000..17daaed --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "themes/LoveIt"] + path = Blog/themes/LoveIt + url = https://github.com/dillonzq/LoveIt.git diff --git a/Blog/.hugo_build.lock b/Blog/.hugo_build.lock new file mode 100644 index 0000000..e69de29 diff --git a/Blog/Dockerfile b/Blog/Dockerfile new file mode 100644 index 0000000..4fcea07 --- /dev/null +++ b/Blog/Dockerfile @@ -0,0 +1,32 @@ +# Start from the Ubuntu image and name this stage as 'builder' +# Note: alpine linux isn't compatible with the hugo command (without hacks around glibc) +FROM ubuntu:latest AS builder + +# Install Hugo and Git +RUN apt-get update && \ + apt-get install -y wget ca-certificates git && \ + wget https://github.com/gohugoio/hugo/releases/download/v0.120.4/hugo_extended_0.120.4_Linux-64bit.tar.gz && \ + tar xzf hugo_extended_0.120.4_Linux-64bit.tar.gz && \ + mv hugo /usr/local/bin/ && \ + rm -r hugo_extended_0.120.4_Linux-64bit.tar.gz + +# Copy your Hugo site source into the Docker container +COPY . /src + +# Set the working directory +WORKDIR /src + +# Initialize Git and update the LoveIt theme submodule +RUN git init && \ + git submodule update --init --recursive + +WORKDIR /src/Blog + +# Build your Hugo site +RUN hugo --minify --config hugo.toml + +# Use an Apache HTTP Server Docker image to serve your Hugo site +FROM httpd:2.4-alpine + +# Copy the built Hugo site from the builder container to the Apache HTTP Server container +COPY --from=builder /src/Blog/public/ /usr/local/apache2/htdocs/ \ No newline at end of file diff --git a/Blog/archetypes/default.md b/Blog/archetypes/default.md new file mode 100644 index 0000000..c6f3fce --- /dev/null +++ b/Blog/archetypes/default.md @@ -0,0 +1,5 @@ ++++ +title = '{{ replace .File.ContentBaseName "-" " " | title }}' +date = {{ .Date }} +draft = true ++++ diff --git a/Blog/assets/css/_page/_home.scss b/Blog/assets/css/_page/_home.scss new file mode 100644 index 0000000..9cfa214 --- /dev/null +++ b/Blog/assets/css/_page/_home.scss @@ -0,0 +1,181 @@ +.home { + .home-profile { + @include transform(translateY(16vh)); + padding: 0 0 .5rem; + text-align: center; + + .home-avatar { + padding: .5rem; + + img { + display: inline-block; + max-width: 100%; + height: auto; + margin: 0 auto; + @include transition(all 0.4s ease); + + &:hover { + position: relative; + @include transform(translateY(-.75rem)); + } + } + } + + .home-title { + font-size: 1.25rem; + font-weight: bold; + margin: 0; + padding: .5rem; + } + + .home-subtitle { + font-size: 1rem; + font-weight: normal; + margin: 0; + } + + .links { + padding: .5rem; + font-size: 1.5rem; + + a * { + vertical-align: text-bottom; + } + + img { + height: 1.5rem; + padding: 0 .25rem; + } + } + + .home-disclaimer { + font-size: 1rem; + line-height: 1.5rem; + font-weight: normal; + margin: 0; + padding: .5rem; + color: $global-font-secondary-color; + + [theme=dark] & { + color: $global-font-secondary-color-dark; + } + } + } +} + +.home[data-home=posts] { + .home-profile { + @include transform(translateY(0)); + padding-top: 2rem; + } + + .home-avatar img { + width: 32rem; + } + + .summary { + padding-top: 1rem; + padding-bottom: .8rem; + color: $global-font-color; + border-bottom: 1px dashed $global-border-color; + + [theme=dark] & { + color: $global-font-color-dark; + border-bottom: 1px dashed $global-border-color-dark; + } + + .featured-image-preview { + width: 4rem; + height: 4rem; + margin-right: 1rem; + @include transition(transform 0.4s ease); + + img { + width: 100%; + height: 100%; + object-fit: cover; + + &.lazyloaded { + @include object-fit(cover); + } + } + + &:hover { + @include transform(scale(1.01)); + } + } + + .title-and-preview{ + display: flex; + flex-direction: row; + align-items: center; + } + + .single-title { + font-size: 1.25rem; + line-height: 140%; + margin: 0.4rem 0; + } + + .content { + @include box(vertical); + -webkit-line-clamp: 3; + margin-top: .3rem; + width: 100%; + overflow: hidden; + text-overflow: ellipsis; + @include overflow-wrap(break-word); + color: $global-font-secondary-color; + + [theme=dark] & { + color: $global-font-secondary-color-dark; + } + + h2, + h3, + h4, + h5, + h6, + p { + font-size: 1rem; + line-height: 1.5; + display: inline; + + &::after { + content: "\A"; + white-space: pre; + } + } + + h2 { + font-size: 1.125rem; + } + + @include link(false, true); + + b, strong { + color: $global-font-secondary-color; + + [theme=dark] & { + color: $global-font-secondary-color-dark; + } + } + } + + .post-footer { + margin-top: .4rem; + display: flex; + justify-content: space-between; + align-items: center; + font-size: .875rem; + + @include link(false, false); + + .post-tags { + padding: 0; + + @include link(true, true); + } + } + } +} diff --git a/Blog/content/posts/20181201_18-19-season-preview/Continental17-18Confusion.PNG b/Blog/content/posts/20181201_18-19-season-preview/Continental17-18Confusion.PNG new file mode 100644 index 0000000..1d74f14 Binary files /dev/null and b/Blog/content/posts/20181201_18-19-season-preview/Continental17-18Confusion.PNG differ diff --git a/Blog/content/posts/20181201_18-19-season-preview/NWACByDate.PNG b/Blog/content/posts/20181201_18-19-season-preview/NWACByDate.PNG new file mode 100644 index 0000000..fc1ac77 Binary files /dev/null and b/Blog/content/posts/20181201_18-19-season-preview/NWACByDate.PNG differ diff --git a/Blog/content/posts/20181201_18-19-season-preview/NWACByProblem.PNG b/Blog/content/posts/20181201_18-19-season-preview/NWACByProblem.PNG new file mode 100644 index 0000000..831637c Binary files /dev/null and b/Blog/content/posts/20181201_18-19-season-preview/NWACByProblem.PNG differ diff --git a/Blog/content/posts/20181201_18-19-season-preview/NWACByRegion.PNG b/Blog/content/posts/20181201_18-19-season-preview/NWACByRegion.PNG new file mode 100644 index 0000000..f3c0e7e Binary files /dev/null and b/Blog/content/posts/20181201_18-19-season-preview/NWACByRegion.PNG differ diff --git a/Blog/content/posts/20181201_18-19-season-preview/UACByProblem.PNG b/Blog/content/posts/20181201_18-19-season-preview/UACByProblem.PNG new file mode 100644 index 0000000..1f73ccb Binary files /dev/null and b/Blog/content/posts/20181201_18-19-season-preview/UACByProblem.PNG differ diff --git a/Blog/content/posts/20181201_18-19-season-preview/UACByRegion.PNG b/Blog/content/posts/20181201_18-19-season-preview/UACByRegion.PNG new file mode 100644 index 0000000..6ff03d9 Binary files /dev/null and b/Blog/content/posts/20181201_18-19-season-preview/UACByRegion.PNG differ diff --git a/Blog/content/posts/20181201_18-19-season-preview/coastal17-18Confusion.PNG b/Blog/content/posts/20181201_18-19-season-preview/coastal17-18Confusion.PNG new file mode 100644 index 0000000..a239e60 Binary files /dev/null and b/Blog/content/posts/20181201_18-19-season-preview/coastal17-18Confusion.PNG differ diff --git a/Blog/content/posts/20181201_18-19-season-preview/index.md b/Blog/content/posts/20181201_18-19-season-preview/index.md new file mode 100644 index 0000000..798a6e2 --- /dev/null +++ b/Blog/content/posts/20181201_18-19-season-preview/index.md @@ -0,0 +1,72 @@ ++++ +title = '18-19 Season Preview' +date = 2018-12-01T11:48:48-07:00 +draft = false ++++ + +## Summary + +We have fixed several critical issues affecting the inability for the model to generalize and which had a negative impact on last seasons accuracy. We are also publishing accuracy goals for when experimental forecasts might also be published again and have outlined some analysis of where errors are still affecting the model and what is the plan to address them. + +## Looking back on 17-18 Season + +Before we look at what are the plans for the coming season I wanted to provide a bit of an update on how we did and what we have learned from last season. I launched the Open Avalanche Project in early March of 2018 at the tail end of the last Northern Hemisphere Avalanche Season. While everything was deployed and published as experimental I did want to provide an update on how accurate the forecasts provided were. The forecasts performed very poorly last season. The full analysis is available here: [https://github.com/scottcha/OpenAvalancheProject/blob/develop/ML/ForecastAccuracyAnalysis17-18.ipynb](https://github.com/scottcha/OpenAvalancheProject/blob/develop/ML/ForecastAccuracyAnalysis17-18.ipynb) but the net is that measuring both at a regional as well as a single point per region the overall accuracy was less than 30%. As I investigated there were several critical errors which contributed to this: + +1. The train/test split was not done across time and led to model overfitting. +2. The model training pipeline had a few different assumptions about date alignments than the prediction pipeline. +3. I was using 0 as a proxy for missing data incorrectly when it does have meaning in this model. For example, 0 snow depth does have meaning and isn't a good proxy for missing data. + +All of these issues have been examined and will be rectified before other models are published. + +That being said there was lots learned in getting the end to end pipeline built and it will be easier for the future to focus less on that and more effort on building the best forecasts possible. + +## Looking forward to the 18-19 Season + +Over the summer I made a few major updates to the pipeline, resolving the first two issues above (the third will be resolved once new models are published). + +I also made the investment to bring in additional data, from Utah Avalanche Center, over the summer greatly expanding the data available to train on but also getting additional regional coverage in a continental climate. + +Now that the train/test split is done on a season boundary (currently the training is done from the 13-14 season through the 16-17 season and the 17-18 season is used as the test set) we have accuracy numbers which are better indicators of the real-world model performance. + +## When models will be live + +The experience and learnings from last year were informative and influenced the position that we should move cautiously in publishing even the experimental forecasts. We want to avoid both people reading in too much to the forecasts as well as we want to build a broad basis of support in the methodology across the public and avalanche forecasting communities. While we'll continue to make all of our work public but **we will only publish experimental forecasts generated from models with a full season accuracy > 75%**. That number seeks to strike a balance between what is likely useful from moving the conversation forward versus as well as keeping even this experimental work from being misinterpreted or taken out of context. While we are close to achieving a 75% accuracy on a continental snowpack we still have some work to get there (and the gap is wider for the coastal snowpack). + +**Coastal Model Performance** + +This basic Random Forest Classifier model struggles to the High forecasts correct a majority of the time and only has an overall accuracy value of 57.3% for the 17-18 test season. I've attempted more complex models (Xgboost, LSTM DNN) and neither have a significant improvement in accuracy. +![Coastal Confusion Matrix](coastal17-18Confusion.PNG) + +Breaking the forecast down across months there was not a clear pattern on parts of the winter which were harder to predict than others. +![Coastal By Date](NWACByDate.PNG) + +Breaking down by region did demonstrate the that Olympics did contribute the most to the error. +![Coastal By Region](NWACByRegion.PNG) + +Across all avalanche problem types the problem types most associated with the model incorrectly predicting the forecast for the Coastal regions are Wind Slabs, followed by Loose Wet and then by Persistent Slab and Cornices. + +Errors when Wind Slab is a forecasted avalanche problem: +![Coastal By Wind Slab](NWACByProblem.PNG) + +**Continental Model Performance** + +The model has better skill and works across the forecast values better in the coastal region but with an overall accuracy value of 72.1%. + +![Continental Confusion Matrix](Continental17-18Confusion.PNG) + +As in the coastal forecast there was not a clear error pattern across dates. + +Breaking down by region demonstrates that the Uintas Region is the highest contribution to the error. +![Continental by Region](UACByRegion.PNG) + +As in the Coastal forecast Wind Slabs also are the highest avalanche problem correlated with model error followed by Storm Slab and Persistent Slab. + +Errors when Wind Slab is a forecasted avalanche problem: +![Continental by Wind Slab](UACByProblem.PNG) + +## Next Steps + +1. **Improve wind data:** From the analysis there definitely is a need to examine the wind values going into the forecasting or otherwise determining if it's possible to improve the Wind Slab forecasting. +2. **Get additional data:** Today the model only uses two forecast regions for training and evaluation. We are taking steps to get data from other avalanche centers to help expand the amount of data available to train. +3. **Explore other useful features/models:** While the models are designed to be able to approximate some aspects of snowpack evolution it's not clear how to connect these models with existing state of the art physics models of the snowpack. We need to investigate this as well as other possible modeling goals (such as predicting avalanche problems). +4. **Continue conversations with the community:** As we learn more about what is possible in the space of automatically generating avalanche forecasts we want to continue the conversation about the appropriate and innovative uses which may be possible. \ No newline at end of file diff --git a/Blog/content/posts/20190620_a-global-avalanche-region-map/index.md b/Blog/content/posts/20190620_a-global-avalanche-region-map/index.md new file mode 100644 index 0000000..1e52db4 --- /dev/null +++ b/Blog/content/posts/20190620_a-global-avalanche-region-map/index.md @@ -0,0 +1,51 @@ ++++ +title = "A Global Avalanche Region Map" +date = 2019-06-20T11:48:26-07:00 +draft = false +featuredImage = "worldregionsshape_small.png" ++++ + +## Summary + +In order to make the largest impact possible we are starting to refactor the OAP to have a global coverage. Enabling others around the world to look at data and make contributions will help accelerate our mission to improve the ability to create improved avalanche forecasts and cover more regions which currently aren't covered by human forecasters. The first building block of this is having a single map (GIS shapefile) to identify which areas of the globe are likely avalanche regions and which aren't. I'm making that map available today [here]. The rest of this post covers how it was made so others can help contribute to this process. + +## Creating The Map + +### Identifying Potential Avalanche Regions + +There are two attributes we need to be able to identify an avalanche region at scale: +1. A slope +2. The presence of enough snow to avalanche + +In order to do this for the entire globe we need two sets of data. The first set of data is elevation data in order to determine slope angles. For this dataset I used the ASTER digital elevation data [https://asterweb.jpl.nasa.gov/gdem.asp](https://asterweb.jpl.nasa.gov/gdem.asp). Using that dataset you can apply a GIS transform to the data to first determine the slope at a location and subsequently to filter that slope to a range. The data has a very high resolution but we'll downsample this later to match the resolution which our weather/snow coverage data exists at. + +The second data set was to determine snow coverage at peak snow depth of winter. As part of moving to a global model we are going to start ingesting and forecasting using the 12km global GFS model. One of the model parameters available in snow depth. To build this portion of the dataset I merged peak winter snowdepth information for both hemispheres in to a single file. I used winter 2018 for the southern hemisphere and winter 2019 for the northern. To make this more accurate it might make sense to merge in a few more years but luckily 2019 was a record snowfall for several locations of the northern hemisphere which helps ensure we are including all potential locations in to our map. + +### The GIS Process + +To build this map at a global scale was computationally heavy. To handle that I chose to prototype the process using QGIS on North America (which I'm most familiar with). QGIS is largely based on GDAL and I can use those command line tools to then generate a script file which I can run in parallel over the thousands of GEOTiff files which makes up the dataset. The full script which covers this process is here: [https://github.com/scottcha/OpenAvalancheProject/blob/develop/GetTrainingData/CreateGlobalAvyRegions/ElevationAndSnowToRegion.sh](https://github.com/scottcha/OpenAvalancheProject/blob/develop/GetTrainingData/CreateGlobalAvyRegions/ElevationAndSnowToRegion.sh) + +#### Identifying slopes + +Using the elevation data we first use gdaldem to derive the slope information and then gdal cals to filter that area to only slopes great than 20 degrees. Since there are lots of very small slopes which meet this criteria we then wan to use a gdal sieve to remove all the very small slopes and make the first attempt at real regions. At this point we are working with files which look like this (essentially all slopes > 20 degress with any very small slopes removed) which you can see Mt Rainier in the upper left: +[![Mt Rainier Slopes](rainierslopes_small.png)](rainierslopes.jpg) + +#### Identifying regions + +Obviously the specific slope information in the image above is too granular so we want to take a few steps to generalize this. Using gdalwarp we take each tile and split it in to 8 equally sized pieces (which will match the granularity of our weather and snow data). For each of those 8 pieces we'll compute the max value to determine if there are slopes in that region or not which meet our criteria. We can then merge all of these tiles to make a slope map of the world which is the first step to making an avy region map. + +Since there are obviously areas of the world with mountains/slopes but which don't get enough snow to cause an avalanche hazard the next step is to merge this slope information with our snowdepth information. As I mentioned before I pulled the peak snow depth information from the GFS models for both Northern and Southern Hemispheres and merged them to create this mask: +[![World Snow 18-19](snow_map_full_18-19_small.png)](snow_map_full_18-19.jpg) + +Using that as a filter for our regions and then doing a bit more cleaning for areas which are either obviously errors in the elevation data or areas I've chosen to optimized out due to the large area but low probability anyone will be there to experience an avalanche (i.e., Greenland) we get the following: +[![World Avy Regions](worldregionsshape_small.png)](worldregionsshape.png) + +While I did a bit of manual cleaning I've opted to let the algorithm options leave some unprobable avy regions in the map with the idea that this is an ongoing experiment and it would be interesting to see if there is value in forecasting in these areas (NE United States). The next steps in this effort will be starting to pull the weather forecasts within these regions and making that data available for people to experiment with. Also, to improve the usability of the map I've reached out to Greg at peakbagger.com and he's allowed me to to help integrate the mountain range names he has created around the world to create labels for these subshapes: [https://peakbagger.com/RangIndx.aspx](https://peakbagger.com/RangIndx.aspx). I'll publish an update once that is incorporated. + +## Script Location + +[https://github.com/scottcha/OpenAvalancheProject/tree/develop/GetTrainingData/CreateGlobalAvyRegions/](https://github.com/scottcha/OpenAvalancheProject/blob/master/GetTrainingData/CreateGlobalAvyRegions/ElevationAndSnowToRegion.sh) + +## References + +1. [https://www.mtavalanche.com/sites/default/files/MSA_Slope_Angle_Final.pdf](https://www.mtavalanche.com/sites/default/files/MSA_Slope_Angle_Final.pdf) \ No newline at end of file diff --git a/Blog/content/posts/20190620_a-global-avalanche-region-map/rainierslopes.jpg b/Blog/content/posts/20190620_a-global-avalanche-region-map/rainierslopes.jpg new file mode 100644 index 0000000..f28e846 Binary files /dev/null and b/Blog/content/posts/20190620_a-global-avalanche-region-map/rainierslopes.jpg differ diff --git a/Blog/content/posts/20190620_a-global-avalanche-region-map/rainierslopes.png b/Blog/content/posts/20190620_a-global-avalanche-region-map/rainierslopes.png new file mode 100644 index 0000000..8428b40 Binary files /dev/null and b/Blog/content/posts/20190620_a-global-avalanche-region-map/rainierslopes.png differ diff --git a/Blog/content/posts/20190620_a-global-avalanche-region-map/rainierslopes_small.png b/Blog/content/posts/20190620_a-global-avalanche-region-map/rainierslopes_small.png new file mode 100644 index 0000000..1c247b1 Binary files /dev/null and b/Blog/content/posts/20190620_a-global-avalanche-region-map/rainierslopes_small.png differ diff --git a/Blog/content/posts/20190620_a-global-avalanche-region-map/snow_map_full_18-19.jpg b/Blog/content/posts/20190620_a-global-avalanche-region-map/snow_map_full_18-19.jpg new file mode 100644 index 0000000..cbb65ec Binary files /dev/null and b/Blog/content/posts/20190620_a-global-avalanche-region-map/snow_map_full_18-19.jpg differ diff --git a/Blog/content/posts/20190620_a-global-avalanche-region-map/snow_map_full_18-19_small.png b/Blog/content/posts/20190620_a-global-avalanche-region-map/snow_map_full_18-19_small.png new file mode 100644 index 0000000..e387656 Binary files /dev/null and b/Blog/content/posts/20190620_a-global-avalanche-region-map/snow_map_full_18-19_small.png differ diff --git a/Blog/content/posts/20190620_a-global-avalanche-region-map/worldregionsshape.png b/Blog/content/posts/20190620_a-global-avalanche-region-map/worldregionsshape.png new file mode 100644 index 0000000..1e3f673 Binary files /dev/null and b/Blog/content/posts/20190620_a-global-avalanche-region-map/worldregionsshape.png differ diff --git a/Blog/content/posts/20190620_a-global-avalanche-region-map/worldregionsshape_small.png b/Blog/content/posts/20190620_a-global-avalanche-region-map/worldregionsshape_small.png new file mode 100644 index 0000000..bec91fa Binary files /dev/null and b/Blog/content/posts/20190620_a-global-avalanche-region-map/worldregionsshape_small.png differ diff --git a/Blog/content/posts/20231201_2023-Update/index.md b/Blog/content/posts/20231201_2023-Update/index.md new file mode 100644 index 0000000..8caa647 --- /dev/null +++ b/Blog/content/posts/20231201_2023-Update/index.md @@ -0,0 +1,5 @@ ++++ +title = '2023 Update' +date = 2023-11-21T11:47:42-07:00 +draft = true ++++ diff --git a/Blog/content/posts/download_images.py b/Blog/content/posts/download_images.py new file mode 100644 index 0000000..b764dd7 --- /dev/null +++ b/Blog/content/posts/download_images.py @@ -0,0 +1,34 @@ +import os +import re +import requests + +# Path to the markdown file +markdown_file = "index.md" + +# Directory to save images +directory = "C:/Users/scott/source/repos/OpenAvalancheProject_Website/Blog/content/posts/20190620_a-global-avalanche-region-map/" + +#combine directory and file with path.join +markdown_file = os.path.join(directory, markdown_file) + +# Read the markdown text from the file +with open(markdown_file, 'r') as f: + markdown_text = f.read() + +# Find all image URLs +image_urls = re.findall(r'\((https://oapstorageprod.blob.core.windows.net/blog-images/[^)]+)\)', markdown_text) + +# Create directory if it doesn't exist +os.makedirs(directory, exist_ok=True) + +# Download each image +for url in image_urls: + # Get the image name by splitting the URL + image_name = url.split("/")[-1] + + # Download the image + response = requests.get(url) + + # Save the image + with open(os.path.join(directory, image_name), 'wb') as f: + f.write(response.content) \ No newline at end of file diff --git a/Blog/data/empty.txt b/Blog/data/empty.txt new file mode 100644 index 0000000..e69de29 diff --git a/Blog/hugo.toml b/Blog/hugo.toml new file mode 100644 index 0000000..71705f1 --- /dev/null +++ b/Blog/hugo.toml @@ -0,0 +1,73 @@ +theme = 'LoveIt' +languageCode = 'en-us' +title = 'Open Avalanche Project News' + +[params] + title = "Open Avalanche Project News" + name = "Open Avalanche Project News" + description = "News and Updates to the Open Avalanche Project" + subtitle = "Open Avalanche Project News" + logo = "/2017_OpenAvalancheProject_Logo_Horizontal_PNG.png" + +[params.header] + [params.header.title] + name = "Open Avalanche Project News" + +[params.app] + title = "Open Avalanche Project News" + subtitle = "Open Avalanche Project News" + +[params.home] + rss = 10 + [params.home.profile] + enable = true + avatarUrl = "/2017_OpenAvalancheProject_Logo_Horizontal_PNG.png" + subtitle = "News and Updates" + typeit = false + +[params.social] + GitHub = "scottcha/OpenAvalancheProject" + Linkedin = "scott-t-chamberlin" + +# Author config +[author] + name = "Scott Chamberlin" + email = "scott@snowymountainworks.com" + link = "https://snowymountainworks.com" + +# Menu config +[menu] + [[menu.main]] + weight = 1 + identifier = "posts" + # you can add extra information before the name (HTML format is supported), such as icons + pre = "" + # you can add extra information after the name (HTML format is supported), such as icons + post = "" + name = "Posts" + url = "/posts/" + # title will be shown when you hover on this menu link + title = "" + [[menu.main]] + weight = 2 + identifier = "tags" + pre = "" + post = "" + name = "Tags" + url = "/tags/" + title = "" + [[menu.main]] + weight = 3 + identifier = "categories" + pre = "" + post = "" + name = "Categories" + url = "/categories/" + title = "" + +# Markup related configuration in Hugo +[markup] + # Syntax Highlighting (https://gohugo.io/content-management/syntax-highlighting) + [markup.highlight] + # false is a necessary configuration (https://github.com/dillonzq/LoveIt/issues/158) + noClasses = false \ No newline at end of file diff --git a/Blog/i18n/empty.txt b/Blog/i18n/empty.txt new file mode 100644 index 0000000..e69de29 diff --git a/Blog/layouts/empty.txt b/Blog/layouts/empty.txt new file mode 100644 index 0000000..e69de29 diff --git a/Blog/static/2017_OpenAvalancheProject_Logo_Horizontal_PNG.png b/Blog/static/2017_OpenAvalancheProject_Logo_Horizontal_PNG.png new file mode 100644 index 0000000..e7fba89 Binary files /dev/null and b/Blog/static/2017_OpenAvalancheProject_Logo_Horizontal_PNG.png differ diff --git a/Blog/static/2017_OpenAvalancheProject_Logo_Icon_PNG.png b/Blog/static/2017_OpenAvalancheProject_Logo_Icon_PNG.png new file mode 100644 index 0000000..e69f606 Binary files /dev/null and b/Blog/static/2017_OpenAvalancheProject_Logo_Icon_PNG.png differ diff --git a/Blog/themes/LoveIt b/Blog/themes/LoveIt new file mode 160000 index 0000000..e9e89a4 --- /dev/null +++ b/Blog/themes/LoveIt @@ -0,0 +1 @@ +Subproject commit e9e89a4613baee823596822b7d246f5931263491