Skip to content

Commit

Permalink
solving dependency issues
Browse files Browse the repository at this point in the history
  • Loading branch information
nilshempelmann committed Jun 18, 2024
1 parent b625c0c commit 59254c4
Show file tree
Hide file tree
Showing 3 changed files with 41 additions and 20 deletions.
38 changes: 21 additions & 17 deletions albatross/atmos_ocean_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,10 @@
import sys
import resource

import logging
LOGGER = logging.getLogger("PYWPS")


def openDAPsst(version = '3b', debug = False, anomalies = True, **kwargs):
"""
This function downloads data from the new ERSSTv3b on the IRI data library
Expand All @@ -22,7 +26,7 @@ def openDAPsst(version = '3b', debug = False, anomalies = True, **kwargs):
import re
from collections import namedtuple


### getting NOAA raw data
SSTurl = 'http://iridl.ldeo.columbia.edu/SOURCES/.NOAA/.NCDC/.ERSST/.version' + version + '/' + \
'.anom/T/%28startmon%20startyr%29%28endmon%20endyr%29RANGEEDGES/T/nbox/0.0/boxAverage/dods'
#SSTurl = 'http://iridl.ldeo.columbia.edu/SOURCES/.NOAA/.NCEP-NCAR/.CDAS-1/.MONTHLY/.Intrinsic/.PressureLevel/.phi/P/%28700%29VALUES' +'/' + \
Expand All @@ -36,7 +40,7 @@ def openDAPsst(version = '3b', debug = False, anomalies = True, **kwargs):
#SSTurl = 'http://iridl.ldeo.columbia.edu/SOURCES/.NOAA/.NCEP-NCAR/.CDAS-1/.MONTHLY/.Intrinsic/.PressureLevel/.phi/P/%28700%29VALUES' +'/' + \
#'.anom/T/%28startmon%20startyr%29%28endmon%20endyr%29RANGEEDGES/T/nbox/0.0/boxAverage/dods'

print( 'Preparing to download from %s' % (SSTurl))
LOGGER.info( 'Preparing to download from %s' % (SSTurl))

i2m = int_to_month()

Expand All @@ -61,20 +65,20 @@ def openDAPsst(version = '3b', debug = False, anomalies = True, **kwargs):
#print sys.getrecursionlimit()
# stupid edit: remove file
#os.remove(fp)
#if debug: print('Using pickled SST')
#if debug: LOGGER.info('Using pickled SST')
#f = open(fp,'rb')
#sstdata = pickle.load(f)
#f.close()
#var = seasonal_var(sstdata['grid'], sstdata['lat'], sstdata['lon'])
#return var

print( 'New SST field, will save to %s' % fp)
print(SSTurl)
LOGGER.info( 'New SST field, will save to %s' % fp)
LOGGER.info(SSTurl)
for kw in DLargs:
SSTurl = re.sub(kw, DLargs[kw], SSTurl)

print('Starting download...')
print(SSTurl)
LOGGER.info('Starting download...')
LOGGER.info(SSTurl)
dataset = open_url(SSTurl)
arg = 'anom' if anomalies else 'sst'
sst = dataset[arg]
Expand All @@ -84,18 +88,18 @@ def openDAPsst(version = '3b', debug = False, anomalies = True, **kwargs):
t = time.data[:].squeeze()
sstlat = dataset['Y'][:]
sstlon = dataset['X'][:]
print('Download finished.')
LOGGER.info('Download finished.')

#_Grid has shape (ntim, nlat, nlon)

nseasons = 12 / kwargs['n_mon']
if debug:
print('Number of seasons is %i, number of months is %i' % (nseasons, kwargs['n_mon']))
LOGGER.info('Number of seasons is %i, number of months is %i' % (nseasons, kwargs['n_mon']))
ntime = len(t)

idx = arange(0, ntime, nseasons).astype(int)
#print(idx)
#print(grid)
#LOGGER.info(idx)
#LOGGER.info(grid)
sst = grid[idx]
sstdata = {'grid':sst, 'lat':sstlat, 'lon':sstlon}
var = seasonal_var(sst, sstlat, sstlon)
Expand All @@ -109,7 +113,7 @@ def load_slp(newFormat = False, debug = False, anomalies = True, **kwargs):
"""
This function loads HADSLP2r data.
"""
from utils import slp_tf, int_to_month
from albatross.utils import slp_tf, int_to_month
from netCDF4 import Dataset
from sklearn.preprocessing import scale
from numpy import arange, zeros, where
Expand Down Expand Up @@ -141,7 +145,7 @@ def load_slp(newFormat = False, debug = False, anomalies = True, **kwargs):
slp = seasonal_var(slpdata['grid'], slpdata['lat'], slpdata['lon'])
return slp
return slpdata
print('Creating new SLP pickle from netCDF file')
LOGGER.info('Creating new SLP pickle from netCDF file')

#_Next block takes the netCDF file and extracts the time to make
#_a time index.
Expand Down Expand Up @@ -174,7 +178,7 @@ def load_slp(newFormat = False, debug = False, anomalies = True, **kwargs):


if debug:
print(tiindexndex[idx][:10])
LOGGER.info(tiindexndex[idx][:10])

lat = dat.variables['lat'][:]
lon = dat.variables['lon'][:]
Expand All @@ -188,7 +192,7 @@ def load_slp(newFormat = False, debug = False, anomalies = True, **kwargs):
for year, mons in enumerate(idx):
slpavg[year] = slp[mons].mean(axis=0)
if debug:
print('Averaging ', mons)
LOGGER.info('Averaging ', mons)

#WHERE TO SCALE THE DATA?
for i in range(nlat):
Expand All @@ -201,7 +205,7 @@ def load_slp(newFormat = False, debug = False, anomalies = True, **kwargs):
}
f = open(fp,'w')
pickle.dump(slpdata,f)
print('SLP data saved to %s' % (fp))
LOGGER.info('SLP data saved to %s' % (fp))
f.close()
if newFormat:
from collections import namedtuple
Expand All @@ -222,7 +226,7 @@ def load_clim_file(fp, debug = False):
description = f.readline()
years = f.readline()
startyr, endyr = years[:4], years[5:9]
print( description)
LOGGER.info( description)

#First load extended index
data = np.loadtxt(fp, skiprows = 2)
Expand Down
3 changes: 2 additions & 1 deletion albatross/climdiv_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@

import os
from albatross.atmos_ocean_data import *
from albatross.utils import int_to_month
from os import environ as EV

def get_data(kwgroups):
Expand Down Expand Up @@ -135,7 +136,7 @@ def create_kwgroups(debug = False, climdata_startyr = 1871, n_yrs = 145, \
#########################################################

if debug:
from utils import int_to_month

i2m = int_to_month()
print('Precip starts in %s-%d, ends in %s-%d' % \
(i2m[climdata_months[0]], climdata_startyr, i2m[climdata_months[-1]], climdata_endyr))
Expand Down
20 changes: 18 additions & 2 deletions albatross/processes/wps_drought.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
# from albatross.atmos_ocean_data import *
from albatross.utils import sstMap
from albatross.utils import *
from albatross import utils


import logging
Expand Down Expand Up @@ -120,8 +121,23 @@ def _handler(request, response):
LOGGER.info("Select the input-output files")

# sst= request.inputs['sst'][0].data
index_file = request.inputs['indicator'][0].data # = './DATA/nao.txt'
clim_file = request.inputs['pr'][0].data # './DATA/APGD_prcpComo.txt'

import shutil
import tempfile
import urllib.request

with urllib.request.urlopen(request.inputs['indicator'][0].data) as response:
with tempfile.NamedTemporaryFile(delete=False) as tmp_indicator:
shutil.copyfileobj(response, tmp_indicator)
with open(tmp_indicator.name) as index_file:
pass

with urllib.request.urlopen(request.inputs['pr'][0].data) as response:
with tempfile.NamedTemporaryFile(delete=False) as tmp_pr:
shutil.copyfileobj(response, tmp_pr)
with open(tmp_pr.name) as clim_file:
pass

filename = 'testComoNAO'

# #### USER INPUT ####
Expand Down

0 comments on commit 59254c4

Please sign in to comment.