-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathmainALE.py
157 lines (147 loc) · 6.17 KB
/
mainALE.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Main function to perform radar reflector analysis using GRTx
R. Czikhardt
29.7.2020
"""
print('\n')
print('Caroline Radar-Coding Toolbox (based on GECORIS v.1.0)')
print('Copyright (c) 2021 Richard Czikhardt, [email protected]')
print('Dept. of Geosciences and Remote Sensing, Delft University of Technology')
print('-----------------------------------------------------------------------')
print('License: GNU GPL v3+')
print('-----------------------------------------------------------------------')
import sys
import os
import glob
from gecoris import ioUtils, plotUtils, atmoUtils
def main(parms):
# unpack:
stationLog = parms['stationLog']
stacksLog = parms['stackLog']
outDir = parms['outDir']
posFlag = parms['precisePosFlag']
ovsFactor = parms['ovsFactor']
atmoFlag = parms['atmoFlag']
plotFlag = parms['plotFlag']
#
print('Initializing reflectors...')
if stationLog.lower().endswith('.json'):
stations = ioUtils.load_station_log(stationLog)
elif stationLog.endswith('.csv'):
stations = ioUtils.fromCSV(stationLog)
else:
print('Unknown station log file format. Use .json or .csv.')
raise
STATIONS = stations
print(f'{len(stations)} stations to process...')
# prepare atmo. data dir:
if atmoFlag:
atmoDir = outDir + os.sep + 'atmo/'
if not os.path.exists(atmoDir):
os.makedirs(atmoDir)
# iterate over every single station
for i in range(len(STATIONS)):
stations = STATIONS[i:i+1]
print(f'***Current station: {stations[0].id}\n')
print('Loading SAR data stacks...')
if stacksLog.lower().endswith('.json'):
stacks = ioUtils.load_stacks(stacksLog)
elif stacksLog.lower().endswith('.csv'):
stacks = ioUtils.stacks_fromCSV(stacksLog)
else:
print('Unknown stacks log file format. Use .json or .csv.')
# load data:
for stack in stacks:
stack.readData(stations)
ioUtils.toJSON(stack, parms['outDir']) # save to JSON
if atmoFlag:
print('Preparing atmo. models for stack '+stack.id)
atmoUtils.prepareAtmo(stack, atmoDir)
# check if output directory exists and load already processed:
if not os.path.exists(outDir):
os.makedirs(outDir)
logs = []
else:
logs = glob.glob(outDir + os.sep +"*.json")
#
print(str(len(stations))+ ' reflectors on ' +str(len(stacks))
+ ' stacks to process.')
# iterate over list indices to modify objects:
i=0
try:
# check if analysis already performed:
inJSON = [q for q in logs
if stations[i].id+'.json' in q.split(os.sep)[-1]]
if inJSON:
print('Station '+stations[i].id+ ' already analyzed, updating.')
stations[i] = ioUtils.fromJSON(inJSON[0])
for stack in stacks:
if atmoFlag:
stations[i].updateStack(stack, ovsFactor=ovsFactor,
posFlag=posFlag, atmoDir=atmoDir)
else:
stations[i].updateStack(stack, ovsFactor=ovsFactor,
posFlag=posFlag)
else:
for stack in stacks:
if atmoFlag:
stations[i].addStack(stack, ovsFactor=ovsFactor,
posFlag=posFlag, plotFlag=plotFlag,
outDir=outDir+stations[i].id,
atmoDir=atmoDir)
else:
stations[i].addStack(stack, ovsFactor=ovsFactor,
posFlag=posFlag, plotFlag=plotFlag,
outDir=outDir+stations[i].id)
stations[i].print_all_timeseries(outDir)
print('Removing outliers.')
stations[i].detectOutliers()
print('Performing RCS and SCR analysis.')
stations[i].RCSanalysis()
if posFlag and plotFlag > 0:
print('Plotting ALE.')
for stack in stacks:
if atmoFlag:
stations[i].plotALE(stack.id,outDir,atmoDir)
else:
stations[i].plotALE(stack.id,outDir)
stations[i].plotALE_TS(outDir)
if plotFlag > 0:
print('Plotting RCS time series.')
stations[i].plotRCS(outDir)
#
print('Exporting to JSON dumps.')
stations[i].toJSON(outDir)
stations[i].statsToJSON(outDir)
except IndexError as e:
print(f'skipping station {stations[i].id}... \nThe footprint of the burst selected doesn\'t agree with the real coverage of the SAR data. \nThe burst corresponding needs to be selected manually (possible to hard code it in the function swathburst, in dorisUtils.py)\n')
stations = STATIONS
if len(stations) > 2 and plotFlag > 0:
print('Generating network plots.')
plotUtils.plotNetworkALE(stations, outDir+os.sep+'network_ALE.png')
plotUtils.plotNetworkRCS(stations, outDir+os.sep+'network_RCS.png')
plotUtils.plotNetworkSCR_hz(stations, outDir+os.sep+'network_SCR.png')
#
print('Done. Thank you for using GECORIS. Do not forget to reference.')
print('In case of any inquries, please contact [email protected]')
def parse_parms(parms_file):
try:
with open(parms_file,'r') as inp:
try:
parms = eval(inp.read())
except:
print("Something wrong with parameters file.")
raise
return parms
except:
print("Specified parameters file not found.")
raise
if __name__ == "__main__":
# load parameters:
if len(sys.argv) > 1:
parms = parse_parms(sys.argv[1])
main(parms)
else:
print('Not enough input arguments!')