You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@climate.apache.org by le...@apache.org on 2017/09/11 14:52:55 UTC
[1/5] climate git commit: CLIMATE-912 Upgrade mccSearch code from
Python2 > 3
Repository: climate
Updated Branches:
refs/heads/master 56989f578 -> 9a30e195b
http://git-wip-us.apache.org/repos/asf/climate/blob/5c86f3a8/mccsearch/code/mccSearchUI.py
----------------------------------------------------------------------
diff --git a/mccsearch/code/mccSearchUI.py b/mccsearch/code/mccSearchUI.py
index 0794fd2..36fe642 100644
--- a/mccsearch/code/mccSearchUI.py
+++ b/mccsearch/code/mccSearchUI.py
@@ -19,29 +19,30 @@
'''
import networkx as nx
-#mccSearch modules
-import mccSearch
+# mccSearch modules
+from mccSearch import *
+
def main():
CEGraph = nx.DiGraph()
prunedGraph = nx.DiGraph()
- MCCList =[]
- MCSList=[]
- MCSMCCNodesList =[]
- allMCSsList =[]
- allCETRMMList =[]
- DIRS={}
+ MCCList = []
+ MCSList = []
+ MCSMCCNodesList = []
+ allMCSsList = []
+ allCETRMMList = []
+ DIRS = {}
# DIRS={
# mainDirStr= "/directory/to/where/to/store/outputs"
- # TRMMdirName = "/directory/to/the/TRMM/netCDF/files"
+ # TRMMdirName = "/directory/to/the/TRMM/netCDF/files"
# CEoriDirName = "/directory/to/the/MERG/netCDF/files"
# }
preprocessing = ''
rawMERG = ''
-
- print "Running MCCSearch ..... \n"
- DIRS['mainDirStr'] = raw_input("> Please enter working directory: \n" ) # This is where data created will be stored
+ print("Running MCCSearch ..... \n")
+ # This is where data created will be stored
+ DIRS['mainDirStr'] = input("> Please enter working directory: \n")
# preprocessing = raw_input ("> Do you need to preprocess the MERG files? [y/n]: \n")
# while preprocessing.lower() != 'n':
@@ -54,58 +55,66 @@ def main():
# elif preprocessing.lower() == 'n' :
# pass
# else:
- # print "Error! Invalid choice "
+ # print("Error! Invalid choice.")
# preprocessing = raw_input ("> Do you need to preprocess the MERG files? [y/n]: \n")
-
- #get the location of the MERG and TRMM data
- DIRS['CEoriDirName'] = raw_input("> Please enter the directory to the MERG netCDF files: \n")
+ # get the location of the MERG and TRMM data
+ DIRS['CEoriDirName'] = input(
+ "> Please enter the directory to the MERG netCDF files: \n")
try:
if not os.path.exists(DIRS['CEoriDirName']):
- print "Error! MERG invalid path!"
- DIRS['CEoriDirName'] = raw_input("> Please enter the directory to the MERG netCDF files: \n")
- except:
- print "..."
-
+ print("Error! MERG invalid path!")
+ DIRS['CEoriDirName'] = input(
+ "> Please enter the directory to the MERG netCDF files: \n")
+ except BaseException:
+ print("...")
- DIRS['TRMMdirName'] = raw_input("> Please enter the location to the raw TRMM netCDF files: \n")
+ DIRS['TRMMdirName'] = input(
+ "> Please enter the location to the raw TRMM netCDF files: \n")
try:
if not os.path.exists(DIRS['TRMMdirName']):
- print "Error: TRMM invalid path!"
- DIRS['TRMMdirName'] = raw_input("> Please enter the location to the raw TRMM netCDF files: \n")
- except:
+ print("Error: TRMM invalid path!")
+ DIRS['TRMMdirName'] = input(
+ "> Please enter the location to the raw TRMM netCDF files: \n")
+ except BaseException:
pass
- #get the dates for analysis
- startDateTime = raw_input("> Please enter the start date and time yyyymmddhr: \n")
- #check validity of time
+ # get the dates for analysis
+ startDateTime = input(
+ "> Please enter the start date and time yyyymmddhr: \n")
+ # check validity of time
while validDate(startDateTime) == 0:
- print "Invalid time entered for startDateTime!"
- startDateTime = raw_input("> Please enter the start date and time yyyymmddhr: \n")
+ print("Invalid time entered for startDateTime!")
+ startDateTime = input(
+ "> Please enter the start date and time yyyymmddhr: \n")
- endDateTime = raw_input("> Please enter the end date and time yyyymmddhr: \n")
+ endDateTime = input("> Please enter the end date and time yyyymmddhr: \n")
while validDate(endDateTime) == 0:
- print "Invalid time entered for endDateTime!"
- endDateTime = raw_input("> Please enter the end date and time yyyymmddhr: \n")
-
- #check if all the files exisits in the MERG and TRMM directories entered
- test, _ = mccSearch.checkForFiles(startDateTime, endDateTime, DIRS['TRMMdirName'], 2)
- if test == False:
- print "Error with files in the original TRMM directory entered. Please check your files before restarting. "
+ print("Invalid time entered for endDateTime!")
+ endDateTime = input(
+ "> Please enter the end date and time yyyymmddhr: \n")
+
+ # check if all the files exisits in the MERG and TRMM directories entered
+ test, _ = mccSearch.checkForFiles(
+ startDateTime, endDateTime, DIRS['TRMMdirName'], 2)
+ if not test:
+ print("Error with files in the original TRMM directory entered. Please check your files before restarting.")
return
- test, filelist = mccSearch.checkForFiles(startDateTime, endDateTime, DIRS['CEoriDirName'],1)
- if test == False:
- print "Error with files in the original MERG directory entered. Please check your files before restarting. "
+ test, filelist = mccSearch.checkForFiles(
+ startDateTime, endDateTime, DIRS['CEoriDirName'], 1)
+ if not test:
+ print("Error with files in the original MERG directory entered. Please check your files before restarting.")
return
- #create main directory and file structure for storing intel
+ # create main directory and file structure for storing intel
mccSearch.createMainDirectory(DIRS['mainDirStr'])
- TRMMCEdirName = DIRS['mainDirStr']+'/TRMMnetcdfCEs'
- CEdirName = DIRS['mainDirStr']+'/MERGnetcdfCEs'
+ TRMMCEdirName = DIRS['mainDirStr'] + '/TRMMnetcdfCEs'
+ CEdirName = DIRS['mainDirStr'] + '/MERGnetcdfCEs'
- # for doing some postprocessing with the clipped datasets instead of running the full program, e.g.
- postprocessing = raw_input("> Do you wish to postprocess data? [y/n] \n")
+ # for doing some postprocessing with the clipped datasets instead of
+ # running the full program, e.g.
+ postprocessing = input("> Do you wish to postprocess data? [y/n] \n")
while postprocessing.lower() != 'n':
if postprocessing.lower() == 'y':
option = postProcessingplotMenu(DIRS)
@@ -113,75 +122,85 @@ def main():
elif postprocessing.lower() == 'n':
pass
else:
- print "\n Invalid option."
- postprocessing = raw_input("> Do you wish to postprocess data? [y/n] \n")
+ print("\n Invalid option.")
+ postprocessing = input(
+ "> Do you wish to postprocess data? [y/n] \n")
# -------------------------------------------------------------------------------------------------
# Getting started. Make it so number one!
- print ("-"*80)
- print "\t\t Starting the MCCSearch Analysis "
- print ("-"*80)
- print "\n -------------- Reading MERG and TRMM Data ----------"
+ print(("-" * 80))
+ print("\t\t Starting the MCCSearch Analysis.")
+ print(("-" * 80))
+ print("\n -------------- Reading MERG and TRMM Data ----------")
mergImgs, timeList = mccSearch.readMergData(DIRS['CEoriDirName'], filelist)
- print "\n -------------- findCloudElements ----------"
- CEGraph = mccSearch.findCloudElements(mergImgs,timeList,DIRS['TRMMdirName'])
- #if the TRMMdirName wasnt entered for whatever reason, you can still get the TRMM data this way
+ print("\n -------------- findCloudElements ----------")
+ CEGraph = mccSearch.findCloudElements(
+ mergImgs, timeList, DIRS['TRMMdirName'])
+ # if the TRMMdirName wasnt entered for whatever reason, you can still get the TRMM data this way
# CEGraph = mccSearch.findCloudElements(mergImgs,timeList)
# allCETRMMList=mccSearch.findPrecipRate(DIRS['TRMMdirName'],timeList)
- # ----------------------------------------------------------------------------------------------
- print "\n -------------- findCloudClusters ----------"
+ # ----------------------------------------------------------------------------------------------
+ print("\n -------------- findCloudClusters ----------")
prunedGraph = mccSearch.findCloudClusters(CEGraph)
- print "\n -------------- findMCCs ----------"
- MCCList,MCSList = mccSearch.findMCC(prunedGraph)
- #now ready to perform various calculations/metrics
- print ("-"*80)
- print "\n -------------- METRICS ----------"
- print ("-"*80)
- #some calculations/metrics that work that work
- print "creating the MCC userfile ", mccSearch.createTextFile(MCCList,1)
- print "creating the MCS userfile ", mccSearch.createTextFile(MCSList,2)
+ print("\n -------------- findMCCs ----------")
+ MCCList, MCSList = mccSearch.findMCC(prunedGraph)
+ # now ready to perform various calculations/metrics
+ print(("-" * 80))
+ print("\n -------------- METRICS ----------")
+ print(("-" * 80))
+ # some calculations/metrics that work that work
+ print(("creating the MCC userfile ", mccSearch.createTextFile(MCCList, 1)))
+ print(("creating the MCS userfile ", mccSearch.createTextFile(MCSList, 2)))
plotMenu(MCCList, MCSList)
-
- #Let's get outta here! Engage!
- print ("-"*80)
-#*********************************************************************************************************************
+
+ # Let's get outta here! Engage!
+ print(("-" * 80))
+#*************************************************************************
+
+
def plotMenu(MCCList, MCSList):
'''
Purpose:: The flow of plots for the user to choose
Input:: MCCList: a list of directories representing a list of nodes in the MCC
MCSList: a list of directories representing a list of nodes in the MCS
-
+
Output:: None
'''
option = displayPlotMenu()
while option != 0:
- try:
+ try:
if option == 1:
- print "Generating Accumulated Rainfall from TRMM for the entire period ...\n"
+ print(
+ "Generating Accumulated Rainfall from TRMM for the entire period ...\n")
mccSearch.plotAccTRMM(MCSList)
if option == 2:
- startDateTime = raw_input("> Please enter the start date and time yyyy-mm-dd_hr:mm:ss format: \n")
- endDateTime = raw_input("> Please enter the end date and time yyyy-mm-dd_hr:mm:ss format: \n")
- print "Generating acccumulated rainfall between ", startDateTime," and ", endDateTime, " ... \n"
+ startDateTime = input(
+ "> Please enter the start date and time yyyy-mm-dd_hr:mm:ss format: \n")
+ endDateTime = input(
+ "> Please enter the end date and time yyyy-mm-dd_hr:mm:ss format: \n")
+ print(("Generating acccumulated rainfall between ",
+ startDateTime, " and ", endDateTime, " ... \n"))
mccSearch.plotAccuInTimeRange(startDateTime, endDateTime)
if option == 3:
- print "Generating area distribution plot ... \n"
+ print("Generating area distribution plot ... \n")
mccSearch.displaySize(MCCList)
if option == 4:
- print "Generating precipitation and area distribution plot ... \n"
+ print("Generating precipitation and area distribution plot ... \n")
mccSearch.displayPrecip(MCCList)
if option == 5:
try:
- print "Generating histogram of precipitation for each time ... \n"
+ print("Generating histogram of precipitation for each time ... \n")
mccSearch.plotPrecipHistograms(MCCList)
- except:
+ except BaseException:
pass
- except:
- print "Invalid option. Please try again, enter 0 to exit \n"
-
- option = displayPlotMenu()
+ except BaseException:
+ print("Invalid option. Please try again, enter 0 to exit \n")
+
+ option = displayPlotMenu()
return
-#*********************************************************************************************************************
+#*************************************************************************
+
+
def displayPlotMenu():
'''
Purpose:: Display the plot Menu Options
@@ -190,16 +209,18 @@ def displayPlotMenu():
Output:: option: an integer representing the choice of the user
'''
- print "**************** PLOTS ************** \n"
- print "0. Exit \n"
- print "1. Accumulated TRMM precipitation \n"
- print "2. Accumulated TRMM precipitation between dates \n"
- print "3. Area distribution of the system over time \n"
- print "4. Precipitation and area distribution of the system \n"
- print "5. Histogram distribution of the rainfall in the area \n"
- option = int(raw_input("> Please enter your option for plots: \n"))
+ print("**************** PLOTS ************** \n")
+ print("0. Exit \n")
+ print("1. Accumulated TRMM precipitation \n")
+ print("2. Accumulated TRMM precipitation between dates \n")
+ print("3. Area distribution of the system over time \n")
+ print("4. Precipitation and area distribution of the system \n")
+ print("5. Histogram distribution of the rainfall in the area \n")
+ option = int(input("> Please enter your option for plots: \n"))
return option
-#*********************************************************************************************************************
+#*************************************************************************
+
+
def displayPostprocessingPlotMenu():
'''
Purpose:: Display the plot Menu Options
@@ -208,17 +229,19 @@ def displayPostprocessingPlotMenu():
Output:: option: an integer representing the choice of the user
'''
- print "**************** POST PROCESSING PLOTS ************** \n"
- print "0. Exit \n"
- print "1. Map plots of the original MERG data \n"
- print "2. Map plots of the cloud elements using IR data \n"
- print "3. Map plots of the cloud elements rainfall accumulations using TRMM data \n"
- #print "4. Accumulated TRMM precipitation \n"
- #print "5. Accumulated TRMM precipitation between dates \n"
-
- option = int(raw_input("> Please enter your option for plots: \n"))
+ print("**************** POST PROCESSING PLOTS ************** \n")
+ print("0. Exit \n")
+ print("1. Map plots of the original MERG data \n")
+ print("2. Map plots of the cloud elements using IR data \n")
+ print("3. Map plots of the cloud elements rainfall accumulations using TRMM data \n")
+ #print("4. Accumulated TRMM precipitation \n")
+ #print("5. Accumulated TRMM precipitation between dates \n")
+
+ option = int(input("> Please enter your option for plots: \n"))
return option
-#*********************************************************************************************************************
+#*************************************************************************
+
+
def postProcessingplotMenu(DIRS):
'''
Purpose:: The flow of plots for the user to choose
@@ -226,29 +249,31 @@ def postProcessingplotMenu(DIRS):
Input:: DIRS a dictionary of directories
# DIRS={
# mainDirStr= "/directory/to/where/to/store/outputs"
- # TRMMdirName = "/directory/to/the/TRMM/netCDF/files"
+ # TRMMdirName = "/directory/to/the/TRMM/netCDF/files"
# CEoriDirName = "/directory/to/the/MERG/netCDF/files"
# }
Output:: None
'''
- TRMMCEdirName = DIRS['mainDirStr']+'/TRMMnetcdfCEs'
- CEdirName = DIRS['mainDirStr']+'/MERGnetcdfCEs'
+ TRMMCEdirName = DIRS['mainDirStr'] + '/TRMMnetcdfCEs'
+ CEdirName = DIRS['mainDirStr'] + '/MERGnetcdfCEs'
option = displayPostprocessingPlotMenu()
while option != 0:
try:
if option == 1:
- print "Generating images from the original MERG dataset ... \n"
- mccSearch.postProcessingNetCDF(1, DIRS['CEoriDirName'])
+ print("Generating images from the original MERG dataset ... \n")
+ mccSearch.postProcessingNetCDF(1, DIRS['CEoriDirName'])
if option == 2:
- print "Generating images from the cloud elements using MERG IR data ... \n"
- mccSearch.postProcessingNetCDF(2, CEdirName)
+ print(
+ "Generating images from the cloud elements using MERG IR data ... \n")
+ mccSearch.postProcessingNetCDF(2, CEdirName)
if option == 3:
- print "Generating precipitation accumulation images from the cloud elements using TRMM data ... \n"
+ print(
+ "Generating precipitation accumulation images from the cloud elements using TRMM data ... \n")
mccSearch.postProcessingNetCDF(3, TRMMCEdirName)
# if option == 4:
- # print "Generating Accumulated TRMM rainfall from cloud elements for each MCS ... \n"
+ # print("Generating Accumulated TRMM rainfall from cloud elements for each MCS ... \n")
# featureType = int(raw_input("> Please enter type of MCS MCC-1 or MCS-2: \n"))
# if featureType == 1:
# filename = DIRS['mainDirStr']+'/textFiles/MCCPostProcessing.txt'
@@ -258,17 +283,19 @@ def postProcessingplotMenu(DIRS):
# mccSearch.plotAccTRMM()
# if option == 5:
# mccSearch.plotAccuInTimeRange()
- except:
- print "Invalid option, please try again"
- option = displayPostprocessingPlotMenu()
+ except BaseException:
+ print("Invalid option, please try again")
+ option = displayPostprocessingPlotMenu()
return
-#*********************************************************************************************************************
+#*************************************************************************
+
+
def validDate(dataString):
'''
'''
if len(dataString) > 10:
- print "invalid time entered"
+ print("invalid time entered")
return 0
yr = int(dataString[:4])
@@ -280,19 +307,19 @@ def validDate(dataString):
return 0
elif hh < 0 or hh > 23:
return 0
- elif (dd< 0 or dd > 30) and (mm == 4 or mm == 6 or mm == 9 or mm == 11):
+ elif (dd < 0 or dd > 30) and (mm == 4 or mm == 6 or mm == 9 or mm == 11):
return 0
- elif (dd< 0 or dd > 31) and (mm == 1 or mm ==3 or mm == 5 or mm == 7 or mm == 8 or mm == 10):
+ elif (dd < 0 or dd > 31) and (mm == 1 or mm == 3 or mm == 5 or mm == 7 or mm == 8 or mm == 10):
return 0
- elif dd > 28 and mm == 2 and (yr%4)!=0:
+ elif dd > 28 and mm == 2 and (yr % 4) != 0:
return 0
- elif (yr%4)==0 and mm == 2 and dd>29:
+ elif (yr % 4) == 0 and mm == 2 and dd > 29:
return 0
elif dd > 31 and mm == 12:
return 0
else:
return 1
-#*********************************************************************************************************************
-
-main()
+#*************************************************************************
+
+main()
[3/5] climate git commit: CLIMATE-912 Upgrade mccSearch code from
Python2 > 3
Posted by le...@apache.org.
CLIMATE-912 Upgrade mccSearch code from Python2 > 3
Project: http://git-wip-us.apache.org/repos/asf/climate/repo
Commit: http://git-wip-us.apache.org/repos/asf/climate/commit/5c86f3a8
Tree: http://git-wip-us.apache.org/repos/asf/climate/tree/5c86f3a8
Diff: http://git-wip-us.apache.org/repos/asf/climate/diff/5c86f3a8
Branch: refs/heads/master
Commit: 5c86f3a87afd8c9ed136d59d3d517207efadb20a
Parents: 7cf8179
Author: Lewis John McGibbney <le...@gmail.com>
Authored: Tue Apr 25 18:40:44 2017 -0700
Committer: Lewis John McGibbney <le...@gmail.com>
Committed: Tue Apr 25 18:40:44 2017 -0700
----------------------------------------------------------------------
deps_py2.txt | 1 +
deps_py3.txt | 1 +
mccsearch/.DS_Store | Bin 6148 -> 0 bytes
mccsearch/code/mainProg.py | 41 +-
mccsearch/code/mainProgTemplate.py | 41 +-
mccsearch/code/mccSearch.py | 7226 +++++++++++++++++--------------
mccsearch/code/mccSearchUI.py | 277 +-
7 files changed, 4171 insertions(+), 3416 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/climate/blob/5c86f3a8/deps_py2.txt
----------------------------------------------------------------------
diff --git a/deps_py2.txt b/deps_py2.txt
index c5cd8b6..4655a22 100644
--- a/deps_py2.txt
+++ b/deps_py2.txt
@@ -12,3 +12,4 @@ webtest
myproxyclient
esgf-pyclient
podaacpy
+networkx
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/climate/blob/5c86f3a8/deps_py3.txt
----------------------------------------------------------------------
diff --git a/deps_py3.txt b/deps_py3.txt
index b5fbfa8..05d845c 100644
--- a/deps_py3.txt
+++ b/deps_py3.txt
@@ -9,3 +9,4 @@ python-dateutil
mock
webtest
podaacpy
+networkx
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/climate/blob/5c86f3a8/mccsearch/.DS_Store
----------------------------------------------------------------------
diff --git a/mccsearch/.DS_Store b/mccsearch/.DS_Store
deleted file mode 100644
index 9cc3ce1..0000000
Binary files a/mccsearch/.DS_Store and /dev/null differ
http://git-wip-us.apache.org/repos/asf/climate/blob/5c86f3a8/mccsearch/code/mainProg.py
----------------------------------------------------------------------
diff --git a/mccsearch/code/mainProg.py b/mccsearch/code/mainProg.py
index fc3a752..d341aa6 100644
--- a/mccsearch/code/mainProg.py
+++ b/mccsearch/code/mainProg.py
@@ -20,7 +20,7 @@
'''
import networkx as nx
-import mccSearch
+from . import mccSearch
import subprocess
@@ -55,35 +55,35 @@ def main():
# -------------------------------------------------------------------------------------------------
# let's go!
- print "\n -------------- Read MERG Data ----------"
+ print("\n -------------- Read MERG Data ----------")
mergImgs, timeList = mccSearch.readMergData(CEoriDirName)
- print ("-" * 80)
+ print(("-" * 80))
- print 'in main', len(mergImgs)
+ print('in main', len(mergImgs))
# print 'timeList', timeList
- print 'TRMMdirName ', TRMMdirName
- print "\n -------------- TESTING findCloudElements ----------"
+ print('TRMMdirName ', TRMMdirName)
+ print("\n -------------- TESTING findCloudElements ----------")
CEGraph = mccSearch.findCloudElements(mergImgs, timeList, TRMMdirName)
# if the TRMMdirName wasnt entered for whatever reason, you can still get the TRMM data this way
# CEGraph = mccSearch.findCloudElements(mergImgs,timeList)
# allCETRMMList=mccSearch.findPrecipRate(TRMMdirName,timeList)
# ----------------------------------------------------------------------------------------------
- print ("-" * 80)
- print "number of nodes in CEGraph is: ", CEGraph.number_of_nodes()
- print ("-" * 80)
- print "\n -------------- TESTING findCloudClusters ----------"
+ print(("-" * 80))
+ print("number of nodes in CEGraph is: ", CEGraph.number_of_nodes())
+ print(("-" * 80))
+ print("\n -------------- TESTING findCloudClusters ----------")
prunedGraph = mccSearch.findCloudClusters(CEGraph)
- print ("-" * 80)
- print "number of nodes in prunedGraph is: ", prunedGraph.number_of_nodes()
- print ("-" * 80)
- print "\n -------------- TESTING findMCCs ----------"
+ print(("-" * 80))
+ print("number of nodes in prunedGraph is: ", prunedGraph.number_of_nodes())
+ print(("-" * 80))
+ print("\n -------------- TESTING findMCCs ----------")
MCCList, MCSList = mccSearch.findMCC(prunedGraph)
- print ("-" * 80)
- print "MCC List has been acquired ", len(MCCList)
- print "MCS List has been acquired ", len(MCSList)
- print ("-" * 80)
+ print(("-" * 80))
+ print("MCC List has been acquired ", len(MCCList))
+ print("MCS List has been acquired ", len(MCSList))
+ print(("-" * 80))
# now ready to perform various calculations/metrics
- print "\n -------------- TESTING METRICS ----------"
+ print("\n -------------- TESTING METRICS ----------")
# some calculations/metrics that work that work
# print "creating the MCC userfile ", mccSearch.createTextFile(MCCList,1)
@@ -104,6 +104,7 @@ def main():
# mccSearch.displayPrecip(MCCList)
# mccSearch.plotHistogram(MCCList)
#
- print ("-" * 80)
+ print(("-" * 80))
+
main()
http://git-wip-us.apache.org/repos/asf/climate/blob/5c86f3a8/mccsearch/code/mainProgTemplate.py
----------------------------------------------------------------------
diff --git a/mccsearch/code/mainProgTemplate.py b/mccsearch/code/mainProgTemplate.py
index 8ef0d0b..96da757 100644
--- a/mccsearch/code/mainProgTemplate.py
+++ b/mccsearch/code/mainProgTemplate.py
@@ -20,7 +20,7 @@
import sys
import networkx as nx
-import mccSearch
+from . import mccSearch
import numpy as np
import numpy.ma as ma
import files
@@ -59,36 +59,36 @@ def main():
# -------------------------------------------------------------------------------------------------
# let's go!
- print "\n -------------- Read MERG Data ----------"
+ print("\n -------------- Read MERG Data ----------")
mergImgs, timeList = mccSearch.readMergData(CEoriDirName)
- print ("-" * 80)
+ print(("-" * 80))
- print 'in main', len(mergImgs)
+ print('in main', len(mergImgs))
# print 'timeList', timeList
- print 'TRMMdirName ', TRMMdirName
- print "\n -------------- TESTING findCloudElements ----------"
+ print('TRMMdirName ', TRMMdirName)
+ print("\n -------------- TESTING findCloudElements ----------")
CEGraph = mccSearch.findCloudElements(mergImgs, timeList, TRMMdirName)
# if the TRMMdirName wasnt entered for whatever reason, you can still get the TRMM data this way
# CEGraph = mccSearch.findCloudElements(mergImgs,timeList)
# allCETRMMList=mccSearch.findPrecipRate(TRMMdirName,timeList)
# ----------------------------------------------------------------------------------------------
- print ("-" * 80)
- print "number of nodes in CEGraph is: ", CEGraph.number_of_nodes()
- print ("-" * 80)
- print "\n -------------- TESTING findCloudClusters ----------"
+ print(("-" * 80))
+ print("number of nodes in CEGraph is: ", CEGraph.number_of_nodes())
+ print(("-" * 80))
+ print("\n -------------- TESTING findCloudClusters ----------")
prunedGraph = mccSearch.findCloudClusters(CEGraph)
- print ("-" * 80)
- print "number of nodes in prunedGraph is: ", prunedGraph.number_of_nodes()
- print ("-" * 80)
+ print(("-" * 80))
+ print("number of nodes in prunedGraph is: ", prunedGraph.number_of_nodes())
+ print(("-" * 80))
# sys.exit()
- print "\n -------------- TESTING findMCCs ----------"
+ print("\n -------------- TESTING findMCCs ----------")
MCCList, MCSList = mccSearch.findMCC(prunedGraph)
- print ("-" * 80)
- print "MCC List has been acquired ", len(MCCList)
- print "MCS List has been acquired ", len(MCSList)
- print ("-" * 80)
+ print(("-" * 80))
+ print("MCC List has been acquired ", len(MCCList))
+ print("MCS List has been acquired ", len(MCSList))
+ print(("-" * 80))
# now ready to perform various calculations/metrics
- print "\n -------------- TESTING METRICS ----------"
+ print("\n -------------- TESTING METRICS ----------")
# some calculations/metrics that work that work
# print "creating the MCC userfile ", mccSearch.createTextFile(MCCList,1)
@@ -109,6 +109,7 @@ def main():
# mccSearch.displayPrecip(MCCList)
# mccSearch.plotHistogram(MCCList)
#
- print ("-" * 80)
+ print(("-" * 80))
+
main()
[4/5] climate git commit: Merge branch 'master' into CLIMATE-912
Posted by le...@apache.org.
Merge branch 'master' into CLIMATE-912
Project: http://git-wip-us.apache.org/repos/asf/climate/repo
Commit: http://git-wip-us.apache.org/repos/asf/climate/commit/51d9dce1
Tree: http://git-wip-us.apache.org/repos/asf/climate/tree/51d9dce1
Diff: http://git-wip-us.apache.org/repos/asf/climate/diff/51d9dce1
Branch: refs/heads/master
Commit: 51d9dce12c9c9b77e7e48f3cca054bcf5647e458
Parents: 5c86f3a 56989f5
Author: Lewis John McGibbney <le...@gmail.com>
Authored: Wed Aug 9 11:42:08 2017 -0700
Committer: GitHub <no...@github.com>
Committed: Wed Aug 9 11:42:08 2017 -0700
----------------------------------------------------------------------
RCMES/cli_app.py | 16 +-
.../NARCCAP_examples/Fig14_and_Fig15.yaml | 4 +-
.../NARCCAP_examples/Fig16_summer.yaml | 5 +-
.../NARCCAP_examples/Fig16_winter.yaml | 4 +-
.../run_statistical_downscaling.py | 17 +-
examples/esgf_integration_example.py | 17 +-
examples/podaac_integration_example.py | 7 +-
examples/simple_model_to_model_bias.py | 14 +-
examples/taylor_diagram_example.py | 13 +-
examples/time_series_with_regions.py | 21 +-
mccsearch/code/mccSearch.py | 16 +-
mccsearch/code/mccSearchUI.py | 1 +
ocw/data_source/esgf.py | 11 +-
ocw/data_source/local.py | 4 +-
ocw/dataset_processor.py | 90 ++----
ocw/esgf/download.py | 28 +-
ocw/tests/TestGetNetcdfVariableNames.nc | Bin 0 -> 471996 bytes
ocw/tests/test_dataset_processor.py | 6 +-
ocw/tests/test_local.py | 280 +++++++++++--------
ocw/utils.py | 2 +-
test_smoke.py | 4 +-
21 files changed, 306 insertions(+), 254 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/climate/blob/51d9dce1/mccsearch/code/mccSearch.py
----------------------------------------------------------------------
diff --cc mccsearch/code/mccSearch.py
index 9f396f8,6f1fa26..c63b243
--- a/mccsearch/code/mccSearch.py
+++ b/mccsearch/code/mccSearch.py
@@@ -44,72 -44,61 +44,73 @@@ import ocw.plotter as plotte
#----------------------- GLOBAL VARIABLES --------------------------
# --------------------- User defined variables ---------------------
-#FYI the lat lon values are not necessarily inclusive of the points given. These are the limits
-#the first point closest the the value (for the min) from the MERG data is used, etc.
-LATMIN = '5.0' #min latitude; -ve values in the SH e.g. 5S = -5
-LATMAX = '19.0' #max latitude; -ve values in the SH e.g. 5S = -5 20.0
-LONMIN = '-5.0' #min longitude; -ve values in the WH e.g. 59.8W = -59.8 -30
-LONMAX = '5.0' #min longitude; -ve values in the WH e.g. 59.8W = -59.8 30
-XRES = 4.0 #x direction spatial resolution in km
-YRES = 4.0 #y direction spatial resolution in km
-TRES = 1 #temporal resolution in hrs
-LAT_DISTANCE = 111.0 #the avg distance in km for 1deg lat for the region being considered
-LON_DISTANCE = 111.0 #the avg distance in km for 1deg lon for the region being considered
-STRUCTURING_ELEMENT = [[0,1,0],[1,1,1],[0,1,0]] #the matrix for determining the pattern for the contiguous boxes and must
- #have same rank of the matrix it is being compared against
-#criteria for determining cloud elements and edges
-T_BB_MAX = 243 #warmest temp to allow (-30C to -55C according to Morel and Sensi 2002)
-T_BB_MIN = 218 #cooler temp for the center of the system
-CONVECTIVE_FRACTION = 0.90 #the min temp/max temp that would be expected in a CE.. this is highly conservative (only a 10K difference)
-MIN_MCS_DURATION = 3 #minimum time for a MCS to exist
-AREA_MIN = 2400.0 #minimum area for CE criteria in km^2 according to Vila et al. (2008) is 2400
-MIN_OVERLAP= 10000.00 #km^2 from Williams and Houze 1987, indir ref in Arnaud et al 1992
+# FYI the lat lon values are not necessarily inclusive of the points given. These are the limits
+# the first point closest the the value (for the min) from the MERG data
+# is used, etc.
+LATMIN = '5.0' # min latitude; -ve values in the SH e.g. 5S = -5
+LATMAX = '19.0' # max latitude; -ve values in the SH e.g. 5S = -5 20.0
+LONMIN = '-5.0' # min longitude; -ve values in the WH e.g. 59.8W = -59.8 -30
+LONMAX = '5.0' # min longitude; -ve values in the WH e.g. 59.8W = -59.8 30
+XRES = 4.0 # x direction spatial resolution in km
+YRES = 4.0 # y direction spatial resolution in km
+TRES = 1 # temporal resolution in hrs
+LAT_DISTANCE = 111.0 # the avg distance in km for 1deg lat for the region being considered
+LON_DISTANCE = 111.0 # the avg distance in km for 1deg lon for the region being considered
+# the matrix for determining the pattern for the contiguous boxes and must
+STRUCTURING_ELEMENT = [[0, 1, 0], [1, 1, 1], [0, 1, 0]]
+# have same rank of the matrix it is being compared against
+# criteria for determining cloud elements and edges
+# warmest temp to allow (-30C to -55C according to Morel and Sensi 2002)
+T_BB_MAX = 243
+T_BB_MIN = 218 # cooler temp for the center of the system
+# the min temp/max temp that would be expected in a CE.. this is highly
+# conservative (only a 10K difference)
+CONVECTIVE_FRACTION = 0.90
+MIN_MCS_DURATION = 3 # minimum time for a MCS to exist
+# minimum area for CE criteria in km^2 according to Vila et al. (2008) is 2400
+AREA_MIN = 2400.0
+# km^2 from Williams and Houze 1987, indir ref in Arnaud et al 1992
+MIN_OVERLAP = 10000.00
#---the MCC criteria
-ECCENTRICITY_THRESHOLD_MAX = 1.0 #tending to 1 is a circle e.g. hurricane,
-ECCENTRICITY_THRESHOLD_MIN = 0.50 #tending to 0 is a linear e.g. squall line
-OUTER_CLOUD_SHIELD_AREA = 80000.0 #km^2
-INNER_CLOUD_SHIELD_AREA = 30000.0 #km^2
-OUTER_CLOUD_SHIELD_TEMPERATURE = 233 #in K
-INNER_CLOUD_SHIELD_TEMPERATURE = 213 #in K
-MINIMUM_DURATION = 6 #min number of frames the MCC must exist for (assuming hrly frames, MCCs is 6hrs)
-MAXIMUM_DURATION = 24#max number of framce the MCC can last for
+ECCENTRICITY_THRESHOLD_MAX = 1.0 # tending to 1 is a circle e.g. hurricane,
+ECCENTRICITY_THRESHOLD_MIN = 0.50 # tending to 0 is a linear e.g. squall line
+OUTER_CLOUD_SHIELD_AREA = 80000.0 # km^2
+INNER_CLOUD_SHIELD_AREA = 30000.0 # km^2
+OUTER_CLOUD_SHIELD_TEMPERATURE = 233 # in K
+INNER_CLOUD_SHIELD_TEMPERATURE = 213 # in K
+# min number of frames the MCC must exist for (assuming hrly frames, MCCs
+# is 6hrs)
+MINIMUM_DURATION = 6
+MAXIMUM_DURATION = 24 # max number of framce the MCC can last for
#------------------- End user defined Variables -------------------
-edgeWeight = [1,2,3] #weights for the graph edges
-#graph object fo the CEs meeting the criteria
+edgeWeight = [1, 2, 3] # weights for the graph edges
+# graph object fo the CEs meeting the criteria
CLOUD_ELEMENT_GRAPH = nx.DiGraph()
-#graph meeting the CC criteria
+# graph meeting the CC criteria
PRUNED_GRAPH = nx.DiGraph()
#------------------------ End GLOBAL VARS -------------------------
++
#************************ Begin Functions *************************
#******************************************************************
-def readMergData(dirname, filelist = None):
+
+
+def readMergData(dirname, filelist=None):
'''
Purpose::
- Read MERG data into RCMES format
-
+ Read MERG data into RCMES format
+
Input::
- dirname: a string representing the directory to the MERG files in NETCDF format
- filelist (optional): a list of strings representing the filenames betweent the start and end dates provided
-
+ dirname: a string representing the directory to the MERG files in NETCDF format
+ filelist (optional): a list of strings representing the filenames betweent the start and end dates provided
+
Output::
- A 3D masked array (t,lat,lon) with only the variables which meet the minimum temperature
- criteria for each frame
+ A 3D masked array (t,lat,lon) with only the variables which meet the minimum temperature
+ criteria for each frame
Assumptions::
- The MERG data has been converted to NETCDF using LATS4D
- The data has the same lat/lon format
+ The MERG data has been converted to NETCDF using LATS4D
+ The data has the same lat/lon format
TODO:: figure out how to use netCDF4 to do the clipping tmp = netCDF4.Dataset(filelist[0])
@@@ -123,17 -112,19 +124,16 @@@
mergTimeVarName = 'time'
mergLatVarName = 'latitude'
mergLonVarName = 'longitude'
-
-
filelistInstructions = dirname + '/*'
- if filelist == None:
+ if filelist is None:
filelist = glob.glob(filelistInstructions)
-
- #sat_img is the array that will contain all the masked frames
+ # sat_img is the array that will contain all the masked frames
mergImgs = []
- #timelist of python time strings
- timelist = []
+ # timelist of python time strings
+ timelist = []
time2store = None
- tempMaskedValueNp =[]
-
+ tempMaskedValueNp = []
filelist.sort()
nfiles = len(filelist)
@@@ -271,20 -254,16 +271,21 @@@ def findCloudElements(mergImgs, timelis
minCELonLimit = 0.0
maxCELatLimit = 0.0
maxCELonLimit = 0.0
-
+
nygrd = len(LAT[:, 0]); nxgrd = len(LON[0, :])
-
- #openfile for storing ALL cloudElement information
- cloudElementsFile = open((MAINDIRECTORY+'/textFiles/cloudElements.txt'),'wb')
- #openfile for storing cloudElement information meeting user criteria i.e. MCCs in this case
- cloudElementsUserFile = open((MAINDIRECTORY+'/textFiles/cloudElementsUserFile.txt'),'w')
-
- #NB in the TRMM files the info is hours since the time thus 00Z file has in 01, 02 and 03 times
- for t in xrange(mergImgs.shape[0]):
+
+ # openfile for storing ALL cloudElement information
+ cloudElementsFile = open(
+ (MAINDIRECTORY + '/textFiles/cloudElements.txt'), 'wb')
+ # openfile for storing cloudElement information meeting user criteria i.e.
+ # MCCs in this case
+ cloudElementsUserFile = open(
+ (MAINDIRECTORY + '/textFiles/cloudElementsUserFile.txt'), 'w')
+
+ # NB in the TRMM files the info is hours since the time thus 00Z file has
+ # in 01, 02 and 03 times
+ for t in range(mergImgs.shape[0]):
++
#-------------------------------------------------
# #textfile name for saving the data for arcgis
# thisFileName = MAINDIRECTORY+'/' + (str(timelist[t])).replace(" ", "_") + '.txt'
@@@ -460,60 -409,48 +461,59 @@@
currNetCDFTRMMData.conventions = 'COARDS'
# dimensions
currNetCDFTRMMData.createDimension('time', None)
- currNetCDFTRMMData.createDimension('lat', len(LAT[:,0]))
- currNetCDFTRMMData.createDimension('lon', len(LON[0,:]))
-
+ currNetCDFTRMMData.createDimension('lat', len(LAT[:, 0]))
+ currNetCDFTRMMData.createDimension('lon', len(LON[0, :]))
+
# variables
- TRMMprecip = ('time','lat', 'lon',)
- times = currNetCDFTRMMData.createVariable('time', 'f8', ('time',))
- times.units = 'hours since '+ str(timelist[t])[:-6]
- latitude = currNetCDFTRMMData.createVariable('latitude', 'f8', ('lat',))
- longitude = currNetCDFTRMMData.createVariable('longitude', 'f8', ('lon',))
- rainFallacc = currNetCDFTRMMData.createVariable('precipitation_Accumulation', 'f8',TRMMprecip )
+ TRMMprecip = ('time', 'lat', 'lon',)
+ times = currNetCDFTRMMData.createVariable(
+ 'time', 'f8', ('time',))
+ times.units = 'hours since ' + str(timelist[t])[:-6]
+ latitude = currNetCDFTRMMData.createVariable(
+ 'latitude', 'f8', ('lat',))
+ longitude = currNetCDFTRMMData.createVariable(
+ 'longitude', 'f8', ('lon',))
+ rainFallacc = currNetCDFTRMMData.createVariable(
+ 'precipitation_Accumulation', 'f8', TRMMprecip)
rainFallacc.units = 'mm'
- longitude[:] = LON[0,:]
- longitude.units = "degrees_east"
- longitude.long_name = "Longitude"
+ longitude[:] = LON[0, :]
+ longitude.units = "degrees_east"
+ longitude.long_name = "Longitude"
- latitude[:] = LAT[:,0]
+ latitude[:] = LAT[:, 0]
latitude.units = "degrees_north"
- latitude.long_name ="Latitude"
+ latitude.long_name = "Latitude"
finalCETRMMvalues = ma.zeros((brightnesstemp.shape))
- #-----------End most of NETCDF file stuff ------------------------------------
+ #-----------End most of NETCDF file stuff -----------------
- #populate cloudElementLatLons by unpacking the original values from loc to get the actual value for lat and lon
- #TODO: KDW - too dirty... play with itertools.izip or zip and the enumerate with this
+ # populate cloudElementLatLons by unpacking the original values from loc to get the actual value for lat and lon
+ # TODO: KDW - too dirty... play with itertools.izip or zip and the enumerate with this
# as cloudElement is masked
- for index,value in np.ndenumerate(cloudElement):
- if value != 0 :
- lat_index,lon_index = index
- lat_lon_tuple = (cloudElementLat[lat_index], cloudElementLon[lon_index],value)
-
- #generate the comma separated file for GIS
+ for index, value in np.ndenumerate(cloudElement):
+ if value != 0:
+ lat_index, lon_index = index
+ lat_lon_tuple = (
+ cloudElementLat[lat_index],
+ cloudElementLon[lon_index],
+ value)
+
+ # generate the comma separated file for GIS
cloudElementLatLons.append(lat_lon_tuple)
- #temp data for CE NETCDF file
- brightnesstemp1[0,int(np.where(LAT[:,0]==cloudElementLat[lat_index])[0]),int(np.where(LON[0,:]==cloudElementLon[lon_index])[0])] = value
-
- if TRMMdirName:
- finalCETRMMvalues[0,int(np.where(LAT[:,0]==cloudElementLat[lat_index])[0]),int(np.where(LON[0,:]==cloudElementLon[lon_index])[0])] = regriddedTRMM[int(np.where(LAT[:,0]==cloudElementLat[lat_index])[0]),int(np.where(LON[0,:]==cloudElementLon[lon_index])[0])]
- CETRMMList.append((cloudElementLat[lat_index], cloudElementLon[lon_index], finalCETRMMvalues[0,cloudElementLat[lat_index], cloudElementLon[lon_index]]))
-
+ # temp data for CE NETCDF file
+ brightnesstemp1[0, int(np.where(LAT[:, 0] == cloudElementLat[lat_index])[0]), int(
+ np.where(LON[0, :] == cloudElementLon[lon_index])[0])] = value
+ if TRMMdirName:
+ finalCETRMMvalues[0, int(np.where(LAT[:, 0] == cloudElementLat[lat_index])[0]), int(np.where(LON[0, :] == cloudElementLon[lon_index])[
+ 0])] = regriddedTRMM[int(np.where(LAT[:, 0] == cloudElementLat[lat_index])[0]), int(np.where(LON[0, :] == cloudElementLon[lon_index])[0])]
+ CETRMMList.append((cloudElementLat[lat_index],
+ cloudElementLon[lon_index],
+ finalCETRMMvalues[0,
+ cloudElementLat[lat_index],
+ cloudElementLon[lon_index]]))
-
brightnesstemp[:] = brightnesstemp1[:]
currNetCDFCEData.close()
@@@ -917,54 -746,51 +917,53 @@@ def findPrecipRate(TRMMdirName, timelis
minCEprecipRate = 0.0
try:
- maxCEprecipRate = np.max(finalCETRMMvalues[np.nonzero(finalCETRMMvalues)])
- except:
+ maxCEprecipRate = np.max(
+ finalCETRMMvalues[np.nonzero(finalCETRMMvalues)])
+ except BaseException:
maxCEprecipRate = 0.0
- #add info to CLOUDELEMENTSGRAPH
- #TODO try block
+ # add info to CLOUDELEMENTSGRAPH
+ # TODO try block
for eachdict in CLOUD_ELEMENT_GRAPH.nodes(CEuniqueID):
if eachdict[1]['uniqueID'] == CEuniqueID:
- if not 'cloudElementPrecipTotal' in eachdict[1].keys():
+ if 'cloudElementPrecipTotal' not in list(eachdict[1].keys()):
eachdict[1]['cloudElementPrecipTotal'] = precipTotal
- if not 'cloudElementLatLonTRMM' in eachdict[1].keys():
+ if 'cloudElementLatLonTRMM' not in list(eachdict[1].keys()):
eachdict[1]['cloudElementLatLonTRMM'] = finalCETRMMvalues
- if not 'TRMMArea' in eachdict[1].keys():
+ if 'TRMMArea' not in list(eachdict[1].keys()):
eachdict[1]['TRMMArea'] = TRMMArea
- if not 'CETRMMmin' in eachdict[1].keys():
+ if 'CETRMMmin' not in list(eachdict[1].keys()):
eachdict[1]['CETRMMmin'] = minCEprecipRate
- if not 'CETRMMmax' in eachdict[1].keys():
+ if 'CETRMMmax' not in list(eachdict[1].keys()):
eachdict[1]['CETRMMmax'] = maxCEprecipRate
- #clean up
+ # clean up
precipTotal = 0.0
- latsrawTRMMData =[]
+ latsrawTRMMData = []
lonsrawTRMMData = []
- latsrawCloudElements=[]
- lonsrawCloudElements=[]
- finalCETRMMvalues =[]
- CEprecipRate =[]
- brightnesstemp =[]
- TRMMdataDict ={}
+ latsrawCloudElements = []
+ lonsrawCloudElements = []
+ finalCETRMMvalues = []
+ CEprecipRate = []
+ brightnesstemp = []
+ TRMMdataDict = {}
return allCEnodesTRMMdata
-#******************************************************************
+#******************************************************************
+
+
def findCloudClusters(CEGraph):
'''
- Purpose::
- Determines the cloud clusters properties from the subgraphs in
- the graph i.e. prunes the graph according to the minimum depth
+ Purpose::
+ Determines the cloud clusters properties from the subgraphs in
+ the graph i.e. prunes the graph according to the minimum depth
- Input::
- CEGraph: a Networkx directed graph of the CEs with weighted edges
- according the area overlap between nodes (CEs) of consectuive frames
-
- Output::
- PRUNED_GRAPH: a Networkx directed graph of with CCs/ MCSs
+ Input::
+ CEGraph: a Networkx directed graph of the CEs with weighted edges
+ according the area overlap between nodes (CEs) of consectuive frames
+ Output::
+ PRUNED_GRAPH: a Networkx directed graph of with CCs/ MCSs
-
'''
seenNode = []
@@@ -1112,148 -924,105 +1111,147 @@@ def findMCC(prunedGraph)
MCCList = checkedNodesMCC(prunedGraph, treeTraversalList)
for aDict in MCCList:
for eachNode in aDict["fullMCSMCC"]:
- addNodeMCSIdentifier(eachNode[0],eachNode[1])
-
- #do check for if MCCs overlap
+ addNodeMCSIdentifier(eachNode[0], eachNode[1])
+
+ # do check for if MCCs overlap
if MCCList:
if len(MCCList) > 1:
- for count in range(len(MCCList)): #for eachDict in MCCList:
- #if there are more than two lists
+ # for eachDict in MCCList:
+ for count in range(len(MCCList)):
+ # if there are more than two lists
if count >= 1:
- #and the first node in this list
- eachList = list(x[0] for x in MCCList[count]["possMCCList"])
- eachList.sort(key=lambda nodeID:(len(nodeID.split('C')[0]), nodeID.split('C')[0]))
+ # and the first node in this list
+ eachList = list(
+ x[0] for x in MCCList[count]["possMCCList"])
+ eachList.sort(key=lambda nodeID: (
+ len(nodeID.split('C')[0]), nodeID.split('C')[0]))
if eachList:
fNode = eachList[0]
- #get the lastNode in the previous possMCC list
- eachList = list(x[0] for x in MCCList[(count-1)]["possMCCList"])
- eachList.sort(key=lambda nodeID:(len(nodeID.split('C')[0]), nodeID.split('C')[0]))
+ # get the lastNode in the previous possMCC list
+ eachList = list(
+ x[0] for x in MCCList[(count - 1)]["possMCCList"])
+ eachList.sort(key=lambda nodeID: (
+ len(nodeID.split('C')[0]), nodeID.split('C')[0]))
if eachList:
lNode = eachList[-1]
- if lNode in CLOUD_ELEMENT_GRAPH.predecessors(fNode):
- for aNode in CLOUD_ELEMENT_GRAPH.predecessors(fNode):
+ if lNode in CLOUD_ELEMENT_GRAPH.predecessors(
+ fNode):
+ for aNode in CLOUD_ELEMENT_GRAPH.predecessors(
+ fNode):
if aNode in eachList and aNode == lNode:
- #if edge_data is equal or less than to the exisitng edge in the tree append one to the other
- if CLOUD_ELEMENT_GRAPH.get_edge_data(aNode,fNode)['weight'] <= CLOUD_ELEMENT_GRAPH.get_edge_data(lNode,fNode)['weight']:
- MCCList[count-1]["possMCCList"].extend(MCCList[count]["possMCCList"])
- MCCList[count-1]["fullMCSMCC"].extend(MCCList[count]["fullMCSMCC"])
- MCCList[count-1]["durationAandB"] += MCCList[count]["durationAandB"]
- MCCList[count-1]["CounterCriteriaA"] += MCCList[count]["CounterCriteriaA"]
- MCCList[count-1]["highestMCCnode"] = MCCList[count]["highestMCCnode"]
- MCCList[count-1]["frameNum"] = MCCList[count]["frameNum"]
+ # if
+ # edge_data
+ # is
+ # equal
+ # or
+ # less
+ # than
+ # to
+ # the
+ # exisitng
+ # edge
+ # in
+ # the
+ # tree
+ # append
+ # one
+ # to
+ # the
+ # other
+ if CLOUD_ELEMENT_GRAPH.get_edge_data(
+ aNode, fNode)['weight'] <= CLOUD_ELEMENT_GRAPH.get_edge_data(
+ lNode, fNode)['weight']:
+ MCCList[count - 1]["possMCCList"].extend(
+ MCCList[count]["possMCCList"])
+ MCCList[count - 1]["fullMCSMCC"].extend(
+ MCCList[count]["fullMCSMCC"])
+ MCCList[count -
+ 1]["durationAandB"] += MCCList[count]["durationAandB"]
+ MCCList[count -
+ 1]["CounterCriteriaA"] += MCCList[count]["CounterCriteriaA"]
+ MCCList[count -
+ 1]["highestMCCnode"] = MCCList[count]["highestMCCnode"]
+ MCCList[count -
+ 1]["frameNum"] = MCCList[count]["frameNum"]
removeList.append(count)
- #update the MCCList
+ # update the MCCList
if removeList:
for i in removeList:
- if (len(MCCList)-1) > i:
+ if (len(MCCList) - 1) > i:
del MCCList[i]
- removeList =[]
-
- #check if the nodes also meet the duration criteria and the shape crieria
+ removeList = []
+
+ # check if the nodes also meet the duration criteria and the shape
+ # crieria
for eachDict in MCCList:
- #order the fullMCSMCC list, then run maximum extent and eccentricity criteria
- if (eachDict["durationAandB"] * TRES) >= MINIMUM_DURATION and (eachDict["durationAandB"] * TRES) <= MAXIMUM_DURATION:
+ # order the fullMCSMCC list, then run maximum extent and
+ # eccentricity criteria
+ if (eachDict["durationAandB"] *
+ TRES) >= MINIMUM_DURATION and (eachDict["durationAandB"] *
+ TRES) <= MAXIMUM_DURATION:
eachList = list(x[0] for x in eachDict["fullMCSMCC"])
- eachList.sort(key=lambda nodeID:(len(nodeID.split('C')[0]), nodeID.split('C')[0]))
+ eachList.sort(key=lambda nodeID: (
+ len(nodeID.split('C')[0]), nodeID.split('C')[0]))
eachMCCList = list(x[0] for x in eachDict["possMCCList"])
- eachMCCList.sort(key=lambda nodeID:(len(nodeID.split('C')[0]), nodeID.split('C')[0]))
-
- #update the nodemcsidentifer behavior
- #find the first element eachMCCList in eachList, and ensure everything ahead of it is indicated as 'I',
- #find last element in eachMCCList in eachList and ensure everything after it is indicated as 'D'
- #ensure that everything between is listed as 'M'
- for eachNode in eachList[:(eachList.index(eachMCCList[0]))]:
- addNodeMCSIdentifier(eachNode,'I')
-
- addNodeMCSIdentifier(eachMCCList[0],'M')
-
- for eachNode in eachList[(eachList.index(eachMCCList[-1])+1):]:
+ eachMCCList.sort(key=lambda nodeID: (
+ len(nodeID.split('C')[0]), nodeID.split('C')[0]))
+
+ # update the nodemcsidentifer behavior
+ # find the first element eachMCCList in eachList, and ensure everything ahead of it is indicated as 'I',
+ # find last element in eachMCCList in eachList and ensure everything after it is indicated as 'D'
+ # ensure that everything between is listed as 'M'
+ for eachNode in eachList[:(
+ eachList.index(eachMCCList[0]))]:
+ addNodeMCSIdentifier(eachNode, 'I')
+
+ addNodeMCSIdentifier(eachMCCList[0], 'M')
+
+ for eachNode in eachList[(
+ eachList.index(eachMCCList[-1]) + 1):]:
addNodeMCSIdentifier(eachNode, 'D')
- #update definiteMCS list
- for eachNode in orderedPath[(orderedPath.index(eachMCCList[-1])+1):]:
+ # update definiteMCS list
+ for eachNode in orderedPath[(
+ orderedPath.index(eachMCCList[-1]) + 1):]:
addNodeMCSIdentifier(eachNode, 'D')
- #run maximum extent and eccentricity criteria
- maxExtentNode, definiteMCCFlag = maxExtentAndEccentricity(eachList)
- #print "maxExtentNode, definiteMCCFlag ", maxExtentNode, definiteMCCFlag
- if definiteMCCFlag == True:
+ # run maximum extent and eccentricity criteria
+ maxExtentNode, definiteMCCFlag = maxExtentAndEccentricity(
+ eachList)
+ if definiteMCCFlag:
definiteMCC.append(eachList)
-
definiteMCS.append(orderedPath)
-
- #reset for next subGraph
+
+ # reset for next subGraph
aSubGraph.clear()
- orderedPath=[]
- MCCList =[]
- MCSList =[]
+ orderedPath = []
+ MCCList = []
+ MCSList = []
definiteMCSFlag = False
-
+
return definiteMCC, definiteMCS
#******************************************************************
-def traverseTree(subGraph,node, stack, checkedNodes=None):
+
+
+def traverseTree(subGraph, node, stack, checkedNodes=None):
'''
- Purpose::
- To traverse a tree using a modified depth-first iterative deepening (DFID) search algorithm
+ Purpose::
+ To traverse a tree using a modified depth-first iterative deepening (DFID) search algorithm
+
+ Input::
+ subGraph: a Networkx DiGraph representing a CC
+ lengthOfsubGraph: an integer representing the length of the subgraph
+ node: a string representing the node currently being checked
+ stack: a list of strings representing a list of nodes in a stack functionality
+ i.e. Last-In-First-Out (LIFO) for sorting the information from each visited node
+ checkedNodes: a list of strings representing the list of the nodes in the traversal
- Input::
- subGraph: a Networkx DiGraph representing a CC
- lengthOfsubGraph: an integer representing the length of the subgraph
- node: a string representing the node currently being checked
- stack: a list of strings representing a list of nodes in a stack functionality
- i.e. Last-In-First-Out (LIFO) for sorting the information from each visited node
+ Output::
checkedNodes: a list of strings representing the list of the nodes in the traversal
-
- Output::
- checkedNodes: a list of strings representing the list of the nodes in the traversal
- Assumptions:
- frames are ordered and are equally distributed in time e.g. hrly satellite images
-
+ Assumptions:
+ frames are ordered and are equally distributed in time e.g. hrly satellite images
-
'''
if len(checkedNodes) == len(subGraph):
return checkedNodes
@@@ -1270,10 -1038,10 +1268,9 @@@
if parent not in checkedNodes and parent not in stack:
for child in downOneLevel:
if child not in checkedNodes and child not in stack:
- stack.insert(0,child)
-
- stack.insert(0,parent)
+ stack.insert(0, child)
+ stack.insert(0, parent)
-
for child in downOneLevel:
if child not in checkedNodes and child not in stack:
if len(subGraph.predecessors(child)) > 1 or node in checkedNodes:
@@@ -1556,62 -1231,38 +1553,63 @@@ def updateMCCList
potentialMCCList[index]["highestMCCnode"] = node
return potentialMCCList
- #if this frameNum doesn't exist and this frameNum is less than the MCC node max frame Num (including 0), then append to fullMCSMCC list
- if frameNum > potentialMCCList[index]["frameNum"] or potentialMCCList[index]["frameNum"]==0:
+ # if this frameNum doesn't exist and this frameNum is less than
+ # the MCC node max frame Num (including 0), then append to
+ # fullMCSMCC list
+ if frameNum > potentialMCCList[index]["frameNum"] or potentialMCCList[index]["frameNum"] == 0:
stage = 'I'
- if CounterCriteriaAFlag == True and CounterCriteriaBFlag == False:
- potentialMCCList.append({"possMCCList":[], "fullMCSMCC":[(node,stage)], "CounterCriteriaA": 1, "durationAandB": 0, "highestMCCnode":"", "frameNum":0})
+ if CounterCriteriaAFlag and CounterCriteriaBFlag == False:
+ potentialMCCList.append(
+ {
+ "possMCCList": [],
+ "fullMCSMCC": [
+ (node,
+ stage)],
+ "CounterCriteriaA": 1,
+ "durationAandB": 0,
+ "highestMCCnode": "",
+ "frameNum": 0})
return potentialMCCList
elif CounterCriteriaAFlag == False and CounterCriteriaBFlag == False:
- potentialMCCList.append({"possMCCList":[], "fullMCSMCC":[(node,stage)], "CounterCriteriaA": 0, "durationAandB": 0, "highestMCCnode":"", "frameNum":0})
+ potentialMCCList.append(
+ {
+ "possMCCList": [],
+ "fullMCSMCC": [
+ (node,
+ stage)],
+ "CounterCriteriaA": 0,
+ "durationAandB": 0,
+ "highestMCCnode": "",
+ "frameNum": 0})
return potentialMCCList
- #if predecessor and this frame number already exist in the MCC list, add the current node to the fullMCSMCC list
- if existingFrameFlag == True:
- if CounterCriteriaAFlag == True and CounterCriteriaBFlag == False:
- potentialMCCList[index]["fullMCSMCC"].append((node,stage))
- potentialMCCList[index]["CounterCriteriaA"] +=1
+ # if predecessor and this frame number already exist in the MCC
+ # list, add the current node to the fullMCSMCC list
+ if existingFrameFlag:
+ if CounterCriteriaAFlag and CounterCriteriaBFlag == False:
+ potentialMCCList[index]["fullMCSMCC"].append((node, stage))
+ potentialMCCList[index]["CounterCriteriaA"] += 1
return potentialMCCList
- if CounterCriteriaAFlag == False:
- potentialMCCList[index]["fullMCSMCC"].append((node,stage))
- return potentialMCCList
-
- if predecessorsFlag == False:
- successorsFlag, index = isThereALink(prunedGraph, 2,node,potentialMCCList,2)
-
- if successorsFlag == True:
- for eachNode in potentialMCCList[index]["possMCCList"]:
- if int((eachNode[0].split('CE')[0]).split('F')[1]) == frameNum:
+ if not CounterCriteriaAFlag:
+ potentialMCCList[index]["fullMCSMCC"].append((node, stage))
+ return potentialMCCList
+
+ if not predecessorsFlag:
+ successorsFlag, index = isThereALink(
+ prunedGraph, 2, node, potentialMCCList, 2)
+
+ if successorsFlag:
+ for eachNode in potentialMCCList[index]["possMCCList"]:
+ if int(
+ (eachNode[0].split('CE')[0]).split('F')[1]) == frameNum:
existingFrameFlag = True
-
- if CounterCriteriaAFlag == True and CounterCriteriaBFlag == True:
+
+ if CounterCriteriaAFlag and CounterCriteriaBFlag:
stage = 'M'
- potentialMCCList[index]["possMCCList"].append((node,stage))
- potentialMCCList[index]["fullMCSMCC"].append((node,stage))
+ potentialMCCList[index]["possMCCList"].append(
+ (node, stage))
+ potentialMCCList[index]["fullMCSMCC"].append((node, stage))
++
if frameNum > potentialMCCList[index]["frameNum"] or potentialMCCList[index]["frameNum"] == 0:
potentialMCCList[index]["frameNum"] = frameNum
potentialMCCList[index]["highestMCCnode"] = node
@@@ -1660,24 -1305,19 +1658,25 @@@
return potentialMCCList
if potentialMCCList[index]["frameNum"] == 0 or frameNum <= potentialMCCList[index]["frameNum"]:
- if CounterCriteriaAFlag == True and CounterCriteriaBFlag == False:
- potentialMCCList[index]["fullMCSMCC"].append((node,stage))
- potentialMCCList[index]["CounterCriteriaA"] +=1
+ if CounterCriteriaAFlag and CounterCriteriaBFlag == False:
+ potentialMCCList[index]["fullMCSMCC"].append(
+ (node, stage))
+ potentialMCCList[index]["CounterCriteriaA"] += 1
return potentialMCCList
elif CounterCriteriaAFlag == False:
- potentialMCCList[index]["fullMCSMCC"].append((node,stage))
+ potentialMCCList[index]["fullMCSMCC"].append(
+ (node, stage))
return potentialMCCList
else:
- successorsMCSFlag, index = isThereALink(prunedGraph, 2,node,potentialMCCList,2)
- if successorsMCSFlag == True:
- if CounterCriteriaAFlag == True and CounterCriteriaBFlag == True:
- potentialMCCList[index]["possMCCList"].append((node,'M'))
- potentialMCCList[index]["fullMCSMCC"].append((node,'M'))
+ successorsMCSFlag, index = isThereALink(
+ prunedGraph, 2, node, potentialMCCList, 2)
+ if successorsMCSFlag:
+ if CounterCriteriaAFlag and CounterCriteriaBFlag:
+ potentialMCCList[index]["possMCCList"].append(
+ (node, 'M'))
+ potentialMCCList[index]["fullMCSMCC"].append(
+ (node, 'M'))
++
potentialMCCList[index]["durationAandB"] += 1
if frameNum > potentialMCCList[index]["frameNum"]:
potentialMCCList[index]["frameNum"] = frameNum
@@@ -2071,24 -1641,22 +2070,23 @@@ def allDescendants(path, aNode)
path = path + aNode
#i.e. PRUNED_GRAPH.predecessors(aNode) is empty
return path, numOfChildren
- except:
- #i.e. PRUNED_GRAPH.predecessors(aNode) threw an exception
+ # i.e. PRUNED_GRAPH.predecessors(aNode) threw an exception
+ except BaseException:
return path, numOfChildren
#******************************************************************
-def addInfothisDict (thisNode, cloudElementArea,criteriaB):
- '''
- Purpose::
- Update original dictionary node with information
- Input::
- thisNode: a string representing the unique ID of a node
- cloudElementArea: a floating-point number representing the area of the cloud element
- criteriaB: a masked array of floating-point numbers representing the lat,lons meeting the criteria
- Output:: None
+def addInfothisDict(thisNode, cloudElementArea, criteriaB):
+ '''
+ Purpose::
+ Update original dictionary node with information
+
+ Input::
+ thisNode: a string representing the unique ID of a node
+ cloudElementArea: a floating-point number representing the area of the cloud element
+ criteriaB: a masked array of floating-point numbers representing the lat,lons meeting the criteria
+ Output:: None
-
'''
for eachdict in CLOUD_ELEMENT_GRAPH.nodes(thisNode):
if eachdict[1]['uniqueID'] == thisNode:
@@@ -3323,29 -2737,24 +3321,28 @@@ def displaySize(finalMCCList)
ax.set_title(title)
ax.fmt_xdata = mdates.DateFormatter('%Y-%m-%d%H:%M:%S')
fig.autofmt_xdate()
-
-
plt.subplots_adjust(bottom=0.2)
-
- imgFilename = MAINDIRECTORY+'/images/'+ str(count)+'MCS.gif'
- plt.savefig(imgFilename, facecolor=fig.get_facecolor(), transparent=True)
-
- #if time in not already in the time list, append it
- timeList=[]
+
+ imgFilename = MAINDIRECTORY + '/images/' + str(count) + 'MCS.gif'
+ plt.savefig(
+ imgFilename,
+ facecolor=fig.get_facecolor(),
+ transparent=True)
+
+ # if time in not already in the time list, append it
+ timeList = []
count += 1
- return
+ return
#******************************************************************
-def displayPrecip(finalMCCList):
+
+
+def displayPrecip(finalMCCList):
'''
- Purpose::
- To create a figure showing the precip rate verse time for each MCS
+ Purpose::
+ To create a figure showing the precip rate verse time for each MCS
- Input::
- finalMCCList: a list of dictionaries representing a list of nodes representing a MCC
+ Input::
+ finalMCCList: a list of dictionaries representing a list of nodes representing a MCC
Output:: None
@@@ -3730,25 -3093,24 +3727,24 @@@ def plotAccuInTimeRange(starttime, endt
accuTRMMData.createDimension('time', None)
accuTRMMData.createDimension('lat', nygrdTRMM)
accuTRMMData.createDimension('lon', nxgrdTRMM)
-
+
# variables
- TRMMprecip = ('time','lat', 'lon',)
+ TRMMprecip = ('time', 'lat', 'lon',)
times = accuTRMMData.createVariable('time', 'f8', ('time',))
- times.units = 'hours since '+ starttime[:-6]
+ times.units = 'hours since ' + starttime[:-6]
latitude = accuTRMMData.createVariable('latitude', 'f8', ('lat',))
longitude = accuTRMMData.createVariable('longitude', 'f8', ('lon',))
- rainFallacc = accuTRMMData.createVariable('precipitation_Accumulation', 'f8',TRMMprecip)
+ rainFallacc = accuTRMMData.createVariable(
+ 'precipitation_Accumulation', 'f8', TRMMprecip)
rainFallacc.units = 'mm'
- longitude[:] = LONTRMM[0,:]
- longitude.units = "degrees_east"
- longitude.long_name = "Longitude"
+ longitude[:] = LONTRMM[0, :]
+ longitude.units = "degrees_east"
+ longitude.long_name = "Longitude"
- latitude[:] = LATTRMM[:,0]
+ latitude[:] = LATTRMM[:, 0]
latitude.units = "degrees_north"
- latitude.long_name ="Latitude"
-
+ latitude.long_name = "Latitude"
-
rainFallacc[:] = accuPrecipRate[:]
accuTRMMData.close()
@@@ -4235,22 -3515,18 +4231,23 @@@ def createTextFile(finalMCCList, identi
#******************************************************************
# PLOTTING UTIL SCRIPTS
#******************************************************************
-def to_percent(y,position):
+
+
+def to_percent(y, position):
'''
- Purpose::
- Utility script for generating the y-axis for plots
+ Purpose::
+ Utility script for generating the y-axis for plots
'''
- return (str(100*y)+'%')
+ return (str(100 * y) + '%')
#******************************************************************
+
+
def colorbar_index(ncolors, nlabels, cmap):
'''
- Purpose::
- Utility script for crating a colorbar
- Taken from http://stackoverflow.com/questions/18704353/correcting-matplotlib-colorbar-ticks
+ Purpose::
+ Utility script for crating a colorbar
+ Taken from http://stackoverflow.com/questions/18704353/correcting-matplotlib-colorbar-ticks
++
'''
cmap = cmap_discretize(cmap, ncolors)
mappable = cm.ScalarMappable(cmap=cmap)
@@@ -4293,22 -3567,22 +4290,19 @@@ def cmap_discretize(cmap, N)
# def preprocessingMERG(MERGdirname):
# '''
# Purpose::
-# Utility script for unzipping and converting the merg*.Z files from Mirador to
+# Utility script for unzipping and converting the merg*.Z files from Mirador to
# NETCDF format. The files end up in a folder called mergNETCDF in the directory
# where the raw MERG data is
-# NOTE: VERY RAW AND DIRTY
-
+# NOTE: VERY RAW AND DIRTY
-
# Input::
# Directory to the location of the raw MERG files, preferably zipped
-
-
# Output::
# none
--
# Assumptions::
# 1 GrADS (http://www.iges.org/grads/gadoc/) and lats4D (http://opengrads.org/doc/scripts/lats4d/)
-# have been installed on the system and the user can access
+# have been installed on the system and the user can access
# 2 User can write files in location where script is being called
-# 3 the files havent been unzipped
+# 3 the files havent been unzipped
# '''
# os.chdir((MERGdirname+'/'))
http://git-wip-us.apache.org/repos/asf/climate/blob/51d9dce1/mccsearch/code/mccSearchUI.py
----------------------------------------------------------------------
diff --cc mccsearch/code/mccSearchUI.py
index 36fe642,a227143..feea1d9
--- a/mccsearch/code/mccSearchUI.py
+++ b/mccsearch/code/mccSearchUI.py
@@@ -18,10 -18,10 +18,11 @@@
# Wizard for running the mccSearch program
'''
+ import os
import networkx as nx
-#mccSearch modules
-import mccSearch
+# mccSearch modules
+from mccSearch import *
+
def main():
CEGraph = nx.DiGraph()
[2/5] climate git commit: CLIMATE-912 Upgrade mccSearch code from
Python2 > 3
Posted by le...@apache.org.
http://git-wip-us.apache.org/repos/asf/climate/blob/5c86f3a8/mccsearch/code/mccSearch.py
----------------------------------------------------------------------
diff --git a/mccsearch/code/mccSearch.py b/mccsearch/code/mccSearch.py
index 15ce0c6..9f396f8 100644
--- a/mccsearch/code/mccSearch.py
+++ b/mccsearch/code/mccSearch.py
@@ -32,7 +32,7 @@ import time
import networkx as nx
import matplotlib.dates as mdates
-from matplotlib.dates import DateFormatter,HourLocator
+from matplotlib.dates import DateFormatter, HourLocator
from mpl_toolkits.basemap import Basemap
from mpl_toolkits.basemap import cm as cmbm
import matplotlib.pyplot as plt
@@ -44,2123 +44,2628 @@ import ocw.plotter as plotter
#----------------------- GLOBAL VARIABLES --------------------------
# --------------------- User defined variables ---------------------
-#FYI the lat lon values are not necessarily inclusive of the points given. These are the limits
-#the first point closest the the value (for the min) from the MERG data is used, etc.
-LATMIN = '5.0' #min latitude; -ve values in the SH e.g. 5S = -5
-LATMAX = '19.0' #max latitude; -ve values in the SH e.g. 5S = -5 20.0
-LONMIN = '-5.0' #min longitude; -ve values in the WH e.g. 59.8W = -59.8 -30
-LONMAX = '5.0' #min longitude; -ve values in the WH e.g. 59.8W = -59.8 30
-XRES = 4.0 #x direction spatial resolution in km
-YRES = 4.0 #y direction spatial resolution in km
-TRES = 1 #temporal resolution in hrs
-LAT_DISTANCE = 111.0 #the avg distance in km for 1deg lat for the region being considered
-LON_DISTANCE = 111.0 #the avg distance in km for 1deg lon for the region being considered
-STRUCTURING_ELEMENT = [[0,1,0],[1,1,1],[0,1,0]] #the matrix for determining the pattern for the contiguous boxes and must
- #have same rank of the matrix it is being compared against
-#criteria for determining cloud elements and edges
-T_BB_MAX = 243 #warmest temp to allow (-30C to -55C according to Morel and Sensi 2002)
-T_BB_MIN = 218 #cooler temp for the center of the system
-CONVECTIVE_FRACTION = 0.90 #the min temp/max temp that would be expected in a CE.. this is highly conservative (only a 10K difference)
-MIN_MCS_DURATION = 3 #minimum time for a MCS to exist
-AREA_MIN = 2400.0 #minimum area for CE criteria in km^2 according to Vila et al. (2008) is 2400
-MIN_OVERLAP= 10000.00 #km^2 from Williams and Houze 1987, indir ref in Arnaud et al 1992
+# FYI the lat lon values are not necessarily inclusive of the points given. These are the limits
+# the first point closest the the value (for the min) from the MERG data
+# is used, etc.
+LATMIN = '5.0' # min latitude; -ve values in the SH e.g. 5S = -5
+LATMAX = '19.0' # max latitude; -ve values in the SH e.g. 5S = -5 20.0
+LONMIN = '-5.0' # min longitude; -ve values in the WH e.g. 59.8W = -59.8 -30
+LONMAX = '5.0' # min longitude; -ve values in the WH e.g. 59.8W = -59.8 30
+XRES = 4.0 # x direction spatial resolution in km
+YRES = 4.0 # y direction spatial resolution in km
+TRES = 1 # temporal resolution in hrs
+LAT_DISTANCE = 111.0 # the avg distance in km for 1deg lat for the region being considered
+LON_DISTANCE = 111.0 # the avg distance in km for 1deg lon for the region being considered
+# the matrix for determining the pattern for the contiguous boxes and must
+STRUCTURING_ELEMENT = [[0, 1, 0], [1, 1, 1], [0, 1, 0]]
+# have same rank of the matrix it is being compared against
+# criteria for determining cloud elements and edges
+# warmest temp to allow (-30C to -55C according to Morel and Sensi 2002)
+T_BB_MAX = 243
+T_BB_MIN = 218 # cooler temp for the center of the system
+# the min temp/max temp that would be expected in a CE.. this is highly
+# conservative (only a 10K difference)
+CONVECTIVE_FRACTION = 0.90
+MIN_MCS_DURATION = 3 # minimum time for a MCS to exist
+# minimum area for CE criteria in km^2 according to Vila et al. (2008) is 2400
+AREA_MIN = 2400.0
+# km^2 from Williams and Houze 1987, indir ref in Arnaud et al 1992
+MIN_OVERLAP = 10000.00
#---the MCC criteria
-ECCENTRICITY_THRESHOLD_MAX = 1.0 #tending to 1 is a circle e.g. hurricane,
-ECCENTRICITY_THRESHOLD_MIN = 0.50 #tending to 0 is a linear e.g. squall line
-OUTER_CLOUD_SHIELD_AREA = 80000.0 #km^2
-INNER_CLOUD_SHIELD_AREA = 30000.0 #km^2
-OUTER_CLOUD_SHIELD_TEMPERATURE = 233 #in K
-INNER_CLOUD_SHIELD_TEMPERATURE = 213 #in K
-MINIMUM_DURATION = 6 #min number of frames the MCC must exist for (assuming hrly frames, MCCs is 6hrs)
-MAXIMUM_DURATION = 24#max number of framce the MCC can last for
+ECCENTRICITY_THRESHOLD_MAX = 1.0 # tending to 1 is a circle e.g. hurricane,
+ECCENTRICITY_THRESHOLD_MIN = 0.50 # tending to 0 is a linear e.g. squall line
+OUTER_CLOUD_SHIELD_AREA = 80000.0 # km^2
+INNER_CLOUD_SHIELD_AREA = 30000.0 # km^2
+OUTER_CLOUD_SHIELD_TEMPERATURE = 233 # in K
+INNER_CLOUD_SHIELD_TEMPERATURE = 213 # in K
+# min number of frames the MCC must exist for (assuming hrly frames, MCCs
+# is 6hrs)
+MINIMUM_DURATION = 6
+MAXIMUM_DURATION = 24 # max number of framce the MCC can last for
#------------------- End user defined Variables -------------------
-edgeWeight = [1,2,3] #weights for the graph edges
-#graph object fo the CEs meeting the criteria
+edgeWeight = [1, 2, 3] # weights for the graph edges
+# graph object fo the CEs meeting the criteria
CLOUD_ELEMENT_GRAPH = nx.DiGraph()
-#graph meeting the CC criteria
+# graph meeting the CC criteria
PRUNED_GRAPH = nx.DiGraph()
#------------------------ End GLOBAL VARS -------------------------
#************************ Begin Functions *************************
#******************************************************************
-def readMergData(dirname, filelist = None):
- '''
- Purpose::
- Read MERG data into RCMES format
-
- Input::
- dirname: a string representing the directory to the MERG files in NETCDF format
- filelist (optional): a list of strings representing the filenames betweent the start and end dates provided
-
- Output::
- A 3D masked array (t,lat,lon) with only the variables which meet the minimum temperature
- criteria for each frame
-
- Assumptions::
- The MERG data has been converted to NETCDF using LATS4D
- The data has the same lat/lon format
-
- TODO:: figure out how to use netCDF4 to do the clipping tmp = netCDF4.Dataset(filelist[0])
-
- '''
-
- global LAT
- global LON
-
- # these strings are specific to the MERG data
- mergVarName = 'ch4'
- mergTimeVarName = 'time'
- mergLatVarName = 'latitude'
- mergLonVarName = 'longitude'
-
- filelistInstructions = dirname + '/*'
- if filelist == None:
- filelist = glob.glob(filelistInstructions)
-
-
- #sat_img is the array that will contain all the masked frames
- mergImgs = []
- #timelist of python time strings
- timelist = []
- time2store = None
- tempMaskedValueNp =[]
-
-
- filelist.sort()
- nfiles = len(filelist)
-
- # Crash nicely if there are no netcdf files
- if nfiles == 0:
- print 'Error: no files in this directory! Exiting elegantly'
- sys.exit()
- else:
- # Open the first file in the list to read in lats, lons and generate the grid for comparison
- tmp = Dataset(filelist[0], 'r+',format='NETCDF4')
-
- alllatsraw = tmp.variables[mergLatVarName][:]
- alllonsraw = tmp.variables[mergLonVarName][:]
- alllonsraw[alllonsraw > 180] = alllonsraw[alllonsraw > 180] - 360. # convert to -180,180 if necessary
-
- #get the lat/lon info data (different resolution)
- latminNETCDF = find_nearest(alllatsraw, float(LATMIN))
- latmaxNETCDF = find_nearest(alllatsraw, float(LATMAX))
- lonminNETCDF = find_nearest(alllonsraw, float(LONMIN))
- lonmaxNETCDF = find_nearest(alllonsraw, float(LONMAX))
- latminIndex = (np.where(alllatsraw == latminNETCDF))[0][0]
- latmaxIndex = (np.where(alllatsraw == latmaxNETCDF))[0][0]
- lonminIndex = (np.where(alllonsraw == lonminNETCDF))[0][0]
- lonmaxIndex = (np.where(alllonsraw == lonmaxNETCDF))[0][0]
-
- #subsetting the data
- latsraw = alllatsraw[latminIndex: latmaxIndex]
- lonsraw = alllonsraw[lonminIndex:lonmaxIndex]
-
- LON, LAT = np.meshgrid(lonsraw, latsraw)
- #clean up
- latsraw =[]
- lonsraw = []
- nygrd = len(LAT[:, 0]); nxgrd = len(LON[0, :])
- tmp.close
-
- for files in filelist:
- try:
- thisFile = Dataset(files, format='NETCDF4')
- #clip the dataset according to user lat,lon coordinates
- tempRaw = thisFile.variables[mergVarName][:,latminIndex:latmaxIndex,lonminIndex:lonmaxIndex].astype('int16')
- tempMask = ma.masked_array(tempRaw, mask=(tempRaw > T_BB_MAX), fill_value=0)
-
- #get the actual values that the mask returned
- tempMaskedValue = ma.zeros((tempRaw.shape)).astype('int16')
- for index, value in maenumerate(tempMask):
- time_index, lat_index, lon_index = index
- tempMaskedValue[time_index,lat_index, lon_index]=value
-
- xtimes = thisFile.variables[mergTimeVarName]
- #convert this time to a python datastring
- time2store, _ = getModelTimes(xtimes, mergTimeVarName)
- #extend instead of append because getModelTimes returns a list already and we don't
- #want a list of list
- timelist.extend(time2store)
- mergImgs.extend(tempMaskedValue)
- thisFile.close
- thisFile = None
-
- except:
- print "bad file! ", files
-
- mergImgs = ma.array(mergImgs)
-
- return mergImgs, timelist
+
+
+def readMergData(dirname, filelist=None):
+ '''
+ Purpose::
+ Read MERG data into RCMES format
+
+ Input::
+ dirname: a string representing the directory to the MERG files in NETCDF format
+ filelist (optional): a list of strings representing the filenames betweent the start and end dates provided
+
+ Output::
+ A 3D masked array (t,lat,lon) with only the variables which meet the minimum temperature
+ criteria for each frame
+
+ Assumptions::
+ The MERG data has been converted to NETCDF using LATS4D
+ The data has the same lat/lon format
+
+ TODO:: figure out how to use netCDF4 to do the clipping tmp = netCDF4.Dataset(filelist[0])
+
+ '''
+
+ global LAT
+ global LON
+
+ # these strings are specific to the MERG data
+ mergVarName = 'ch4'
+ mergTimeVarName = 'time'
+ mergLatVarName = 'latitude'
+ mergLonVarName = 'longitude'
+
+ filelistInstructions = dirname + '/*'
+ if filelist is None:
+ filelist = glob.glob(filelistInstructions)
+
+ # sat_img is the array that will contain all the masked frames
+ mergImgs = []
+ # timelist of python time strings
+ timelist = []
+ time2store = None
+ tempMaskedValueNp = []
+
+ filelist.sort()
+ nfiles = len(filelist)
+
+ # Crash nicely if there are no netcdf files
+ if nfiles == 0:
+ print("Error: no files in this directory! Exiting elegantly")
+ sys.exit()
+ else:
+ # Open the first file in the list to read in lats, lons and generate
+ # the grid for comparison
+ tmp = Dataset(filelist[0], 'r+', format='NETCDF4')
+
+ alllatsraw = tmp.variables[mergLatVarName][:]
+ alllonsraw = tmp.variables[mergLonVarName][:]
+ alllonsraw[alllonsraw > 180] = alllonsraw[alllonsraw >
+ 180] - 360. # convert to -180,180 if necessary
+
+ # get the lat/lon info data (different resolution)
+ latminNETCDF = find_nearest(alllatsraw, float(LATMIN))
+ latmaxNETCDF = find_nearest(alllatsraw, float(LATMAX))
+ lonminNETCDF = find_nearest(alllonsraw, float(LONMIN))
+ lonmaxNETCDF = find_nearest(alllonsraw, float(LONMAX))
+ latminIndex = (np.where(alllatsraw == latminNETCDF))[0][0]
+ latmaxIndex = (np.where(alllatsraw == latmaxNETCDF))[0][0]
+ lonminIndex = (np.where(alllonsraw == lonminNETCDF))[0][0]
+ lonmaxIndex = (np.where(alllonsraw == lonmaxNETCDF))[0][0]
+
+ # subsetting the data
+ latsraw = alllatsraw[latminIndex: latmaxIndex]
+ lonsraw = alllonsraw[lonminIndex:lonmaxIndex]
+
+ LON, LAT = np.meshgrid(lonsraw, latsraw)
+ # clean up
+ latsraw = []
+ lonsraw = []
+ nygrd = len(LAT[:, 0]); nxgrd = len(LON[0, :])
+ tmp.close
+
+ for files in filelist:
+ try:
+ thisFile = Dataset(files, format='NETCDF4')
+ # clip the dataset according to user lat,lon coordinates
+ tempRaw = thisFile.variables[mergVarName][:,
+ latminIndex:latmaxIndex,
+ lonminIndex:lonmaxIndex].astype('int16')
+ tempMask = ma.masked_array(
+ tempRaw, mask=(
+ tempRaw > T_BB_MAX), fill_value=0)
+
+ # get the actual values that the mask returned
+ tempMaskedValue = ma.zeros((tempRaw.shape)).astype('int16')
+ for index, value in maenumerate(tempMask):
+ time_index, lat_index, lon_index = index
+ tempMaskedValue[time_index, lat_index, lon_index] = value
+
+ xtimes = thisFile.variables[mergTimeVarName]
+ # convert this time to a python datastring
+ time2store, _ = getModelTimes(xtimes, mergTimeVarName)
+ # extend instead of append because getModelTimes returns a list already and we don't
+ # want a list of list
+ timelist.extend(time2store)
+ mergImgs.extend(tempMaskedValue)
+ thisFile.close
+ thisFile = None
+
+ except BaseException:
+ print(("bad file! %s" % files))
+
+ mergImgs = ma.array(mergImgs)
+
+ return mergImgs, timelist
#******************************************************************
-def findCloudElements(mergImgs,timelist,TRMMdirName=None):
- '''
- Purpose::
- Determines the contiguous boxes for a given time of the satellite images i.e. each frame
- using scipy ndimage package
-
- Input::
- mergImgs: masked numpy array in (time,lat,lon),T_bb representing the satellite data. This is masked based on the
- maximum acceptable temperature, T_BB_MAX
- timelist: a list of python datatimes
- TRMMdirName (optional): string representing the path where to find the TRMM datafiles
-
- Output::
- CLOUD_ELEMENT_GRAPH: a Networkx directed graph where each node contains the information in cloudElementDict
- The nodes are determined according to the area of contiguous squares. The nodes are linked through weighted edges.
-
- cloudElementDict = {'uniqueID': unique tag for this CE,
- 'cloudElementTime': time of the CE,
- 'cloudElementLatLon': (lat,lon,value) of MERG data of CE,
- 'cloudElementCenter':list of floating-point [lat,lon] representing the CE's center
- 'cloudElementArea':floating-point representing the area of the CE,
- 'cloudElementEccentricity': floating-point representing the shape of the CE,
- 'cloudElementTmax':integer representing the maximum Tb in CE,
- 'cloudElementTmin': integer representing the minimum Tb in CE,
- 'cloudElementPrecipTotal':floating-point representing the sum of all rainfall in CE if TRMMdirName entered,
- 'cloudElementLatLonTRMM':(lat,lon,value) of TRMM data in CE if TRMMdirName entered,
- 'TRMMArea': floating-point representing the CE if TRMMdirName entered,
- 'CETRMMmax':floating-point representing the max rate in the CE if TRMMdirName entered,
- 'CETRMMmin':floating-point representing the min rate in the CE if TRMMdirName entered}
- Assumptions::
- Assumes we are dealing with MERG data which is 4kmx4km resolved, thus the smallest value
- required according to Vila et al. (2008) is 2400km^2
- therefore, 2400/16 = 150 contiguous squares
- '''
-
- frame = ma.empty((1,mergImgs.shape[1],mergImgs.shape[2]))
- CEcounter = 0
- frameCEcounter = 0
- frameNum = 0
- cloudElementEpsilon = 0.0
- cloudElementDict = {}
- cloudElementCenter = [] #list with two elements [lat,lon] for the center of a CE
- prevFrameCEs = [] #list for CEs in previous frame
- currFrameCEs = [] #list for CEs in current frame
- cloudElementLat = [] #list for a particular CE's lat values
- cloudElementLon = [] #list for a particular CE's lon values
- cloudElementLatLons = [] #list for a particular CE's (lat,lon) values
-
- prevLatValue = 0.0
- prevLonValue = 0.0
- TIR_min = 0.0
- TIR_max = 0.0
- temporalRes = 3 # TRMM data is 3 hourly
- precipTotal = 0.0
- CETRMMList =[]
- precip =[]
- TRMMCloudElementLatLons =[]
-
- minCELatLimit = 0.0
- minCELonLimit = 0.0
- maxCELatLimit = 0.0
- maxCELonLimit = 0.0
-
- nygrd = len(LAT[:, 0]); nxgrd = len(LON[0, :])
-
- #openfile for storing ALL cloudElement information
- cloudElementsFile = open((MAINDIRECTORY+'/textFiles/cloudElements.txt'),'wb')
- #openfile for storing cloudElement information meeting user criteria i.e. MCCs in this case
- cloudElementsUserFile = open((MAINDIRECTORY+'/textFiles/cloudElementsUserFile.txt'),'w')
-
- #NB in the TRMM files the info is hours since the time thus 00Z file has in 01, 02 and 03 times
- for t in xrange(mergImgs.shape[0]):
- #-------------------------------------------------
- # #textfile name for saving the data for arcgis
- # thisFileName = MAINDIRECTORY+'/' + (str(timelist[t])).replace(" ", "_") + '.txt'
- # cloudElementsTextFile = open(thisFileName,'w')
- #-------------------------------------------------
-
- #determine contiguous locations with temeperature below the warmest temp i.e. cloudElements in each frame
- frame, CEcounter = ndimage.measurements.label(mergImgs[t,:,:], structure=STRUCTURING_ELEMENT)
- frameCEcounter=0
- frameNum += 1
-
- #for each of the areas identified, check to determine if it a valid CE via an area and T requirement
- for count in xrange(CEcounter):
- #[0] is time dimension. Determine the actual values from the data
- #loc is a masked array
- try:
- loc = ndimage.find_objects(frame==(count+1))[0]
- except Exception, e:
- print "Error is ", e
- continue
-
-
- cloudElement = mergImgs[t,:,:][loc]
- labels, lcounter = ndimage.label(cloudElement)
-
- #determine the true lats and lons for this particular CE
- cloudElementLat = LAT[loc[0],0]
- cloudElementLon = LON[0,loc[1]]
-
- #determine number of boxes in this cloudelement
- numOfBoxes = np.count_nonzero(cloudElement)
- cloudElementArea = numOfBoxes*XRES*YRES
-
- #If the area is greater than the area required, or if the area is smaller than the suggested area, check if it meets a convective fraction requirement
- #consider as CE
-
- if cloudElementArea >= AREA_MIN or (cloudElementArea < AREA_MIN and ((ndimage.minimum(cloudElement, labels=labels))/float((ndimage.maximum(cloudElement, labels=labels)))) < CONVECTIVE_FRACTION ):
-
- #get some time information and labeling info
- frameTime = str(timelist[t])
- frameCEcounter +=1
- CEuniqueID = 'F'+str(frameNum)+'CE'+str(frameCEcounter)
-
- #-------------------------------------------------
- #textfile name for accesing CE data using MATLAB code
- # thisFileName = MAINDIRECTORY+'/' + (str(timelist[t])).replace(" ", "_") + CEuniqueID +'.txt'
- # cloudElementsTextFile = open(thisFileName,'w')
- #-------------------------------------------------
-
- # ------ NETCDF File stuff for brightness temp stuff ------------------------------------
- thisFileName = MAINDIRECTORY +'/MERGnetcdfCEs/cloudElements'+ (str(timelist[t])).replace(" ", "_") + CEuniqueID +'.nc'
- currNetCDFCEData = Dataset(thisFileName, 'w', format='NETCDF4')
- currNetCDFCEData.description = 'Cloud Element '+CEuniqueID + ' temperature data'
- currNetCDFCEData.calendar = 'standard'
- currNetCDFCEData.conventions = 'COARDS'
- # dimensions
- currNetCDFCEData.createDimension('time', None)
- currNetCDFCEData.createDimension('lat', len(LAT[:,0]))
- currNetCDFCEData.createDimension('lon', len(LON[0,:]))
- # variables
- tempDims = ('time','lat', 'lon',)
- times = currNetCDFCEData.createVariable('time', 'f8', ('time',))
- times.units = 'hours since '+ str(timelist[t])[:-6]
- latitudes = currNetCDFCEData.createVariable('latitude', 'f8', ('lat',))
- longitudes = currNetCDFCEData.createVariable('longitude', 'f8', ('lon',))
- brightnesstemp = currNetCDFCEData.createVariable('brightnesstemp', 'i16',tempDims )
- brightnesstemp.units = 'Kelvin'
- # NETCDF data
- dates=[timelist[t]+timedelta(hours=0)]
- times[:] = date2num(dates,units=times.units)
- longitudes[:] = LON[0,:]
- longitudes.units = "degrees_east"
- longitudes.long_name = "Longitude"
-
- latitudes[:] = LAT[:,0]
- latitudes.units = "degrees_north"
- latitudes.long_name ="Latitude"
-
- #generate array of zeros for brightness temperature
- brightnesstemp1 = ma.zeros((1,len(latitudes), len(longitudes))).astype('int16')
- #-----------End most of NETCDF file stuff ------------------------------------
-
- #if other dataset (TRMM) assumed to be a precipitation dataset was entered
- if TRMMdirName:
- #------------------TRMM stuff -------------------------------------------------
- fileDate = ((str(timelist[t])).replace(" ", "")[:-8]).replace("-","")
- fileHr1 = (str(timelist[t])).replace(" ", "")[-8:-6]
-
- if int(fileHr1) % temporalRes == 0:
- fileHr = fileHr1
- else:
- fileHr = (int(fileHr1)/temporalRes) * temporalRes
- if fileHr < 10:
- fileHr = '0'+str(fileHr)
- else:
- str(fileHr)
-
- #open TRMM file for the resolution info and to create the appropriate sized grid
- TRMMfileName = TRMMdirName+'/3B42.'+ fileDate + "."+str(fileHr)+".7A.nc"
-
- TRMMData = Dataset(TRMMfileName,'r', format='NETCDF4')
- precipRate = TRMMData.variables['pcp'][:,:,:]
- latsrawTRMMData = TRMMData.variables['latitude'][:]
- lonsrawTRMMData = TRMMData.variables['longitude'][:]
- lonsrawTRMMData[lonsrawTRMMData > 180] = lonsrawTRMMData[lonsrawTRMMData>180] - 360.
- LONTRMM, LATTRMM = np.meshgrid(lonsrawTRMMData, latsrawTRMMData)
-
- nygrdTRMM = len(LATTRMM[:,0]); nxgrdTRMM = len(LONTRMM[0,:])
- precipRateMasked = ma.masked_array(precipRate, mask=(precipRate < 0.0))
- #---------regrid the TRMM data to the MERG dataset ----------------------------------
- #regrid using the do_regrid stuff from the Apache OCW
- regriddedTRMM = ma.zeros((0, nygrd, nxgrd))
- #regriddedTRMM = process.do_regrid(precipRateMasked[0,:,:], LATTRMM, LONTRMM, LAT, LON, order=1, mdi= -999999999)
- regriddedTRMM = do_regrid(precipRateMasked[0,:,:], LATTRMM, LONTRMM, LAT, LON, order=1, mdi= -999999999)
- #----------------------------------------------------------------------------------
-
- # #get the lat/lon info from cloudElement
- #get the lat/lon info from the file
- latCEStart = LAT[0][0]
- latCEEnd = LAT[-1][0]
- lonCEStart = LON[0][0]
- lonCEEnd = LON[0][-1]
-
- #get the lat/lon info for TRMM data (different resolution)
- latStartT = find_nearest(latsrawTRMMData, latCEStart)
- latEndT = find_nearest(latsrawTRMMData, latCEEnd)
- lonStartT = find_nearest(lonsrawTRMMData, lonCEStart)
- lonEndT = find_nearest(lonsrawTRMMData, lonCEEnd)
- latStartIndex = np.where(latsrawTRMMData == latStartT)
- latEndIndex = np.where(latsrawTRMMData == latEndT)
- lonStartIndex = np.where(lonsrawTRMMData == lonStartT)
- lonEndIndex = np.where(lonsrawTRMMData == lonEndT)
-
- #get the relevant TRMM info
- CEprecipRate = precipRate[:,(latStartIndex[0][0]-1):latEndIndex[0][0],(lonStartIndex[0][0]-1):lonEndIndex[0][0]]
- TRMMData.close()
-
- # ------ NETCDF File info for writing TRMM CE rainfall ------------------------------------
- thisFileName = MAINDIRECTORY+'/TRMMnetcdfCEs/TRMM' + (str(timelist[t])).replace(" ", "_") + CEuniqueID +'.nc'
- currNetCDFTRMMData = Dataset(thisFileName, 'w', format='NETCDF4')
- currNetCDFTRMMData.description = 'Cloud Element '+CEuniqueID + ' precipitation data'
- currNetCDFTRMMData.calendar = 'standard'
- currNetCDFTRMMData.conventions = 'COARDS'
- # dimensions
- currNetCDFTRMMData.createDimension('time', None)
- currNetCDFTRMMData.createDimension('lat', len(LAT[:,0]))
- currNetCDFTRMMData.createDimension('lon', len(LON[0,:]))
-
- # variables
- TRMMprecip = ('time','lat', 'lon',)
- times = currNetCDFTRMMData.createVariable('time', 'f8', ('time',))
- times.units = 'hours since '+ str(timelist[t])[:-6]
- latitude = currNetCDFTRMMData.createVariable('latitude', 'f8', ('lat',))
- longitude = currNetCDFTRMMData.createVariable('longitude', 'f8', ('lon',))
- rainFallacc = currNetCDFTRMMData.createVariable('precipitation_Accumulation', 'f8',TRMMprecip )
- rainFallacc.units = 'mm'
-
- longitude[:] = LON[0,:]
- longitude.units = "degrees_east"
- longitude.long_name = "Longitude"
-
- latitude[:] = LAT[:,0]
- latitude.units = "degrees_north"
- latitude.long_name ="Latitude"
-
- finalCETRMMvalues = ma.zeros((brightnesstemp.shape))
- #-----------End most of NETCDF file stuff ------------------------------------
-
- #populate cloudElementLatLons by unpacking the original values from loc to get the actual value for lat and lon
- #TODO: KDW - too dirty... play with itertools.izip or zip and the enumerate with this
- # as cloudElement is masked
- for index,value in np.ndenumerate(cloudElement):
- if value != 0 :
- lat_index,lon_index = index
- lat_lon_tuple = (cloudElementLat[lat_index], cloudElementLon[lon_index],value)
-
- #generate the comma separated file for GIS
- cloudElementLatLons.append(lat_lon_tuple)
-
- #temp data for CE NETCDF file
- brightnesstemp1[0,int(np.where(LAT[:,0]==cloudElementLat[lat_index])[0]),int(np.where(LON[0,:]==cloudElementLon[lon_index])[0])] = value
-
- if TRMMdirName:
- finalCETRMMvalues[0,int(np.where(LAT[:,0]==cloudElementLat[lat_index])[0]),int(np.where(LON[0,:]==cloudElementLon[lon_index])[0])] = regriddedTRMM[int(np.where(LAT[:,0]==cloudElementLat[lat_index])[0]),int(np.where(LON[0,:]==cloudElementLon[lon_index])[0])]
- CETRMMList.append((cloudElementLat[lat_index], cloudElementLon[lon_index], finalCETRMMvalues[0,cloudElementLat[lat_index], cloudElementLon[lon_index]]))
-
-
- brightnesstemp[:] = brightnesstemp1[:]
- currNetCDFCEData.close()
-
- if TRMMdirName:
-
- #calculate the total precip associated with the feature
- for index, value in np.ndenumerate(finalCETRMMvalues):
- precipTotal += value
- precip.append(value)
-
- rainFallacc[:] = finalCETRMMvalues[:]
- currNetCDFTRMMData.close()
- TRMMnumOfBoxes = np.count_nonzero(finalCETRMMvalues)
- TRMMArea = TRMMnumOfBoxes*XRES*YRES
- try:
- maxCEprecipRate = np.max(finalCETRMMvalues[np.nonzero(finalCETRMMvalues)])
- minCEprecipRate = np.min(finalCETRMMvalues[np.nonzero(finalCETRMMvalues)])
- except:
- pass
-
- #sort cloudElementLatLons by lats
- cloudElementLatLons.sort(key=lambda tup: tup[0])
-
- #determine if the cloud element the shape
- cloudElementEpsilon = eccentricity (cloudElement)
- cloudElementsUserFile.write("\n\nTime is: %s" %(str(timelist[t])))
- cloudElementsUserFile.write("\nCEuniqueID is: %s" %CEuniqueID)
- latCenter, lonCenter = ndimage.measurements.center_of_mass(cloudElement, labels=labels)
-
- #latCenter and lonCenter are given according to the particular array defining this CE
- #so you need to convert this value to the overall domain truth
- latCenter = cloudElementLat[round(latCenter)]
- lonCenter = cloudElementLon[round(lonCenter)]
- cloudElementsUserFile.write("\nCenter (lat,lon) is: %.2f\t%.2f" %(latCenter, lonCenter))
- cloudElementCenter.append(latCenter)
- cloudElementCenter.append(lonCenter)
- cloudElementsUserFile.write("\nNumber of boxes are: %d" %numOfBoxes)
- cloudElementsUserFile.write("\nArea is: %.4f km^2" %(cloudElementArea))
- cloudElementsUserFile.write("\nAverage brightness temperature is: %.4f K" %ndimage.mean(cloudElement, labels=labels))
- cloudElementsUserFile.write("\nMin brightness temperature is: %.4f K" %ndimage.minimum(cloudElement, labels=labels))
- cloudElementsUserFile.write("\nMax brightness temperature is: %.4f K" %ndimage.maximum(cloudElement, labels=labels))
- cloudElementsUserFile.write("\nBrightness temperature variance is: %.4f K" %ndimage.variance(cloudElement, labels=labels))
- cloudElementsUserFile.write("\nConvective fraction is: %.4f " %(((ndimage.minimum(cloudElement, labels=labels))/float((ndimage.maximum(cloudElement, labels=labels))))*100.0))
- cloudElementsUserFile.write("\nEccentricity is: %.4f " %(cloudElementEpsilon))
- #populate the dictionary
- if TRMMdirName:
- cloudElementDict = {'uniqueID': CEuniqueID, 'cloudElementTime': timelist[t],'cloudElementLatLon': cloudElementLatLons, 'cloudElementCenter':cloudElementCenter, 'cloudElementArea':cloudElementArea, 'cloudElementEccentricity':cloudElementEpsilon, 'cloudElementTmax':TIR_max, 'cloudElementTmin': TIR_min, 'cloudElementPrecipTotal':precipTotal,'cloudElementLatLonTRMM':CETRMMList, 'TRMMArea': TRMMArea,'CETRMMmax':maxCEprecipRate, 'CETRMMmin':minCEprecipRate}
- else:
- cloudElementDict = {'uniqueID': CEuniqueID, 'cloudElementTime': timelist[t],'cloudElementLatLon': cloudElementLatLons, 'cloudElementCenter':cloudElementCenter, 'cloudElementArea':cloudElementArea, 'cloudElementEccentricity':cloudElementEpsilon, 'cloudElementTmax':TIR_max, 'cloudElementTmin': TIR_min,}
-
- #current frame list of CEs
- currFrameCEs.append(cloudElementDict)
-
- #draw the graph node
- CLOUD_ELEMENT_GRAPH.add_node(CEuniqueID, cloudElementDict)
-
- if frameNum != 1:
- for cloudElementDict in prevFrameCEs:
- thisCElen = len(cloudElementLatLons)
- percentageOverlap, areaOverlap = cloudElementOverlap(cloudElementLatLons, cloudElementDict['cloudElementLatLon'])
-
- #change weights to integers because the built in shortest path chokes on floating pts according to Networkx doc
- #according to Goyens et al, two CEs are considered related if there is atleast 95% overlap between them for consecutive imgs a max of 2 hrs apart
- if percentageOverlap >= 0.95:
- CLOUD_ELEMENT_GRAPH.add_edge(cloudElementDict['uniqueID'], CEuniqueID, weight=edgeWeight[0])
-
- elif percentageOverlap >= 0.90 and percentageOverlap < 0.95 :
- CLOUD_ELEMENT_GRAPH.add_edge(cloudElementDict['uniqueID'], CEuniqueID, weight=edgeWeight[1])
-
- elif areaOverlap >= MIN_OVERLAP:
- CLOUD_ELEMENT_GRAPH.add_edge(cloudElementDict['uniqueID'], CEuniqueID, weight=edgeWeight[2])
-
- else:
- #TODO: remove this else as we only wish for the CE details
- #ensure only the non-zero elements are considered
- #store intel in allCE file
- labels, _ = ndimage.label(cloudElement)
- cloudElementsFile.write("\n-----------------------------------------------")
- cloudElementsFile.write("\n\nTime is: %s" %(str(timelist[t])))
- # cloudElementLat = LAT[loc[0],0]
- # cloudElementLon = LON[0,loc[1]]
-
- #populate cloudElementLatLons by unpacking the original values from loc
- #TODO: KDW - too dirty... play with itertools.izip or zip and the enumerate with this
- # as cloudElement is masked
- for index,value in np.ndenumerate(cloudElement):
- if value != 0 :
- lat_index,lon_index = index
- lat_lon_tuple = (cloudElementLat[lat_index], cloudElementLon[lon_index])
- cloudElementLatLons.append(lat_lon_tuple)
-
- cloudElementsFile.write("\nLocation of rejected CE (lat,lon) points are: %s" %cloudElementLatLons)
- #latCenter and lonCenter are given according to the particular array defining this CE
- #so you need to convert this value to the overall domain truth
- latCenter, lonCenter = ndimage.measurements.center_of_mass(cloudElement, labels=labels)
- latCenter = cloudElementLat[round(latCenter)]
- lonCenter = cloudElementLon[round(lonCenter)]
- cloudElementsFile.write("\nCenter (lat,lon) is: %.2f\t%.2f" %(latCenter, lonCenter))
- cloudElementsFile.write("\nNumber of boxes are: %d" %numOfBoxes)
- cloudElementsFile.write("\nArea is: %.4f km^2" %(cloudElementArea))
- cloudElementsFile.write("\nAverage brightness temperature is: %.4f K" %ndimage.mean(cloudElement, labels=labels))
- cloudElementsFile.write("\nMin brightness temperature is: %.4f K" %ndimage.minimum(cloudElement, labels=labels))
- cloudElementsFile.write("\nMax brightness temperature is: %.4f K" %ndimage.maximum(cloudElement, labels=labels))
- cloudElementsFile.write("\nBrightness temperature variance is: %.4f K" %ndimage.variance(cloudElement, labels=labels))
- cloudElementsFile.write("\nConvective fraction is: %.4f " %(((ndimage.minimum(cloudElement, labels=labels))/float((ndimage.maximum(cloudElement, labels=labels))))*100.0))
- cloudElementsFile.write("\nEccentricity is: %.4f " %(cloudElementEpsilon))
- cloudElementsFile.write("\n-----------------------------------------------")
-
- #reset list for the next CE
- nodeExist = False
- cloudElementCenter=[]
- cloudElement = []
- cloudElementLat=[]
- cloudElementLon =[]
- cloudElementLatLons =[]
- brightnesstemp1 =[]
- brightnesstemp =[]
- finalCETRMMvalues =[]
- CEprecipRate =[]
- CETRMMList =[]
- precipTotal = 0.0
- precip=[]
- TRMMCloudElementLatLons=[]
-
- #reset for the next time
- prevFrameCEs =[]
- prevFrameCEs = currFrameCEs
- currFrameCEs =[]
-
- cloudElementsFile.close
- cloudElementsUserFile.close
- #if using ARCGIS data store code, uncomment this file close line
- #cloudElementsTextFile.close
-
- #clean up graph - remove parent and childless nodes
- outAndInDeg = CLOUD_ELEMENT_GRAPH.degree_iter()
- toRemove = [node[0] for node in outAndInDeg if node[1]<1]
- CLOUD_ELEMENT_GRAPH.remove_nodes_from(toRemove)
-
- print "number of nodes are: ", CLOUD_ELEMENT_GRAPH.number_of_nodes()
- print "number of edges are: ", CLOUD_ELEMENT_GRAPH.number_of_edges()
- print ("*"*80)
-
- #hierachial graph output
- graphTitle = "Cloud Elements observed over somewhere from 0000Z to 0000Z"
- #drawGraph(CLOUD_ELEMENT_GRAPH, graphTitle, edgeWeight)
-
- return CLOUD_ELEMENT_GRAPH
+
+
+def findCloudElements(mergImgs, timelist, TRMMdirName=None):
+ '''
+ Purpose::
+ Determines the contiguous boxes for a given time of the satellite images i.e. each frame
+ using scipy ndimage package
+
+ Input::
+ mergImgs: masked numpy array in (time,lat,lon),T_bb representing the satellite data. This is masked based on the
+ maximum acceptable temperature, T_BB_MAX
+ timelist: a list of python datatimes
+ TRMMdirName (optional): string representing the path where to find the TRMM datafiles
+
+ Output::
+ CLOUD_ELEMENT_GRAPH: a Networkx directed graph where each node contains the information in cloudElementDict
+ The nodes are determined according to the area of contiguous squares. The nodes are linked through weighted edges.
+
+ cloudElementDict = {'uniqueID': unique tag for this CE,
+ 'cloudElementTime': time of the CE,
+ 'cloudElementLatLon': (lat,lon,value) of MERG data of CE,
+ 'cloudElementCenter':list of floating-point [lat,lon] representing the CE's center
+ 'cloudElementArea':floating-point representing the area of the CE,
+ 'cloudElementEccentricity': floating-point representing the shape of the CE,
+ 'cloudElementTmax':integer representing the maximum Tb in CE,
+ 'cloudElementTmin': integer representing the minimum Tb in CE,
+ 'cloudElementPrecipTotal':floating-point representing the sum of all rainfall in CE if TRMMdirName entered,
+ 'cloudElementLatLonTRMM':(lat,lon,value) of TRMM data in CE if TRMMdirName entered,
+ 'TRMMArea': floating-point representing the CE if TRMMdirName entered,
+ 'CETRMMmax':floating-point representing the max rate in the CE if TRMMdirName entered,
+ 'CETRMMmin':floating-point representing the min rate in the CE if TRMMdirName entered}
+ Assumptions::
+ Assumes we are dealing with MERG data which is 4kmx4km resolved, thus the smallest value
+ required according to Vila et al. (2008) is 2400km^2
+ therefore, 2400/16 = 150 contiguous squares
+ '''
+
+ frame = ma.empty((1, mergImgs.shape[1], mergImgs.shape[2]))
+ CEcounter = 0
+ frameCEcounter = 0
+ frameNum = 0
+ cloudElementEpsilon = 0.0
+ cloudElementDict = {}
+ cloudElementCenter = [] # list with two elements [lat,lon] for the center of a CE
+ prevFrameCEs = [] # list for CEs in previous frame
+ currFrameCEs = [] # list for CEs in current frame
+ cloudElementLat = [] # list for a particular CE's lat values
+ cloudElementLon = [] # list for a particular CE's lon values
+ cloudElementLatLons = [] # list for a particular CE's (lat,lon) values
+
+ prevLatValue = 0.0
+ prevLonValue = 0.0
+ TIR_min = 0.0
+ TIR_max = 0.0
+ temporalRes = 3 # TRMM data is 3 hourly
+ precipTotal = 0.0
+ CETRMMList = []
+ precip = []
+ TRMMCloudElementLatLons = []
+
+ minCELatLimit = 0.0
+ minCELonLimit = 0.0
+ maxCELatLimit = 0.0
+ maxCELonLimit = 0.0
+
+ nygrd = len(LAT[:, 0]); nxgrd = len(LON[0, :])
+
+ # openfile for storing ALL cloudElement information
+ cloudElementsFile = open(
+ (MAINDIRECTORY + '/textFiles/cloudElements.txt'), 'wb')
+ # openfile for storing cloudElement information meeting user criteria i.e.
+ # MCCs in this case
+ cloudElementsUserFile = open(
+ (MAINDIRECTORY + '/textFiles/cloudElementsUserFile.txt'), 'w')
+
+ # NB in the TRMM files the info is hours since the time thus 00Z file has
+ # in 01, 02 and 03 times
+ for t in range(mergImgs.shape[0]):
+ #-------------------------------------------------
+ # #textfile name for saving the data for arcgis
+ # thisFileName = MAINDIRECTORY+'/' + (str(timelist[t])).replace(" ", "_") + '.txt'
+ # cloudElementsTextFile = open(thisFileName,'w')
+ #-------------------------------------------------
+
+ # determine contiguous locations with temeperature below the warmest
+ # temp i.e. cloudElements in each frame
+ frame, CEcounter = ndimage.measurements.label(
+ mergImgs[t, :, :], structure=STRUCTURING_ELEMENT)
+ frameCEcounter = 0
+ frameNum += 1
+
+ # for each of the areas identified, check to determine if it a valid CE
+ # via an area and T requirement
+ for count in range(CEcounter):
+ #[0] is time dimension. Determine the actual values from the data
+ # loc is a masked array
+ try:
+ loc = ndimage.find_objects(frame == (count + 1))[0]
+ except Exception as e:
+ print(("Error is %s" % e))
+ continue
+
+ cloudElement = mergImgs[t, :, :][loc]
+ labels, lcounter = ndimage.label(cloudElement)
+
+ # determine the true lats and lons for this particular CE
+ cloudElementLat = LAT[loc[0], 0]
+ cloudElementLon = LON[0, loc[1]]
+
+ # determine number of boxes in this cloudelement
+ numOfBoxes = np.count_nonzero(cloudElement)
+ cloudElementArea = numOfBoxes * XRES * YRES
+
+ # If the area is greater than the area required, or if the area is smaller than the suggested area, check if it meets a convective fraction requirement
+ # consider as CE
+
+ if cloudElementArea >= AREA_MIN or (
+ cloudElementArea < AREA_MIN and (
+ (ndimage.minimum(
+ cloudElement,
+ labels=labels)) /
+ float(
+ (ndimage.maximum(
+ cloudElement,
+ labels=labels)))) < CONVECTIVE_FRACTION):
+
+ # get some time information and labeling info
+ frameTime = str(timelist[t])
+ frameCEcounter += 1
+ CEuniqueID = 'F' + str(frameNum) + 'CE' + str(frameCEcounter)
+
+ #-------------------------------------------------
+ # textfile name for accesing CE data using MATLAB code
+ # thisFileName = MAINDIRECTORY+'/' + (str(timelist[t])).replace(" ", "_") + CEuniqueID +'.txt'
+ # cloudElementsTextFile = open(thisFileName,'w')
+ #-------------------------------------------------
+
+ # ------ NETCDF File stuff for brightness temp stuff ----------
+ thisFileName = MAINDIRECTORY + '/MERGnetcdfCEs/cloudElements' + \
+ (str(timelist[t])).replace(" ", "_") + CEuniqueID + '.nc'
+ currNetCDFCEData = Dataset(thisFileName, 'w', format='NETCDF4')
+ currNetCDFCEData.description = 'Cloud Element ' + CEuniqueID + ' temperature data'
+ currNetCDFCEData.calendar = 'standard'
+ currNetCDFCEData.conventions = 'COARDS'
+ # dimensions
+ currNetCDFCEData.createDimension('time', None)
+ currNetCDFCEData.createDimension('lat', len(LAT[:, 0]))
+ currNetCDFCEData.createDimension('lon', len(LON[0, :]))
+ # variables
+ tempDims = ('time', 'lat', 'lon',)
+ times = currNetCDFCEData.createVariable(
+ 'time', 'f8', ('time',))
+ times.units = 'hours since ' + str(timelist[t])[:-6]
+ latitudes = currNetCDFCEData.createVariable(
+ 'latitude', 'f8', ('lat',))
+ longitudes = currNetCDFCEData.createVariable(
+ 'longitude', 'f8', ('lon',))
+ brightnesstemp = currNetCDFCEData.createVariable(
+ 'brightnesstemp', 'i16', tempDims)
+ brightnesstemp.units = 'Kelvin'
+ # NETCDF data
+ dates = [timelist[t] + timedelta(hours=0)]
+ times[:] = date2num(dates, units=times.units)
+ longitudes[:] = LON[0, :]
+ longitudes.units = "degrees_east"
+ longitudes.long_name = "Longitude"
+
+ latitudes[:] = LAT[:, 0]
+ latitudes.units = "degrees_north"
+ latitudes.long_name = "Latitude"
+
+ # generate array of zeros for brightness temperature
+ brightnesstemp1 = ma.zeros(
+ (1, len(latitudes), len(longitudes))).astype('int16')
+ #-----------End most of NETCDF file stuff ---------------------
+
+ # if other dataset (TRMM) assumed to be a precipitation dataset
+ # was entered
+ if TRMMdirName:
+ #------------------TRMM stuff -----------------------------
+ fileDate = ((str(timelist[t])).replace(
+ " ", "")[:-8]).replace("-", "")
+ fileHr1 = (str(timelist[t])).replace(" ", "")[-8:-6]
+
+ if int(fileHr1) % temporalRes == 0:
+ fileHr = fileHr1
+ else:
+ fileHr = (int(fileHr1) / temporalRes) * temporalRes
+ if fileHr < 10:
+ fileHr = '0' + str(fileHr)
+ else:
+ str(fileHr)
+
+ # open TRMM file for the resolution info and to create the
+ # appropriate sized grid
+ TRMMfileName = TRMMdirName + '/3B42.' + \
+ fileDate + "." + str(fileHr) + ".7A.nc"
+
+ TRMMData = Dataset(TRMMfileName, 'r', format='NETCDF4')
+ precipRate = TRMMData.variables['pcp'][:, :, :]
+ latsrawTRMMData = TRMMData.variables['latitude'][:]
+ lonsrawTRMMData = TRMMData.variables['longitude'][:]
+ lonsrawTRMMData[lonsrawTRMMData >
+ 180] = lonsrawTRMMData[lonsrawTRMMData > 180] - 360.
+ LONTRMM, LATTRMM = np.meshgrid(
+ lonsrawTRMMData, latsrawTRMMData)
+
+ nygrdTRMM = len(LATTRMM[:, 0]); nxgrdTRMM = len(
+ LONTRMM[0, :])
+ precipRateMasked = ma.masked_array(
+ precipRate, mask=(precipRate < 0.0))
+ #---------regrid the TRMM data to the MERG dataset --------
+ # regrid using the do_regrid stuff from the Apache OCW
+ regriddedTRMM = ma.zeros((0, nygrd, nxgrd))
+ #regriddedTRMM = process.do_regrid(precipRateMasked[0,:,:], LATTRMM, LONTRMM, LAT, LON, order=1, mdi= -999999999)
+ regriddedTRMM = do_regrid(
+ precipRateMasked[0, :, :], LATTRMM, LONTRMM, LAT, LON, order=1, mdi=-999999999)
+ #----------------------------------------------------------
+
+ # #get the lat/lon info from cloudElement
+ # get the lat/lon info from the file
+ latCEStart = LAT[0][0]
+ latCEEnd = LAT[-1][0]
+ lonCEStart = LON[0][0]
+ lonCEEnd = LON[0][-1]
+
+ # get the lat/lon info for TRMM data (different resolution)
+ latStartT = find_nearest(latsrawTRMMData, latCEStart)
+ latEndT = find_nearest(latsrawTRMMData, latCEEnd)
+ lonStartT = find_nearest(lonsrawTRMMData, lonCEStart)
+ lonEndT = find_nearest(lonsrawTRMMData, lonCEEnd)
+ latStartIndex = np.where(latsrawTRMMData == latStartT)
+ latEndIndex = np.where(latsrawTRMMData == latEndT)
+ lonStartIndex = np.where(lonsrawTRMMData == lonStartT)
+ lonEndIndex = np.where(lonsrawTRMMData == lonEndT)
+
+ # get the relevant TRMM info
+ CEprecipRate = precipRate[:,
+ (latStartIndex[0][0] - 1):latEndIndex[0][0],
+ (lonStartIndex[0][0] - 1):lonEndIndex[0][0]]
+ TRMMData.close()
+
+ # ------ NETCDF File info for writing TRMM CE rainfall ----
+ thisFileName = MAINDIRECTORY + '/TRMMnetcdfCEs/TRMM' + \
+ (str(timelist[t])).replace(" ", "_") + CEuniqueID + '.nc'
+ currNetCDFTRMMData = Dataset(
+ thisFileName, 'w', format='NETCDF4')
+ currNetCDFTRMMData.description = 'Cloud Element ' + \
+ CEuniqueID + ' precipitation data'
+ currNetCDFTRMMData.calendar = 'standard'
+ currNetCDFTRMMData.conventions = 'COARDS'
+ # dimensions
+ currNetCDFTRMMData.createDimension('time', None)
+ currNetCDFTRMMData.createDimension('lat', len(LAT[:, 0]))
+ currNetCDFTRMMData.createDimension('lon', len(LON[0, :]))
+
+ # variables
+ TRMMprecip = ('time', 'lat', 'lon',)
+ times = currNetCDFTRMMData.createVariable(
+ 'time', 'f8', ('time',))
+ times.units = 'hours since ' + str(timelist[t])[:-6]
+ latitude = currNetCDFTRMMData.createVariable(
+ 'latitude', 'f8', ('lat',))
+ longitude = currNetCDFTRMMData.createVariable(
+ 'longitude', 'f8', ('lon',))
+ rainFallacc = currNetCDFTRMMData.createVariable(
+ 'precipitation_Accumulation', 'f8', TRMMprecip)
+ rainFallacc.units = 'mm'
+
+ longitude[:] = LON[0, :]
+ longitude.units = "degrees_east"
+ longitude.long_name = "Longitude"
+
+ latitude[:] = LAT[:, 0]
+ latitude.units = "degrees_north"
+ latitude.long_name = "Latitude"
+
+ finalCETRMMvalues = ma.zeros((brightnesstemp.shape))
+ #-----------End most of NETCDF file stuff -----------------
+
+ # populate cloudElementLatLons by unpacking the original values from loc to get the actual value for lat and lon
+ # TODO: KDW - too dirty... play with itertools.izip or zip and the enumerate with this
+ # as cloudElement is masked
+ for index, value in np.ndenumerate(cloudElement):
+ if value != 0:
+ lat_index, lon_index = index
+ lat_lon_tuple = (
+ cloudElementLat[lat_index],
+ cloudElementLon[lon_index],
+ value)
+
+ # generate the comma separated file for GIS
+ cloudElementLatLons.append(lat_lon_tuple)
+
+ # temp data for CE NETCDF file
+ brightnesstemp1[0, int(np.where(LAT[:, 0] == cloudElementLat[lat_index])[0]), int(
+ np.where(LON[0, :] == cloudElementLon[lon_index])[0])] = value
+
+ if TRMMdirName:
+ finalCETRMMvalues[0, int(np.where(LAT[:, 0] == cloudElementLat[lat_index])[0]), int(np.where(LON[0, :] == cloudElementLon[lon_index])[
+ 0])] = regriddedTRMM[int(np.where(LAT[:, 0] == cloudElementLat[lat_index])[0]), int(np.where(LON[0, :] == cloudElementLon[lon_index])[0])]
+ CETRMMList.append((cloudElementLat[lat_index],
+ cloudElementLon[lon_index],
+ finalCETRMMvalues[0,
+ cloudElementLat[lat_index],
+ cloudElementLon[lon_index]]))
+
+ brightnesstemp[:] = brightnesstemp1[:]
+ currNetCDFCEData.close()
+
+ if TRMMdirName:
+
+ # calculate the total precip associated with the feature
+ for index, value in np.ndenumerate(finalCETRMMvalues):
+ precipTotal += value
+ precip.append(value)
+
+ rainFallacc[:] = finalCETRMMvalues[:]
+ currNetCDFTRMMData.close()
+ TRMMnumOfBoxes = np.count_nonzero(finalCETRMMvalues)
+ TRMMArea = TRMMnumOfBoxes * XRES * YRES
+ try:
+ maxCEprecipRate = np.max(
+ finalCETRMMvalues[np.nonzero(finalCETRMMvalues)])
+ minCEprecipRate = np.min(
+ finalCETRMMvalues[np.nonzero(finalCETRMMvalues)])
+ except BaseException:
+ pass
+
+ # sort cloudElementLatLons by lats
+ cloudElementLatLons.sort(key=lambda tup: tup[0])
+
+ # determine if the cloud element the shape
+ cloudElementEpsilon = eccentricity(cloudElement)
+ cloudElementsUserFile.write(
+ "\n\nTime is: %s" % (str(timelist[t])))
+ cloudElementsUserFile.write("\nCEuniqueID is: %s" % CEuniqueID)
+ latCenter, lonCenter = ndimage.measurements.center_of_mass(
+ cloudElement, labels=labels)
+
+ # latCenter and lonCenter are given according to the particular array defining this CE
+ # so you need to convert this value to the overall domain truth
+ latCenter = cloudElementLat[round(latCenter)]
+ lonCenter = cloudElementLon[round(lonCenter)]
+ cloudElementsUserFile.write(
+ "\nCenter (lat,lon) is: %.2f\t%.2f" %
+ (latCenter, lonCenter))
+ cloudElementCenter.append(latCenter)
+ cloudElementCenter.append(lonCenter)
+ cloudElementsUserFile.write(
+ "\nNumber of boxes are: %d" % numOfBoxes)
+ cloudElementsUserFile.write(
+ "\nArea is: %.4f km^2" %
+ (cloudElementArea))
+ cloudElementsUserFile.write(
+ "\nAverage brightness temperature is: %.4f K" %
+ ndimage.mean(
+ cloudElement, labels=labels))
+ cloudElementsUserFile.write(
+ "\nMin brightness temperature is: %.4f K" %
+ ndimage.minimum(
+ cloudElement, labels=labels))
+ cloudElementsUserFile.write(
+ "\nMax brightness temperature is: %.4f K" %
+ ndimage.maximum(
+ cloudElement, labels=labels))
+ cloudElementsUserFile.write(
+ "\nBrightness temperature variance is: %.4f K" %
+ ndimage.variance(
+ cloudElement, labels=labels))
+ cloudElementsUserFile.write(
+ "\nConvective fraction is: %.4f " %
+ (((ndimage.minimum(
+ cloudElement,
+ labels=labels)) /
+ float(
+ (ndimage.maximum(
+ cloudElement,
+ labels=labels)))) *
+ 100.0))
+ cloudElementsUserFile.write(
+ "\nEccentricity is: %.4f " %
+ (cloudElementEpsilon))
+ # populate the dictionary
+ if TRMMdirName:
+ cloudElementDict = {
+ 'uniqueID': CEuniqueID,
+ 'cloudElementTime': timelist[t],
+ 'cloudElementLatLon': cloudElementLatLons,
+ 'cloudElementCenter': cloudElementCenter,
+ 'cloudElementArea': cloudElementArea,
+ 'cloudElementEccentricity': cloudElementEpsilon,
+ 'cloudElementTmax': TIR_max,
+ 'cloudElementTmin': TIR_min,
+ 'cloudElementPrecipTotal': precipTotal,
+ 'cloudElementLatLonTRMM': CETRMMList,
+ 'TRMMArea': TRMMArea,
+ 'CETRMMmax': maxCEprecipRate,
+ 'CETRMMmin': minCEprecipRate}
+ else:
+ cloudElementDict = {
+ 'uniqueID': CEuniqueID,
+ 'cloudElementTime': timelist[t],
+ 'cloudElementLatLon': cloudElementLatLons,
+ 'cloudElementCenter': cloudElementCenter,
+ 'cloudElementArea': cloudElementArea,
+ 'cloudElementEccentricity': cloudElementEpsilon,
+ 'cloudElementTmax': TIR_max,
+ 'cloudElementTmin': TIR_min,
+ }
+
+ # current frame list of CEs
+ currFrameCEs.append(cloudElementDict)
+
+ # draw the graph node
+ CLOUD_ELEMENT_GRAPH.add_node(CEuniqueID, cloudElementDict)
+
+ if frameNum != 1:
+ for cloudElementDict in prevFrameCEs:
+ thisCElen = len(cloudElementLatLons)
+ percentageOverlap, areaOverlap = cloudElementOverlap(
+ cloudElementLatLons, cloudElementDict['cloudElementLatLon'])
+
+ # change weights to integers because the built in shortest path chokes on floating pts according to Networkx doc
+ # according to Goyens et al, two CEs are considered
+ # related if there is atleast 95% overlap between them
+ # for consecutive imgs a max of 2 hrs apart
+ if percentageOverlap >= 0.95:
+ CLOUD_ELEMENT_GRAPH.add_edge(
+ cloudElementDict['uniqueID'], CEuniqueID, weight=edgeWeight[0])
+
+ elif percentageOverlap >= 0.90 and percentageOverlap < 0.95:
+ CLOUD_ELEMENT_GRAPH.add_edge(
+ cloudElementDict['uniqueID'], CEuniqueID, weight=edgeWeight[1])
+
+ elif areaOverlap >= MIN_OVERLAP:
+ CLOUD_ELEMENT_GRAPH.add_edge(
+ cloudElementDict['uniqueID'], CEuniqueID, weight=edgeWeight[2])
+
+ else:
+ # TODO: remove this else as we only wish for the CE details
+ # ensure only the non-zero elements are considered
+ # store intel in allCE file
+ labels, _ = ndimage.label(cloudElement)
+ cloudElementsFile.write(
+ "\n-----------------------------------------------")
+ cloudElementsFile.write(
+ "\n\nTime is: %s" % (str(timelist[t])))
+ # cloudElementLat = LAT[loc[0],0]
+ # cloudElementLon = LON[0,loc[1]]
+
+ # populate cloudElementLatLons by unpacking the original values from loc
+ # TODO: KDW - too dirty... play with itertools.izip or zip and the enumerate with this
+ # as cloudElement is masked
+ for index, value in np.ndenumerate(cloudElement):
+ if value != 0:
+ lat_index, lon_index = index
+ lat_lon_tuple = (
+ cloudElementLat[lat_index],
+ cloudElementLon[lon_index])
+ cloudElementLatLons.append(lat_lon_tuple)
+
+ cloudElementsFile.write(
+ "\nLocation of rejected CE (lat,lon) points are: %s" %
+ cloudElementLatLons)
+ # latCenter and lonCenter are given according to the particular array defining this CE
+ # so you need to convert this value to the overall domain
+ # truth
+ latCenter, lonCenter = ndimage.measurements.center_of_mass(
+ cloudElement, labels=labels)
+ latCenter = cloudElementLat[round(latCenter)]
+ lonCenter = cloudElementLon[round(lonCenter)]
+ cloudElementsFile.write(
+ "\nCenter (lat,lon) is: %.2f\t%.2f" %
+ (latCenter, lonCenter))
+ cloudElementsFile.write(
+ "\nNumber of boxes are: %d" % numOfBoxes)
+ cloudElementsFile.write(
+ "\nArea is: %.4f km^2" %
+ (cloudElementArea))
+ cloudElementsFile.write(
+ "\nAverage brightness temperature is: %.4f K" %
+ ndimage.mean(
+ cloudElement, labels=labels))
+ cloudElementsFile.write(
+ "\nMin brightness temperature is: %.4f K" %
+ ndimage.minimum(
+ cloudElement, labels=labels))
+ cloudElementsFile.write(
+ "\nMax brightness temperature is: %.4f K" %
+ ndimage.maximum(
+ cloudElement, labels=labels))
+ cloudElementsFile.write(
+ "\nBrightness temperature variance is: %.4f K" %
+ ndimage.variance(
+ cloudElement, labels=labels))
+ cloudElementsFile.write(
+ "\nConvective fraction is: %.4f " %
+ (((ndimage.minimum(
+ cloudElement,
+ labels=labels)) /
+ float(
+ (ndimage.maximum(
+ cloudElement,
+ labels=labels)))) *
+ 100.0))
+ cloudElementsFile.write(
+ "\nEccentricity is: %.4f " %
+ (cloudElementEpsilon))
+ cloudElementsFile.write(
+ "\n-----------------------------------------------")
+
+ # reset list for the next CE
+ nodeExist = False
+ cloudElementCenter = []
+ cloudElement = []
+ cloudElementLat = []
+ cloudElementLon = []
+ cloudElementLatLons = []
+ brightnesstemp1 = []
+ brightnesstemp = []
+ finalCETRMMvalues = []
+ CEprecipRate = []
+ CETRMMList = []
+ precipTotal = 0.0
+ precip = []
+ TRMMCloudElementLatLons = []
+
+ # reset for the next time
+ prevFrameCEs = []
+ prevFrameCEs = currFrameCEs
+ currFrameCEs = []
+
+ cloudElementsFile.close
+ cloudElementsUserFile.close
+ # if using ARCGIS data store code, uncomment this file close line
+ # cloudElementsTextFile.close
+
+ # clean up graph - remove parent and childless nodes
+ outAndInDeg = CLOUD_ELEMENT_GRAPH.degree_iter()
+ toRemove = [node[0] for node in outAndInDeg if node[1] < 1]
+ CLOUD_ELEMENT_GRAPH.remove_nodes_from(toRemove)
+
+ print(("number of nodes are: %s" % CLOUD_ELEMENT_GRAPH.number_of_nodes()))
+ print(("number of edges are: %s" % CLOUD_ELEMENT_GRAPH.number_of_edges()))
+ print(("*" * 80))
+
+ # hierachial graph output
+ graphTitle = "Cloud Elements observed over somewhere from 0000Z to 0000Z"
+ #drawGraph(CLOUD_ELEMENT_GRAPH, graphTitle, edgeWeight)
+
+ return CLOUD_ELEMENT_GRAPH
#******************************************************************
+
+
def findPrecipRate(TRMMdirName, timelist):
- '''
- Purpose::
- Determines the precipitation rates for MCSs found if TRMMdirName was not entered in findCloudElements this can be used
-
- Input::
- TRMMdirName: a string representing the directory for the original TRMM netCDF files
- timelist: a list of python datatimes
-
- Output:: a list of dictionary of the TRMM data
- NB: also creates netCDF with TRMM data for each CE (for post processing) index
- in MAINDIRECTORY/TRMMnetcdfCEs
-
- Assumptions:: Assumes that findCloudElements was run without the TRMMdirName value
-
- '''
- allCEnodesTRMMdata =[]
- TRMMdataDict={}
- precipTotal = 0.0
-
- os.chdir((MAINDIRECTORY+'/MERGnetcdfCEs/'))
- imgFilename = ''
- temporalRes = 3 #3 hours for TRMM
-
- #sort files
- files = filter(os.path.isfile, glob.glob("*.nc"))
- files.sort(key=lambda x: os.path.getmtime(x))
-
- for afile in files:
- fullFname = os.path.splitext(afile)[0]
- noFrameExtension = (fullFname.replace("_","")).split('F')[0]
- CEuniqueID = 'F' +(fullFname.replace("_","")).split('F')[1]
- fileDateTimeChar = (noFrameExtension.replace(":","")).split('s')[1]
- fileDateTime = fileDateTimeChar.replace("-","")
- fileDate = fileDateTime[:-6]
- fileHr1=fileDateTime[-6:-4]
-
- cloudElementData = Dataset(afile,'r', format='NETCDF4')
- brightnesstemp1 = cloudElementData.variables['brightnesstemp'][:,:,:]
- latsrawCloudElements = cloudElementData.variables['latitude'][:]
- lonsrawCloudElements = cloudElementData.variables['longitude'][:]
-
- brightnesstemp = np.squeeze(brightnesstemp1, axis=0)
-
- if int(fileHr1) % temporalRes == 0:
- fileHr = fileHr1
- else:
- fileHr = (int(fileHr1)/temporalRes) * temporalRes
-
- if fileHr < 10:
- fileHr = '0'+str(fileHr)
- else:
- str(fileHr)
-
- TRMMfileName = TRMMdirName+"/3B42."+ str(fileDate) + "."+str(fileHr)+".7A.nc"
- TRMMData = Dataset(TRMMfileName,'r', format='NETCDF4')
- precipRate = TRMMData.variables['pcp'][:,:,:]
- latsrawTRMMData = TRMMData.variables['latitude'][:]
- lonsrawTRMMData = TRMMData.variables['longitude'][:]
- lonsrawTRMMData[lonsrawTRMMData > 180] = lonsrawTRMMData[lonsrawTRMMData>180] - 360.
- LONTRMM, LATTRMM = np.meshgrid(lonsrawTRMMData, latsrawTRMMData)
-
- #nygrdTRMM = len(LATTRMM[:,0]); nxgrd = len(LONTRMM[0,:])
- nygrd = len(LAT[:, 0]); nxgrd = len(LON[0, :])
-
- precipRateMasked = ma.masked_array(precipRate, mask=(precipRate < 0.0))
- #---------regrid the TRMM data to the MERG dataset ----------------------------------
- #regrid using the do_regrid stuff from the Apache OCW
- regriddedTRMM = ma.zeros((0, nygrd, nxgrd))
- regriddedTRMM = do_regrid(precipRateMasked[0,:,:], LATTRMM, LONTRMM, LAT, LON, order=1, mdi= -999999999)
- #regriddedTRMM = process.do_regrid(precipRateMasked[0,:,:], LATTRMM, LONTRMM, LAT, LON, order=1, mdi= -999999999)
- #----------------------------------------------------------------------------------
-
- # #get the lat/lon info from
- latCEStart = LAT[0][0]
- latCEEnd = LAT[-1][0]
- lonCEStart = LON[0][0]
- lonCEEnd = LON[0][-1]
-
- #get the lat/lon info for TRMM data (different resolution)
- latStartT = find_nearest(latsrawTRMMData, latCEStart)
- latEndT = find_nearest(latsrawTRMMData, latCEEnd)
- lonStartT = find_nearest(lonsrawTRMMData, lonCEStart)
- lonEndT = find_nearest(lonsrawTRMMData, lonCEEnd)
- latStartIndex = np.where(latsrawTRMMData == latStartT)
- latEndIndex = np.where(latsrawTRMMData == latEndT)
- lonStartIndex = np.where(lonsrawTRMMData == lonStartT)
- lonEndIndex = np.where(lonsrawTRMMData == lonEndT)
-
- #get the relevant TRMM info
- CEprecipRate = precipRate[:,(latStartIndex[0][0]-1):latEndIndex[0][0],(lonStartIndex[0][0]-1):lonEndIndex[0][0]]
- TRMMData.close()
-
-
- # ------ NETCDF File stuff ------------------------------------
- thisFileName = MAINDIRECTORY+'/TRMMnetcdfCEs/'+ fileDateTime + CEuniqueID +'.nc'
- currNetCDFTRMMData = Dataset(thisFileName, 'w', format='NETCDF4')
- currNetCDFTRMMData.description = 'Cloud Element '+CEuniqueID + ' rainfall data'
- currNetCDFTRMMData.calendar = 'standard'
- currNetCDFTRMMData.conventions = 'COARDS'
- # dimensions
- currNetCDFTRMMData.createDimension('time', None)
- currNetCDFTRMMData.createDimension('lat', len(LAT[:,0]))
- currNetCDFTRMMData.createDimension('lon', len(LON[0,:]))
- # variables
- TRMMprecip = ('time','lat', 'lon',)
- times = currNetCDFTRMMData.createVariable('time', 'f8', ('time',))
- times.units = 'hours since '+ fileDateTime[:-6]
- latitude = currNetCDFTRMMData.createVariable('latitude', 'f8', ('lat',))
- longitude = currNetCDFTRMMData.createVariable('longitude', 'f8', ('lon',))
- rainFallacc = currNetCDFTRMMData.createVariable('precipitation_Accumulation', 'f8',TRMMprecip )
- rainFallacc.units = 'mm'
-
- longitude[:] = LON[0,:]
- longitude.units = "degrees_east"
- longitude.long_name = "Longitude"
-
- latitude[:] = LAT[:,0]
- latitude.units = "degrees_north"
- latitude.long_name ="Latitude"
-
- finalCETRMMvalues = ma.zeros((brightnesstemp1.shape))
- #-----------End most of NETCDF file stuff ------------------------------------
- for index,value in np.ndenumerate(brightnesstemp):
- lat_index, lon_index = index
- currTimeValue = 0
- if value > 0:
-
- finalCETRMMvalues[0,lat_index,lon_index] = regriddedTRMM[int(np.where(LAT[:,0]==LAT[lat_index,0])[0]), int(np.where(LON[0,:]==LON[0,lon_index])[0])]
-
-
- rainFallacc[:] = finalCETRMMvalues
- currNetCDFTRMMData.close()
-
- for index, value in np.ndenumerate(finalCETRMMvalues):
- precipTotal += value
-
- TRMMnumOfBoxes = np.count_nonzero(finalCETRMMvalues)
- TRMMArea = TRMMnumOfBoxes*XRES*YRES
-
- try:
- minCEprecipRate = np.min(finalCETRMMvalues[np.nonzero(finalCETRMMvalues)])
- except:
- minCEprecipRate = 0.0
-
- try:
- maxCEprecipRate = np.max(finalCETRMMvalues[np.nonzero(finalCETRMMvalues)])
- except:
- maxCEprecipRate = 0.0
-
- #add info to CLOUDELEMENTSGRAPH
- #TODO try block
- for eachdict in CLOUD_ELEMENT_GRAPH.nodes(CEuniqueID):
- if eachdict[1]['uniqueID'] == CEuniqueID:
- if not 'cloudElementPrecipTotal' in eachdict[1].keys():
- eachdict[1]['cloudElementPrecipTotal'] = precipTotal
- if not 'cloudElementLatLonTRMM' in eachdict[1].keys():
- eachdict[1]['cloudElementLatLonTRMM'] = finalCETRMMvalues
- if not 'TRMMArea' in eachdict[1].keys():
- eachdict[1]['TRMMArea'] = TRMMArea
- if not 'CETRMMmin' in eachdict[1].keys():
- eachdict[1]['CETRMMmin'] = minCEprecipRate
- if not 'CETRMMmax' in eachdict[1].keys():
- eachdict[1]['CETRMMmax'] = maxCEprecipRate
-
- #clean up
- precipTotal = 0.0
- latsrawTRMMData =[]
- lonsrawTRMMData = []
- latsrawCloudElements=[]
- lonsrawCloudElements=[]
- finalCETRMMvalues =[]
- CEprecipRate =[]
- brightnesstemp =[]
- TRMMdataDict ={}
-
- return allCEnodesTRMMdata
-#******************************************************************
+ '''
+ Purpose::
+ Determines the precipitation rates for MCSs found if TRMMdirName was not entered in findCloudElements this can be used
+
+ Input::
+ TRMMdirName: a string representing the directory for the original TRMM netCDF files
+ timelist: a list of python datatimes
+
+ Output:: a list of dictionary of the TRMM data
+ NB: also creates netCDF with TRMM data for each CE (for post processing) index
+ in MAINDIRECTORY/TRMMnetcdfCEs
+
+ Assumptions:: Assumes that findCloudElements was run without the TRMMdirName value
+
+ '''
+ allCEnodesTRMMdata = []
+ TRMMdataDict = {}
+ precipTotal = 0.0
+
+ os.chdir((MAINDIRECTORY + '/MERGnetcdfCEs/'))
+ imgFilename = ''
+ temporalRes = 3 # 3 hours for TRMM
+
+ # sort files
+ files = list(filter(os.path.isfile, glob.glob("*.nc")))
+ files.sort(key=lambda x: os.path.getmtime(x))
+
+ for afile in files:
+ fullFname = os.path.splitext(afile)[0]
+ noFrameExtension = (fullFname.replace("_", "")).split('F')[0]
+ CEuniqueID = 'F' + (fullFname.replace("_", "")).split('F')[1]
+ fileDateTimeChar = (noFrameExtension.replace(":", "")).split('s')[1]
+ fileDateTime = fileDateTimeChar.replace("-", "")
+ fileDate = fileDateTime[:-6]
+ fileHr1 = fileDateTime[-6:-4]
+
+ cloudElementData = Dataset(afile, 'r', format='NETCDF4')
+ brightnesstemp1 = cloudElementData.variables['brightnesstemp'][:, :, :]
+ latsrawCloudElements = cloudElementData.variables['latitude'][:]
+ lonsrawCloudElements = cloudElementData.variables['longitude'][:]
+
+ brightnesstemp = np.squeeze(brightnesstemp1, axis=0)
+
+ if int(fileHr1) % temporalRes == 0:
+ fileHr = fileHr1
+ else:
+ fileHr = (int(fileHr1) / temporalRes) * temporalRes
+
+ if fileHr < 10:
+ fileHr = '0' + str(fileHr)
+ else:
+ str(fileHr)
+
+ TRMMfileName = TRMMdirName + "/3B42." + \
+ str(fileDate) + "." + str(fileHr) + ".7A.nc"
+ TRMMData = Dataset(TRMMfileName, 'r', format='NETCDF4')
+ precipRate = TRMMData.variables['pcp'][:, :, :]
+ latsrawTRMMData = TRMMData.variables['latitude'][:]
+ lonsrawTRMMData = TRMMData.variables['longitude'][:]
+ lonsrawTRMMData[lonsrawTRMMData >
+ 180] = lonsrawTRMMData[lonsrawTRMMData > 180] - 360.
+ LONTRMM, LATTRMM = np.meshgrid(lonsrawTRMMData, latsrawTRMMData)
+
+ #nygrdTRMM = len(LATTRMM[:,0]); nxgrd = len(LONTRMM[0,:])
+ nygrd = len(LAT[:, 0]); nxgrd = len(LON[0, :])
+
+ precipRateMasked = ma.masked_array(precipRate, mask=(precipRate < 0.0))
+ #---------regrid the TRMM data to the MERG dataset --------------------
+ # regrid using the do_regrid stuff from the Apache OCW
+ regriddedTRMM = ma.zeros((0, nygrd, nxgrd))
+ regriddedTRMM = do_regrid(
+ precipRateMasked[0, :, :], LATTRMM, LONTRMM, LAT, LON, order=1, mdi=-999999999)
+ #regriddedTRMM = process.do_regrid(precipRateMasked[0,:,:], LATTRMM, LONTRMM, LAT, LON, order=1, mdi= -999999999)
+ #----------------------------------------------------------------------
+
+ # #get the lat/lon info from
+ latCEStart = LAT[0][0]
+ latCEEnd = LAT[-1][0]
+ lonCEStart = LON[0][0]
+ lonCEEnd = LON[0][-1]
+
+ # get the lat/lon info for TRMM data (different resolution)
+ latStartT = find_nearest(latsrawTRMMData, latCEStart)
+ latEndT = find_nearest(latsrawTRMMData, latCEEnd)
+ lonStartT = find_nearest(lonsrawTRMMData, lonCEStart)
+ lonEndT = find_nearest(lonsrawTRMMData, lonCEEnd)
+ latStartIndex = np.where(latsrawTRMMData == latStartT)
+ latEndIndex = np.where(latsrawTRMMData == latEndT)
+ lonStartIndex = np.where(lonsrawTRMMData == lonStartT)
+ lonEndIndex = np.where(lonsrawTRMMData == lonEndT)
+
+ # get the relevant TRMM info
+ CEprecipRate = precipRate[:,
+ (latStartIndex[0][0] - 1):latEndIndex[0][0],
+ (lonStartIndex[0][0] - 1):lonEndIndex[0][0]]
+ TRMMData.close()
+
+ # ------ NETCDF File stuff ------------------------------------
+ thisFileName = MAINDIRECTORY + '/TRMMnetcdfCEs/' + \
+ fileDateTime + CEuniqueID + '.nc'
+ currNetCDFTRMMData = Dataset(thisFileName, 'w', format='NETCDF4')
+ currNetCDFTRMMData.description = 'Cloud Element ' + CEuniqueID + ' rainfall data'
+ currNetCDFTRMMData.calendar = 'standard'
+ currNetCDFTRMMData.conventions = 'COARDS'
+ # dimensions
+ currNetCDFTRMMData.createDimension('time', None)
+ currNetCDFTRMMData.createDimension('lat', len(LAT[:, 0]))
+ currNetCDFTRMMData.createDimension('lon', len(LON[0, :]))
+ # variables
+ TRMMprecip = ('time', 'lat', 'lon',)
+ times = currNetCDFTRMMData.createVariable('time', 'f8', ('time',))
+ times.units = 'hours since ' + fileDateTime[:-6]
+ latitude = currNetCDFTRMMData.createVariable(
+ 'latitude', 'f8', ('lat',))
+ longitude = currNetCDFTRMMData.createVariable(
+ 'longitude', 'f8', ('lon',))
+ rainFallacc = currNetCDFTRMMData.createVariable(
+ 'precipitation_Accumulation', 'f8', TRMMprecip)
+ rainFallacc.units = 'mm'
+
+ longitude[:] = LON[0, :]
+ longitude.units = "degrees_east"
+ longitude.long_name = "Longitude"
+
+ latitude[:] = LAT[:, 0]
+ latitude.units = "degrees_north"
+ latitude.long_name = "Latitude"
+
+ finalCETRMMvalues = ma.zeros((brightnesstemp1.shape))
+ #-----------End most of NETCDF file stuff -----------------------------
+ for index, value in np.ndenumerate(brightnesstemp):
+ lat_index, lon_index = index
+ currTimeValue = 0
+ if value > 0:
+
+ finalCETRMMvalues[0, lat_index, lon_index] = regriddedTRMM[int(np.where(
+ LAT[:, 0] == LAT[lat_index, 0])[0]), int(np.where(LON[0, :] == LON[0, lon_index])[0])]
+
+ rainFallacc[:] = finalCETRMMvalues
+ currNetCDFTRMMData.close()
+
+ for index, value in np.ndenumerate(finalCETRMMvalues):
+ precipTotal += value
+
+ TRMMnumOfBoxes = np.count_nonzero(finalCETRMMvalues)
+ TRMMArea = TRMMnumOfBoxes * XRES * YRES
+
+ try:
+ minCEprecipRate = np.min(
+ finalCETRMMvalues[np.nonzero(finalCETRMMvalues)])
+ except BaseException:
+ minCEprecipRate = 0.0
+
+ try:
+ maxCEprecipRate = np.max(
+ finalCETRMMvalues[np.nonzero(finalCETRMMvalues)])
+ except BaseException:
+ maxCEprecipRate = 0.0
+
+ # add info to CLOUDELEMENTSGRAPH
+ # TODO try block
+ for eachdict in CLOUD_ELEMENT_GRAPH.nodes(CEuniqueID):
+ if eachdict[1]['uniqueID'] == CEuniqueID:
+ if 'cloudElementPrecipTotal' not in list(eachdict[1].keys()):
+ eachdict[1]['cloudElementPrecipTotal'] = precipTotal
+ if 'cloudElementLatLonTRMM' not in list(eachdict[1].keys()):
+ eachdict[1]['cloudElementLatLonTRMM'] = finalCETRMMvalues
+ if 'TRMMArea' not in list(eachdict[1].keys()):
+ eachdict[1]['TRMMArea'] = TRMMArea
+ if 'CETRMMmin' not in list(eachdict[1].keys()):
+ eachdict[1]['CETRMMmin'] = minCEprecipRate
+ if 'CETRMMmax' not in list(eachdict[1].keys()):
+ eachdict[1]['CETRMMmax'] = maxCEprecipRate
+
+ # clean up
+ precipTotal = 0.0
+ latsrawTRMMData = []
+ lonsrawTRMMData = []
+ latsrawCloudElements = []
+ lonsrawCloudElements = []
+ finalCETRMMvalues = []
+ CEprecipRate = []
+ brightnesstemp = []
+ TRMMdataDict = {}
+
+ return allCEnodesTRMMdata
+#******************************************************************
+
+
def findCloudClusters(CEGraph):
- '''
- Purpose::
- Determines the cloud clusters properties from the subgraphs in
- the graph i.e. prunes the graph according to the minimum depth
-
- Input::
- CEGraph: a Networkx directed graph of the CEs with weighted edges
- according the area overlap between nodes (CEs) of consectuive frames
-
- Output::
- PRUNED_GRAPH: a Networkx directed graph of with CCs/ MCSs
-
- '''
-
- seenNode = []
- allMCSLists =[]
- pathDictList =[]
- pathList=[]
-
- cloudClustersFile = open((MAINDIRECTORY+'/textFiles/cloudClusters.txt'),'wb')
-
- for eachNode in CEGraph:
- #check if the node has been seen before
- if eachNode not in dict(enumerate(zip(*seenNode))):
- #look for all trees associated with node as the root
- thisPathDistanceAndLength = nx.single_source_dijkstra(CEGraph, eachNode)
- #determine the actual shortestPath and minimum depth/length
- maxDepthAndMinPath = findMaxDepthAndMinPath(thisPathDistanceAndLength)
- if maxDepthAndMinPath:
- maxPathLength = maxDepthAndMinPath[0]
- shortestPath = maxDepthAndMinPath[1]
-
- #add nodes and paths to PRUNED_GRAPH
- for i in xrange(len(shortestPath)):
- if PRUNED_GRAPH.has_node(shortestPath[i]) is False:
- PRUNED_GRAPH.add_node(shortestPath[i])
-
- #add edge if necessary
- if i < (len(shortestPath)-1) and PRUNED_GRAPH.has_edge(shortestPath[i], shortestPath[i+1]) is False:
- prunedGraphEdgeweight = CEGraph.get_edge_data(shortestPath[i], shortestPath[i+1])['weight']
- PRUNED_GRAPH.add_edge(shortestPath[i], shortestPath[i+1], weight=prunedGraphEdgeweight)
-
- #note information in a file for consideration later i.e. checking to see if it works
- cloudClustersFile.write("\nSubtree pathlength is %d and path is %s" %(maxPathLength, shortestPath))
- #update seenNode info
- seenNode.append(shortestPath)
-
- print "pruned graph"
- print "number of nodes are: ", PRUNED_GRAPH.number_of_nodes()
- print "number of edges are: ", PRUNED_GRAPH.number_of_edges()
- print ("*"*80)
-
- graphTitle = "Cloud Clusters observed over somewhere during sometime"
- #drawGraph(PRUNED_GRAPH, graphTitle, edgeWeight)
- cloudClustersFile.close
-
- return PRUNED_GRAPH
+ '''
+ Purpose::
+ Determines the cloud clusters properties from the subgraphs in
+ the graph i.e. prunes the graph according to the minimum depth
+
+ Input::
+ CEGraph: a Networkx directed graph of the CEs with weighted edges
+ according the area overlap between nodes (CEs) of consectuive frames
+
+ Output::
+ PRUNED_GRAPH: a Networkx directed graph of with CCs/ MCSs
+
+ '''
+
+ seenNode = []
+ allMCSLists = []
+ pathDictList = []
+ pathList = []
+
+ cloudClustersFile = open(
+ (MAINDIRECTORY + '/textFiles/cloudClusters.txt'), 'wb')
+
+ for eachNode in CEGraph:
+ # check if the node has been seen before
+ if eachNode not in dict(enumerate(zip(*seenNode))):
+ # look for all trees associated with node as the root
+ thisPathDistanceAndLength = nx.single_source_dijkstra(
+ CEGraph, eachNode)
+ # determine the actual shortestPath and minimum depth/length
+ maxDepthAndMinPath = findMaxDepthAndMinPath(
+ thisPathDistanceAndLength)
+ if maxDepthAndMinPath:
+ maxPathLength = maxDepthAndMinPath[0]
+ shortestPath = maxDepthAndMinPath[1]
+
+ # add nodes and paths to PRUNED_GRAPH
+ for i in range(len(shortestPath)):
+ if PRUNED_GRAPH.has_node(shortestPath[i]) is False:
+ PRUNED_GRAPH.add_node(shortestPath[i])
+
+ # add edge if necessary
+ if i < (len(shortestPath) - 1) and PRUNED_GRAPH.has_edge(
+ shortestPath[i], shortestPath[i + 1]) is False:
+ prunedGraphEdgeweight = CEGraph.get_edge_data(
+ shortestPath[i], shortestPath[i + 1])['weight']
+ PRUNED_GRAPH.add_edge(
+ shortestPath[i], shortestPath[i + 1], weight=prunedGraphEdgeweight)
+
+ # note information in a file for consideration later i.e.
+ # checking to see if it works
+ cloudClustersFile.write(
+ "\nSubtree pathlength is %d and path is %s" %
+ (maxPathLength, shortestPath))
+ # update seenNode info
+ seenNode.append(shortestPath)
+
+ print("pruned graph")
+ print(("number of nodes are: %s", PRUNED_GRAPH.number_of_nodes()))
+ print(("number of edges are: %s", PRUNED_GRAPH.number_of_edges()))
+ print(("*" * 80))
+
+ graphTitle = "Cloud Clusters observed over somewhere during sometime"
+ #drawGraph(PRUNED_GRAPH, graphTitle, edgeWeight)
+ cloudClustersFile.close
+
+ return PRUNED_GRAPH
#******************************************************************
-def findMCC (prunedGraph):
- '''
- Purpose::
- Determines if subtree is a MCC according to Laurent et al 1998 criteria
-
- Input::
- prunedGraph: a Networkx Graph representing the CCs
-
- Output::
- finalMCCList: a list of list of tuples representing a MCC
-
- Assumptions:
- frames are ordered and are equally distributed in time e.g. hrly satellite images
-
- '''
- MCCList = []
- MCSList = []
- definiteMCC = []
- definiteMCS = []
- eachList =[]
- eachMCCList =[]
- maturing = False
- decaying = False
- fNode = ''
- lNode = ''
- removeList =[]
- imgCount = 0
- imgTitle =''
-
- maxShieldNode = ''
- orderedPath =[]
- treeTraversalList =[]
- definiteMCCFlag = False
- unDirGraph = nx.Graph()
- aSubGraph = nx.DiGraph()
- definiteMCSFlag = False
-
-
- #connected_components is not available for DiGraph, so generate graph as undirected
- unDirGraph = PRUNED_GRAPH.to_undirected()
- subGraph = nx.connected_component_subgraphs(unDirGraph)
-
- #for each path in the subgraphs determined
- for path in subGraph:
- #definite is a subTree provided the duration is longer than 3 hours
-
- if len(path.nodes()) > MIN_MCS_DURATION:
- orderedPath = path.nodes()
- orderedPath.sort(key=lambda item:(len(item.split('C')[0]), item.split('C')[0]))
- #definiteMCS.append(orderedPath)
-
- #build back DiGraph for checking purposes/paper purposes
- aSubGraph.add_nodes_from(path.nodes())
- for eachNode in path.nodes():
- if prunedGraph.predecessors(eachNode):
- for node in prunedGraph.predecessors(eachNode):
- aSubGraph.add_edge(node,eachNode,weight=edgeWeight[0])
-
- if prunedGraph.successors(eachNode):
- for node in prunedGraph.successors(eachNode):
- aSubGraph.add_edge(eachNode,node,weight=edgeWeight[0])
- imgTitle = 'CC'+str(imgCount+1)
- #drawGraph(aSubGraph, imgTitle, edgeWeight) #for eachNode in path:
- imgCount +=1
- #----------end build back ---------------------------------------------
-
- mergeList, splitList = hasMergesOrSplits(path)
- #add node behavior regarding neutral, merge, split or both
- for node in path:
- if node in mergeList and node in splitList:
- addNodeBehaviorIdentifier(node,'B')
- elif node in mergeList and not node in splitList:
- addNodeBehaviorIdentifier(node,'M')
- elif node in splitList and not node in mergeList:
- addNodeBehaviorIdentifier(node,'S')
- else:
- addNodeBehaviorIdentifier(node,'N')
-
-
- #Do the first part of checking for the MCC feature
- #find the path
- treeTraversalList = traverseTree(aSubGraph, orderedPath[0],[],[])
- #print "treeTraversalList is ", treeTraversalList
- #check the nodes to determine if a MCC on just the area criteria (consecutive nodes meeting the area and temp requirements)
- MCCList = checkedNodesMCC(prunedGraph, treeTraversalList)
- for aDict in MCCList:
- for eachNode in aDict["fullMCSMCC"]:
- addNodeMCSIdentifier(eachNode[0],eachNode[1])
-
- #do check for if MCCs overlap
- if MCCList:
- if len(MCCList) > 1:
- for count in range(len(MCCList)): #for eachDict in MCCList:
- #if there are more than two lists
- if count >= 1:
- #and the first node in this list
- eachList = list(x[0] for x in MCCList[count]["possMCCList"])
- eachList.sort(key=lambda nodeID:(len(nodeID.split('C')[0]), nodeID.split('C')[0]))
- if eachList:
- fNode = eachList[0]
- #get the lastNode in the previous possMCC list
- eachList = list(x[0] for x in MCCList[(count-1)]["possMCCList"])
- eachList.sort(key=lambda nodeID:(len(nodeID.split('C')[0]), nodeID.split('C')[0]))
- if eachList:
- lNode = eachList[-1]
- if lNode in CLOUD_ELEMENT_GRAPH.predecessors(fNode):
- for aNode in CLOUD_ELEMENT_GRAPH.predecessors(fNode):
- if aNode in eachList and aNode == lNode:
- #if edge_data is equal or less than to the exisitng edge in the tree append one to the other
- if CLOUD_ELEMENT_GRAPH.get_edge_data(aNode,fNode)['weight'] <= CLOUD_ELEMENT_GRAPH.get_edge_data(lNode,fNode)['weight']:
- MCCList[count-1]["possMCCList"].extend(MCCList[count]["possMCCList"])
- MCCList[count-1]["fullMCSMCC"].extend(MCCList[count]["fullMCSMCC"])
- MCCList[count-1]["durationAandB"] += MCCList[count]["durationAandB"]
- MCCList[count-1]["CounterCriteriaA"] += MCCList[count]["CounterCriteriaA"]
- MCCList[count-1]["highestMCCnode"] = MCCList[count]["highestMCCnode"]
- MCCList[count-1]["frameNum"] = MCCList[count]["frameNum"]
- removeList.append(count)
- #update the MCCList
- if removeList:
- for i in removeList:
- if (len(MCCList)-1) > i:
- del MCCList[i]
- removeList =[]
-
- #check if the nodes also meet the duration criteria and the shape crieria
- for eachDict in MCCList:
- #order the fullMCSMCC list, then run maximum extent and eccentricity criteria
- if (eachDict["durationAandB"] * TRES) >= MINIMUM_DURATION and (eachDict["durationAandB"] * TRES) <= MAXIMUM_DURATION:
- eachList = list(x[0] for x in eachDict["fullMCSMCC"])
- eachList.sort(key=lambda nodeID:(len(nodeID.split('C')[0]), nodeID.split('C')[0]))
- eachMCCList = list(x[0] for x in eachDict["possMCCList"])
- eachMCCList.sort(key=lambda nodeID:(len(nodeID.split('C')[0]), nodeID.split('C')[0]))
-
- #update the nodemcsidentifer behavior
- #find the first element eachMCCList in eachList, and ensure everything ahead of it is indicated as 'I',
- #find last element in eachMCCList in eachList and ensure everything after it is indicated as 'D'
- #ensure that everything between is listed as 'M'
- for eachNode in eachList[:
<TRUNCATED>
[5/5] climate git commit: CLIMATE-912 Upgrade mccSearch code from
Python2 > 3
Posted by le...@apache.org.
CLIMATE-912 Upgrade mccSearch code from Python2 > 3
Project: http://git-wip-us.apache.org/repos/asf/climate/repo
Commit: http://git-wip-us.apache.org/repos/asf/climate/commit/9a30e195
Tree: http://git-wip-us.apache.org/repos/asf/climate/tree/9a30e195
Diff: http://git-wip-us.apache.org/repos/asf/climate/diff/9a30e195
Branch: refs/heads/master
Commit: 9a30e195b66bc16c626e6572a781ad8d2b92da85
Parents: 51d9dce
Author: Lewis John McGibbney <le...@gmail.com>
Authored: Mon Sep 11 07:46:13 2017 -0700
Committer: Lewis John McGibbney <le...@gmail.com>
Committed: Mon Sep 11 07:46:13 2017 -0700
----------------------------------------------------------------------
mccsearch/code/mainProg.py | 16 ++++++++--------
mccsearch/code/mainProgTemplate.py | 16 ++++++++--------
2 files changed, 16 insertions(+), 16 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/climate/blob/9a30e195/mccsearch/code/mainProg.py
----------------------------------------------------------------------
diff --git a/mccsearch/code/mainProg.py b/mccsearch/code/mainProg.py
index d341aa6..33210d5 100644
--- a/mccsearch/code/mainProg.py
+++ b/mccsearch/code/mainProg.py
@@ -57,7 +57,7 @@ def main():
# let's go!
print("\n -------------- Read MERG Data ----------")
mergImgs, timeList = mccSearch.readMergData(CEoriDirName)
- print(("-" * 80))
+ print("-" * 80)
print('in main', len(mergImgs))
# print 'timeList', timeList
@@ -68,20 +68,20 @@ def main():
# CEGraph = mccSearch.findCloudElements(mergImgs,timeList)
# allCETRMMList=mccSearch.findPrecipRate(TRMMdirName,timeList)
# ----------------------------------------------------------------------------------------------
- print(("-" * 80))
+ print("-" * 80)
print("number of nodes in CEGraph is: ", CEGraph.number_of_nodes())
- print(("-" * 80))
+ print("-" * 80)
print("\n -------------- TESTING findCloudClusters ----------")
prunedGraph = mccSearch.findCloudClusters(CEGraph)
- print(("-" * 80))
+ print("-" * 80)
print("number of nodes in prunedGraph is: ", prunedGraph.number_of_nodes())
- print(("-" * 80))
+ print("-" * 80)
print("\n -------------- TESTING findMCCs ----------")
MCCList, MCSList = mccSearch.findMCC(prunedGraph)
- print(("-" * 80))
+ print("-" * 80)
print("MCC List has been acquired ", len(MCCList))
print("MCS List has been acquired ", len(MCSList))
- print(("-" * 80))
+ print("-" * 80)
# now ready to perform various calculations/metrics
print("\n -------------- TESTING METRICS ----------")
@@ -104,7 +104,7 @@ def main():
# mccSearch.displayPrecip(MCCList)
# mccSearch.plotHistogram(MCCList)
#
- print(("-" * 80))
+ print("-" * 80)
main()
http://git-wip-us.apache.org/repos/asf/climate/blob/9a30e195/mccsearch/code/mainProgTemplate.py
----------------------------------------------------------------------
diff --git a/mccsearch/code/mainProgTemplate.py b/mccsearch/code/mainProgTemplate.py
index 96da757..b59b5b8 100644
--- a/mccsearch/code/mainProgTemplate.py
+++ b/mccsearch/code/mainProgTemplate.py
@@ -61,7 +61,7 @@ def main():
# let's go!
print("\n -------------- Read MERG Data ----------")
mergImgs, timeList = mccSearch.readMergData(CEoriDirName)
- print(("-" * 80))
+ print("-" * 80)
print('in main', len(mergImgs))
# print 'timeList', timeList
@@ -72,21 +72,21 @@ def main():
# CEGraph = mccSearch.findCloudElements(mergImgs,timeList)
# allCETRMMList=mccSearch.findPrecipRate(TRMMdirName,timeList)
# ----------------------------------------------------------------------------------------------
- print(("-" * 80))
+ print("-" * 80)
print("number of nodes in CEGraph is: ", CEGraph.number_of_nodes())
- print(("-" * 80))
+ print("-" * 80)
print("\n -------------- TESTING findCloudClusters ----------")
prunedGraph = mccSearch.findCloudClusters(CEGraph)
- print(("-" * 80))
+ print("-" * 80)
print("number of nodes in prunedGraph is: ", prunedGraph.number_of_nodes())
- print(("-" * 80))
+ print("-" * 80)
# sys.exit()
print("\n -------------- TESTING findMCCs ----------")
MCCList, MCSList = mccSearch.findMCC(prunedGraph)
- print(("-" * 80))
+ print("-" * 80)
print("MCC List has been acquired ", len(MCCList))
print("MCS List has been acquired ", len(MCSList))
- print(("-" * 80))
+ print("-" * 80)
# now ready to perform various calculations/metrics
print("\n -------------- TESTING METRICS ----------")
@@ -109,7 +109,7 @@ def main():
# mccSearch.displayPrecip(MCCList)
# mccSearch.plotHistogram(MCCList)
#
- print(("-" * 80))
+ print("-" * 80)
main()