You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@climate.apache.org by le...@apache.org on 2017/08/05 16:28:01 UTC

[1/3] climate git commit: CLIMATE-920 Make examples Python 3 compatible

Repository: climate
Updated Branches:
  refs/heads/master ec3fdb4f7 -> 9bb217008


http://git-wip-us.apache.org/repos/asf/climate/blob/9bb21700/ocw/data_source/esgf.py
----------------------------------------------------------------------
diff --git a/ocw/data_source/esgf.py b/ocw/data_source/esgf.py
index 67c307f..0dcc2e0 100644
--- a/ocw/data_source/esgf.py
+++ b/ocw/data_source/esgf.py
@@ -18,7 +18,14 @@
 #
 
 import os
-import urllib2
+import sys
+if sys.version_info[0] >= 3:
+    from urllib.error import HTTPError
+else:
+    # Not Python 3 - today, it is most likely to be Python 2
+    # But note that this might need an update when Python 4
+    # might be around one day
+    from urllib2 import HTTPError
 
 from ocw.esgf.constants import DEFAULT_ESGF_SEARCH
 from ocw.esgf.download import download
@@ -137,7 +144,7 @@ def _download_files(file_urls, username, password, download_directory='/tmp'):
     ''''''
     try:
         logon(username, password)
-    except urllib2.HTTPError:
+    except HTTPError:
         raise ValueError('esgf._download_files: Invalid login credentials')
 
     for url in file_urls:

http://git-wip-us.apache.org/repos/asf/climate/blob/9bb21700/ocw/esgf/download.py
----------------------------------------------------------------------
diff --git a/ocw/esgf/download.py b/ocw/esgf/download.py
index 23c107b..690915c 100644
--- a/ocw/esgf/download.py
+++ b/ocw/esgf/download.py
@@ -17,24 +17,36 @@
 # under the License.
 #
 '''
-RCMES module to download a file from ESGF.
+OCW module to download a file from ESGF.
 
 '''
 
-import urllib2
-import httplib
+import sys
+if sys.version_info[0] >= 3:
+    from http.client import HTTPSConnection
+    from urllib.request import build_opener
+    from urllib.request import HTTPCookieProcessor
+    from urllib.request import HTTPSHandler
+else:
+    # Not Python 3 - today, it is most likely to be Python 2
+    # But note that this might need an update when Python 4
+    # might be around one day
+    from httplib import HTTPSConnection
+    from urllib2 import build_opener
+    from urllib2 import HTTPCookieProcessor
+    from urllib2 import HTTPSHandler
 from os.path import expanduser, join
 
 from ocw.esgf.constants import ESGF_CREDENTIALS
 
 
-class HTTPSClientAuthHandler(urllib2.HTTPSHandler):
+class HTTPSClientAuthHandler(HTTPSHandler):
     '''
     HTTP handler that transmits an X509 certificate as part of the request
     '''
 
     def __init__(self, key, cert):
-        urllib2.HTTPSHandler.__init__(self)
+        HTTPSHandler.__init__(self)
         self.key = key
         self.cert = cert
 
@@ -42,7 +54,7 @@ class HTTPSClientAuthHandler(urllib2.HTTPSHandler):
         return self.do_open(self.getConnection, req)
 
     def getConnection(self, host, timeout=300):
-        return httplib.HTTPSConnection(host, key_file=self.key, cert_file=self.cert)
+        return HTTPSConnection(host, key_file=self.key, cert_file=self.cert)
 
 
 def download(url, toDirectory="/tmp"):
@@ -55,8 +67,8 @@ def download(url, toDirectory="/tmp"):
 
     # setup HTTP handler
     certFile = expanduser(ESGF_CREDENTIALS)
-    opener = urllib2.build_opener(HTTPSClientAuthHandler(certFile, certFile))
-    opener.add_handler(urllib2.HTTPCookieProcessor())
+    opener = build_opener(HTTPSClientAuthHandler(certFile, certFile))
+    opener.add_handler(HTTPCookieProcessor())
 
     # download file
     localFilePath = join(toDirectory, url.split('/')[-1])


[2/3] climate git commit: CLIMATE-920 Make examples Python 3 compatible

Posted by le...@apache.org.
http://git-wip-us.apache.org/repos/asf/climate/blob/9bb21700/mccsearch/code/mccSearch.py
----------------------------------------------------------------------
diff --git a/mccsearch/code/mccSearch.py b/mccsearch/code/mccSearch.py
index 15ce0c6..6f1fa26 100644
--- a/mccsearch/code/mccSearch.py
+++ b/mccsearch/code/mccSearch.py
@@ -56,7 +56,7 @@ TRES = 1 				#temporal resolution in hrs
 LAT_DISTANCE = 111.0 	#the avg distance in km for 1deg lat for the region being considered 
 LON_DISTANCE = 111.0	#the avg distance in km for 1deg lon for the region being considered
 STRUCTURING_ELEMENT = [[0,1,0],[1,1,1],[0,1,0]] #the matrix for determining the pattern for the contiguous boxes and must
-												#have same rank of the matrix it is being compared against 
+                                                #have same rank of the matrix it is being compared against 
 #criteria for determining cloud elements and edges
 T_BB_MAX = 243  #warmest temp to allow (-30C to -55C according to Morel and Sensi 2002)
 T_BB_MIN = 218  #cooler temp for the center of the system
@@ -84,2079 +84,2079 @@ PRUNED_GRAPH = nx.DiGraph()
 #************************ Begin Functions *************************
 #******************************************************************
 def readMergData(dirname, filelist = None):
-	'''
-	Purpose::
-		Read MERG data into RCMES format
-	
-	Input::
-		dirname: a string representing the directory to the MERG files in NETCDF format
-		filelist (optional): a list of strings representing the filenames betweent the start and end dates provided
-	
-	Output::
-		A 3D masked array (t,lat,lon) with only the variables which meet the minimum temperature 
-		criteria for each frame
-
-	Assumptions::
-		The MERG data has been converted to NETCDF using LATS4D
-		The data has the same lat/lon format
-
-	TODO:: figure out how to use netCDF4 to do the clipping tmp = netCDF4.Dataset(filelist[0])
-
-	'''
-
-	global LAT
-	global LON
-
-	# these strings are specific to the MERG data
-	mergVarName = 'ch4'
-	mergTimeVarName = 'time'
-	mergLatVarName = 'latitude'
-	mergLonVarName = 'longitude'
-	
-	filelistInstructions = dirname + '/*'
-	if filelist == None:
-		filelist = glob.glob(filelistInstructions)
-
-	
-	#sat_img is the array that will contain all the masked frames
-	mergImgs = []
-	#timelist of python time strings
-	timelist = [] 
-	time2store = None
-	tempMaskedValueNp =[]
-	
-
-	filelist.sort()
-	nfiles = len(filelist)
-
-	# Crash nicely if there are no netcdf files
-	if nfiles == 0:
-		print 'Error: no files in this directory! Exiting elegantly'
-		sys.exit()
-	else:
-		# Open the first file in the list to read in lats, lons and generate the  grid for comparison
-		tmp = Dataset(filelist[0], 'r+',format='NETCDF4')
-
-		alllatsraw = tmp.variables[mergLatVarName][:]
-		alllonsraw = tmp.variables[mergLonVarName][:]
-		alllonsraw[alllonsraw > 180] = alllonsraw[alllonsraw > 180] - 360.  # convert to -180,180 if necessary
-		
-		#get the lat/lon info data (different resolution)
-		latminNETCDF = find_nearest(alllatsraw, float(LATMIN))
-		latmaxNETCDF = find_nearest(alllatsraw, float(LATMAX))
-		lonminNETCDF = find_nearest(alllonsraw, float(LONMIN))
-		lonmaxNETCDF = find_nearest(alllonsraw, float(LONMAX))
-		latminIndex = (np.where(alllatsraw == latminNETCDF))[0][0]
-		latmaxIndex = (np.where(alllatsraw == latmaxNETCDF))[0][0]
-		lonminIndex = (np.where(alllonsraw == lonminNETCDF))[0][0]
-		lonmaxIndex = (np.where(alllonsraw == lonmaxNETCDF))[0][0]
-		
-		#subsetting the data
-		latsraw = alllatsraw[latminIndex: latmaxIndex]
-		lonsraw = alllonsraw[lonminIndex:lonmaxIndex]
-		
-		LON, LAT = np.meshgrid(lonsraw, latsraw)
-		#clean up
-		latsraw =[]
-		lonsraw = []
-		nygrd = len(LAT[:, 0]); nxgrd = len(LON[0, :])
-		tmp.close
-	
-	for files in filelist:
-		try:
-			thisFile = Dataset(files, format='NETCDF4')
-			#clip the dataset according to user lat,lon coordinates
-			tempRaw = thisFile.variables[mergVarName][:,latminIndex:latmaxIndex,lonminIndex:lonmaxIndex].astype('int16')
-			tempMask = ma.masked_array(tempRaw, mask=(tempRaw > T_BB_MAX), fill_value=0) 
-			
-			#get the actual values that the mask returned
-			tempMaskedValue = ma.zeros((tempRaw.shape)).astype('int16')
-			for index, value in maenumerate(tempMask): 
-				time_index, lat_index, lon_index = index			
-				tempMaskedValue[time_index,lat_index, lon_index]=value	
-			
-			xtimes = thisFile.variables[mergTimeVarName]
-			#convert this time to a python datastring
-			time2store, _ = getModelTimes(xtimes, mergTimeVarName)
-			#extend instead of append because getModelTimes returns a list already and we don't 
-			#want a list of list
-			timelist.extend(time2store)
-			mergImgs.extend(tempMaskedValue) 
-			thisFile.close
-			thisFile = None
-			
-		except:
-			print "bad file! ", files
-
-	mergImgs = ma.array(mergImgs)
-
-	return mergImgs, timelist
+    '''
+    Purpose::
+        Read MERG data into RCMES format
+    
+    Input::
+        dirname: a string representing the directory to the MERG files in NETCDF format
+        filelist (optional): a list of strings representing the filenames betweent the start and end dates provided
+    
+    Output::
+        A 3D masked array (t,lat,lon) with only the variables which meet the minimum temperature 
+        criteria for each frame
+
+    Assumptions::
+        The MERG data has been converted to NETCDF using LATS4D
+        The data has the same lat/lon format
+
+    TODO:: figure out how to use netCDF4 to do the clipping tmp = netCDF4.Dataset(filelist[0])
+
+    '''
+
+    global LAT
+    global LON
+
+    # these strings are specific to the MERG data
+    mergVarName = 'ch4'
+    mergTimeVarName = 'time'
+    mergLatVarName = 'latitude'
+    mergLonVarName = 'longitude'
+    
+    filelistInstructions = dirname + '/*'
+    if filelist == None:
+        filelist = glob.glob(filelistInstructions)
+
+    
+    #sat_img is the array that will contain all the masked frames
+    mergImgs = []
+    #timelist of python time strings
+    timelist = [] 
+    time2store = None
+    tempMaskedValueNp =[]
+    
+
+    filelist.sort()
+    nfiles = len(filelist)
+
+    # Crash nicely if there are no netcdf files
+    if nfiles == 0:
+        print 'Error: no files in this directory! Exiting elegantly'
+        sys.exit()
+    else:
+        # Open the first file in the list to read in lats, lons and generate the  grid for comparison
+        tmp = Dataset(filelist[0], 'r+',format='NETCDF4')
+
+        alllatsraw = tmp.variables[mergLatVarName][:]
+        alllonsraw = tmp.variables[mergLonVarName][:]
+        alllonsraw[alllonsraw > 180] = alllonsraw[alllonsraw > 180] - 360.  # convert to -180,180 if necessary
+        
+        #get the lat/lon info data (different resolution)
+        latminNETCDF = find_nearest(alllatsraw, float(LATMIN))
+        latmaxNETCDF = find_nearest(alllatsraw, float(LATMAX))
+        lonminNETCDF = find_nearest(alllonsraw, float(LONMIN))
+        lonmaxNETCDF = find_nearest(alllonsraw, float(LONMAX))
+        latminIndex = (np.where(alllatsraw == latminNETCDF))[0][0]
+        latmaxIndex = (np.where(alllatsraw == latmaxNETCDF))[0][0]
+        lonminIndex = (np.where(alllonsraw == lonminNETCDF))[0][0]
+        lonmaxIndex = (np.where(alllonsraw == lonmaxNETCDF))[0][0]
+        
+        #subsetting the data
+        latsraw = alllatsraw[latminIndex: latmaxIndex]
+        lonsraw = alllonsraw[lonminIndex:lonmaxIndex]
+        
+        LON, LAT = np.meshgrid(lonsraw, latsraw)
+        #clean up
+        latsraw =[]
+        lonsraw = []
+        nygrd = len(LAT[:, 0]); nxgrd = len(LON[0, :])
+        tmp.close
+    
+    for files in filelist:
+        try:
+            thisFile = Dataset(files, format='NETCDF4')
+            #clip the dataset according to user lat,lon coordinates
+            tempRaw = thisFile.variables[mergVarName][:,latminIndex:latmaxIndex,lonminIndex:lonmaxIndex].astype('int16')
+            tempMask = ma.masked_array(tempRaw, mask=(tempRaw > T_BB_MAX), fill_value=0) 
+            
+            #get the actual values that the mask returned
+            tempMaskedValue = ma.zeros((tempRaw.shape)).astype('int16')
+            for index, value in maenumerate(tempMask): 
+                time_index, lat_index, lon_index = index			
+                tempMaskedValue[time_index,lat_index, lon_index]=value	
+            
+            xtimes = thisFile.variables[mergTimeVarName]
+            #convert this time to a python datastring
+            time2store, _ = getModelTimes(xtimes, mergTimeVarName)
+            #extend instead of append because getModelTimes returns a list already and we don't 
+            #want a list of list
+            timelist.extend(time2store)
+            mergImgs.extend(tempMaskedValue) 
+            thisFile.close
+            thisFile = None
+            
+        except:
+            print "bad file! ", files
+
+    mergImgs = ma.array(mergImgs)
+
+    return mergImgs, timelist
 #******************************************************************
 def findCloudElements(mergImgs,timelist,TRMMdirName=None):
-	'''
-	Purpose::
-		Determines the contiguous boxes for a given time of the satellite images i.e. each frame
-		using scipy ndimage package
-	
-	Input::	
-		mergImgs: masked numpy array in (time,lat,lon),T_bb representing the satellite data. This is masked based on the
-		maximum acceptable temperature, T_BB_MAX
-		timelist: a list of python datatimes
-		TRMMdirName (optional): string representing the path where to find the TRMM datafiles
-		
-	Output::
-		CLOUD_ELEMENT_GRAPH: a Networkx directed graph where each node contains the information in cloudElementDict
-		The nodes are determined according to the area of contiguous squares. The nodes are linked through weighted edges.
-
-		cloudElementDict = {'uniqueID': unique tag for this CE, 
-							'cloudElementTime': time of the CE,
-							'cloudElementLatLon': (lat,lon,value) of MERG data of CE, 
-							'cloudElementCenter':list of floating-point [lat,lon] representing the CE's center 
-							'cloudElementArea':floating-point representing the area of the CE, 
-							'cloudElementEccentricity': floating-point representing the shape of the CE, 
-							'cloudElementTmax':integer representing the maximum Tb in CE, 
-							'cloudElementTmin': integer representing the minimum Tb in CE, 
-							'cloudElementPrecipTotal':floating-point representing the sum of all rainfall in CE if TRMMdirName entered,
-							'cloudElementLatLonTRMM':(lat,lon,value) of TRMM data in CE if TRMMdirName entered, 
-							'TRMMArea': floating-point representing the CE if TRMMdirName entered,
-							'CETRMMmax':floating-point representing the max rate in the CE if TRMMdirName entered, 
-							'CETRMMmin':floating-point representing the min rate in the CE if TRMMdirName entered}
-	Assumptions::
-		Assumes we are dealing with MERG data which is 4kmx4km resolved, thus the smallest value 
-		required according to Vila et al. (2008) is 2400km^2 
-		therefore, 2400/16 = 150 contiguous squares
-	'''
-
-	frame = ma.empty((1,mergImgs.shape[1],mergImgs.shape[2]))
-	CEcounter = 0
-	frameCEcounter = 0
-	frameNum = 0
-	cloudElementEpsilon = 0.0
-	cloudElementDict = {} 
-	cloudElementCenter = []		#list with two elements [lat,lon] for the center of a CE
-	prevFrameCEs = []			#list for CEs in previous frame
-	currFrameCEs = []			#list for CEs in current frame
-	cloudElementLat = []		#list for a particular CE's lat values
-	cloudElementLon = []		#list for a particular CE's lon values
-	cloudElementLatLons = []	#list for a particular CE's (lat,lon) values
-	
-	prevLatValue = 0.0
-	prevLonValue = 0.0
-	TIR_min = 0.0
-	TIR_max = 0.0
-	temporalRes = 3 # TRMM data is 3 hourly
-	precipTotal = 0.0
-	CETRMMList =[]
-	precip =[]
-	TRMMCloudElementLatLons =[]
-
-	minCELatLimit = 0.0
-	minCELonLimit = 0.0
-	maxCELatLimit = 0.0
-	maxCELonLimit = 0.0
-	
-	nygrd = len(LAT[:, 0]); nxgrd = len(LON[0, :])
-	
-	#openfile for storing ALL cloudElement information 
-	cloudElementsFile = open((MAINDIRECTORY+'/textFiles/cloudElements.txt'),'wb')
-	#openfile for storing cloudElement information meeting user criteria i.e. MCCs in this case
-	cloudElementsUserFile = open((MAINDIRECTORY+'/textFiles/cloudElementsUserFile.txt'),'w')
-	
-	#NB in the TRMM files the info is hours since the time thus 00Z file has in 01, 02 and 03 times
-	for t in xrange(mergImgs.shape[0]):
-		#-------------------------------------------------
-		# #textfile name for saving the data for arcgis
-		# thisFileName = MAINDIRECTORY+'/' + (str(timelist[t])).replace(" ", "_") + '.txt'
-		# cloudElementsTextFile = open(thisFileName,'w')
-		#-------------------------------------------------
-
-		#determine contiguous locations with temeperature below the warmest temp i.e. cloudElements in each frame
-	   	frame, CEcounter = ndimage.measurements.label(mergImgs[t,:,:], structure=STRUCTURING_ELEMENT)
-	   	frameCEcounter=0
-		frameNum += 1
-
-		#for each of the areas identified, check to determine if it a valid CE via an area and T requirement
-	   	for count in xrange(CEcounter):
-	   		#[0] is time dimension. Determine the actual values from the data
-	   		#loc is a masked array
-	   		try:
-	   			loc = ndimage.find_objects(frame==(count+1))[0]
-	   		except Exception, e:
-	   			print "Error is ", e
-	   			continue
-
-
-	   		cloudElement = mergImgs[t,:,:][loc]
-	   		labels, lcounter = ndimage.label(cloudElement)
-	   		
-	   		#determine the true lats and lons for this particular CE
-   			cloudElementLat = LAT[loc[0],0]
-   			cloudElementLon = LON[0,loc[1]] 
-   			
-	   		#determine number of boxes in this cloudelement
-	   		numOfBoxes = np.count_nonzero(cloudElement)
-	   		cloudElementArea = numOfBoxes*XRES*YRES
-
-	   		#If the area is greater than the area required, or if the area is smaller than the suggested area, check if it meets a convective fraction requirement
-	   		#consider as CE
-
-	   		if cloudElementArea >= AREA_MIN or (cloudElementArea < AREA_MIN and ((ndimage.minimum(cloudElement, labels=labels))/float((ndimage.maximum(cloudElement, labels=labels)))) < CONVECTIVE_FRACTION ):
-
-	   			#get some time information and labeling info
-	   			frameTime = str(timelist[t])
-	   			frameCEcounter +=1
-	   			CEuniqueID = 'F'+str(frameNum)+'CE'+str(frameCEcounter) 
-
-	   			#-------------------------------------------------
-				#textfile name for accesing CE data using MATLAB code
-				# thisFileName = MAINDIRECTORY+'/' + (str(timelist[t])).replace(" ", "_") + CEuniqueID +'.txt'
-				# cloudElementsTextFile = open(thisFileName,'w')
-				#-------------------------------------------------
-
-				# ------ NETCDF File stuff for brightness temp stuff ------------------------------------
-				thisFileName = MAINDIRECTORY +'/MERGnetcdfCEs/cloudElements'+ (str(timelist[t])).replace(" ", "_") + CEuniqueID +'.nc'
-				currNetCDFCEData = Dataset(thisFileName, 'w', format='NETCDF4')
-				currNetCDFCEData.description = 'Cloud Element '+CEuniqueID + ' temperature data'
-				currNetCDFCEData.calendar = 'standard'
-				currNetCDFCEData.conventions = 'COARDS'
-				# dimensions
-				currNetCDFCEData.createDimension('time', None)
-				currNetCDFCEData.createDimension('lat', len(LAT[:,0]))
-				currNetCDFCEData.createDimension('lon', len(LON[0,:]))
-				# variables
-				tempDims = ('time','lat', 'lon',)
-				times = currNetCDFCEData.createVariable('time', 'f8', ('time',))
-				times.units = 'hours since '+ str(timelist[t])[:-6]
-				latitudes = currNetCDFCEData.createVariable('latitude', 'f8', ('lat',))
-				longitudes = currNetCDFCEData.createVariable('longitude', 'f8', ('lon',))
-				brightnesstemp = currNetCDFCEData.createVariable('brightnesstemp', 'i16',tempDims )
-				brightnesstemp.units = 'Kelvin'
-				# NETCDF data
-				dates=[timelist[t]+timedelta(hours=0)]
-				times[:] =  date2num(dates,units=times.units)
-				longitudes[:] = LON[0,:]
-				longitudes.units = "degrees_east" 
-				longitudes.long_name = "Longitude" 
-
-				latitudes[:] =  LAT[:,0]
-				latitudes.units = "degrees_north"
-				latitudes.long_name ="Latitude"
-				
-				#generate array of zeros for brightness temperature
-				brightnesstemp1 = ma.zeros((1,len(latitudes), len(longitudes))).astype('int16')
-				#-----------End most of NETCDF file stuff ------------------------------------
-
-				#if other dataset (TRMM) assumed to be a precipitation dataset was entered
-				if TRMMdirName:
-					#------------------TRMM stuff -------------------------------------------------
-					fileDate = ((str(timelist[t])).replace(" ", "")[:-8]).replace("-","")
-					fileHr1 = (str(timelist[t])).replace(" ", "")[-8:-6]
-					
-					if int(fileHr1) % temporalRes == 0:
-						fileHr = fileHr1
-					else:
-						fileHr = (int(fileHr1)/temporalRes) * temporalRes
-					if fileHr < 10:
-						fileHr = '0'+str(fileHr)
-					else:
-						str(fileHr)
-
-					#open TRMM file for the resolution info and to create the appropriate sized grid
-					TRMMfileName = TRMMdirName+'/3B42.'+ fileDate + "."+str(fileHr)+".7A.nc"
-					
-					TRMMData = Dataset(TRMMfileName,'r', format='NETCDF4')
-					precipRate = TRMMData.variables['pcp'][:,:,:]
-					latsrawTRMMData = TRMMData.variables['latitude'][:]
-					lonsrawTRMMData = TRMMData.variables['longitude'][:]
-					lonsrawTRMMData[lonsrawTRMMData > 180] = lonsrawTRMMData[lonsrawTRMMData>180] - 360.
-					LONTRMM, LATTRMM = np.meshgrid(lonsrawTRMMData, latsrawTRMMData)
-
-					nygrdTRMM = len(LATTRMM[:,0]); nxgrdTRMM = len(LONTRMM[0,:])
-					precipRateMasked = ma.masked_array(precipRate, mask=(precipRate < 0.0))
-					#---------regrid the TRMM data to the MERG dataset ----------------------------------
-					#regrid using the do_regrid stuff from the Apache OCW 
-					regriddedTRMM = ma.zeros((0, nygrd, nxgrd))
-					#regriddedTRMM = process.do_regrid(precipRateMasked[0,:,:], LATTRMM,  LONTRMM, LAT, LON, order=1, mdi= -999999999)
-					regriddedTRMM = do_regrid(precipRateMasked[0,:,:], LATTRMM,  LONTRMM, LAT, LON, order=1, mdi= -999999999)
-					#----------------------------------------------------------------------------------
-		
-					# #get the lat/lon info from cloudElement
-					#get the lat/lon info from the file
-					latCEStart = LAT[0][0]
-					latCEEnd = LAT[-1][0]
-					lonCEStart = LON[0][0]
-					lonCEEnd = LON[0][-1]
-					
-					#get the lat/lon info for TRMM data (different resolution)
-					latStartT = find_nearest(latsrawTRMMData, latCEStart)
-					latEndT = find_nearest(latsrawTRMMData, latCEEnd)
-					lonStartT = find_nearest(lonsrawTRMMData, lonCEStart)
-					lonEndT = find_nearest(lonsrawTRMMData, lonCEEnd)
-					latStartIndex = np.where(latsrawTRMMData == latStartT)
-					latEndIndex = np.where(latsrawTRMMData == latEndT)
-					lonStartIndex = np.where(lonsrawTRMMData == lonStartT)
-					lonEndIndex = np.where(lonsrawTRMMData == lonEndT)
-
-					#get the relevant TRMM info 
-					CEprecipRate = precipRate[:,(latStartIndex[0][0]-1):latEndIndex[0][0],(lonStartIndex[0][0]-1):lonEndIndex[0][0]]
-					TRMMData.close()
-					
-					# ------ NETCDF File info for writing TRMM CE rainfall ------------------------------------
-					thisFileName = MAINDIRECTORY+'/TRMMnetcdfCEs/TRMM' + (str(timelist[t])).replace(" ", "_") + CEuniqueID +'.nc'
-					currNetCDFTRMMData = Dataset(thisFileName, 'w', format='NETCDF4')
-					currNetCDFTRMMData.description = 'Cloud Element '+CEuniqueID + ' precipitation data'
-					currNetCDFTRMMData.calendar = 'standard'
-					currNetCDFTRMMData.conventions = 'COARDS'
-					# dimensions
-					currNetCDFTRMMData.createDimension('time', None)
-					currNetCDFTRMMData.createDimension('lat', len(LAT[:,0]))
-					currNetCDFTRMMData.createDimension('lon', len(LON[0,:]))
-					
-					# variables
-					TRMMprecip = ('time','lat', 'lon',)
-					times = currNetCDFTRMMData.createVariable('time', 'f8', ('time',))
-					times.units = 'hours since '+ str(timelist[t])[:-6]
-					latitude = currNetCDFTRMMData.createVariable('latitude', 'f8', ('lat',))
-					longitude = currNetCDFTRMMData.createVariable('longitude', 'f8', ('lon',))
-					rainFallacc = currNetCDFTRMMData.createVariable('precipitation_Accumulation', 'f8',TRMMprecip )
-					rainFallacc.units = 'mm'
-
-					longitude[:] = LON[0,:]
-					longitude.units = "degrees_east" 
-					longitude.long_name = "Longitude" 
-
-					latitude[:] =  LAT[:,0]
-					latitude.units = "degrees_north"
-					latitude.long_name ="Latitude"
-
-					finalCETRMMvalues = ma.zeros((brightnesstemp.shape))
-					#-----------End most of NETCDF file stuff ------------------------------------
-
-	   			#populate cloudElementLatLons by unpacking the original values from loc to get the actual value for lat and lon
-				#TODO: KDW - too dirty... play with itertools.izip or zip and the enumerate with this
-				# 			as cloudElement is masked
-				for index,value in np.ndenumerate(cloudElement):
-					if value != 0 : 
-						lat_index,lon_index = index
-						lat_lon_tuple = (cloudElementLat[lat_index], cloudElementLon[lon_index],value)
-
-						#generate the comma separated file for GIS
-						cloudElementLatLons.append(lat_lon_tuple)
-
-						#temp data for CE NETCDF file
-						brightnesstemp1[0,int(np.where(LAT[:,0]==cloudElementLat[lat_index])[0]),int(np.where(LON[0,:]==cloudElementLon[lon_index])[0])] = value
-						
-						if TRMMdirName:
-							finalCETRMMvalues[0,int(np.where(LAT[:,0]==cloudElementLat[lat_index])[0]),int(np.where(LON[0,:]==cloudElementLon[lon_index])[0])] = regriddedTRMM[int(np.where(LAT[:,0]==cloudElementLat[lat_index])[0]),int(np.where(LON[0,:]==cloudElementLon[lon_index])[0])]
-							CETRMMList.append((cloudElementLat[lat_index], cloudElementLon[lon_index], finalCETRMMvalues[0,cloudElementLat[lat_index], cloudElementLon[lon_index]]))
-
-
-				brightnesstemp[:] = brightnesstemp1[:]
-				currNetCDFCEData.close()
-
-				if TRMMdirName:
-
-					#calculate the total precip associated with the feature
-					for index, value in np.ndenumerate(finalCETRMMvalues):
-						precipTotal += value 
-						precip.append(value)
-			
-					rainFallacc[:] = finalCETRMMvalues[:]
-					currNetCDFTRMMData.close()
-					TRMMnumOfBoxes = np.count_nonzero(finalCETRMMvalues)
-					TRMMArea = TRMMnumOfBoxes*XRES*YRES
-					try:
-						maxCEprecipRate = np.max(finalCETRMMvalues[np.nonzero(finalCETRMMvalues)])
-						minCEprecipRate = np.min(finalCETRMMvalues[np.nonzero(finalCETRMMvalues)])
-					except:
-						pass
-
-				#sort cloudElementLatLons by lats
-				cloudElementLatLons.sort(key=lambda tup: tup[0])	
-
-				#determine if the cloud element the shape 
-				cloudElementEpsilon = eccentricity (cloudElement)
-				cloudElementsUserFile.write("\n\nTime is: %s" %(str(timelist[t])))
-				cloudElementsUserFile.write("\nCEuniqueID is: %s" %CEuniqueID)
-				latCenter, lonCenter = ndimage.measurements.center_of_mass(cloudElement, labels=labels)
-				
-				#latCenter and lonCenter are given according to the particular array defining this CE
-				#so you need to convert this value to the overall domain truth
-				latCenter = cloudElementLat[round(latCenter)]
-				lonCenter = cloudElementLon[round(lonCenter)]
-				cloudElementsUserFile.write("\nCenter (lat,lon) is: %.2f\t%.2f" %(latCenter, lonCenter))
-				cloudElementCenter.append(latCenter)
-				cloudElementCenter.append(lonCenter)
-				cloudElementsUserFile.write("\nNumber of boxes are: %d" %numOfBoxes)
-				cloudElementsUserFile.write("\nArea is: %.4f km^2" %(cloudElementArea))
-				cloudElementsUserFile.write("\nAverage brightness temperature is: %.4f K" %ndimage.mean(cloudElement, labels=labels))
-				cloudElementsUserFile.write("\nMin brightness temperature is: %.4f K" %ndimage.minimum(cloudElement, labels=labels))
-				cloudElementsUserFile.write("\nMax brightness temperature is: %.4f K" %ndimage.maximum(cloudElement, labels=labels))
-				cloudElementsUserFile.write("\nBrightness temperature variance is: %.4f K" %ndimage.variance(cloudElement, labels=labels))
-				cloudElementsUserFile.write("\nConvective fraction is: %.4f " %(((ndimage.minimum(cloudElement, labels=labels))/float((ndimage.maximum(cloudElement, labels=labels))))*100.0))
-				cloudElementsUserFile.write("\nEccentricity is: %.4f " %(cloudElementEpsilon))
-				#populate the dictionary
-				if TRMMdirName:
-					cloudElementDict = {'uniqueID': CEuniqueID, 'cloudElementTime': timelist[t],'cloudElementLatLon': cloudElementLatLons, 'cloudElementCenter':cloudElementCenter, 'cloudElementArea':cloudElementArea, 'cloudElementEccentricity':cloudElementEpsilon, 'cloudElementTmax':TIR_max, 'cloudElementTmin': TIR_min, 'cloudElementPrecipTotal':precipTotal,'cloudElementLatLonTRMM':CETRMMList, 'TRMMArea': TRMMArea,'CETRMMmax':maxCEprecipRate, 'CETRMMmin':minCEprecipRate}
-				else:
-					cloudElementDict = {'uniqueID': CEuniqueID, 'cloudElementTime': timelist[t],'cloudElementLatLon': cloudElementLatLons, 'cloudElementCenter':cloudElementCenter, 'cloudElementArea':cloudElementArea, 'cloudElementEccentricity':cloudElementEpsilon, 'cloudElementTmax':TIR_max, 'cloudElementTmin': TIR_min,}
-				
-				#current frame list of CEs
-				currFrameCEs.append(cloudElementDict)
-				
-				#draw the graph node
-				CLOUD_ELEMENT_GRAPH.add_node(CEuniqueID, cloudElementDict)
-				
-				if frameNum != 1:
-					for cloudElementDict in prevFrameCEs:
-						thisCElen = len(cloudElementLatLons)
-						percentageOverlap, areaOverlap = cloudElementOverlap(cloudElementLatLons, cloudElementDict['cloudElementLatLon'])
-						
-						#change weights to integers because the built in shortest path chokes on floating pts according to Networkx doc
-						#according to Goyens et al, two CEs are considered related if there is atleast 95% overlap between them for consecutive imgs a max of 2 hrs apart
-						if percentageOverlap >= 0.95: 
-							CLOUD_ELEMENT_GRAPH.add_edge(cloudElementDict['uniqueID'], CEuniqueID, weight=edgeWeight[0])
-							
-						elif percentageOverlap >= 0.90 and percentageOverlap < 0.95 :
-							CLOUD_ELEMENT_GRAPH.add_edge(cloudElementDict['uniqueID'], CEuniqueID, weight=edgeWeight[1])
-
-						elif areaOverlap >= MIN_OVERLAP:
-							CLOUD_ELEMENT_GRAPH.add_edge(cloudElementDict['uniqueID'], CEuniqueID, weight=edgeWeight[2])
-
-				else:
-					#TODO: remove this else as we only wish for the CE details
-					#ensure only the non-zero elements are considered
-					#store intel in allCE file
-					labels, _ = ndimage.label(cloudElement)
-					cloudElementsFile.write("\n-----------------------------------------------")
-					cloudElementsFile.write("\n\nTime is: %s" %(str(timelist[t])))
-					# cloudElementLat = LAT[loc[0],0]
-					# cloudElementLon = LON[0,loc[1]] 
-					
-					#populate cloudElementLatLons by unpacking the original values from loc
-					#TODO: KDW - too dirty... play with itertools.izip or zip and the enumerate with this
-					# 			as cloudElement is masked
-					for index,value in np.ndenumerate(cloudElement):
-						if value != 0 : 
-							lat_index,lon_index = index
-							lat_lon_tuple = (cloudElementLat[lat_index], cloudElementLon[lon_index])
-							cloudElementLatLons.append(lat_lon_tuple)
-	
-					cloudElementsFile.write("\nLocation of rejected CE (lat,lon) points are: %s" %cloudElementLatLons)
-					#latCenter and lonCenter are given according to the particular array defining this CE
-		   			#so you need to convert this value to the overall domain truth
-					latCenter, lonCenter = ndimage.measurements.center_of_mass(cloudElement, labels=labels)
-					latCenter = cloudElementLat[round(latCenter)]
-					lonCenter = cloudElementLon[round(lonCenter)]
-					cloudElementsFile.write("\nCenter (lat,lon) is: %.2f\t%.2f" %(latCenter, lonCenter))
-					cloudElementsFile.write("\nNumber of boxes are: %d" %numOfBoxes)
-					cloudElementsFile.write("\nArea is: %.4f km^2" %(cloudElementArea))
-					cloudElementsFile.write("\nAverage brightness temperature is: %.4f K" %ndimage.mean(cloudElement, labels=labels))
-					cloudElementsFile.write("\nMin brightness temperature is: %.4f K" %ndimage.minimum(cloudElement, labels=labels))
-					cloudElementsFile.write("\nMax brightness temperature is: %.4f K" %ndimage.maximum(cloudElement, labels=labels))
-					cloudElementsFile.write("\nBrightness temperature variance is: %.4f K" %ndimage.variance(cloudElement, labels=labels))
-					cloudElementsFile.write("\nConvective fraction is: %.4f " %(((ndimage.minimum(cloudElement, labels=labels))/float((ndimage.maximum(cloudElement, labels=labels))))*100.0))
-					cloudElementsFile.write("\nEccentricity is: %.4f " %(cloudElementEpsilon))
-					cloudElementsFile.write("\n-----------------------------------------------")
-					
-			#reset list for the next CE
-			nodeExist = False
-			cloudElementCenter=[]
-			cloudElement = []
-			cloudElementLat=[]
-			cloudElementLon =[]
-			cloudElementLatLons =[]
-			brightnesstemp1 =[]
-			brightnesstemp =[]
-			finalCETRMMvalues =[]
-			CEprecipRate =[]
-			CETRMMList =[]
-			precipTotal = 0.0
-			precip=[]
-			TRMMCloudElementLatLons=[]
-			
-		#reset for the next time
-		prevFrameCEs =[]
-		prevFrameCEs = currFrameCEs
-		currFrameCEs =[]
-						
-	cloudElementsFile.close
-	cloudElementsUserFile.close
-	#if using ARCGIS data store code, uncomment this file close line
-	#cloudElementsTextFile.close
-
-	#clean up graph - remove parent and childless nodes
-	outAndInDeg = CLOUD_ELEMENT_GRAPH.degree_iter()
-	toRemove = [node[0] for node in outAndInDeg if node[1]<1]
-	CLOUD_ELEMENT_GRAPH.remove_nodes_from(toRemove)
-	
-	print "number of nodes are: ", CLOUD_ELEMENT_GRAPH.number_of_nodes()
-	print "number of edges are: ", CLOUD_ELEMENT_GRAPH.number_of_edges()
-	print ("*"*80)
-
-	#hierachial graph output
-	graphTitle = "Cloud Elements observed over somewhere from 0000Z to 0000Z" 
-	#drawGraph(CLOUD_ELEMENT_GRAPH, graphTitle, edgeWeight)
-
-	return CLOUD_ELEMENT_GRAPH	
+    '''
+    Purpose::
+        Determines the contiguous boxes for a given time of the satellite images i.e. each frame
+        using scipy ndimage package
+    
+    Input::	
+        mergImgs: masked numpy array in (time,lat,lon),T_bb representing the satellite data. This is masked based on the
+        maximum acceptable temperature, T_BB_MAX
+        timelist: a list of python datatimes
+        TRMMdirName (optional): string representing the path where to find the TRMM datafiles
+        
+    Output::
+        CLOUD_ELEMENT_GRAPH: a Networkx directed graph where each node contains the information in cloudElementDict
+        The nodes are determined according to the area of contiguous squares. The nodes are linked through weighted edges.
+
+        cloudElementDict = {'uniqueID': unique tag for this CE, 
+                            'cloudElementTime': time of the CE,
+                            'cloudElementLatLon': (lat,lon,value) of MERG data of CE, 
+                            'cloudElementCenter':list of floating-point [lat,lon] representing the CE's center 
+                            'cloudElementArea':floating-point representing the area of the CE, 
+                            'cloudElementEccentricity': floating-point representing the shape of the CE, 
+                            'cloudElementTmax':integer representing the maximum Tb in CE, 
+                            'cloudElementTmin': integer representing the minimum Tb in CE, 
+                            'cloudElementPrecipTotal':floating-point representing the sum of all rainfall in CE if TRMMdirName entered,
+                            'cloudElementLatLonTRMM':(lat,lon,value) of TRMM data in CE if TRMMdirName entered, 
+                            'TRMMArea': floating-point representing the CE if TRMMdirName entered,
+                            'CETRMMmax':floating-point representing the max rate in the CE if TRMMdirName entered, 
+                            'CETRMMmin':floating-point representing the min rate in the CE if TRMMdirName entered}
+    Assumptions::
+        Assumes we are dealing with MERG data which is 4kmx4km resolved, thus the smallest value 
+        required according to Vila et al. (2008) is 2400km^2 
+        therefore, 2400/16 = 150 contiguous squares
+    '''
+
+    frame = ma.empty((1,mergImgs.shape[1],mergImgs.shape[2]))
+    CEcounter = 0
+    frameCEcounter = 0
+    frameNum = 0
+    cloudElementEpsilon = 0.0
+    cloudElementDict = {} 
+    cloudElementCenter = []		#list with two elements [lat,lon] for the center of a CE
+    prevFrameCEs = []			#list for CEs in previous frame
+    currFrameCEs = []			#list for CEs in current frame
+    cloudElementLat = []		#list for a particular CE's lat values
+    cloudElementLon = []		#list for a particular CE's lon values
+    cloudElementLatLons = []	#list for a particular CE's (lat,lon) values
+    
+    prevLatValue = 0.0
+    prevLonValue = 0.0
+    TIR_min = 0.0
+    TIR_max = 0.0
+    temporalRes = 3 # TRMM data is 3 hourly
+    precipTotal = 0.0
+    CETRMMList =[]
+    precip =[]
+    TRMMCloudElementLatLons =[]
+
+    minCELatLimit = 0.0
+    minCELonLimit = 0.0
+    maxCELatLimit = 0.0
+    maxCELonLimit = 0.0
+    
+    nygrd = len(LAT[:, 0]); nxgrd = len(LON[0, :])
+    
+    #openfile for storing ALL cloudElement information 
+    cloudElementsFile = open((MAINDIRECTORY+'/textFiles/cloudElements.txt'),'wb')
+    #openfile for storing cloudElement information meeting user criteria i.e. MCCs in this case
+    cloudElementsUserFile = open((MAINDIRECTORY+'/textFiles/cloudElementsUserFile.txt'),'w')
+    
+    #NB in the TRMM files the info is hours since the time thus 00Z file has in 01, 02 and 03 times
+    for t in xrange(mergImgs.shape[0]):
+        #-------------------------------------------------
+        # #textfile name for saving the data for arcgis
+        # thisFileName = MAINDIRECTORY+'/' + (str(timelist[t])).replace(" ", "_") + '.txt'
+        # cloudElementsTextFile = open(thisFileName,'w')
+        #-------------------------------------------------
+
+        #determine contiguous locations with temeperature below the warmest temp i.e. cloudElements in each frame
+        frame, CEcounter = ndimage.measurements.label(mergImgs[t,:,:], structure=STRUCTURING_ELEMENT)
+        frameCEcounter=0
+        frameNum += 1
+
+        #for each of the areas identified, check to determine if it a valid CE via an area and T requirement
+        for count in xrange(CEcounter):
+            #[0] is time dimension. Determine the actual values from the data
+            #loc is a masked array
+            try:
+                loc = ndimage.find_objects(frame==(count+1))[0]
+            except Exception, e:
+                print "Error is ", e
+                continue
+
+
+            cloudElement = mergImgs[t,:,:][loc]
+            labels, lcounter = ndimage.label(cloudElement)
+            
+            #determine the true lats and lons for this particular CE
+            cloudElementLat = LAT[loc[0],0]
+            cloudElementLon = LON[0,loc[1]] 
+            
+            #determine number of boxes in this cloudelement
+            numOfBoxes = np.count_nonzero(cloudElement)
+            cloudElementArea = numOfBoxes*XRES*YRES
+
+            #If the area is greater than the area required, or if the area is smaller than the suggested area, check if it meets a convective fraction requirement
+            #consider as CE
+
+            if cloudElementArea >= AREA_MIN or (cloudElementArea < AREA_MIN and ((ndimage.minimum(cloudElement, labels=labels))/float((ndimage.maximum(cloudElement, labels=labels)))) < CONVECTIVE_FRACTION ):
+
+                #get some time information and labeling info
+                frameTime = str(timelist[t])
+                frameCEcounter +=1
+                CEuniqueID = 'F'+str(frameNum)+'CE'+str(frameCEcounter) 
+
+                #-------------------------------------------------
+                #textfile name for accesing CE data using MATLAB code
+                # thisFileName = MAINDIRECTORY+'/' + (str(timelist[t])).replace(" ", "_") + CEuniqueID +'.txt'
+                # cloudElementsTextFile = open(thisFileName,'w')
+                #-------------------------------------------------
+
+                # ------ NETCDF File stuff for brightness temp stuff ------------------------------------
+                thisFileName = MAINDIRECTORY +'/MERGnetcdfCEs/cloudElements'+ (str(timelist[t])).replace(" ", "_") + CEuniqueID +'.nc'
+                currNetCDFCEData = Dataset(thisFileName, 'w', format='NETCDF4')
+                currNetCDFCEData.description = 'Cloud Element '+CEuniqueID + ' temperature data'
+                currNetCDFCEData.calendar = 'standard'
+                currNetCDFCEData.conventions = 'COARDS'
+                # dimensions
+                currNetCDFCEData.createDimension('time', None)
+                currNetCDFCEData.createDimension('lat', len(LAT[:,0]))
+                currNetCDFCEData.createDimension('lon', len(LON[0,:]))
+                # variables
+                tempDims = ('time','lat', 'lon',)
+                times = currNetCDFCEData.createVariable('time', 'f8', ('time',))
+                times.units = 'hours since '+ str(timelist[t])[:-6]
+                latitudes = currNetCDFCEData.createVariable('latitude', 'f8', ('lat',))
+                longitudes = currNetCDFCEData.createVariable('longitude', 'f8', ('lon',))
+                brightnesstemp = currNetCDFCEData.createVariable('brightnesstemp', 'i16',tempDims )
+                brightnesstemp.units = 'Kelvin'
+                # NETCDF data
+                dates=[timelist[t]+timedelta(hours=0)]
+                times[:] =  date2num(dates,units=times.units)
+                longitudes[:] = LON[0,:]
+                longitudes.units = "degrees_east" 
+                longitudes.long_name = "Longitude" 
+
+                latitudes[:] =  LAT[:,0]
+                latitudes.units = "degrees_north"
+                latitudes.long_name ="Latitude"
+                
+                #generate array of zeros for brightness temperature
+                brightnesstemp1 = ma.zeros((1,len(latitudes), len(longitudes))).astype('int16')
+                #-----------End most of NETCDF file stuff ------------------------------------
+
+                #if other dataset (TRMM) assumed to be a precipitation dataset was entered
+                if TRMMdirName:
+                    #------------------TRMM stuff -------------------------------------------------
+                    fileDate = ((str(timelist[t])).replace(" ", "")[:-8]).replace("-","")
+                    fileHr1 = (str(timelist[t])).replace(" ", "")[-8:-6]
+                    
+                    if int(fileHr1) % temporalRes == 0:
+                        fileHr = fileHr1
+                    else:
+                        fileHr = (int(fileHr1)/temporalRes) * temporalRes
+                    if fileHr < 10:
+                        fileHr = '0'+str(fileHr)
+                    else:
+                        str(fileHr)
+
+                    #open TRMM file for the resolution info and to create the appropriate sized grid
+                    TRMMfileName = TRMMdirName+'/3B42.'+ fileDate + "."+str(fileHr)+".7A.nc"
+                    
+                    TRMMData = Dataset(TRMMfileName,'r', format='NETCDF4')
+                    precipRate = TRMMData.variables['pcp'][:,:,:]
+                    latsrawTRMMData = TRMMData.variables['latitude'][:]
+                    lonsrawTRMMData = TRMMData.variables['longitude'][:]
+                    lonsrawTRMMData[lonsrawTRMMData > 180] = lonsrawTRMMData[lonsrawTRMMData>180] - 360.
+                    LONTRMM, LATTRMM = np.meshgrid(lonsrawTRMMData, latsrawTRMMData)
+
+                    nygrdTRMM = len(LATTRMM[:,0]); nxgrdTRMM = len(LONTRMM[0,:])
+                    precipRateMasked = ma.masked_array(precipRate, mask=(precipRate < 0.0))
+                    #---------regrid the TRMM data to the MERG dataset ----------------------------------
+                    #regrid using the do_regrid stuff from the Apache OCW 
+                    regriddedTRMM = ma.zeros((0, nygrd, nxgrd))
+                    #regriddedTRMM = process.do_regrid(precipRateMasked[0,:,:], LATTRMM,  LONTRMM, LAT, LON, order=1, mdi= -999999999)
+                    regriddedTRMM = do_regrid(precipRateMasked[0,:,:], LATTRMM,  LONTRMM, LAT, LON, order=1, mdi= -999999999)
+                    #----------------------------------------------------------------------------------
+        
+                    # #get the lat/lon info from cloudElement
+                    #get the lat/lon info from the file
+                    latCEStart = LAT[0][0]
+                    latCEEnd = LAT[-1][0]
+                    lonCEStart = LON[0][0]
+                    lonCEEnd = LON[0][-1]
+                    
+                    #get the lat/lon info for TRMM data (different resolution)
+                    latStartT = find_nearest(latsrawTRMMData, latCEStart)
+                    latEndT = find_nearest(latsrawTRMMData, latCEEnd)
+                    lonStartT = find_nearest(lonsrawTRMMData, lonCEStart)
+                    lonEndT = find_nearest(lonsrawTRMMData, lonCEEnd)
+                    latStartIndex = np.where(latsrawTRMMData == latStartT)
+                    latEndIndex = np.where(latsrawTRMMData == latEndT)
+                    lonStartIndex = np.where(lonsrawTRMMData == lonStartT)
+                    lonEndIndex = np.where(lonsrawTRMMData == lonEndT)
+
+                    #get the relevant TRMM info 
+                    CEprecipRate = precipRate[:,(latStartIndex[0][0]-1):latEndIndex[0][0],(lonStartIndex[0][0]-1):lonEndIndex[0][0]]
+                    TRMMData.close()
+                    
+                    # ------ NETCDF File info for writing TRMM CE rainfall ------------------------------------
+                    thisFileName = MAINDIRECTORY+'/TRMMnetcdfCEs/TRMM' + (str(timelist[t])).replace(" ", "_") + CEuniqueID +'.nc'
+                    currNetCDFTRMMData = Dataset(thisFileName, 'w', format='NETCDF4')
+                    currNetCDFTRMMData.description = 'Cloud Element '+CEuniqueID + ' precipitation data'
+                    currNetCDFTRMMData.calendar = 'standard'
+                    currNetCDFTRMMData.conventions = 'COARDS'
+                    # dimensions
+                    currNetCDFTRMMData.createDimension('time', None)
+                    currNetCDFTRMMData.createDimension('lat', len(LAT[:,0]))
+                    currNetCDFTRMMData.createDimension('lon', len(LON[0,:]))
+                    
+                    # variables
+                    TRMMprecip = ('time','lat', 'lon',)
+                    times = currNetCDFTRMMData.createVariable('time', 'f8', ('time',))
+                    times.units = 'hours since '+ str(timelist[t])[:-6]
+                    latitude = currNetCDFTRMMData.createVariable('latitude', 'f8', ('lat',))
+                    longitude = currNetCDFTRMMData.createVariable('longitude', 'f8', ('lon',))
+                    rainFallacc = currNetCDFTRMMData.createVariable('precipitation_Accumulation', 'f8',TRMMprecip )
+                    rainFallacc.units = 'mm'
+
+                    longitude[:] = LON[0,:]
+                    longitude.units = "degrees_east" 
+                    longitude.long_name = "Longitude" 
+
+                    latitude[:] =  LAT[:,0]
+                    latitude.units = "degrees_north"
+                    latitude.long_name ="Latitude"
+
+                    finalCETRMMvalues = ma.zeros((brightnesstemp.shape))
+                    #-----------End most of NETCDF file stuff ------------------------------------
+
+                #populate cloudElementLatLons by unpacking the original values from loc to get the actual value for lat and lon
+                #TODO: KDW - too dirty... play with itertools.izip or zip and the enumerate with this
+                # 			as cloudElement is masked
+                for index,value in np.ndenumerate(cloudElement):
+                    if value != 0 : 
+                        lat_index,lon_index = index
+                        lat_lon_tuple = (cloudElementLat[lat_index], cloudElementLon[lon_index],value)
+
+                        #generate the comma separated file for GIS
+                        cloudElementLatLons.append(lat_lon_tuple)
+
+                        #temp data for CE NETCDF file
+                        brightnesstemp1[0,int(np.where(LAT[:,0]==cloudElementLat[lat_index])[0]),int(np.where(LON[0,:]==cloudElementLon[lon_index])[0])] = value
+                        
+                        if TRMMdirName:
+                            finalCETRMMvalues[0,int(np.where(LAT[:,0]==cloudElementLat[lat_index])[0]),int(np.where(LON[0,:]==cloudElementLon[lon_index])[0])] = regriddedTRMM[int(np.where(LAT[:,0]==cloudElementLat[lat_index])[0]),int(np.where(LON[0,:]==cloudElementLon[lon_index])[0])]
+                            CETRMMList.append((cloudElementLat[lat_index], cloudElementLon[lon_index], finalCETRMMvalues[0,cloudElementLat[lat_index], cloudElementLon[lon_index]]))
+
+
+                brightnesstemp[:] = brightnesstemp1[:]
+                currNetCDFCEData.close()
+
+                if TRMMdirName:
+
+                    #calculate the total precip associated with the feature
+                    for index, value in np.ndenumerate(finalCETRMMvalues):
+                        precipTotal += value 
+                        precip.append(value)
+            
+                    rainFallacc[:] = finalCETRMMvalues[:]
+                    currNetCDFTRMMData.close()
+                    TRMMnumOfBoxes = np.count_nonzero(finalCETRMMvalues)
+                    TRMMArea = TRMMnumOfBoxes*XRES*YRES
+                    try:
+                        maxCEprecipRate = np.max(finalCETRMMvalues[np.nonzero(finalCETRMMvalues)])
+                        minCEprecipRate = np.min(finalCETRMMvalues[np.nonzero(finalCETRMMvalues)])
+                    except:
+                        pass
+
+                #sort cloudElementLatLons by lats
+                cloudElementLatLons.sort(key=lambda tup: tup[0])	
+
+                #determine if the cloud element the shape 
+                cloudElementEpsilon = eccentricity (cloudElement)
+                cloudElementsUserFile.write("\n\nTime is: %s" %(str(timelist[t])))
+                cloudElementsUserFile.write("\nCEuniqueID is: %s" %CEuniqueID)
+                latCenter, lonCenter = ndimage.measurements.center_of_mass(cloudElement, labels=labels)
+                
+                #latCenter and lonCenter are given according to the particular array defining this CE
+                #so you need to convert this value to the overall domain truth
+                latCenter = cloudElementLat[round(latCenter)]
+                lonCenter = cloudElementLon[round(lonCenter)]
+                cloudElementsUserFile.write("\nCenter (lat,lon) is: %.2f\t%.2f" %(latCenter, lonCenter))
+                cloudElementCenter.append(latCenter)
+                cloudElementCenter.append(lonCenter)
+                cloudElementsUserFile.write("\nNumber of boxes are: %d" %numOfBoxes)
+                cloudElementsUserFile.write("\nArea is: %.4f km^2" %(cloudElementArea))
+                cloudElementsUserFile.write("\nAverage brightness temperature is: %.4f K" %ndimage.mean(cloudElement, labels=labels))
+                cloudElementsUserFile.write("\nMin brightness temperature is: %.4f K" %ndimage.minimum(cloudElement, labels=labels))
+                cloudElementsUserFile.write("\nMax brightness temperature is: %.4f K" %ndimage.maximum(cloudElement, labels=labels))
+                cloudElementsUserFile.write("\nBrightness temperature variance is: %.4f K" %ndimage.variance(cloudElement, labels=labels))
+                cloudElementsUserFile.write("\nConvective fraction is: %.4f " %(((ndimage.minimum(cloudElement, labels=labels))/float((ndimage.maximum(cloudElement, labels=labels))))*100.0))
+                cloudElementsUserFile.write("\nEccentricity is: %.4f " %(cloudElementEpsilon))
+                #populate the dictionary
+                if TRMMdirName:
+                    cloudElementDict = {'uniqueID': CEuniqueID, 'cloudElementTime': timelist[t],'cloudElementLatLon': cloudElementLatLons, 'cloudElementCenter':cloudElementCenter, 'cloudElementArea':cloudElementArea, 'cloudElementEccentricity':cloudElementEpsilon, 'cloudElementTmax':TIR_max, 'cloudElementTmin': TIR_min, 'cloudElementPrecipTotal':precipTotal,'cloudElementLatLonTRMM':CETRMMList, 'TRMMArea': TRMMArea,'CETRMMmax':maxCEprecipRate, 'CETRMMmin':minCEprecipRate}
+                else:
+                    cloudElementDict = {'uniqueID': CEuniqueID, 'cloudElementTime': timelist[t],'cloudElementLatLon': cloudElementLatLons, 'cloudElementCenter':cloudElementCenter, 'cloudElementArea':cloudElementArea, 'cloudElementEccentricity':cloudElementEpsilon, 'cloudElementTmax':TIR_max, 'cloudElementTmin': TIR_min,}
+                
+                #current frame list of CEs
+                currFrameCEs.append(cloudElementDict)
+                
+                #draw the graph node
+                CLOUD_ELEMENT_GRAPH.add_node(CEuniqueID, cloudElementDict)
+                
+                if frameNum != 1:
+                    for cloudElementDict in prevFrameCEs:
+                        thisCElen = len(cloudElementLatLons)
+                        percentageOverlap, areaOverlap = cloudElementOverlap(cloudElementLatLons, cloudElementDict['cloudElementLatLon'])
+                        
+                        #change weights to integers because the built in shortest path chokes on floating pts according to Networkx doc
+                        #according to Goyens et al, two CEs are considered related if there is atleast 95% overlap between them for consecutive imgs a max of 2 hrs apart
+                        if percentageOverlap >= 0.95: 
+                            CLOUD_ELEMENT_GRAPH.add_edge(cloudElementDict['uniqueID'], CEuniqueID, weight=edgeWeight[0])
+                            
+                        elif percentageOverlap >= 0.90 and percentageOverlap < 0.95 :
+                            CLOUD_ELEMENT_GRAPH.add_edge(cloudElementDict['uniqueID'], CEuniqueID, weight=edgeWeight[1])
+
+                        elif areaOverlap >= MIN_OVERLAP:
+                            CLOUD_ELEMENT_GRAPH.add_edge(cloudElementDict['uniqueID'], CEuniqueID, weight=edgeWeight[2])
+
+                else:
+                    #TODO: remove this else as we only wish for the CE details
+                    #ensure only the non-zero elements are considered
+                    #store intel in allCE file
+                    labels, _ = ndimage.label(cloudElement)
+                    cloudElementsFile.write("\n-----------------------------------------------")
+                    cloudElementsFile.write("\n\nTime is: %s" %(str(timelist[t])))
+                    # cloudElementLat = LAT[loc[0],0]
+                    # cloudElementLon = LON[0,loc[1]] 
+                    
+                    #populate cloudElementLatLons by unpacking the original values from loc
+                    #TODO: KDW - too dirty... play with itertools.izip or zip and the enumerate with this
+                    # 			as cloudElement is masked
+                    for index,value in np.ndenumerate(cloudElement):
+                        if value != 0 : 
+                            lat_index,lon_index = index
+                            lat_lon_tuple = (cloudElementLat[lat_index], cloudElementLon[lon_index])
+                            cloudElementLatLons.append(lat_lon_tuple)
+    
+                    cloudElementsFile.write("\nLocation of rejected CE (lat,lon) points are: %s" %cloudElementLatLons)
+                    #latCenter and lonCenter are given according to the particular array defining this CE
+                    #so you need to convert this value to the overall domain truth
+                    latCenter, lonCenter = ndimage.measurements.center_of_mass(cloudElement, labels=labels)
+                    latCenter = cloudElementLat[round(latCenter)]
+                    lonCenter = cloudElementLon[round(lonCenter)]
+                    cloudElementsFile.write("\nCenter (lat,lon) is: %.2f\t%.2f" %(latCenter, lonCenter))
+                    cloudElementsFile.write("\nNumber of boxes are: %d" %numOfBoxes)
+                    cloudElementsFile.write("\nArea is: %.4f km^2" %(cloudElementArea))
+                    cloudElementsFile.write("\nAverage brightness temperature is: %.4f K" %ndimage.mean(cloudElement, labels=labels))
+                    cloudElementsFile.write("\nMin brightness temperature is: %.4f K" %ndimage.minimum(cloudElement, labels=labels))
+                    cloudElementsFile.write("\nMax brightness temperature is: %.4f K" %ndimage.maximum(cloudElement, labels=labels))
+                    cloudElementsFile.write("\nBrightness temperature variance is: %.4f K" %ndimage.variance(cloudElement, labels=labels))
+                    cloudElementsFile.write("\nConvective fraction is: %.4f " %(((ndimage.minimum(cloudElement, labels=labels))/float((ndimage.maximum(cloudElement, labels=labels))))*100.0))
+                    cloudElementsFile.write("\nEccentricity is: %.4f " %(cloudElementEpsilon))
+                    cloudElementsFile.write("\n-----------------------------------------------")
+                    
+            #reset list for the next CE
+            nodeExist = False
+            cloudElementCenter=[]
+            cloudElement = []
+            cloudElementLat=[]
+            cloudElementLon =[]
+            cloudElementLatLons =[]
+            brightnesstemp1 =[]
+            brightnesstemp =[]
+            finalCETRMMvalues =[]
+            CEprecipRate =[]
+            CETRMMList =[]
+            precipTotal = 0.0
+            precip=[]
+            TRMMCloudElementLatLons=[]
+            
+        #reset for the next time
+        prevFrameCEs =[]
+        prevFrameCEs = currFrameCEs
+        currFrameCEs =[]
+                        
+    cloudElementsFile.close
+    cloudElementsUserFile.close
+    #if using ARCGIS data store code, uncomment this file close line
+    #cloudElementsTextFile.close
+
+    #clean up graph - remove parent and childless nodes
+    outAndInDeg = CLOUD_ELEMENT_GRAPH.degree_iter()
+    toRemove = [node[0] for node in outAndInDeg if node[1]<1]
+    CLOUD_ELEMENT_GRAPH.remove_nodes_from(toRemove)
+    
+    print "number of nodes are: ", CLOUD_ELEMENT_GRAPH.number_of_nodes()
+    print "number of edges are: ", CLOUD_ELEMENT_GRAPH.number_of_edges()
+    print ("*"*80)
+
+    #hierachial graph output
+    graphTitle = "Cloud Elements observed over somewhere from 0000Z to 0000Z" 
+    #drawGraph(CLOUD_ELEMENT_GRAPH, graphTitle, edgeWeight)
+
+    return CLOUD_ELEMENT_GRAPH	
 #******************************************************************
 def findPrecipRate(TRMMdirName, timelist):
-	''' 
-	Purpose:: 
-		Determines the precipitation rates for MCSs found if TRMMdirName was not entered in findCloudElements this can be used
+    ''' 
+    Purpose:: 
+        Determines the precipitation rates for MCSs found if TRMMdirName was not entered in findCloudElements this can be used
 
-	Input:: 
-		TRMMdirName: a string representing the directory for the original TRMM netCDF files
-		timelist: a list of python datatimes
+    Input:: 
+        TRMMdirName: a string representing the directory for the original TRMM netCDF files
+        timelist: a list of python datatimes
 
-	Output:: a list of dictionary of the TRMM data 
-		NB: also creates netCDF with TRMM data for each CE (for post processing) index
-			in MAINDIRECTORY/TRMMnetcdfCEs
+    Output:: a list of dictionary of the TRMM data 
+        NB: also creates netCDF with TRMM data for each CE (for post processing) index
+            in MAINDIRECTORY/TRMMnetcdfCEs
    
-	Assumptions:: Assumes that findCloudElements was run without the TRMMdirName value 
+    Assumptions:: Assumes that findCloudElements was run without the TRMMdirName value 
  
-	'''
-	allCEnodesTRMMdata =[]
-	TRMMdataDict={}
-	precipTotal = 0.0
-
-	os.chdir((MAINDIRECTORY+'/MERGnetcdfCEs/'))
-	imgFilename = ''
-	temporalRes = 3 #3 hours for TRMM
-	
-	#sort files
-	files = filter(os.path.isfile, glob.glob("*.nc"))
-	files.sort(key=lambda x: os.path.getmtime(x))
-	
-	for afile in files:
-		fullFname = os.path.splitext(afile)[0]
-		noFrameExtension = (fullFname.replace("_","")).split('F')[0]
-		CEuniqueID = 'F' +(fullFname.replace("_","")).split('F')[1]
-		fileDateTimeChar = (noFrameExtension.replace(":","")).split('s')[1]
-		fileDateTime = fileDateTimeChar.replace("-","")
-		fileDate = fileDateTime[:-6]
-		fileHr1=fileDateTime[-6:-4]
-
-		cloudElementData = Dataset(afile,'r', format='NETCDF4')
-		brightnesstemp1 = cloudElementData.variables['brightnesstemp'][:,:,:] 
-		latsrawCloudElements = cloudElementData.variables['latitude'][:]
-		lonsrawCloudElements = cloudElementData.variables['longitude'][:]
-		
-		brightnesstemp = np.squeeze(brightnesstemp1, axis=0)
-		
-		if int(fileHr1) % temporalRes == 0:
-			fileHr = fileHr1
-		else:
-			fileHr = (int(fileHr1)/temporalRes) * temporalRes
-		
-		if fileHr < 10:
-			fileHr = '0'+str(fileHr)
-		else:
-			str(fileHr)
-
-		TRMMfileName = TRMMdirName+"/3B42."+ str(fileDate) + "."+str(fileHr)+".7A.nc"
-		TRMMData = Dataset(TRMMfileName,'r', format='NETCDF4')
-		precipRate = TRMMData.variables['pcp'][:,:,:]
-		latsrawTRMMData = TRMMData.variables['latitude'][:]
-		lonsrawTRMMData = TRMMData.variables['longitude'][:]
-		lonsrawTRMMData[lonsrawTRMMData > 180] = lonsrawTRMMData[lonsrawTRMMData>180] - 360.
-		LONTRMM, LATTRMM = np.meshgrid(lonsrawTRMMData, latsrawTRMMData)
-
-		#nygrdTRMM = len(LATTRMM[:,0]); nxgrd = len(LONTRMM[0,:])
-		nygrd = len(LAT[:, 0]); nxgrd = len(LON[0, :])
-
-		precipRateMasked = ma.masked_array(precipRate, mask=(precipRate < 0.0))
-		#---------regrid the TRMM data to the MERG dataset ----------------------------------
-		#regrid using the do_regrid stuff from the Apache OCW 
-		regriddedTRMM = ma.zeros((0, nygrd, nxgrd))
-		regriddedTRMM = do_regrid(precipRateMasked[0,:,:], LATTRMM,  LONTRMM, LAT, LON, order=1, mdi= -999999999)
-		#regriddedTRMM = process.do_regrid(precipRateMasked[0,:,:], LATTRMM,  LONTRMM, LAT, LON, order=1, mdi= -999999999)
-		#----------------------------------------------------------------------------------
-
-		# #get the lat/lon info from
-		latCEStart = LAT[0][0]
-		latCEEnd = LAT[-1][0]
-		lonCEStart = LON[0][0]
-		lonCEEnd = LON[0][-1]
-
-		#get the lat/lon info for TRMM data (different resolution)
-		latStartT = find_nearest(latsrawTRMMData, latCEStart)
-		latEndT = find_nearest(latsrawTRMMData, latCEEnd)
-		lonStartT = find_nearest(lonsrawTRMMData, lonCEStart)
-		lonEndT = find_nearest(lonsrawTRMMData, lonCEEnd)
-		latStartIndex = np.where(latsrawTRMMData == latStartT)
-		latEndIndex = np.where(latsrawTRMMData == latEndT)
-		lonStartIndex = np.where(lonsrawTRMMData == lonStartT)
-		lonEndIndex = np.where(lonsrawTRMMData == lonEndT)
-
-		#get the relevant TRMM info 
-		CEprecipRate = precipRate[:,(latStartIndex[0][0]-1):latEndIndex[0][0],(lonStartIndex[0][0]-1):lonEndIndex[0][0]]
-		TRMMData.close()
-			
-		
-		# ------ NETCDF File stuff ------------------------------------
-		thisFileName = MAINDIRECTORY+'/TRMMnetcdfCEs/'+ fileDateTime + CEuniqueID +'.nc'
-		currNetCDFTRMMData = Dataset(thisFileName, 'w', format='NETCDF4')
-		currNetCDFTRMMData.description = 'Cloud Element '+CEuniqueID + ' rainfall data'
-		currNetCDFTRMMData.calendar = 'standard'
-		currNetCDFTRMMData.conventions = 'COARDS'
-		# dimensions
-		currNetCDFTRMMData.createDimension('time', None)
-		currNetCDFTRMMData.createDimension('lat', len(LAT[:,0]))
-		currNetCDFTRMMData.createDimension('lon', len(LON[0,:]))
-		# variables
-		TRMMprecip = ('time','lat', 'lon',)
-		times = currNetCDFTRMMData.createVariable('time', 'f8', ('time',))
-		times.units = 'hours since '+ fileDateTime[:-6] 
-		latitude = currNetCDFTRMMData.createVariable('latitude', 'f8', ('lat',))
-		longitude = currNetCDFTRMMData.createVariable('longitude', 'f8', ('lon',))
-		rainFallacc = currNetCDFTRMMData.createVariable('precipitation_Accumulation', 'f8',TRMMprecip )
-		rainFallacc.units = 'mm'
-
-		longitude[:] = LON[0,:]
-		longitude.units = "degrees_east" 
-		longitude.long_name = "Longitude" 
-
-		latitude[:] =  LAT[:,0]
-		latitude.units = "degrees_north"
-		latitude.long_name ="Latitude"
-
-		finalCETRMMvalues = ma.zeros((brightnesstemp1.shape))
-		#-----------End most of NETCDF file stuff ------------------------------------	
-		for index,value in np.ndenumerate(brightnesstemp):
-			lat_index, lon_index = index
-			currTimeValue = 0
-			if value > 0:
-
-				finalCETRMMvalues[0,lat_index,lon_index] = regriddedTRMM[int(np.where(LAT[:,0]==LAT[lat_index,0])[0]), int(np.where(LON[0,:]==LON[0,lon_index])[0])]
-				
-
-		rainFallacc[:] = finalCETRMMvalues
-		currNetCDFTRMMData.close()
-
-		for index, value in np.ndenumerate(finalCETRMMvalues):
-			precipTotal += value 
-
-		TRMMnumOfBoxes = np.count_nonzero(finalCETRMMvalues)
-		TRMMArea = TRMMnumOfBoxes*XRES*YRES	
-
-		try:
-			minCEprecipRate = np.min(finalCETRMMvalues[np.nonzero(finalCETRMMvalues)])
-		except:
-			minCEprecipRate = 0.0
-
-		try:
-			maxCEprecipRate = np.max(finalCETRMMvalues[np.nonzero(finalCETRMMvalues)])
-		except:
-			maxCEprecipRate = 0.0
-
-		#add info to CLOUDELEMENTSGRAPH
-		#TODO try block
-		for eachdict in CLOUD_ELEMENT_GRAPH.nodes(CEuniqueID):
-			if eachdict[1]['uniqueID'] == CEuniqueID:
-				if not 'cloudElementPrecipTotal' in eachdict[1].keys():
-					eachdict[1]['cloudElementPrecipTotal'] = precipTotal
-				if not 'cloudElementLatLonTRMM' in eachdict[1].keys():
-					eachdict[1]['cloudElementLatLonTRMM'] = finalCETRMMvalues
-				if not 'TRMMArea' in eachdict[1].keys():
-					eachdict[1]['TRMMArea'] = TRMMArea
-				if not 'CETRMMmin' in eachdict[1].keys():
-					eachdict[1]['CETRMMmin'] = minCEprecipRate
-				if not 'CETRMMmax' in eachdict[1].keys():
-					eachdict[1]['CETRMMmax'] = maxCEprecipRate
-
-		#clean up
-		precipTotal = 0.0
-		latsrawTRMMData =[]
-		lonsrawTRMMData = []
-		latsrawCloudElements=[]
-		lonsrawCloudElements=[]
-		finalCETRMMvalues =[]
-		CEprecipRate =[]
-		brightnesstemp =[]
-		TRMMdataDict ={}
-
-	return allCEnodesTRMMdata
+    '''
+    allCEnodesTRMMdata =[]
+    TRMMdataDict={}
+    precipTotal = 0.0
+
+    os.chdir((MAINDIRECTORY+'/MERGnetcdfCEs/'))
+    imgFilename = ''
+    temporalRes = 3 #3 hours for TRMM
+    
+    #sort files
+    files = filter(os.path.isfile, glob.glob("*.nc"))
+    files.sort(key=lambda x: os.path.getmtime(x))
+    
+    for afile in files:
+        fullFname = os.path.splitext(afile)[0]
+        noFrameExtension = (fullFname.replace("_","")).split('F')[0]
+        CEuniqueID = 'F' +(fullFname.replace("_","")).split('F')[1]
+        fileDateTimeChar = (noFrameExtension.replace(":","")).split('s')[1]
+        fileDateTime = fileDateTimeChar.replace("-","")
+        fileDate = fileDateTime[:-6]
+        fileHr1=fileDateTime[-6:-4]
+
+        cloudElementData = Dataset(afile,'r', format='NETCDF4')
+        brightnesstemp1 = cloudElementData.variables['brightnesstemp'][:,:,:] 
+        latsrawCloudElements = cloudElementData.variables['latitude'][:]
+        lonsrawCloudElements = cloudElementData.variables['longitude'][:]
+        
+        brightnesstemp = np.squeeze(brightnesstemp1, axis=0)
+        
+        if int(fileHr1) % temporalRes == 0:
+            fileHr = fileHr1
+        else:
+            fileHr = (int(fileHr1)/temporalRes) * temporalRes
+        
+        if fileHr < 10:
+            fileHr = '0'+str(fileHr)
+        else:
+            str(fileHr)
+
+        TRMMfileName = TRMMdirName+"/3B42."+ str(fileDate) + "."+str(fileHr)+".7A.nc"
+        TRMMData = Dataset(TRMMfileName,'r', format='NETCDF4')
+        precipRate = TRMMData.variables['pcp'][:,:,:]
+        latsrawTRMMData = TRMMData.variables['latitude'][:]
+        lonsrawTRMMData = TRMMData.variables['longitude'][:]
+        lonsrawTRMMData[lonsrawTRMMData > 180] = lonsrawTRMMData[lonsrawTRMMData>180] - 360.
+        LONTRMM, LATTRMM = np.meshgrid(lonsrawTRMMData, latsrawTRMMData)
+
+        #nygrdTRMM = len(LATTRMM[:,0]); nxgrd = len(LONTRMM[0,:])
+        nygrd = len(LAT[:, 0]); nxgrd = len(LON[0, :])
+
+        precipRateMasked = ma.masked_array(precipRate, mask=(precipRate < 0.0))
+        #---------regrid the TRMM data to the MERG dataset ----------------------------------
+        #regrid using the do_regrid stuff from the Apache OCW 
+        regriddedTRMM = ma.zeros((0, nygrd, nxgrd))
+        regriddedTRMM = do_regrid(precipRateMasked[0,:,:], LATTRMM,  LONTRMM, LAT, LON, order=1, mdi= -999999999)
+        #regriddedTRMM = process.do_regrid(precipRateMasked[0,:,:], LATTRMM,  LONTRMM, LAT, LON, order=1, mdi= -999999999)
+        #----------------------------------------------------------------------------------
+
+        # #get the lat/lon info from
+        latCEStart = LAT[0][0]
+        latCEEnd = LAT[-1][0]
+        lonCEStart = LON[0][0]
+        lonCEEnd = LON[0][-1]
+
+        #get the lat/lon info for TRMM data (different resolution)
+        latStartT = find_nearest(latsrawTRMMData, latCEStart)
+        latEndT = find_nearest(latsrawTRMMData, latCEEnd)
+        lonStartT = find_nearest(lonsrawTRMMData, lonCEStart)
+        lonEndT = find_nearest(lonsrawTRMMData, lonCEEnd)
+        latStartIndex = np.where(latsrawTRMMData == latStartT)
+        latEndIndex = np.where(latsrawTRMMData == latEndT)
+        lonStartIndex = np.where(lonsrawTRMMData == lonStartT)
+        lonEndIndex = np.where(lonsrawTRMMData == lonEndT)
+
+        #get the relevant TRMM info 
+        CEprecipRate = precipRate[:,(latStartIndex[0][0]-1):latEndIndex[0][0],(lonStartIndex[0][0]-1):lonEndIndex[0][0]]
+        TRMMData.close()
+            
+        
+        # ------ NETCDF File stuff ------------------------------------
+        thisFileName = MAINDIRECTORY+'/TRMMnetcdfCEs/'+ fileDateTime + CEuniqueID +'.nc'
+        currNetCDFTRMMData = Dataset(thisFileName, 'w', format='NETCDF4')
+        currNetCDFTRMMData.description = 'Cloud Element '+CEuniqueID + ' rainfall data'
+        currNetCDFTRMMData.calendar = 'standard'
+        currNetCDFTRMMData.conventions = 'COARDS'
+        # dimensions
+        currNetCDFTRMMData.createDimension('time', None)
+        currNetCDFTRMMData.createDimension('lat', len(LAT[:,0]))
+        currNetCDFTRMMData.createDimension('lon', len(LON[0,:]))
+        # variables
+        TRMMprecip = ('time','lat', 'lon',)
+        times = currNetCDFTRMMData.createVariable('time', 'f8', ('time',))
+        times.units = 'hours since '+ fileDateTime[:-6] 
+        latitude = currNetCDFTRMMData.createVariable('latitude', 'f8', ('lat',))
+        longitude = currNetCDFTRMMData.createVariable('longitude', 'f8', ('lon',))
+        rainFallacc = currNetCDFTRMMData.createVariable('precipitation_Accumulation', 'f8',TRMMprecip )
+        rainFallacc.units = 'mm'
+
+        longitude[:] = LON[0,:]
+        longitude.units = "degrees_east" 
+        longitude.long_name = "Longitude" 
+
+        latitude[:] =  LAT[:,0]
+        latitude.units = "degrees_north"
+        latitude.long_name ="Latitude"
+
+        finalCETRMMvalues = ma.zeros((brightnesstemp1.shape))
+        #-----------End most of NETCDF file stuff ------------------------------------	
+        for index,value in np.ndenumerate(brightnesstemp):
+            lat_index, lon_index = index
+            currTimeValue = 0
+            if value > 0:
+
+                finalCETRMMvalues[0,lat_index,lon_index] = regriddedTRMM[int(np.where(LAT[:,0]==LAT[lat_index,0])[0]), int(np.where(LON[0,:]==LON[0,lon_index])[0])]
+                
+
+        rainFallacc[:] = finalCETRMMvalues
+        currNetCDFTRMMData.close()
+
+        for index, value in np.ndenumerate(finalCETRMMvalues):
+            precipTotal += value 
+
+        TRMMnumOfBoxes = np.count_nonzero(finalCETRMMvalues)
+        TRMMArea = TRMMnumOfBoxes*XRES*YRES	
+
+        try:
+            minCEprecipRate = np.min(finalCETRMMvalues[np.nonzero(finalCETRMMvalues)])
+        except:
+            minCEprecipRate = 0.0
+
+        try:
+            maxCEprecipRate = np.max(finalCETRMMvalues[np.nonzero(finalCETRMMvalues)])
+        except:
+            maxCEprecipRate = 0.0
+
+        #add info to CLOUDELEMENTSGRAPH
+        #TODO try block
+        for eachdict in CLOUD_ELEMENT_GRAPH.nodes(CEuniqueID):
+            if eachdict[1]['uniqueID'] == CEuniqueID:
+                if not 'cloudElementPrecipTotal' in eachdict[1].keys():
+                    eachdict[1]['cloudElementPrecipTotal'] = precipTotal
+                if not 'cloudElementLatLonTRMM' in eachdict[1].keys():
+                    eachdict[1]['cloudElementLatLonTRMM'] = finalCETRMMvalues
+                if not 'TRMMArea' in eachdict[1].keys():
+                    eachdict[1]['TRMMArea'] = TRMMArea
+                if not 'CETRMMmin' in eachdict[1].keys():
+                    eachdict[1]['CETRMMmin'] = minCEprecipRate
+                if not 'CETRMMmax' in eachdict[1].keys():
+                    eachdict[1]['CETRMMmax'] = maxCEprecipRate
+
+        #clean up
+        precipTotal = 0.0
+        latsrawTRMMData =[]
+        lonsrawTRMMData = []
+        latsrawCloudElements=[]
+        lonsrawCloudElements=[]
+        finalCETRMMvalues =[]
+        CEprecipRate =[]
+        brightnesstemp =[]
+        TRMMdataDict ={}
+
+    return allCEnodesTRMMdata
 #******************************************************************	
 def findCloudClusters(CEGraph):
-	'''
-	Purpose:: 
-		Determines the cloud clusters properties from the subgraphs in 
-		the graph i.e. prunes the graph according to the minimum depth
-
-	Input:: 
-		CEGraph: a Networkx directed graph of the CEs with weighted edges
-		according the area overlap between nodes (CEs) of consectuive frames
-	
-	Output:: 
-		PRUNED_GRAPH: a Networkx directed graph of with CCs/ MCSs
-
-	'''
-
-	seenNode = []
-	allMCSLists =[]
-	pathDictList =[]
-	pathList=[]
-
-	cloudClustersFile = open((MAINDIRECTORY+'/textFiles/cloudClusters.txt'),'wb')
-	
-	for eachNode in CEGraph:
-		#check if the node has been seen before
-		if eachNode not in dict(enumerate(zip(*seenNode))):
-			#look for all trees associated with node as the root
-			thisPathDistanceAndLength = nx.single_source_dijkstra(CEGraph, eachNode)
-			#determine the actual shortestPath and minimum depth/length
-			maxDepthAndMinPath = findMaxDepthAndMinPath(thisPathDistanceAndLength)
-			if maxDepthAndMinPath:
-				maxPathLength = maxDepthAndMinPath[0] 
-				shortestPath = maxDepthAndMinPath[1]
-				
-				#add nodes and paths to PRUNED_GRAPH
-				for i in xrange(len(shortestPath)):
-					if PRUNED_GRAPH.has_node(shortestPath[i]) is False:
-						PRUNED_GRAPH.add_node(shortestPath[i])
-						
-					#add edge if necessary
-					if i < (len(shortestPath)-1) and PRUNED_GRAPH.has_edge(shortestPath[i], shortestPath[i+1]) is False:
-						prunedGraphEdgeweight = CEGraph.get_edge_data(shortestPath[i], shortestPath[i+1])['weight']
-						PRUNED_GRAPH.add_edge(shortestPath[i], shortestPath[i+1], weight=prunedGraphEdgeweight)
-
-				#note information in a file for consideration later i.e. checking to see if it works
-				cloudClustersFile.write("\nSubtree pathlength is %d and path is %s" %(maxPathLength, shortestPath))
-				#update seenNode info
-				seenNode.append(shortestPath)	
-
-	print "pruned graph"
-	print "number of nodes are: ", PRUNED_GRAPH.number_of_nodes()
-	print "number of edges are: ", PRUNED_GRAPH.number_of_edges()
-	print ("*"*80)		
-					
-	graphTitle = "Cloud Clusters observed over somewhere during sometime"
-	#drawGraph(PRUNED_GRAPH, graphTitle, edgeWeight)
-	cloudClustersFile.close
-	
-	return PRUNED_GRAPH  
+    '''
+    Purpose:: 
+        Determines the cloud clusters properties from the subgraphs in 
+        the graph i.e. prunes the graph according to the minimum depth
+
+    Input:: 
+        CEGraph: a Networkx directed graph of the CEs with weighted edges
+        according the area overlap between nodes (CEs) of consectuive frames
+    
+    Output:: 
+        PRUNED_GRAPH: a Networkx directed graph of with CCs/ MCSs
+
+    '''
+
+    seenNode = []
+    allMCSLists =[]
+    pathDictList =[]
+    pathList=[]
+
+    cloudClustersFile = open((MAINDIRECTORY+'/textFiles/cloudClusters.txt'),'wb')
+    
+    for eachNode in CEGraph:
+        #check if the node has been seen before
+        if eachNode not in dict(enumerate(zip(*seenNode))):
+            #look for all trees associated with node as the root
+            thisPathDistanceAndLength = nx.single_source_dijkstra(CEGraph, eachNode)
+            #determine the actual shortestPath and minimum depth/length
+            maxDepthAndMinPath = findMaxDepthAndMinPath(thisPathDistanceAndLength)
+            if maxDepthAndMinPath:
+                maxPathLength = maxDepthAndMinPath[0] 
+                shortestPath = maxDepthAndMinPath[1]
+                
+                #add nodes and paths to PRUNED_GRAPH
+                for i in xrange(len(shortestPath)):
+                    if PRUNED_GRAPH.has_node(shortestPath[i]) is False:
+                        PRUNED_GRAPH.add_node(shortestPath[i])
+                        
+                    #add edge if necessary
+                    if i < (len(shortestPath)-1) and PRUNED_GRAPH.has_edge(shortestPath[i], shortestPath[i+1]) is False:
+                        prunedGraphEdgeweight = CEGraph.get_edge_data(shortestPath[i], shortestPath[i+1])['weight']
+                        PRUNED_GRAPH.add_edge(shortestPath[i], shortestPath[i+1], weight=prunedGraphEdgeweight)
+
+                #note information in a file for consideration later i.e. checking to see if it works
+                cloudClustersFile.write("\nSubtree pathlength is %d and path is %s" %(maxPathLength, shortestPath))
+                #update seenNode info
+                seenNode.append(shortestPath)	
+
+    print "pruned graph"
+    print "number of nodes are: ", PRUNED_GRAPH.number_of_nodes()
+    print "number of edges are: ", PRUNED_GRAPH.number_of_edges()
+    print ("*"*80)		
+                    
+    graphTitle = "Cloud Clusters observed over somewhere during sometime"
+    #drawGraph(PRUNED_GRAPH, graphTitle, edgeWeight)
+    cloudClustersFile.close
+    
+    return PRUNED_GRAPH  
 #******************************************************************
 def findMCC (prunedGraph):
-	'''
-	Purpose:: 
-		Determines if subtree is a MCC according to Laurent et al 1998 criteria
-
-	Input:: 
-		prunedGraph: a Networkx Graph representing the CCs 
-
-	Output:: 
-		finalMCCList: a list of list of tuples representing a MCC
-			 
-	Assumptions: 
-		frames are ordered and are equally distributed in time e.g. hrly satellite images
+    '''
+    Purpose:: 
+        Determines if subtree is a MCC according to Laurent et al 1998 criteria
+
+    Input:: 
+        prunedGraph: a Networkx Graph representing the CCs 
+
+    Output:: 
+        finalMCCList: a list of list of tuples representing a MCC
+             
+    Assumptions: 
+        frames are ordered and are equally distributed in time e.g. hrly satellite images
  
-	'''
-	MCCList = []
-	MCSList = []
-	definiteMCC = []
-	definiteMCS = []
-	eachList =[]
-	eachMCCList =[]
-	maturing = False
-	decaying = False
-	fNode = ''
-	lNode = ''
-	removeList =[]
-	imgCount = 0
-	imgTitle =''
-	
-	maxShieldNode = ''
-	orderedPath =[]
-	treeTraversalList =[]
-	definiteMCCFlag = False
-	unDirGraph = nx.Graph()
-	aSubGraph = nx.DiGraph()
-	definiteMCSFlag = False
-
-	
-	#connected_components is not available for DiGraph, so generate graph as undirected 
-	unDirGraph = PRUNED_GRAPH.to_undirected()
-	subGraph = nx.connected_component_subgraphs(unDirGraph)
-
-	#for each path in the subgraphs determined
-	for path in subGraph:
-		#definite is a subTree provided the duration is longer than 3 hours
-
-		if len(path.nodes()) > MIN_MCS_DURATION:
-			orderedPath = path.nodes()
-			orderedPath.sort(key=lambda item:(len(item.split('C')[0]), item.split('C')[0]))
-			#definiteMCS.append(orderedPath)
-
-			#build back DiGraph for checking purposes/paper purposes
-			aSubGraph.add_nodes_from(path.nodes())	
-			for eachNode in path.nodes():
-				if prunedGraph.predecessors(eachNode):
-					for node in prunedGraph.predecessors(eachNode):
-						aSubGraph.add_edge(node,eachNode,weight=edgeWeight[0])
-
-				if prunedGraph.successors(eachNode):
-					for node in prunedGraph.successors(eachNode):
-						aSubGraph.add_edge(eachNode,node,weight=edgeWeight[0])
-			imgTitle = 'CC'+str(imgCount+1)
-			#drawGraph(aSubGraph, imgTitle, edgeWeight) #for eachNode in path:
-			imgCount +=1
-			#----------end build back ---------------------------------------------
-
-			mergeList, splitList = hasMergesOrSplits(path)	
-			#add node behavior regarding neutral, merge, split or both
-			for node in path:
-				if node in mergeList and node in splitList:
-					addNodeBehaviorIdentifier(node,'B')
-				elif node in mergeList and not node in splitList:
-					addNodeBehaviorIdentifier(node,'M')
-				elif node in splitList and not node in mergeList:
-					addNodeBehaviorIdentifier(node,'S')
-				else:
-					addNodeBehaviorIdentifier(node,'N')
-			
-
-			#Do the first part of checking for the MCC feature
-			#find the path
-			treeTraversalList = traverseTree(aSubGraph, orderedPath[0],[],[])
-			#print "treeTraversalList is ", treeTraversalList
-			#check the nodes to determine if a MCC on just the area criteria (consecutive nodes meeting the area and temp requirements)
-			MCCList = checkedNodesMCC(prunedGraph, treeTraversalList)
-			for aDict in MCCList:
-				for eachNode in aDict["fullMCSMCC"]:
-					addNodeMCSIdentifier(eachNode[0],eachNode[1])
-				
-			#do check for if MCCs overlap
-			if MCCList:
-				if len(MCCList) > 1:
-					for count in range(len(MCCList)): #for eachDict in MCCList:
-						#if there are more than two lists
-						if count >= 1:
-							#and the first node in this list
-							eachList = list(x[0] for x in MCCList[count]["possMCCList"])
-							eachList.sort(key=lambda nodeID:(len(nodeID.split('C')[0]), nodeID.split('C')[0]))
-							if eachList:
-								fNode = eachList[0]
-								#get the lastNode in the previous possMCC list
-								eachList = list(x[0] for x in MCCList[(count-1)]["possMCCList"])
-								eachList.sort(key=lambda nodeID:(len(nodeID.split('C')[0]), nodeID.split('C')[0]))
-								if eachList:
-									lNode = eachList[-1]
-									if lNode in CLOUD_ELEMENT_GRAPH.predecessors(fNode):
-										for aNode in CLOUD_ELEMENT_GRAPH.predecessors(fNode):
-											if aNode in eachList and aNode == lNode:
-												#if edge_data is equal or less than to the exisitng edge in the tree append one to the other
-												if CLOUD_ELEMENT_GRAPH.get_edge_data(aNode,fNode)['weight'] <= CLOUD_ELEMENT_GRAPH.get_edge_data(lNode,fNode)['weight']:
-													MCCList[count-1]["possMCCList"].extend(MCCList[count]["possMCCList"]) 
-													MCCList[count-1]["fullMCSMCC"].extend(MCCList[count]["fullMCSMCC"])
-													MCCList[count-1]["durationAandB"] +=  MCCList[count]["durationAandB"]
-													MCCList[count-1]["CounterCriteriaA"] += MCCList[count]["CounterCriteriaA"]
-													MCCList[count-1]["highestMCCnode"] = MCCList[count]["highestMCCnode"]
-													MCCList[count-1]["frameNum"] = MCCList[count]["frameNum"] 
-													removeList.append(count)
-				#update the MCCList
-				if removeList:
-					for i in removeList:
-						if (len(MCCList)-1) > i:
-							del MCCList[i]
-							removeList =[]
-				
-			#check if the nodes also meet the duration criteria and the shape crieria
-			for eachDict in MCCList:
-				#order the fullMCSMCC list, then run maximum extent and eccentricity criteria 
-				if (eachDict["durationAandB"] * TRES) >= MINIMUM_DURATION and (eachDict["durationAandB"] * TRES) <= MAXIMUM_DURATION:
-					eachList = list(x[0] for x in eachDict["fullMCSMCC"])
-					eachList.sort(key=lambda nodeID:(len(nodeID.split('C')[0]), nodeID.split('C')[0]))
-					eachMCCList = list(x[0] for x in eachDict["possMCCList"])
-					eachMCCList.sort(key=lambda nodeID:(len(nodeID.split('C')[0]), nodeID.split('C')[0]))
-					
-					#update the nodemcsidentifer behavior
-					#find the first element eachMCCList in eachList, and ensure everything ahead of it is indicated as 'I', 
-					#find last element in eachMCCList in eachList and ensure everything after it is indicated as 'D'
-					#ensure that everything between is listed as 'M'
-					for eachNode in eachList[:(eachList.index(eachMCCList[0]))]: 
-						addNodeMCSIdentifier(eachNode,'I')
-
-					addNodeMCSIdentifier(eachMCCList[0],'M')
-
-					for eachNode in eachList[(eachList.index(eachMCCList[-1])+1):]:
-						addNodeMCSIdentifier(eachNode, 'D')
-
-					#update definiteMCS list
-					for eachNode in orderedPath[(orderedPath.index(eachMCCList[-1])+1):]:
-						addNodeMCSIdentifier(eachNode, 'D')
-
-					#run maximum extent and eccentricity criteria
-					maxExtentNode, definiteMCCFlag = maxExtentAndEccentricity(eachList)
-					#print "maxExtentNode, definiteMCCFlag ", maxExtentNode, definiteMCCFlag
-					if definiteMCCFlag == True:
-						definiteMCC.append(eachList)
-
-
-			definiteMCS.append(orderedPath)
-			
-			#reset for next subGraph	
-			aSubGraph.clear()
-			orderedPath=[]
-			MCCList =[]
-			MCSList =[]
-			definiteMCSFlag = False
-		
-	return definiteMCC, definiteMCS
+    '''
+    MCCList = []
+    MCSList = []
+    definiteMCC = []
+    definiteMCS = []
+    eachList =[]
+    eachMCCList =[]
+    maturing = False
+    decaying = False
+    fNode = ''
+    lNode = ''
+    removeList =[]
+    imgCount = 0
+    imgTitle =''
+    
+    maxShieldNode = ''
+    orderedPath =[]
+    treeTraversalList =[]
+    definiteMCCFlag = False
+    unDirGraph = nx.Graph()
+    aSubGraph = nx.DiGraph()
+    definiteMCSFlag = False
+
+    
+    #connected_components is not available for DiGraph, so generate graph as undirected 
+    unDirGraph = PRUNED_GRAPH.to_undirected()
+    subGraph = nx.connected_component_subgraphs(unDirGraph)
+
+    #for each path in the subgraphs determined
+    for path in subGraph:
+        #definite is a subTree provided the duration is longer than 3 hours
+
+        if len(path.nodes()) > MIN_MCS_DURATION:
+            orderedPath = path.nodes()
+            orderedPath.sort(key=lambda item:(len(item.split('C')[0]), item.split('C')[0]))
+            #definiteMCS.append(orderedPath)
+
+            #build back DiGraph for checking purposes/paper purposes
+            aSubGraph.add_nodes_from(path.nodes())	
+            for eachNode in path.nodes():
+                if prunedGraph.predecessors(eachNode):
+                    for node in prunedGraph.predecessors(eachNode):
+                        aSubGraph.add_edge(node,eachNode,weight=edgeWeight[0])
+
+                if prunedGraph.successors(eachNode):
+                    for node in prunedGraph.successors(eachNode):
+                        aSubGraph.add_edge(eachNode,node,weight=edgeWeight[0])
+            imgTitle = 'CC'+str(imgCount+1)
+            #drawGraph(aSubGraph, imgTitle, edgeWeight) #for eachNode in path:
+            imgCount +=1
+            #----------end build back ---------------------------------------------
+
+            mergeList, splitList = hasMergesOrSplits(path)	
+            #add node behavior regarding neutral, merge, split or both
+            for node in path:
+                if node in mergeList and node in splitList:
+                    addNodeBehaviorIdentifier(node,'B')
+                elif node in mergeList and not node in splitList:
+                    addNodeBehaviorIdentifier(node,'M')
+                elif node in splitList and not node in mergeList:
+                    addNodeBehaviorIdentifier(node,'S')
+                else:
+                    addNodeBehaviorIdentifier(node,'N')
+            
+
+            #Do the first part of checking for the MCC feature
+            #find the path
+            treeTraversalList = traverseTree(aSubGraph, orderedPath[0],[],[])
+            #print "treeTraversalList is ", treeTraversalList
+            #check the nodes to determine if a MCC on just the area criteria (consecutive nodes meeting the area and temp requirements)
+            MCCList = checkedNodesMCC(prunedGraph, treeTraversalList)
+            for aDict in MCCList:
+                for eachNode in aDict["fullMCSMCC"]:
+                    addNodeMCSIdentifier(eachNode[0],eachNode[1])
+                
+            #do check for if MCCs overlap
+            if MCCList:
+                if len(MCCList) > 1:
+                    for count in range(len(MCCList)): #for eachDict in MCCList:
+                        #if there are more than two lists
+                        if count >= 1:
+                            #and the first node in this list
+                            eachList = list(x[0] for x in MCCList[count]["possMCCList"])
+                            eachList.sort(key=lambda nodeID:(len(nodeID.split('C')[0]), nodeID.split('C')[0]))
+                            if eachList:
+                                fNode = eachList[0]
+                                #get the lastNode in the previous possMCC list
+                                eachList = list(x[0] for x in MCCList[(count-1)]["possMCCList"])
+                                eachList.sort(key=lambda nodeID:(len(nodeID.split('C')[0]), nodeID.split('C')[0]))
+                                if eachList:
+                                    lNode = eachList[-1]
+                                    if lNode in CLOUD_ELEMENT_GRAPH.predecessors(fNode):
+                                        for aNode in CLOUD_ELEMENT_GRAPH.predecessors(fNode):
+                                            if aNode in eachList and aNode == lNode:
+                                                #if edge_data is equal or less than to the exisitng edge in the tree append one to the other
+                                                if CLOUD_ELEMENT_GRAPH.get_edge_data(aNode,fNode)['weight'] <= CLOUD_ELEMENT_GRAPH.get_edge_data(lNode,fNode)['weight']:
+                                                    MCCList[count-1]["possMCCList"].extend(MCCList[count]["possMCCList"]) 
+                                                    MCCList[count-1]["fullMCSMCC"].extend(MCCList[count]["fullMCSMCC"])
+                                                    MCCList[count-1]["durationAandB"] +=  MCCList[count]["durationAandB"]
+                                                    MCCList[count-1]["CounterCriteriaA"] += MCCList[count]["CounterCriteriaA"]
+                                                    MCCList[count-1]["highestMCCnode"] = MCCList[count]["highestMCCnode"]
+                                                    MCCList[count-1]["frameNum"] = MCCList[count]["frameNum"] 
+                                                    removeList.append(count)
+                #update the MCCList
+                if removeList:
+                    for i in removeList:
+                        if (len(MCCList)-1) > i:
+                            del MCCList[i]
+                            removeList =[]
+                
+            #check if the nodes also meet the duration criteria and the shape crieria
+            for eachDict in MCCList:
+                #order the fullMCSMCC list, then run maximum extent and eccentricity criteria 
+                if (eachDict["durationAandB"] * TRES) >= MINIMUM_DURATION and (eachDict["durationAandB"] * TRES) <= MAXIMUM_DURATION:
+                    eachList = list(x[0] for x in eachDict["fullMCSMCC"])
+                    eachList.sort(key=lambda nodeID:(len(nodeID.split('C')[0]), nodeID.split('C')[0]))
+                    eachMCCList = list(x[0] for x in eachDict["possMCCList"])
+                    eachMCCList.sort(key=lambda nodeID:(len(nodeID.split('C')[0]), nodeID.split('C')[0]))
+                    
+                    #update the nodemcsidentifer behavior
+                    #find the first element eachMCCList in eachList, and ensure everything ahead of it is indicated as 'I', 
+                    #find last element in eachMCCList in eachList and ensure everything after it is indicated as 'D'
+                    #ensure that everything between is listed as 'M'
+                    for eachNode in eachList[:(eachList.index(eachMCCList[0]))]: 
+                        addNodeMCSIdentifier(eachNode,'I')
+
+                    addNodeMCSIdentifier(eachMCCList[0],'M')
+
+                    for eachNode in eachList[(eachList.index(eachMCCList[-1])+1):]:
+                        addNodeMCSIdentifier(eachNode, 'D')
+
+                    #update definiteMCS list
+                    for eachNode in orderedPath[(orderedPath.index(eachMCCList[-1])+1):]:
+                        addNodeMCSIdentifier(eachNode, 'D')
+
+                    #run maximum extent and eccentricity criteria
+                    maxExtentNode, definiteMCCFlag = maxExtentAndEccentricity(eachList)
+                    #print "maxExtentNode, definiteMCCFlag ", maxExtentNode, definiteMCCFlag
+                    if def

<TRUNCATED>

[3/3] climate git commit: CLIMATE-920 Make examples Python 3 compatible

Posted by le...@apache.org.
CLIMATE-920 Make examples Python 3 compatible


Project: http://git-wip-us.apache.org/repos/asf/climate/repo
Commit: http://git-wip-us.apache.org/repos/asf/climate/commit/9bb21700
Tree: http://git-wip-us.apache.org/repos/asf/climate/tree/9bb21700
Diff: http://git-wip-us.apache.org/repos/asf/climate/diff/9bb21700

Branch: refs/heads/master
Commit: 9bb2170084ec61cee178c3742cb5a1be709966fe
Parents: ec3fdb4
Author: Lewis John McGibbney <le...@gmail.com>
Authored: Sat Jul 29 13:36:05 2017 -0700
Committer: Lewis John McGibbney <le...@gmail.com>
Committed: Sat Jul 29 13:36:05 2017 -0700

----------------------------------------------------------------------
 examples/esgf_integration_example.py   |   17 +-
 examples/podaac_integration_example.py |    7 +-
 examples/simple_model_to_model_bias.py |   14 +-
 examples/taylor_diagram_example.py     |   13 +-
 examples/time_series_with_regions.py   |   21 +-
 mccsearch/code/mccSearch.py            | 6292 +++++++++++++--------------
 ocw/data_source/esgf.py                |   11 +-
 ocw/esgf/download.py                   |   28 +-
 8 files changed, 3227 insertions(+), 3176 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/climate/blob/9bb21700/examples/esgf_integration_example.py
----------------------------------------------------------------------
diff --git a/examples/esgf_integration_example.py b/examples/esgf_integration_example.py
index 7a02632..8247435 100644
--- a/examples/esgf_integration_example.py
+++ b/examples/esgf_integration_example.py
@@ -18,14 +18,19 @@
 import ocw.data_source.esgf as esgf
 from getpass import getpass
 import ssl
+import sys
 
 if hasattr(ssl, '_create_unverified_context'):
     ssl._create_default_https_context = ssl._create_unverified_context
 
-dataset_id = 'obs4MIPs.CNES.AVISO.zos.mon.v20110829|esgf-data.jpl.nasa.gov'
+dataset_id = 'obs4mips.CNES.AVISO.zos.mon.v20110829|esgf-data.jpl.nasa.gov'
 variable = 'zosStderr'
 
-username = raw_input('Enter your ESGF OpenID:\n')
+if sys.version_info[0] >= 3:
+    username = input('Enter your ESGF OpenID:\n')
+else:
+    username = raw_input('Enter your ESGF OpenID:\n')
+
 password = getpass(prompt='Enter your ESGF Password:\n')
 
 # Multiple datasets are returned in a list if the ESGF dataset is
@@ -39,7 +44,7 @@ datasets = esgf.load_dataset(dataset_id,
 # we only need to look at the 0-th value in the returned list.
 ds = datasets[0]
 
-print '\n--------\n'
-print 'Variable: ', ds.variable
-print 'Shape: ', ds.values.shape
-print 'A Value: ', ds.values[100][100][100]
+print('\n--------\n')
+print('Variable: ', ds.variable)
+print('Shape: ', ds.values.shape)
+print('A Value: ', ds.values[100][100][100])

http://git-wip-us.apache.org/repos/asf/climate/blob/9bb21700/examples/podaac_integration_example.py
----------------------------------------------------------------------
diff --git a/examples/podaac_integration_example.py b/examples/podaac_integration_example.py
index 7b8bb10..61663d7 100644
--- a/examples/podaac_integration_example.py
+++ b/examples/podaac_integration_example.py
@@ -24,8 +24,9 @@ datasetId = 'PODAAC-CCF30-01XXX'
 variable = 'uwnd'
 name = 'PO.DAAC_test_dataset'
 OUTPUT_PLOT = "ccmp_temporal_std"
-""" Step 1: Load Local NetCDF Files into OCW Dataset Objects """
-print("Extracting Level4 granule %s and converting it into a OCW dataset object." % datasetId)
+""" Step 1: Download remote PO.DAAC Dataset and read it into an OCW Dataset Object"""
+print("Available Level4 PO.DAAC Granules: %s" % podaac.list_available_extract_granule_dataset_ids())
+print("Extracting variable '%s' from  Level4 granule '%s' and converting it into a OCW dataset object." % (variable, datasetId))
 ccmp_dataset = podaac.extract_l4_granule(
     variable=variable, dataset_id=datasetId, name=name)
 print("CCMP_Dataset.values shape: (times, lats, lons) - %s \n" %
@@ -67,7 +68,7 @@ print("Generating a contour map using ocw.plotter.draw_contour_map()")
 
 fname = OUTPUT_PLOT
 gridshape = (4, 5)  # 20 Years worth of plots. 20 rows in 1 column
-plot_title = "CCMP Temporal Standard Deviation"
+plot_title = "Cross-Calibrated Multi-Platform Temporal Standard Deviation"
 sub_titles = range(2002, 2010, 1)
 
 plotter.draw_contour_map(results, lats, lons, fname,

http://git-wip-us.apache.org/repos/asf/climate/blob/9bb21700/examples/simple_model_to_model_bias.py
----------------------------------------------------------------------
diff --git a/examples/simple_model_to_model_bias.py b/examples/simple_model_to_model_bias.py
index ffa5cda..8e834b6 100644
--- a/examples/simple_model_to_model_bias.py
+++ b/examples/simple_model_to_model_bias.py
@@ -17,7 +17,15 @@
 
 import datetime
 from os import path
-import urllib
+import sys
+
+if sys.version_info[0] >= 3:
+    from urllib.request import urlretrieve
+else:
+    # Not Python 3 - today, it is most likely to be Python 2
+    # But note that this might need an update when Python 4
+    # might be around one day
+    from urllib import urlretrieve
 
 import numpy as np
 
@@ -39,9 +47,9 @@ FILE_1_PATH = path.join('/tmp', FILE_1)
 FILE_2_PATH = path.join('/tmp', FILE_2)
 
 if not path.exists(FILE_1_PATH):
-    urllib.urlretrieve(FILE_LEADER + FILE_1, FILE_1_PATH)
+    urlretrieve(FILE_LEADER + FILE_1, FILE_1_PATH)
 if not path.exists(FILE_2_PATH):
-    urllib.urlretrieve(FILE_LEADER + FILE_2, FILE_2_PATH)
+    urlretrieve(FILE_LEADER + FILE_2, FILE_2_PATH)
 
 """ Step 1: Load Local NetCDF Files into OCW Dataset Objects """
 print("Loading %s into an OCW Dataset Object" % (FILE_1_PATH,))

http://git-wip-us.apache.org/repos/asf/climate/blob/9bb21700/examples/taylor_diagram_example.py
----------------------------------------------------------------------
diff --git a/examples/taylor_diagram_example.py b/examples/taylor_diagram_example.py
index 86236c8..8d5bbf0 100644
--- a/examples/taylor_diagram_example.py
+++ b/examples/taylor_diagram_example.py
@@ -18,7 +18,14 @@
 import datetime
 import sys
 from os import path
-import urllib
+
+if sys.version_info[0] >= 3:
+    from urllib.request import urlretrieve
+else:
+    # Not Python 3 - today, it is most likely to be Python 2
+    # But note that this might need an update when Python 4
+    # might be around one day
+    from urllib import urlretrieve
 
 import numpy
 
@@ -36,10 +43,10 @@ FILE_2 = "AFRICA_UC-WRF311_CTL_ERAINT_MM_50km-rg_1989-2008_tasmax.nc"
 # Download some example NetCDF files for the evaluation
 ##########################################################################
 if not path.exists(FILE_1):
-    urllib.urlretrieve(FILE_LEADER + FILE_1, FILE_1)
+    urlretrieve(FILE_LEADER + FILE_1, FILE_1)
 
 if not path.exists(FILE_2):
-    urllib.urlretrieve(FILE_LEADER + FILE_2, FILE_2)
+    urlretrieve(FILE_LEADER + FILE_2, FILE_2)
 
 # Load the example datasets into OCW Dataset objects. We want to load
 # the 'tasmax' variable values. We'll also name the datasets for use

http://git-wip-us.apache.org/repos/asf/climate/blob/9bb21700/examples/time_series_with_regions.py
----------------------------------------------------------------------
diff --git a/examples/time_series_with_regions.py b/examples/time_series_with_regions.py
index 05c4721..3bb133c 100644
--- a/examples/time_series_with_regions.py
+++ b/examples/time_series_with_regions.py
@@ -12,7 +12,15 @@ import datetime
 import numpy as np
 import numpy.ma as ma
 from os import path
-import urllib
+import sys
+
+if sys.version_info[0] >= 3:
+    from urllib.request import urlretrieve
+else:
+    # Not Python 3 - today, it is most likely to be Python 2
+    # But note that this might need an update when Python 4
+    # might be around one day
+    from urllib import urlretrieve
 import ssl
 if hasattr(ssl, '_create_unverified_context'):
     ssl._create_default_https_context = ssl._create_unverified_context
@@ -29,7 +37,7 @@ LAT_MIN = -45.0
 LAT_MAX = 42.24
 LON_MIN = -24.0
 LON_MAX = 60.0
-START = datetime.datetime(2000, 01, 1)
+START = datetime.datetime(2000, 1, 1)
 END = datetime.datetime(2007, 12, 31)
 
 EVAL_BOUNDS = Bounds(lat_min=LAT_MIN, lat_max=LAT_MAX,
@@ -48,13 +56,16 @@ region_counter = 0
 
 # Download necessary NetCDF file if not present
 if not path.exists(FILE_1):
-    urllib.urlretrieve(FILE_LEADER + FILE_1, FILE_1)
+    print("Downloading %s" % (FILE_LEADER + FILE_1))
+    urlretrieve(FILE_LEADER + FILE_1, FILE_1)
 
 if not path.exists(FILE_2):
-    urllib.urlretrieve(FILE_LEADER + FILE_2, FILE_2)
+    print("Downloading %s" % (FILE_LEADER + FILE_2))
+    urlretrieve(FILE_LEADER + FILE_2, FILE_2)
 
 if not path.exists(FILE_3):
-    urllib.urlretrieve(FILE_LEADER + FILE_3, FILE_3)
+    print("Downloading %s" % (FILE_LEADER + FILE_3))
+    urlretrieve(FILE_LEADER + FILE_3, FILE_3)
 
 """ Step 1: Load Local NetCDF File into OCW Dataset Objects and store in list"""
 target_datasets.append(local.load_file(FILE_1, varName, name="KNMI"))