[GRASS-SVN] r54411 - grass-addons/grass7/raster/r.modis/libmodis

svn_grass at osgeo.org svn_grass at osgeo.org
Thu Dec 27 02:12:03 PST 2012


Author: lucadelu
Date: 2012-12-27 02:12:03 -0800 (Thu, 27 Dec 2012)
New Revision: 54411

Modified:
   grass-addons/grass7/raster/r.modis/libmodis/convertmodis.py
   grass-addons/grass7/raster/r.modis/libmodis/downmodis.py
   grass-addons/grass7/raster/r.modis/libmodis/parsemodis.py
   grass-addons/grass7/raster/r.modis/libmodis/rmodislib.py
Log:
r.modis: update modis library

Modified: grass-addons/grass7/raster/r.modis/libmodis/convertmodis.py
===================================================================
--- grass-addons/grass7/raster/r.modis/libmodis/convertmodis.py	2012-12-26 12:17:09 UTC (rev 54410)
+++ grass-addons/grass7/raster/r.modis/libmodis/convertmodis.py	2012-12-27 10:12:03 UTC (rev 54411)
@@ -20,24 +20,23 @@
 ##################################################################
 
 from datetime import *
-import string
 import os
 import sys
-import glob
-import logging
-import socket
-from ftplib import FTP
-import ftplib
 
+
 class convertModis:
-  """A class to convert modis data from hdf to tif using resample (from MRT tools)
+  """A class to convert modis data from hdf to tif using resample
+  (from MRT tools)
   """
   def __init__(self, hdfname, confile, mrtpath):
     """Initialization function :
+
        hdfname = the full path to the hdf file
+
        confile = the full path to the paramater file
-       mrtpath = the full path to mrt directory where inside you have bin and 
-                 data directories
+
+       mrtpath = the full path to mrt directory where inside you have bin and
+       data directories
     """
     # check if the hdf file exists
     if os.path.exists(hdfname):
@@ -76,7 +75,7 @@
         return os.path.join(self.mrtpath,'resample.exe')
 
   def run(self):
-    """Exec the process"""
+    """Exec the convertion process"""
     import subprocess
     execut = self.executable()
     if not os.path.exists(execut):
@@ -86,18 +85,23 @@
       subprocess.call([execut,'-p',self.conf])
     return "The hdf file %s was converted" % self.name
 
+
 class createMosaic:
-  """A class to convert a mosaic of different modis tiles"""
+  """A class to convert several MODIS tiles into a mosaic"""
   def __init__(self,
               listfile,
               outprefix,
               mrtpath,
               subset = False
               ):
+    import tempfile
     # check if the hdf file exists
     if os.path.exists(listfile):
       self.basepath = os.path.split(listfile)[0]
+      self.fullpath = os.path.realpath(self.basepath)
       self.listfiles = listfile
+      self.tmplistfiles = open(os.path.join(tempfile.gettempdir(),
+                               '%s.prm' % str(os.getpid())), 'w')
       self.HDFfiles = open(listfile).readlines()
     else:
       raise IOError('%s not exists' % hdfname)
@@ -121,16 +125,19 @@
     self.subset = subset
 
   def write_mosaic_xml(self):
+    """Write the XML metadata file for MODIS mosaic"""
     from parsemodis import parseModisMulti
     listHDF = []
     for i in self.HDFfiles:
       if i.find(self.basepath) == -1:
         print "Attection maybe you have the not full path in the HDF file list"
         listHDF.append(os.path.join(self.basepath,i.strip()))
-      else:
+      elif i.find('.hdf.xml') == -1:
         listHDF.append(i.strip())
+        self.tmplistfiles.write(os.path.join(self.fullpath,i))
     pmm = parseModisMulti(listHDF)
     pmm.writexml(self.outxml)
+    self.tmplistfiles.close()
 
 
   def executable(self):
@@ -153,13 +160,14 @@
     else:
       self.write_mosaic_xml()
       if self.subset:
-        subprocess.call([execut,'-i',self.listfiles,'-o',self.out,'-s',self.subset], 
+        subprocess.call([execut,'-i',self.tmplistfiles.name,'-o',self.out,'-s',
+                         self.subset], stderr = subprocess.STDOUT)
+      else:
+        subprocess.call([execut,'-i',self.tmplistfiles.name,'-o',self.out],
                         stderr = subprocess.STDOUT)
-      else:
-        subprocess.call([execut,'-i',self.listfiles,'-o',self.out], stderr = 
-                        subprocess.STDOUT)
     return "The mosaic file %s is created" % self.out
 
+
 class processModis:
   """A class to process raw modis data from hdf to tif using swath2grid (from MRT Swath tools)
   """
@@ -210,7 +218,7 @@
         return os.path.join(self.mrtpath,'swath2grid.exe')
 
   def run(self):
-    """Exec the process"""
+    """Exec the convertion process"""
     import subprocess
     execut = self.executable()
     if not os.path.exists(execut):

Modified: grass-addons/grass7/raster/r.modis/libmodis/downmodis.py
===================================================================
--- grass-addons/grass7/raster/r.modis/libmodis/downmodis.py	2012-12-26 12:17:09 UTC (rev 54410)
+++ grass-addons/grass7/raster/r.modis/libmodis/downmodis.py	2012-12-27 10:12:03 UTC (rev 54411)
@@ -20,44 +20,51 @@
 ##################################################################
 
 from datetime import *
-import string
 import os
-import sys
 import glob
 import logging
 import socket
 from ftplib import FTP
 import ftplib
 
+
 class downModis:
   """A class to download MODIS data from NASA FTP repository"""
-  def __init__(self, 
+  def __init__(self,
                 password,
                 destinationFolder,
-                user = "anonymous",
-                url = "e4ftl01.cr.usgs.gov",
-                tiles = None,
-                path = "MOLT/MOD11A1.005",
-                today = None,
-                enddate = None,
-                delta = 10,
-                jpg = False,
-                debug = False
+                user="anonymous",
+                url="e4ftl01.cr.usgs.gov",
+                tiles=None,
+                path="MOLT/MOD11A1.005",
+                today=None,
+                enddate=None,
+                delta=10,
+                jpg=False,
+                debug=False
               ):
     """Initialization function :
+
         password = is your password, usually your email address
+
         destinationFolder = where the files will be stored
+
         user = your username, by default anonymous
+
         url = the url where to download the MODIS data
-        path = the directory where the data that you want to download are 
+
+        path = the directory where the data that you want to download are
                stored on the ftp server
+
         tiles = a list of tiles that you want to download, None == all tiles
-        today = the day to start downloading; in order to pass a date different from
-                today use the format YYYY-MM-DD
-        delta = timelag i.e. the number of days starting from today 
+
+        today = the day to start downloading; in order to pass a date different
+                from today use the format YYYY-MM-DD
+
+        delta = timelag i.e. the number of days starting from today
                 (backwards
 
-        Creates a ftp instance, connects user to ftp server and goes into the 
+        Creates a ftp instance, connects user to ftp server and goes into the
         directory where the MODIS data are stored
     """
 
@@ -75,7 +82,7 @@
     else:
         self.tiles = tiles
     # set destination folder
-    if os.access(destinationFolder,os.W_OK):
+    if os.access(destinationFolder, os.W_OK):
       self.writeFilePath = destinationFolder
     else:
       raise IOError("Folder to store downloaded files does not exist or is not" \
@@ -87,7 +94,7 @@
       self.product = self.path.split('/')[2]
     # write a file with the name of file downloaded
     self.filelist = open(os.path.join(self.writeFilePath, 'listfile' \
-    + self.product + '.txt'),'w')
+    + self.product + '.txt'), 'w')
     # set jpg download
     self.jpeg = jpg
     # today
@@ -103,19 +110,32 @@
     # for logging
     LOG_FILENAME = os.path.join(self.writeFilePath, 'modis' \
     + self.product + '.log')
-    LOGGING_FORMAT='%(asctime)s - %(levelname)s - %(message)s'
+    LOGGING_FORMAT = '%(asctime)s - %(levelname)s - %(message)s'
     logging.basicConfig(filename=LOG_FILENAME, level=logging.DEBUG, \
     format=LOGGING_FORMAT)
     self.nconnection = 0
-    
-  def connectFTP(self, ncon = 20):
+
+  def removeEmptyFiles(self):
+    """Check if some file has size ugual 0"""
+    year = str(date.today().year)
+    pref = self.product.split('.')[0]
+    files = glob.glob1(self.writeFilePath, '%s.A%s*' % (pref, year))
+    for f in files:
+        fil = os.path.join(self.writeFilePath, f)
+        if os.path.getsize(fil) == 0:
+            os.remove(fil)
+
+  def connectFTP(self, ncon=20):
     """ Set connection to ftp server, move to path where data are stored
-    and create a list of directory for all days"""
+    and create a list of directories for all days
+
+    ncon = number maximum of test to connection at the ftp server
+    """
     self.nconnection += 1
     try:
       # connect to ftp server
       self.ftp = FTP(self.url)
-      self.ftp.login(self.user,self.password)
+      self.ftp.login(self.user, self.password)
       # enter in directory
       self.ftp.cwd(self.path)
       self.dirData = []
@@ -136,14 +156,14 @@
     """ Close ftp connection """
     self.ftp.quit()
     self.filelist.close()
-    if self.debug==True:
+    if self.debug == True:
       logging.debug("Close connection %s" % self.url)
 
-  def setDirectoryIn(self,day):
+  def setDirectoryIn(self, day):
     """ Enter in the directory of the day """
     try:
       self.ftp.cwd(day)
-    except (ftplib.error_reply,socket.error), e:
+    except (ftplib.error_reply, socket.error), e:
       logging.error("Error %s entering in directory %s" % e, day)
       self.setDirectoryIn(day)
 
@@ -151,36 +171,38 @@
     """ Come back to old path """
     try:
       self.ftp.cwd('..')
-    except (ftplib.error_reply,socket.error), e:
+    except (ftplib.error_reply, socket.error), e:
       logging.error("Error %s when try to come back" % e)
       self.setDirectoryOver()
 
-  def str2date(self,strin):
-      """Return a date object from a string"""
+  def _str2date(self, strin):
+      """Return a date object from a string
+
+      string = text string to return date (2012-10-04)
+      """
       todaySplit = strin.split('-')
-      return date(int(todaySplit[0]), int(todaySplit[1]),int(todaySplit[2]))
+      return date(int(todaySplit[0]), int(todaySplit[1]), int(todaySplit[2]))
 
-  def getToday(self):
+  def _getToday(self):
     """Return the first day for start to download"""
     if self.today == None:
       # set today variable to today
       self.today = date.today()
     else:
       # set today variable to data pass from user
-      self.today = self.str2date(self.today)
+      self.today = self._str2date(self.today)
       # set enday variable to data
     if self.enday != None:
-      self.enday = self.str2date(self.enday)
-      
+      self.enday = self._str2date(self.enday)
+
   def getListDays(self):
-      """ Return a list of all selected days """
-      self.getToday()
+      """Return a list of all selected days"""
+      self._getToday()
 
       today_s = self.today.strftime("%Y.%m.%d")
       # dirData is reverse sorted
       for i, d in enumerate(self.dirData):
         if d <= today_s:
-          today_avail = d
           today_index = i
           break
       else:
@@ -195,7 +217,7 @@
         delta = 0
         # it make a for cicle from the last value and find the internal delta
         #to remove file outside temporaly range
-        for i in range(-(len(days)),0):
+        for i in range(-(len(days)), 0):
           if days[i] < enday_s:
             break
           else:
@@ -204,14 +226,18 @@
         days = days[:delta]
       return days
 
+  def getAllDays(self):
+    """Return a list of all days"""
+    return self.dirData
+
   def getFilesList(self):
-    """ Create a list of files to download, it is possible choose to download 
+    """ Create a list of files to download, it is possible choose to download
     also the jpeg files or only the hdf files"""
-    def cicle_file(jpeg=False,tile=True):
+    def cicle_file(jpeg=False, tile=True):
       finalList = []
       for i in self.listfiles:
         File = i.split('.')
-        # distinguish jpeg files from hdf files by the number of index 
+        # distinguish jpeg files from hdf files by the number of index
         # where find the tile index
         if not tile and not (File.count('jpg') or File.count('BROWSE')):
           finalList.append(i)
@@ -223,7 +249,7 @@
 
     # return the file's list inside the directory of each day
     try:
-      self.listfiles = self.ftp.nlst() 
+      self.listfiles = self.ftp.nlst()
       # download also jpeg
       if self.jpeg:
         # finallist is ugual to all file with jpeg file
@@ -235,25 +261,29 @@
       # not download jpeg
       else:
         if not self.tiles:
-          finalList = cicle_file(tile=False)          
+          finalList = cicle_file(tile=False)
         else:
           finalList = cicle_file()
-      if self.debug==True:
+      if self.debug == True:
         logging.debug("The number of file to download is: %i" % len(finalList))
       return finalList
-    except (ftplib.error_reply,socket.error), e:
+    except (ftplib.error_reply, socket.error), e:
       logging.error("Error %s when try to receive list of files" % e)
       self.getFilesList()
 
   def checkDataExist(self,listNewFile, move = 0):
-    """ Check if a data already exists in the directory of download 
-    Move serve to know if function is called from download or move function"""
+    """ Check if a file already exists in the directory of download
+
+    listNewFile = list of all files, returned by getFilesList function
+
+    move = it is useful to know if a function is called from download or move function
+    """
     fileInPath = []
     # add all files in the directory where we will save new modis data
     for f in os.listdir(self.writeFilePath):
       if os.path.isfile(os.path.join(self.writeFilePath, f)):
         fileInPath.append(f)
-    # different return if this method is used from downloadsAllDay() or 
+    # different return if this method is used from downloadsAllDay() or
     # moveFile()
     if move == 0:
       listOfDifferent = list(set(listNewFile) - set(fileInPath))
@@ -261,8 +291,13 @@
       listOfDifferent = list(set(fileInPath) - set(listNewFile))
     return listOfDifferent
 
-  def getNewerVersion(self,oldFile,newFile):
-    """ Return newer version of a file"""
+  def getNewerVersion(self, oldFile, newFile):
+    """ Return newer version of a file
+    
+    oldFile = one of the two similar file
+    
+    newFile = one of the two similar file
+    """
     oldFileSplit = oldFile.split('.')
     newFileSplit = newFile.split('.')
     if oldFileSplit[4] > newFileSplit[4]:
@@ -270,22 +305,45 @@
     else:
       return newFile
 
-  def downloadFile(self,filDown,filSave):
-    """Download the single file"""
-    #try to download file
+  def _downloadFile(self, filDown, filHdf):
+    """Download the single file
+    
+    filDown = name of the file to download
+    
+    filSave = name of the file to write
+    """
+    filSave = open(filHdf, "wb")
     try:
       self.ftp.retrbinary("RETR " + filDown, filSave.write)
       self.filelist.write("%s\n" % filDown)
       if self.debug==True:
         logging.debug("File %s downloaded" % filDown)
     #if it have an error it try to download again the file
-    except (ftplib.error_reply,socket.error), e:
+    except (ftplib.error_reply, socket.error, ftplib.error_temp, EOFError), e:
       logging.error("Cannot download %s, retry.." % filDown)
-      self.connectFTP()
-      self.downloadFile(filDown,filSave)
+      filSave.close()
+      os.remove(filSave.name)
+      try:
+          self.ftp.pwd()
+      except (ftplib.error_temp, EOFError), e:
+          self.connectFTP()
+      self._downloadFile(filDown, filHdf)
+    filSave.close()
+    orig_size = self.ftp.size(filDown)
+    transf_size = os.path.getsize(filSave.name)
+    if orig_size == transf_size:
+      return 0
+    else:
+      logging.warning("Different size for file %s - original data: %s, downloaded: %s" %
+                      (filDown, orig_size, transf_size))
+      os.remove(filSave.name)
+      self._downloadFile(filDown,filHdf)
 
-  def dayDownload(self,listFilesDown):
-    """ Downloads tiles are in files_hdf_consider """
+  def dayDownload(self, listFilesDown):
+    """ Downloads tiles are in files_hdf_consider
+
+    listFilesDown = list of the files to download, returned by checkDataExist function
+    """
     # for each file in files' list
     for i in listFilesDown:
         fileSplit = i.split('.')
@@ -298,24 +356,29 @@
           + fileSplit[-1])
           numFiles = len(oldFile)
           if numFiles == 0:
-            file_hdf = open(os.path.join(self.writeFilePath,i), "wb")
+            file_hdf = os.path.join(self.writeFilePath, i)
           elif numFiles == 1:
             # check the version of file  
-            fileDown = self.getNewerVersion(oldFile[0],i)
+            fileDown = self.getNewerVersion(oldFile[0], i)
             if fileDown != oldFile[0]:
-              os.remove(os.path.join(self.writeFilePath,oldFile[0]))
-              file_hdf = open(os.path.join(self.writeFilePath,fileDown), "wb")
+              os.remove(os.path.join(self.writeFilePath, oldFile[0]))
+              file_hdf = os.path.join(self.writeFilePath, fileDown)
           elif numFiles > 1:
             logging.error("There are to much files for %s" % i)
             #raise EOFError("There are to much file with the same prefix")
           if numFiles == 0 or (numFiles == 1 and fileDown != oldFile[0]):
-            self.downloadFile(i,file_hdf)
+            self._downloadFile(i, file_hdf)
 
-  def downloadsAllDay(self):
+  def downloadsAllDay(self, clean=False, allDays=False):
     """ Downloads all the tiles considered """
     #return the days to download
-    days = self.getListDays()
-    if self.debug==True:
+    if clean:
+        self.removeEmptyFiles()
+    if allDays:
+        days = self.getAllDays()
+    else:
+        days = self.getListDays()
+    if self.debug == True:
       logging.debug("The number of days to download is: %i" % len(days))
     #for each day
     for day in days:
@@ -329,11 +392,12 @@
       self.dayDownload(listFilesDown)
       self.setDirectoryOver()
     self.closeFTP()
-    if self.debug==True:
+    if self.debug == True:
       logging.debug("Download terminated")
     return 0
 
   def debugLog(self):
+    """Function to create the debug file"""
     # create logger
     logger = logging.getLogger("PythonLibModis debug")
     logger.setLevel(logging.DEBUG)
@@ -367,9 +431,9 @@
     # the lenght of list of days and delta are ugual
     else:
       logger.info("All right!!")
-    
+
   def debugMaps(self):
-    """This function is useful to debug the number of maps to download for 
+    """This function is useful to debug the number of maps to download for
     each day"""
     logger = debugLog()
     days = self.getListDays()
@@ -378,4 +442,4 @@
       listAllFiles = self.getFilesList()
       string = day + ": " + str(len(listAllFiles)) + "\n"
       logger.debug(string)
-      self.setDirectoryOver()   
+      self.setDirectoryOver()
\ No newline at end of file

Modified: grass-addons/grass7/raster/r.modis/libmodis/parsemodis.py
===================================================================
--- grass-addons/grass7/raster/r.modis/libmodis/parsemodis.py	2012-12-26 12:17:09 UTC (rev 54410)
+++ grass-addons/grass7/raster/r.modis/libmodis/parsemodis.py	2012-12-27 10:12:03 UTC (rev 54411)
@@ -22,31 +22,30 @@
 from datetime import *
 import string
 import os
-import sys
-import glob
-import logging
-import socket
-from ftplib import FTP
-import ftplib
 
 ## lists of parameters accepted by resample MRT software
 # projections
-PROJ_LIST = ['AEA','GEO', 'HAM', 'IGH', 'ISIN', 'LA', 'LCC', 'MOL', 'PS', 
-                    'SIN','TM', 'UTM', 'MERCAT']
+PROJ_LIST = ['AEA','GEO', 'HAM', 'IGH', 'ISIN', 'LA', 'LCC', 'MOL', 'PS',
+             'SIN','TM', 'UTM', 'MERCAT']
 # resampling
 RESAM_LIST = ['NEAREST_NEIGHBOR', 'BICUBIC', 'CUBIC_CONVOLUTION', 'NONE']
 RESAM_LIST_SWATH = ['NN', 'BI', 'CC']
 
 # datum
 DATUM_LIST = ['NODATUM', 'NAD27', 'NAD83', 'WGS66', 'WGS72', 'WGS84']
-SPHERE_LIST = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20]
+SPHERE_LIST = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18,
+               19, 20]
 
+
 class parseModis:
-  """Class to parse MODIS xml files, it also can create the parameter 
-    configuration file for resampling with the MRT software
+  """Class to parse MODIS xml files, it also can create the parameter
+     configuration file for resampling MODIS DATA with the MRT software or
+     convertmodis Module
   """
+
   def __init__(self, filename):
     """Initialization function :
+
        filename = the name of MODIS hdf file
     """
     from xml.etree import ElementTree
@@ -63,7 +62,7 @@
       raise IOError('%s not exists' % self.hdfname + '.xml')
 
     # tif name for the output file for resample MRT software
-    self.tifname = self.hdfname.replace('.hdf','.tif')
+    self.tifname = self.hdfname.replace('.hdf', '.tif')
     with open(self.xmlname) as f:
       self.tree = ElementTree.parse(f)
     # return the code of tile for conf file
@@ -76,11 +75,11 @@
     try:
       for node in self.tree.iter():
         if node.text.strip() != '':
-          retString = "%s = %s\n" % (node.tag,node.text) 
+          retString = "%s = %s\n" % (node.tag, node.text)
     except:
       for node in self.tree.getiterator():
         if node.text.strip() != '':
-          retString = "%s = %s\n" % (node.tag,node.text) 
+          retString = "%s = %s\n" % (node.tag, node.text)
     return retString
 
   def getRoot(self):
@@ -113,17 +112,17 @@
     return self.granule.find('DbID').text
 
   def retInsertTime(self):
-    """Return the DbID element"""
+    """Return the InsertTime element"""
     self.getGranule()
     return self.granule.find('InsertTime').text
 
   def retLastUpdate(self):
-    """Return the DbID element"""
+    """Return the LastUpdate element"""
     self.getGranule()
     return self.granule.find('LastUpdate').text
 
   def retCollectionMetaData(self):
-    """Return the CollectionMetaData element"""
+    """Return the CollectionMetaData element as dictionary"""
     self.getGranule()
     collect = {}
     for i in self.granule.find('CollectionMetaData').getiterator():
@@ -132,7 +131,7 @@
     return collect
 
   def retDataFiles(self):
-    """Return the DataFiles element"""
+    """Return the DataFiles element as dictionary"""
     self.getGranule()
     collect = {}
     datafiles = self.granule.find('DataFiles')
@@ -142,7 +141,7 @@
     return collect
 
   def retDataGranule(self):
-    """Return the ECSDataGranule elements"""
+    """Return the ECSDataGranule elements as dictionary"""
     self.getGranule()
     datagran = {}
     for i in self.granule.find('ECSDataGranule').getiterator():
@@ -156,8 +155,7 @@
     return self.granule.find('PGEVersionClass').find('PGEVersion').text
 
   def retRangeTime(self):
-    """Return the RangeDateTime elements inside a dictionary with the element
-       name like dictionary key
+    """Return the RangeDateTime elements as dictionary
     """
     self.getGranule()
     rangeTime = {}
@@ -167,7 +165,8 @@
     return rangeTime
 
   def retBoundary(self):
-    """Return the maximum extend of the MODIS file inside a dictionary"""
+    """Return the maximum extend (Bounding Box) of the MODIS file as
+    dictionary"""
     self.getGranule()
     self.boundary = []
     lat = []
@@ -180,13 +179,13 @@
       lo = float(i.find('PointLatitude').text)
       lon.append(la)
       lat.append(lo)
-      self.boundary.append({'lat': la, 'lon':lo})
-    extent = {'min_lat':min(lat),'max_lat':max(lat),'min_lon':min(lon),
-                'max_lon':max(lon)}
+      self.boundary.append({'lat': la, 'lon': lo})
+    extent = {'min_lat': min(lat), 'max_lat': max(lat), 'min_lon': min(lon),
+              'max_lon': max(lon)}
     return extent
 
   def retMeasure(self):
-    """Return statistics inside a dictionary"""
+    """Return statistics of QA as dictionary"""
     value = {}
     self.getGranule()
     mes = self.granule.find('MeasuredParameter')
@@ -207,7 +206,7 @@
     return value
 
   def retPlatform(self):
-    """Return the platform values inside a dictionary."""
+    """Return the platform values as dictionary."""
     value = {}
     self.getGranule()
     plat = self.granule.find('Platform')
@@ -219,7 +218,7 @@
     return value
 
   def retPSA(self):
-    """Return the PSA values inside a dictionary, the PSAName is he key and
+    """Return the PSA values as dictionary, the PSAName is the key and
        and PSAValue is the value
     """
     value = {}
@@ -230,7 +229,8 @@
     return value
 
   def retInputGranule(self):
-    """Return the input files used to process the considered file"""
+    """Return the input files (InputGranule) used to process the considered
+    file"""
     value = []
     self.getGranule()
     for i in self.granule.find('InputGranule').getiterator():
@@ -239,48 +239,70 @@
     return value
 
   def retBrowseProduct(self):
-    """Return the PGEVersion element"""
+    """Return the BrowseProduct element"""
     self.getGranule()
     try:
         value = self.granule.find('BrowseProduct').find('BrowseGranuleId').text
     except:
         value = None
-    return value    
+    return value
 
-  def confResample(self, spectral, res = None, output = None, datum = 'WGS84',
-                  resampl = 'NEAREST_NEIGHBOR', projtype = 'GEO',  utm = None,
-                  projpar = '( 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 )',
+  def confResample(self, spectral, res=None, output=None, datum='WGS84',
+                  resample='NEAREST_NEIGHBOR', projtype='GEO',  utm=None,
+                  projpar='( 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 )',
                   ):
     """Create the parameter file to use with resample MRT software to create
-       tif file
-        spectral = the spectral subset to use, look the product table to 
-                   understand the layer that you want use. 
-                   For example: 
-                    - NDVI ( 1 1 1 0 0 0 0 0 0 0 0 0) copy only layer NDVI, EVI 
-                      and QA VI the other layers are not used
-                    - LST ( 1 1 0 0 1 1 0 0 0 0 0 0 ) copy only layer daily and
-                      nightly temperature and QA
-        res = the resolution for the output file, it must be set in the map 
-              unit of output projection system. The software will use the original
-              resolution of input file if res it isn't set
-        output = the output name, if it doesn't set will use the prefix name of 
-                 input hdf file
+    tif file
+
+        spectral = the spectral subset to be used, look the product table to
+        understand the layer that you want use. For example:
+
+            - NDVI ( 1 1 1 0 0 0 0 0 0 0 0 0) copy only layer NDVI, EVI
+              and QA VI the other layers are not used
+            - LST ( 1 1 0 0 1 1 0 0 0 0 0 0 ) copy only layer daily and
+              nightly temperature and QA
+
+        res = the resolution for the output file, it must be set in the map
+        unit of output projection system. The software will use the
+        original resolution of input file if res it isn't set
+
+        output = the output name, if it doesn't set will use the prefix name
+        of input hdf file
+
         utm = the UTM zone if projection system is UTM
-        resampl = the type of resampling, the valid values are: NN (nearest 
-                  neighbor), BI (bilinear), CC (cubic convolution)
-        projtype = the output projection system, the valid values are: AEA 
-                   (Albers Equal Area), ER (Equirectangular), GEO (Geographic 
-                   Latitude/Longitude), HAM (Hammer), ISIN (Integerized Sinusoidal), 
-                   IGH (Interrupted Goode Homolosine), LA (Lambert Azimuthal), 
-                   LCC (LambertConformal Conic), MERCAT (Mercator), MOL (Mollweide), 
-                   PS (Polar Stereographic), SIN ()Sinusoidal), UTM (Universal 
-                   TransverseMercator)
-        datum = the datum to use, the valid values are: NAD27, NAD83, WGS66,
-                WGS76, WGS84, NODATUM
-        projpar = a list of projection parameters, for more info check the 
-                  "Appendix C" of MODIS reprojection tool user's manual
-                  https://lpdaac.usgs.gov/content/download/4831/22895/file/mrt41_usermanual_032811.pdf
 
+        resample = the type of resampling, the valid values are:
+            - NN (nearest neighbor)
+            - BI (bilinear)
+            - CC (cubic convolution)
+
+        projtype = the output projection system, the valid values are:
+            - AEA (Albers Equal Area)
+            - ER (Equirectangular)
+            - GEO (Geographic Latitude/Longitude)
+            - HAM (Hammer)
+            - ISIN (Integerized Sinusoidal)
+            - IGH (Interrupted Goode Homolosine)
+            - LA (Lambert Azimuthal)
+            - LCC (LambertConformal Conic)
+            - MERCAT (Mercator)
+            - MOL (Mollweide)
+            - PS (Polar Stereographic)
+            - SIN (Sinusoidal)
+            - UTM (Universal TransverseMercator)
+
+        datum = the datum to use, the valid values are:
+            - NAD27
+            - NAD83
+            - WGS66
+            - WGS76
+            - WGS84
+            - NODATUM
+
+        projpar = a list of projection parameters, for more info check the
+        Appendix C of MODIS reprojection tool user manual
+        https://lpdaac.usgs.gov/content/download/4831/22895/file/mrt41_usermanual_032811.pdf
+
         """
     #check if spectral it's write with correct construct ( value )
     if string.find(spectral, '(') == -1 or  string.find(spectral, ')') == -1:
@@ -291,7 +313,7 @@
     else:
       fileout = output
     # the name of the output parameters files for resample MRT software
-    filename = os.path.join(self.path,'%s_mrt_resample.conf' % self.code)
+    filename = os.path.join(self.path, '%s_mrt_resample.conf' % self.code)
     # if the file already exists it remove it 
     if os.path.exists(filename):
       os.remove(filename)
@@ -308,25 +330,26 @@
     conFile.write("SPATIAL_SUBSET_LR_CORNER = ( %f %f )\n" % (bound['min_lat'],
                                                               bound['max_lon']))
     conFile.write("OUTPUT_FILENAME = %s\n" % fileout)
-    # if resampl is in resam_list set the parameter otherwise return an error
-    if resampl in RESAM_LIST:
-      conFile.write("RESAMPLING_TYPE = %s\n" % resampl)
+    # if resample is in resam_list set the parameter otherwise return an error
+    if resample in RESAM_LIST:
+      conFile.write("RESAMPLING_TYPE = %s\n" % resample)
     else:
       raise IOError('The resampling type %s is not supportet.\n' \
-                   'The resampling type supported are %s' % (resampl,RESAM_LIST))
+                   'The resampling type supported are %s' % (resample,
+                                                             RESAM_LIST))
     # if projtype is in proj_list set the parameter otherwise return an error
     if projtype in PROJ_LIST:
       conFile.write("OUTPUT_PROJECTION_TYPE = %s\n" % projtype)
     else:
       raise IOError('The projection type %s is not supported.\n' \
-                   'The projections supported are %s' % (projtype,PROJ_LIST))
+                   'The projections supported are %s' % (projtype, PROJ_LIST))
     conFile.write("OUTPUT_PROJECTION_PARAMETERS = %s\n" % projpar)
     # if datum is in datum_list set the parameter otherwise return an error
     if datum in DATUM_LIST:
       conFile.write("DATUM = %s\n" % datum)
     else:
       raise IOError('The datum %s is not supported.\n' \
-                   'The datum supported are %s' % (datum,DATUM_LIST))
+                   'The datum supported are %s' % (datum, DATUM_LIST))
     # if utm is not None write the UTM_ZONE parameter in the file
     if utm:
       conFile.write("UTM_ZONE = %s\n" % utm)
@@ -336,39 +359,72 @@
     conFile.close()
     return filename
 
-  def confResample_swath(self, sds, geoloc, res, output = None, 
-                  sphere = '8', resampl = 'NN', projtype = 'GEO',  utm = None,
-                  projpar = '0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0',
+  def confResample_swath(self, sds, geoloc, res, output=None, 
+                  sphere='8', resample='NN', projtype='GEO',  utm=None,
+                  projpar='0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0',
                   ):
     """Create the parameter file to use with resample MRT software to create
        tif file
+
         sds = Name of band/s (Science Data Set) to resample
+
         geoloc = Name geolocation file (example MOD3, MYD3)
-        res = the resolution for the output file, it must be set in the map 
-              unit of output projection system. The software will use the original
-              resolution of input file if res it isn't set
-        output = the output name, if it doesn't set will use the prefix name of 
-                 input hdf file
-        sphere = Output sphere number. Valid options are: 
-                 0=Clarke 1866, 1=Clarke 1880, 2=Bessel, 3=International 1967, 
-                 4=International 1909, 5=WGS 72, 6=Everest, 7=WGS 66, 
-                 8=GRS1980/WGS 84, 9=Airy, 10=Modified Everest, 11=Modified Airy, 
-                 12=Walbeck, 13=Southeast Asia, 14=Australian National, 
-                 15=Krassovsky, 16=Hough, 17=Mercury1960, 18=Modified Mercury1968, 
-                 19=Sphere 19 (Radius 6370997), 20=MODIS Sphere (Radius 6371007.181)
-        resampl = the type of resampling, the valid values are: NN (nearest 
-                  neighbor), BI (bilinear), CC (cubic convolution)
-        projtype = the output projection system, the valid values are: 
-                   AEA (Albers Equal Area), ER (Equirectangular), 
-                   GEO (Geographic Latitude/Longitude), HAM (Hammer), 
-                   ISIN (Integerized Sinusoidal),IGH (Interrupted Goode Homolosine), 
-                   LA (Lambert Azimuthal), LCC (LambertConformal Conic),
-                   MERCAT (Mercator), MOL (Mollweide), PS (Polar Stereographic),
-                   SIN ()Sinusoidal), UTM (Universal TransverseMercator)
+
+        res = the resolution for the output file, it must be set in the map
+        unit of output projection system. The software will use the
+        original resolution of input file if res it isn't set
+
+        output = the output name, if it doesn't set will use the prefix name
+        of input hdf file
+
+        sphere = Output sphere number. Valid options are:
+            - 0=Clarke 1866
+            - 1=Clarke 1880
+            - 2=Bessel
+            - 3=International 1967
+            - 4=International 1909
+            - 5=WGS 72
+            - 6=Everest
+            - 7=WGS 66
+            - 8=GRS1980/WGS 84
+            - 9=Airy
+            - 10=Modified Everest
+            - 11=Modified Airy
+            - 12=Walbeck
+            - 13=Southeast Asia
+            - 14=Australian National
+            - 15=Krassovsky
+            - 16=Hough
+            - 17=Mercury1960
+            - 18=Modified Mercury1968
+            - 19=Sphere 19 (Radius 6370997)
+            - 20=MODIS Sphere (Radius 6371007.181)
+
+        resample = the type of resampling, the valid values are:
+            - NN (nearest neighbor)
+            - BI (bilinear)
+            - CC (cubic convolution)
+
+        projtype = the output projection system, the valid values are:
+            - AEA (Albers Equal Area)
+            - ER (Equirectangular)
+            - GEO (Geographic Latitude/Longitude)
+            - HAM (Hammer)
+            - ISIN (Integerized Sinusoidal)
+            - IGH (Interrupted Goode Homolosine)
+            - LA (Lambert Azimuthal)
+            - LCC (LambertConformal Conic)
+            - MERCAT (Mercator)
+            - MOL (Mollweide)
+            - PS (Polar Stereographic),
+            - SIN ()Sinusoidal)
+            - UTM (Universal TransverseMercator)
+
         utm = the UTM zone if projection system is UTM
-        projpar = a list of projection parameters, for more info check the 
-                  "Appendix C" of MODIS reprojection tool user's manual
-                  https://lpdaac.usgs.gov/content/download/4831/22895/file/mrt41_usermanual_032811.pdf
+
+        projpar = a list of projection parameters, for more info check
+        the Appendix C of MODIS reprojection tool user manual
+        https://lpdaac.usgs.gov/content/download/4831/22895/file/mrt41_usermanual_032811.pdf
         """
     # output name
     if not output:
@@ -376,7 +432,7 @@
     else:
       fileout = output
     # the name of the output parameters files for resample MRT software
-    filename = os.path.join(self.path,'%s_mrt_resample.prm' % self.code)
+    filename = os.path.join(self.path, '%s_mrt_resample.prm' % self.code)
     # if the file already exists it remove it 
     if os.path.exists(filename):
       os.remove(filename)
@@ -395,25 +451,26 @@
                                                               bound['max_lon']))
     conFile.write("OUTPUT_FILENAME = %s\n" % fileout)
     conFile.write("OUTPUT_FILE_FORMAT = GEOTIFF_FMT\n")
-    # if resampl is in resam_list set the parameter otherwise return an error
-    if resampl in RESAM_LIST_SWATH:
-      conFile.write("KERNEL_TYPE (CC/BI/NN) = %s\n" % resampl)
+    # if resample is in resam_list set the parameter otherwise return an error
+    if resample in RESAM_LIST_SWATH:
+      conFile.write("KERNEL_TYPE (CC/BI/NN) = %s\n" % resample)
     else:
       raise IOError('The resampling type %s is not supportet.\n' \
-                   'The resampling type supported are %s' % (resampl,RESAM_LIST_SWATH))
+                   'The resampling type supported are %s' % (resample,
+                                                             RESAM_LIST_SWATH))
     # if projtype is in proj_list set the parameter otherwise return an error
     if projtype in PROJ_LIST:
       conFile.write("OUTPUT_PROJECTION_NUMBER = %s\n" % projtype)
     else:
       raise IOError('The projection type %s is not supported.\n' \
-                   'The projections supported are %s' % (projtype,PROJ_LIST))
+                   'The projections supported are %s' % (projtype, PROJ_LIST))
     conFile.write("OUTPUT_PROJECTION_PARAMETER = %s\n" % projpar)
     # if sphere is in sphere_list set the parameter otherwise return an error
     if int(sphere) in SPHERE_LIST:
       conFile.write("OUTPUT_PROJECTION_SPHERE = %s\n" % sphere)
     else:
       raise IOError('The sphere %s is not supported.\n' \
-                   'The spheres supported are %s' % (sphere,SPHERE_LIST))
+                   'The spheres supported are %s' % (sphere, SPHERE_LIST))
     # if utm is not None write the UTM_ZONE parameter in the file
     if utm:
       if utm < '-60' or utm > '60':
@@ -426,10 +483,13 @@
     conFile.close()
     return filename
 
+
 class parseModisMulti:
-  """A class to some variable for the xml file of a mosaic
+  """A class to obtain some variables for the xml file of several MODIS tiles.
+     It can also create the xml file
   """
-  def __init__(self,hdflist):
+
+  def __init__(self, hdflist):
     """hdflist = python list containing the hdf files"""
     from xml.etree import ElementTree
     self.ElementTree = ElementTree
@@ -441,8 +501,11 @@
       self.parModis.append(parseModis(i))
       self.nfiles += 1
 
-  def _checkval(self,vals):
-    """Internal function to return values from list"""
+  def _checkval(self, vals):
+    """Internal function to return values from list
+
+    vals = list of values
+    """
     if vals.count(vals[0]) == self.nfiles:
       return [vals[0]]
     else:
@@ -451,9 +514,12 @@
         if outvals.count(i) == 0:
           outvals.append(i)
       return outvals
-      
-  def _checkvaldict(self,vals):
-    """Internal function to return values from dictionary"""
+
+  def _checkvaldict(self, vals):
+    """Internal function to return values from dictionary
+
+    vals = dictionary of values
+    """
     keys = vals[0].keys()
     outvals = {}
     for k in keys:
@@ -464,118 +530,164 @@
         outvals[k] = valtemp[0]
       else:
         raise IOError('Something wrong reading XML files')
-      
+
     return outvals
 
   def _minval(self, vals):
-    """Internal function to return the minimum value"""
+    """Internal function to return the minimum value
+
+    vals = list of values
+    """
     outval = vals[0]
-    for i in range(1,len(vals)):
+    for i in range(1, len(vals)):
       if outval > i:
         outval = i
     return outval
-    
+
   def _maxval(self, vals):
-    """Internal function to return the maximum value"""
+    """Internal function to return the maximum value
+
+    vals = list of values   
+    """
     outval = vals[0]
-    for i in range(1,len(vals)):
+    for i in range(1, len(vals)):
       if outval < i:
         outval = i
     return outval
-    
-  def _cicle_values(self, ele,values):
-    """Internal function to add values from a dictionary"""
+
+  def _cicle_values(self, obj, values):
+    """Internal function to add values from a dictionary
+
+    obj = element to add values
+
+    values = dictionary containing keys and values
+    """
     for k,v in values.iteritems():
-      elem = self.ElementTree.SubElement(ele,k)
+      elem = self.ElementTree.SubElement(obj, k)
       elem.text = v
 
-  def _addPoint(self,obj,lon,lat):
-    """Internal function to add a point in boundary xml tag"""
+  def _addPoint(self, obj, lon, lat):
+    """Internal function to add a point in boundary xml tag
+
+    obj = element to add point
+
+    lon = longitude of point
+
+    lat = latitude of point
+    """
     pt = self.ElementTree.SubElement(obj, 'Point')
     ptlon = self.ElementTree.SubElement(pt, 'PointLongitude')
     ptlon.text = str(self.boundary[lon])
     ptlat = self.ElementTree.SubElement(pt, 'PointLatitude')
     ptlat.text = str(self.boundary[lat])
 
-  def valDTD(self,obj):
-    """Function to add DTDVersion"""
+  def valDTD(self, obj):
+    """Function to add DTDVersion
+
+    obj = element to add DTDVersion
+    """
     values = []
     for i in self.parModis:
       values.append(i.retDTD())
     for i in self._checkval(values):
-      dtd = self.ElementTree.SubElement(obj,'DTDVersion')
+      dtd = self.ElementTree.SubElement(obj, 'DTDVersion')
       dtd.text = i
 
-  def valDataCenter(self,obj):
-    """Function to add DataCenter"""
+  def valDataCenter(self, obj):
+    """Function to add DataCenter
+
+    obj = element to add DataCenter
+    """
     values = []
     for i in self.parModis:
       values.append(i.retDataCenter())
     for i in self._checkval(values):
-      dci = self.ElementTree.SubElement(obj,'DataCenterId')
+      dci = self.ElementTree.SubElement(obj, 'DataCenterId')
       dci.text = i
-      
-  def valGranuleUR(self,obj):
-    """Function to add GranuleUR"""
+
+  def valGranuleUR(self, obj):
+    """Function to add GranuleUR
+
+    obj = element to add GranuleUR
+    """
     values = []
     for i in self.parModis:
       values.append(i.retGranuleUR())
     for i in self._checkval(values):
-      gur = self.ElementTree.SubElement(obj,'GranuleUR')
+      gur = self.ElementTree.SubElement(obj, 'GranuleUR')
       gur.text = i
 
-  def valDbID(self,obj):
-    """Function to add DbID"""
+  def valDbID(self, obj):
+    """Function to add DbID
+
+    obj = element to add DbID
+    """
     values = []
     for i in self.parModis:
       values.append(i.retDbID())
     for i in self._checkval(values):
-      dbid = self.ElementTree.SubElement(obj,'DbID')
+      dbid = self.ElementTree.SubElement(obj, 'DbID')
       dbid.text = i
-      
-  def valInsTime(self,obj):
-    """TODO"""
+
+  def valInsTime(self, obj):
+    """Function to add the minimum of InsertTime
+
+    obj = element to add InsertTime
+    """
     values = []
     for i in self.parModis:
       values.append(i.retInsertTime())
     obj.text = self._minval(values)
-  
-  def valCollectionMetaData(self,obj):
-    """Function to add CollectionMetaData"""
+
+  def valCollectionMetaData(self, obj):
+    """Function to add CollectionMetaData
+
+    obj = element to add CollectionMetaData
+    """
     values = []
     for i in self.parModis:
       values.append(i.retCollectionMetaData())
-    self._cicle_values(obj,self._checkvaldict(values))
-  
+    self._cicle_values(obj, self._checkvaldict(values))
+
   def valDataFiles(self, obj):
-    """Function to add DataFileContainer"""
+    """Function to add DataFileContainer
+
+    obj = element to add DataFileContainer
+    """
     values = []
     for i in self.parModis:
       values.append(i.retDataFiles())
     for i in values:
       dfc = self.ElementTree.SubElement(obj, 'DataFileContainer')
-      self._cicle_values(dfc,i)
+      self._cicle_values(dfc, i)
+
+  def valPGEVersion(self, obj):
+    """Function to add PGEVersion
     
-  def valPGEVersion(self,obj):
-    """Function to add PGEVersion"""
+    obj = element to add PGEVersion
+    """
     values = []
     for i in self.parModis:
       values.append(i.retPGEVersion())
     for i in self._checkval(values):
-      pge = self.ElementTree.SubElement(obj,'PGEVersion')
+      pge = self.ElementTree.SubElement(obj, 'PGEVersion')
       pge.text = i
-  
-  def valRangeTime(self,obj):
-    """Function to add RangeDateTime"""
+
+  def valRangeTime(self, obj):
+    """Function to add RangeDateTime
+
+    obj = element to add RangeDateTime
+    """
     values = []
     for i in self.parModis:
       values.append(i.retRangeTime())
-    self._cicle_values(obj,self._checkvaldict(values))
-  
+    self._cicle_values(obj, self._checkvaldict(values))
+
   def valBound(self):
-    """Function return the bounding box of mosaic"""
+    """Function return the Bounding Box of mosaic
+    """
     boundary = self.parModis[0].retBoundary()
-    for i in range(1,len(self.parModis)):
+    for i in range(1, len(self.parModis)):
       bound = self.parModis[i].retBoundary()
       if bound['min_lat'] < boundary['min_lat']:
         boundary['min_lat'] = bound['min_lat']
@@ -586,9 +698,12 @@
       if bound['max_lon'] > boundary['max_lon']:
         boundary['max_lon'] = bound['max_lon']
     self.boundary = boundary
-  
-  def valMeasuredParameter(self,obj):
-    """Function to add ParameterName"""
+
+  def valMeasuredParameter(self, obj):
+    """Function to add ParameterName
+
+    obj = element to add ParameterName
+    """
     valuesQAStats = []
     valuesQAFlags = []
     valuesParameter = []
@@ -597,18 +712,24 @@
       valuesQAFlags.append(i.retMeasure()['QAFlags'])
       valuesParameter.append(i.retMeasure()['ParameterName'])
     for i in self._checkval(valuesParameter):
-      pn = self.ElementTree.SubElement(obj,'ParameterName')
+      pn = self.ElementTree.SubElement(obj, 'ParameterName')
       pn.text = i
-  
-  def valInputPointer(self,obj):
-    """Function to add InputPointer"""
+
+  def valInputPointer(self, obj):
+    """Function to add InputPointer
+
+    obj = element to add InputPointer
+    """
     for i in self.parModis:
       for v in i.retInputGranule():
-        ip = self.ElementTree.SubElement(obj,'InputPointer')
+        ip = self.ElementTree.SubElement(obj, 'InputPointer')
         ip.text = v
-  
+
   def valPlatform(self, obj):
-    """Function to add Platform tags"""
+    """Function to add Platform elements
+
+    obj = element to add Platform elements
+    """
     valuesSName = []
     valuesInstr = []
     valuesSensor = []
@@ -617,25 +738,28 @@
         valuesInstr.append(i.retPlatform()['InstrumentShortName'])
         valuesSensor.append(i.retPlatform()['SensorShortName'])
     for i in self._checkval(valuesSName):
-      pn = self.ElementTree.SubElement(obj,'PlatformShortName')
+      pn = self.ElementTree.SubElement(obj, 'PlatformShortName')
       pn.text = i
-      
+
     valInstr = self._checkval(valuesInstr)
     valSens = self._checkval(valuesSensor)
-    
+
     if len(valInstr) != len(valSens):
       raise IOError('Something wrong reading XML files')
     else:
       for i in range(len(valInstr)):
-        ins = self.ElementTree.SubElement(obj,'Instrument')
-        pn = self.ElementTree.SubElement(ins,'InstrumentShortName')
+        ins = self.ElementTree.SubElement(obj, 'Instrument')
+        pn = self.ElementTree.SubElement(ins, 'InstrumentShortName')
         pn.text = valInstr[i]
-        sens = self.ElementTree.SubElement(ins,'Sensor')
-        ps = self.ElementTree.SubElement(sens,'SensorShortName')
+        sens = self.ElementTree.SubElement(ins, 'Sensor')
+        ps = self.ElementTree.SubElement(sens, 'SensorShortName')
         ps.text = valSens[i]
 
-  def writexml(self,outputname):
-    """Return a xml file for a mosaic"""
+  def writexml(self, outputname):
+    """Write a xml file for a mosaic
+
+    outputname = the name of xml file
+    """
     # the root element
     granule = self.ElementTree.Element('GranuleMetaDataFile')
     # add DTDVersion
@@ -643,57 +767,58 @@
     # add DataCenterId
     self.valDataCenter(granule)
     # add GranuleURMetaData
-    gurmd = self.ElementTree.SubElement(granule,'GranuleURMetaData')
+    gurmd = self.ElementTree.SubElement(granule, 'GranuleURMetaData')
     # add GranuleUR
     self.valGranuleUR(gurmd)
     # add dbID
     self.valDbID(gurmd)
-    
+
     # TODO ADD InsertTime LastUpdate
 
     # add CollectionMetaData
-    cmd = self.ElementTree.SubElement(gurmd,'CollectionMetaData')
+    cmd = self.ElementTree.SubElement(gurmd, 'CollectionMetaData')
     self.valCollectionMetaData(cmd)
     # add DataFiles
-    df = self.ElementTree.SubElement(gurmd,'DataFiles')
+    df = self.ElementTree.SubElement(gurmd, 'DataFiles')
     self.valDataFiles(df)
-    
+
     # TODO ADD ECSDataGranule
-    
+
     # add PGEVersionClass
-    pgevc = self.ElementTree.SubElement(gurmd,'PGEVersionClass')
+    pgevc = self.ElementTree.SubElement(gurmd, 'PGEVersionClass')
     self.valPGEVersion(pgevc)
     # add RangeDateTime
-    rdt = self.ElementTree.SubElement(gurmd,'RangeDateTime')
+    rdt = self.ElementTree.SubElement(gurmd, 'RangeDateTime')
     self.valRangeTime(rdt)
     # SpatialDomainContainer
-    sdc = self.ElementTree.SubElement(gurmd,'SpatialDomainContainer')
-    hsdc = self.ElementTree.SubElement(sdc,'HorizontalSpatialDomainContainer')
-    gp = self.ElementTree.SubElement(hsdc,'GPolygon')
-    bound = self.ElementTree.SubElement(gp,'Boundary')
+    sdc = self.ElementTree.SubElement(gurmd, 'SpatialDomainContainer')
+    hsdc = self.ElementTree.SubElement(sdc, 'HorizontalSpatialDomainContainer')
+    gp = self.ElementTree.SubElement(hsdc, 'GPolygon')
+    bound = self.ElementTree.SubElement(gp, 'Boundary')
     self.valBound()
-    self._addPoint(bound,'min_lon','max_lat')
-    self._addPoint(bound,'max_lon','max_lat')
-    self._addPoint(bound,'min_lon','min_lat')
-    self._addPoint(bound,'max_lon','min_lat')
+    self._addPoint(bound, 'min_lon', 'max_lat')
+    self._addPoint(bound, 'max_lon', 'max_lat')
+    self._addPoint(bound, 'min_lon', 'min_lat')
+    self._addPoint(bound, 'max_lon', 'min_lat')
     # add MeasuredParameter
-    mp = self.ElementTree.SubElement(gurmd,'MeasuredParameter')
-    mpc = self.ElementTree.SubElement(mp,'MeasuredParameterContainer')
+    mp = self.ElementTree.SubElement(gurmd, 'MeasuredParameter')
+    mpc = self.ElementTree.SubElement(mp, 'MeasuredParameterContainer')
     self.valMeasuredParameter(mpc)
     # Platform
-    pl = self.ElementTree.SubElement(gurmd,'Platform')
+    pl = self.ElementTree.SubElement(gurmd, 'Platform')
     self.valPlatform(pl)
 
     # add PSAs
-    psas = self.ElementTree.SubElement(gurmd,'PSAs')
+    psas = self.ElementTree.SubElement(gurmd, 'PSAs')
     # TODO ADD all PSA
 
     # add InputGranule and InputPointer
-    ig = self.ElementTree.SubElement(gurmd,'InputGranule')
+    ig = self.ElementTree.SubElement(gurmd, 'InputGranule')
     self.valInputPointer(ig)
     # TODO ADD BrowseProduct
     output = open(outputname, 'w')
     output.write('<?xml version="1.0" encoding="UTF-8"?>')
-    output.write('<!DOCTYPE GranuleMetaDataFile SYSTEM "http://ecsinfo.gsfc.nasa.gov/ECSInfo/ecsmetadata/dtds/DPL/ECS/ScienceGranuleMetadata.dtd">')
+    output.write('<!DOCTYPE GranuleMetaDataFile SYSTEM "http://ecsinfo.gsfc."\
+    "nasa.gov/ECSInfo/ecsmetadata/dtds/DPL/ECS/ScienceGranuleMetadata.dtd">')
     output.write(self.ElementTree.tostring(granule))
-    output.close()
+    output.close()
\ No newline at end of file

Modified: grass-addons/grass7/raster/r.modis/libmodis/rmodislib.py
===================================================================
--- grass-addons/grass7/raster/r.modis/libmodis/rmodislib.py	2012-12-26 12:17:09 UTC (rev 54410)
+++ grass-addons/grass7/raster/r.modis/libmodis/rmodislib.py	2012-12-27 10:12:03 UTC (rev 54411)
@@ -17,9 +17,11 @@
 #############################################################################
 import grass.script as grass
 # interface to g.proj -p
+
+
 def get_proj():
-    """!Returns the output from running "g.proj -p" plus towgs84 parameter (g.proj -d), 
-    as a dictionary. Example:
+    """!Returns the output from running "g.proj -p" plus towgs84 parameter
+    (g.proj -d), as a dictionary. Example:
 
     \code
     >>> proj = grass.get_proj()
@@ -29,7 +31,7 @@
 
     @return dictionary of projection values
     """
-    gproj = grass.read_command('g.proj',flags='p')
+    gproj = grass.read_command('g.proj', flags='p')
     listproj = gproj.split('\n')
     listproj.remove('-PROJ_INFO-------------------------------------------------')
     listproj.remove('-PROJ_UNITS------------------------------------------------')
@@ -38,203 +40,228 @@
     for i in listproj:
         ilist = i.split(':')
         proj[ilist[0].strip()] = ilist[1].strip()
-    proj.update(grass.parse_command('g.proj',flags='j'))
+    proj.update(grass.parse_command('g.proj', flags='j'))
     return proj
 
+
 class product:
     """Definition of MODIS product with url and path in the ftp server
     """
-    def __init__(self,value = None):
+
+    def __init__(self, value=None):
         # url to download products
         urlbase = 'e4ftl01.cr.usgs.gov'
         usrsnow = 'n4ftl01u.ecs.nasa.gov'
         ### values of lst product:
         lst_spec = '( 1 0 0 0 1 0 0 0 0 0 0 0 )'
         lst_specqa = '( 1 1 0 0 1 1 0 0 0 0 0 0 )'
-        # pattern for r.bitpatter (key is the pattern option, list of values 
+        # pattern for r.bitpatter (key is the pattern option, list of values
         #                         contain values for patval option)
-        lst_patt = {3 : [2, 3], 128 : [81, 85], 255 : [129, 133, 145, 149]}
+        lst_patt = {3: [2, 3], 128: [81, 85], 255: [129, 133, 145, 149]}
         # suffix for the lst product (key is the lst map, value the QA)
-        lst1km_suff = {'.LST_Day_1km':'.QC_Day','.LST_Night_1km':'.QC_Night'}
-        lst6km_suff = {'.LST_Day_6km':'.QC_Day','.LST_Night_6km':'.QC_Night'}
+        lst1km_suff = {'.LST_Day_1km': '.QC_Day', '.LST_Night_1km': '.QC_Night'}
+        lst6km_suff = {'.LST_Day_6km': '.QC_Day', '.LST_Night_6km': '.QC_Night'}
         # color for lst product
         lst_color = ['celsius']
         ### values of vi product:
         vi_spec = '( 1 1 0 0 0 0 0 0 0 0 0 )'
         vi_specqa = '( 1 1 1 0 0 0 0 0 0 0 0 )'
-        vi_patt = {3 : [2, 3], 63 : [13, 14, 15], 128 : [3], 1024 : [1], 
-        8192 : [0, 6, 7], 16384 : [1], 32768 : [1]}
-        vi_color = ['ndvi','evi']
-        vi250m_suff = {'.250m_16_days_NDVI' : '.250m_16_days_VI_Quality',
-        '.250m_16_days_EVI' : '.250m_16_days_VI_Quality'}
-        vi500m_suff = {'.500m_16_days_NDVI' : '.500m_16_days_VI_Quality',
-        '.500m_16_days_EVI' : '.500m_16_days_VI_Quality'}        
-        vi1km_suff = {'.1_km_16_days_NDVI' : '.1_km_16_days_VI_Quality',
-        '.1_km_16_days_EVI' : '.1_km_16_days_VI_Quality'}
+        vi_patt = {3: [2, 3], 63: [13, 14, 15], 128: [3], 1024: [1],
+                   8192: [0, 6, 7], 16384: [1], 32768: [1]}
+        vi_color = ['ndvi', 'evi']
+        vi250m_suff = {'.250m_16_days_NDVI': '.250m_16_days_VI_Quality',
+        '.250m_16_days_EVI': '.250m_16_days_VI_Quality'}
+        vi500m_suff = {'.500m_16_days_NDVI': '.500m_16_days_VI_Quality',
+        '.500m_16_days_EVI': '.500m_16_days_VI_Quality'}
+        vi1km_suff = {'.1_km_16_days_NDVI': '.1_km_16_days_VI_Quality',
+        '.1_km_16_days_EVI': '.1_km_16_days_VI_Quality'}
         ### values of snow product:
         snow1_spec = ('( 1 )')
         snow1_specqa = ('( 1 1 )')
-        snow1_suff = {'.Snow_Cover_Daily_Tile':'.Snow_Spatial_QA'}
-        snow1_patt = {3 : [2,3], 7 : [6, 7], 15 : [10, 11, 14, 15]}
-        
+        snow1_suff = {'.Snow_Cover_Daily_Tile': '.Snow_Spatial_QA'}
+        snow1_patt = {3: [2, 3], 7: [6, 7], 15: [10, 11, 14, 15]}
         snow8_spec = ('( 1 1 )')
-        snow_color = ['gyr'] #TODO CREATE THE COLOR TABLE FOR MODIS_SNOW
-        snow8_suff = {'.Maximum_Snow_Extent' : None, '.Eight_Day_Snow_Cover' : None}
+        snow_color = ['gyr']  # TODO CREATE THE COLOR TABLE FOR MODIS_SNOW
+        snow8_suff = {'.Maximum_Snow_Extent': None, '.Eight_Day_Snow_Cover': None}
         lstL2_spec = 'LST; QC; Error_LST; Emis_31; Emis_32; View_angle; View_time'
-        
-	self.prod = value
-	lst = {'lst_aqua_daily_1000' : {'url' : urlbase, 'folder' : 'MOLA/MYD11A1.005',
-                                  'res' : 1000, 'spec' : lst_spec, 'spec_qa' : lst_specqa,
-                                  'suff' : lst1km_suff, 'pattern' : lst_patt, 
-                                  'color' : lst_color
+
+        self.prod = value
+        lst = {'lst_aqua_daily_1000': {'url': urlbase, 'pattern': lst_patt,
+                                       'folder': 'MOLA/MYD11A1.005',
+                                       'spec': lst_spec, 'spec_qa': lst_specqa,
+                                       'suff': lst1km_suff, 'res': 1000,
+                                       'color': lst_color
               },
-              'lst_terra_daily_1000' : {'url' : urlbase, 'folder' : 'MOLT/MOD11A1.005',
-                                  'res' : 1000, 'spec': lst_spec,'spec_qa': lst_specqa, 
-                                  'suff' : lst1km_suff, 'pattern' : lst_patt, 
-                                  'color' : lst_color
-              }, 
-              'lst_terra_eight_1000' : {'url' : urlbase, 'folder' : 'MOLT/MOD11A2.005',
-                                  'res' : 1000, 'spec': lst_spec,'spec_qa': lst_specqa, 
-                                  'suff' : lst1km_suff, 'pattern' : lst_patt, 
-                                  'color' : lst_color
+              'lst_terra_daily_1000': {'url': urlbase, 'pattern': lst_patt,
+                                       'folder': 'MOLT/MOD11A1.005',
+                                       'spec': lst_spec, 'spec_qa': lst_specqa,
+                                       'suff': lst1km_suff, 'res': 1000,
+                                       'color': lst_color
               },
-              'lst_aqua_eight_1000' : {'url' : urlbase, 'folder' : 'MOLA/MYD11A2.005',
-                                  'res' : 1000, 'spec': lst_spec,'spec_qa': lst_specqa, 
-                                  'suff' : lst1km_suff, 'pattern' : lst_patt, 
-                                  'color' : lst_color
+              'lst_terra_eight_1000': {'url': urlbase, 'pattern': lst_patt,
+                                       'folder': 'MOLT/MOD11A2.005',
+                                       'spec': lst_spec, 'spec_qa': lst_specqa,
+                                       'suff': lst1km_suff, 'res': 1000,
+                                       'color': lst_color
               },
-              'lst_terra_daily_6000' : {'url' : urlbase, 'folder' : 'MOLT/MOD11B1.005',
-                                  'res' : 6000, 'spec': lst_spec,'spec_qa': lst_specqa, 
-                                  'suff' : lst6km_suff, 'pattern' : lst_patt, 
-                                  'color' : lst_color
-              }, 
-              'lst_aqua_daily_6000' : {'url' : urlbase, 'folder' : 'MOLA/MYD11B1.005',
-                                  'res' : 6000, 'spec': lst_spec,'spec_qa': lst_specqa, 
-                                  'suff' : lst6km_suff, 'pattern' : lst_patt, 
-                                  'color' : lst_color
+              'lst_aqua_eight_1000': {'url': urlbase, 'pattern': lst_patt,
+                                      'folder': 'MOLA/MYD11A2.005',
+                                      'spec': lst_spec, 'spec_qa': lst_specqa,
+                                      'suff': lst1km_suff, 'res': 1000,
+                                      'color': lst_color
               },
+              'lst_terra_daily_6000': {'url': urlbase, 'pattern': lst_patt,
+                                       'folder': 'MOLT/MOD11B1.005',
+                                       'spec': lst_spec, 'spec_qa': lst_specqa,
+                                       'suff': lst6km_suff, 'res': 6000,
+                                       'color': lst_color
+              },
+              'lst_aqua_daily_6000': {'url': urlbase, 'pattern': lst_patt,
+                                      'folder': 'MOLA/MYD11B1.005',
+                                      'spec': lst_spec, 'spec_qa': lst_specqa,
+                                      'suff': lst6km_suff, 'res': 6000,
+                                      'color': lst_color
+              },
 
         }
-	vi = {'ndvi_terra_sixteen_250':{'url':urlbase, 'folder':'MOLT/MOD13Q1.005',
-                                    'res':250,'spec': vi_spec,'spec_qa': vi_specqa,
-                                    'suff' : vi250m_suff, 'pattern' : vi_patt, 
-                                    'color' : vi_color
+        vi = {'ndvi_terra_sixteen_250': {'url': urlbase, 'pattern': vi_patt,
+                                         'folder': 'MOLT/MOD13Q1.005',
+                                         'spec': vi_spec, 'spec_qa': vi_specqa,
+                                         'suff': vi250m_suff, 'res': 250,
+                                         'color': vi_color
               },
-              'ndvi_aqua_sixteen_250':{'url':urlbase, 'folder':'MOLA/MYD13Q1.005',
-                                    'res':250,'spec': vi_spec,'spec_qa': vi_specqa,
-                                    'suff' : vi250m_suff, 'pattern' : vi_patt, 
-                                    'color' : vi_color
+              'ndvi_aqua_sixteen_250': {'url': urlbase, 'pattern': vi_patt,
+                                        'folder': 'MOLA/MYD13Q1.005',
+                                        'spec': vi_spec, 'spec_qa': vi_specqa,
+                                        'suff': vi250m_suff, 'res': 250,
+                                        'color': vi_color
               },
-              'ndvi_terra_sixteen_500':{'url':urlbase, 'folder':'MOLT/MOD13A1.005',
-                                    'res':500,'spec': vi_spec,'spec_qa': vi_specqa,
-                                    'suff' : vi1km_suff, 'pattern' : vi_patt, 
-                                    'color' : vi_color
+              'ndvi_terra_sixteen_500': {'url': urlbase, 'pattern': vi_patt,
+                                         'folder': 'MOLT/MOD13A1.005',
+                                         'spec': vi_spec, 'spec_qa': vi_specqa,
+                                         'suff': vi1km_suff, 'res': 500,
+                                         'color': vi_color
               },
-              'ndvi_aqua_sixteen_500':{'url':urlbase, 'folder':'MOLA/MYD13A1.005',
-                                    'res':500,'spec': vi_spec,'spec_qa': vi_specqa,
-                                    'suff' : vi500m_suff, 'pattern' : vi_patt, 
-                                    'color' : vi_color
+              'ndvi_aqua_sixteen_500': {'url': urlbase, 'pattern': vi_patt,
+                                        'folder': 'MOLA/MYD13A1.005',
+                                        'spec': vi_spec, 'spec_qa': vi_specqa,
+                                        'suff': vi500m_suff, 'res': 500,
+                                        'color': vi_color
               },
-              'ndvi_terra_sixteen_1000':{'url':urlbase, 'folder':'MOLT/MOD13A2.005',
-                                    'res':1000,'spec': vi_spec,'spec_qa': vi_specqa,
-                                    'suff' : vi500m_suff, 'pattern' : vi_patt, 
-                                    'color' : vi_color
+              'ndvi_terra_sixteen_1000': {'url': urlbase, 'pattern': vi_patt,
+                                          'folder': 'MOLT/MOD13A2.005',
+                                          'spec': vi_spec, 'spec_qa': vi_specqa,
+                                          'suff': vi500m_suff, 'res': 1000,
+                                          'color': vi_color
               },
-              'ndvi_aqua_sixteen_1000':{'url':urlbase, 'folder':'MOLA/MYD13A2.005',
-                                    'res':1000,'spec': vi_spec,'spec_qa': vi_specqa,
-                                    'suff' : vi1km_suff, 'pattern' : vi_patt, 
-                                    'color' : vi_color
+              'ndvi_aqua_sixteen_1000': {'url': urlbase, 'pattern': vi_patt,
+                                         'folder': 'MOLA/MYD13A2.005',
+                                         'spec': vi_spec, 'spec_qa': vi_specqa,
+                                         'suff': vi1km_suff, 'res': 1000,
+                                         'color': vi_color
               }
         }
-        snow = {'snow_terra_daily_500' : {'url' : usrsnow, 'folder' : 
-				'SAN/MOST/MOD10A1.005', 'res' : 500, 'spec' : snow1_spec
-				,'spec_qa': snow1_specqa, 'color' : snow_color,
-				'suff' : snow1_suff, 'pattern' : snow1_patt
-		}, 
-		'snow_aqua_daily_500' : {'url' : usrsnow, 'folder' : 
-                                'SAN/MOSA/MYD10A1.005', 'res' : 500, 'spec' : snow1_spec
-                                ,'spec_qa': snow1_specqa, 'color' : snow_color,
-                                'suff' : snow1_suff, 'pattern' : snow1_patt
+        snow = {'snow_terra_daily_500': {'url': usrsnow, 'pattern': snow1_patt,
+                                         'folder': 'SAN/MOST/MOD10A1.005',
+                                         'spec': snow1_spec,
+                                         'spec_qa': snow1_specqa,
+                                         'color': snow_color,
+                                         'suff': snow1_suff, 'res': 500
+               },
+               'snow_aqua_daily_500': {'url': usrsnow, 'pattern': snow1_patt,
+                                       'folder':  'SAN/MOSA/MYD10A1.005',
+                                       'spec' : snow1_spec,
+                                       'spec_qa': snow1_specqa,
+                                       'color' : snow_color,
+                                       'suff' : snow1_suff, 'res' : 500
+               },
+               'snow_terra_eight_500': {'url': usrsnow, 'pattern': None,
+                                        'folder': 'SAN/MOST/MOD10A2.005',
+                                        'spec': snow8_spec,
+                                        'spec_qa': None,
+                                        'color': snow_color,
+                                        'suff': snow8_suff, 'res' : 500
                 },
-                'snow_terra_eight_500' : {'url' : usrsnow, 'folder' : 
-                                'SAN/MOST/MOD10A2.005', 'res' : 500, 'spec' : snow8_spec
-                                ,'spec_qa': None, 'color' : snow_color,
-                                'pattern' : None, 'suff' : snow8_suff
-                }, 
-                'snow_aqua_eight_500' : {'url' : usrsnow, 'folder' : 
-                                'SAN/MOSA/MYD10A2.005', 'res' : 500, 'spec' : snow8_spec
-                                ,'spec_qa': None, 'color' : snow_color,
-                                'pattern' : None, 'suff' : snow8_suff
+                'snow_aqua_eight_500' : {'url': usrsnow, 'pattern' : None,
+                                         'folder': 'SAN/MOSA/MYD10A2.005',
+                                         'spec': snow8_spec,
+                                         'spec_qa': None,
+                                         'color': snow_color,
+                                         'suff' : snow8_suff, 'res' : 500
                 }
-	}
-	self.products = { }
-	self.products.update(lst)
-	self.products.update(vi)
-	self.products.update(snow)
-        self.products_swath = { 
-          'lst_terra_daily':{'url':urlbase,'folder':'MOLT/MOD11_L2.005',
-          'spec': lstL2_spec}, 'lst_aqua_daily':{'url':urlbase,'folder':'MOLA/MYD11_L2.005',
-          'spec': lstL2_spec}
         }
+        self.products = {}
+        self.products.update(lst)
+        self.products.update(vi)
+        self.products.update(snow)
+        self.products_swath = {'lst_terra_daily': {'url': urlbase,
+                                                   'folder': 'MOLT/MOD11_L2.005',
+                                                   'spec': lstL2_spec},
+                               'lst_aqua_daily': {'url': urlbase,
+                                                  'folder': 'MOLA/MYD11_L2.005',
+                                                  'spec': lstL2_spec}
+        }
+
     def returned(self):
         if self.products.keys().count(self.prod) == 1:
             return self.products[self.prod]
         elif self.products_swath.keys().count(self.prod) == 1:
             return self.products_swath[self.prod]
         else:
-            grass.fatal(_("The code insert is not supported yet. Consider to ask " \
-                      + "on the grass-dev mailing list for future support"))
+            grass.fatal(_("The code insert is not supported yet. Consider to " \
+                      "ask on the grass-dev mailing list for future support"))
 
-    def fromcode(self,code):
+    def fromcode(self, code):
         import string
-        for k,v in self.products.iteritems():
-          if string.find(v['folder'],code) != -1:
-            return self.products[k]
-        for k,v in self.products_swath.iteritems():
-          if string.find(v['folder'],code) != -1:
-            return self.products_swath[k]
-        grass.fatal(_("The code insert is not supported yet. Consider to ask " \
-                      "on the grass-dev mailing list for future support"))
+        for k, v in self.products.iteritems():
+            if string.find(v['folder'], code) != -1:
+                return self.products[k]
+        for k, v in self.products_swath.iteritems():
+            if string.find(v['folder'], code) != -1:
+                return self.products_swath[k]
+        grass.fatal(_("The code insert is not supported yet. Consider to " \
+                      "ask on the grass-dev mailing list for future support"))
 
-    def color(self,code = None):
+    def color(self, code=None):
         if code:
-          return self.fromcode(code)['color']
+            return self.fromcode(code)['color']
         else:
-          return self.returned()['color']
+            return self.returned()['color']
 
-
-    def pattern(self,code = None):
+    def pattern(self, code=None):
         if code:
-          return self.fromcode(code)['pattern']
+            return self.fromcode(code)['pattern']
         else:
-          return self.returned()['pattern']
+            return self.returned()['pattern']
 
-    def suffix(self,code = None):
+    def suffix(self, code=None):
         if code:
-          return self.fromcode(code)['suff']
+            return self.fromcode(code)['suff']
         else:
-          return self.returned()['suff']
+            return self.returned()['suff']
 
     def __str__(self):
-	prod = self.returned()
-	string = "url: " + prod['url'] + ", folder: " + prod['folder']
+        prod = self.returned()
+        string = "url: " + prod['url'] + ", folder: " + prod['folder']
         if prod.keys().count('spec') == 1:
-                string += ", spectral subset: " + prod['spec']
+            string += ", spectral subset: " + prod['spec']
         if prod.keys().count('spec_qa') == 1:
-                string += ", spectral subset qa:" + prod['spec_qa']
-	return string
+            string += ", spectral subset qa:" + prod['spec_qa']
+        return string
 
+
 class resampling:
     """Return the resampling value from the code used in the modules
     """
-    def __init__(self,value):
+    def __init__(self, value):
         self.code = value
         self.resampling = {'nearest': 'NEAREST_NEIGHBOR', 'bilinear': 'BILINEAR',
-        'cubic': 'CUBIC CONVOLUTION'}
+                           'cubic': 'CUBIC CONVOLUTION'}
 
     def returned(self):
         return self.resampling[self.code]
 
+
 class projection:
     """Definition of projection for converting from sinusoidal projection to
     another one. Not all projection systems are supported"""
@@ -245,17 +272,24 @@
             self.dat = self.proj['datum']
         else:
             self.dat = 'none'
-        self.projections = {'laea' : 'LA','ll':'GEO', 'lcc':'LCC',
-             'merc':'MERCAT', 'polar':'PS', 'utm':'UTM', 'tmerc':'TM'}
-        self.datumlist = {'none':'NONE', 'nad27':'NAD27', 'nad83':'NAD83', 
-        'wgs66':'WGS66', 'wgs72':'WGS72', 'wgs84':'WGS84', 'etrs89':'WGS84'}
-        self.datumlist_swath = {'Clarke 1866' : 0, 'Clarke 1880' : 1, 'bessel' : 2,
-            'International 1967' : 3, 'International 1909': 4, 'wgs72' : 5, 
-            'Everest' : 6, 'wgs66' : 7, 'wgs84' : 8, 'Airy' : 9, 
-            'Modified Everest' : 10, 'Modified Airy' : 11, 'Walbeck' : 12, 
-            'Southeast Asia' : 13, 'Australian National' : 14, 'Krassovsky' : 15, 
-            'Hough' : 16, 'Mercury1960' : 17, 'Modified Mercury1968' : 18, 
-            'Sphere 19 (Radius 6370997)' : 19, 'MODIS Sphere (Radius 6371007.181)' : 20}
+        self.projections = {'laea': 'LA', 'll': 'GEO', 'lcc': 'LCC',
+                            'merc': 'MERCAT', 'polar': 'PS', 'utm': 'UTM',
+                            'tmerc': 'TM'}
+        self.datumlist = {'none': 'NONE', 'nad27': 'NAD27', 'nad83': 'NAD83',
+                          'wgs66': 'WGS66', 'wgs72': 'WGS72', 'wgs84': 'WGS84',
+                          'etrs89': 'WGS84'}
+        self.datumlist_swath = {'Clarke 1866': 0, 'Clarke 1880': 1,
+                                'bessel': 2, 'International 1967': 3,
+                                'International 1909': 4, 'wgs72': 5,
+                                'Everest': 6, 'wgs66': 7, 'wgs84': 8,
+                                'Airy': 9, 'Modified Everest': 10,
+                                'Modified Airy': 11, 'Walbeck': 12,
+                                'Southeast Asia': 13, 'Australian National': 14,
+                                'Krassovsky': 15, 'Hough': 16,
+                                'Mercury1960': 17, 'Modified Mercury1968': 18,
+                                'Sphere 19 (Radius 6370997)': 19,
+                                'MODIS Sphere (Radius 6371007.181)': 20
+                                }
 
     def returned(self):
         """Return the projection in the MRT style"""
@@ -264,7 +298,7 @@
         else:
             return self.projections[self.val]
 
-    def _par(self,key):
+    def _par(self, key):
         """Function use in return_params"""
         if self.proj[key]:
             Val = self.proj[key]
@@ -272,15 +306,16 @@
             Val = 0.0
         return float(Val)
 
-    def _outpar(self, SMajor, SMinor, Val, Factor, CentMer, TrueScale, FE, FN,swath):
+    def _outpar(self, SMajor, SMinor, Val, Factor, CentMer, TrueScale, FE, FN,
+                swath):
         if swath:
-          return '%i %i %d %d %d %d %d %d 0.0 0.0 0.0 0.0 0.0 0.0 0.0' % ( 
-                SMajor, SMinor, Val, Factor, CentMer, TrueScale, FE, FN )
+            return '%i %i %d %d %d %d %d %d 0.0 0.0 0.0 0.0 0.0 0.0 0.0' % (
+                    SMajor, SMinor, Val, Factor, CentMer, TrueScale, FE, FN)
         else:
-          return '( %i %i %d %d %d %d %d %d 0.0 0.0 0.0 0.0 0.0 0.0 0.0 )' % ( 
-                SMajor, SMinor, Val, Factor, CentMer, TrueScale, FE, FN )
+            return '( %i %i %d %d %d %d %d %d 0.0 0.0 0.0 0.0 0.0 0.0 0.0 )' % (
+                    SMajor, SMinor, Val, Factor, CentMer, TrueScale, FE, FN)
 
-    def return_params(self, swath = False):
+    def return_params(self, swath=False):
         """ Return the 13 parameters for MRT parameter file """
         if self.val == 'll' or self.val == 'utm':
             return self._outpar(0, 0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, swath)
@@ -294,7 +329,7 @@
             CentLat = self._par('+lat_0')
             FE = self._par('+x_0')
             FN = self._par('+y_0')
-            return self._outpar(SMajor, SMinor, 0.0, 0.0, CentMer, 
+            return self._outpar(SMajor, SMinor, 0.0, 0.0, CentMer,
                                 CentLat, FE, FN, swath)
         elif self.val == 'lcc':
             SMajor = self._par('+a')
@@ -308,7 +343,7 @@
             CentLat = self._par('+lat_0')
             FE = self._par('+x_0')
             FN = self._par('+y_0')
-            return self._outpar(SMajor, SMinor, STDPR1, STDPR2, CentMer, 
+            return self._outpar(SMajor, SMinor, STDPR1, STDPR2, CentMer,
                                 CentLat, FE, FN, swath)
         elif self.val == 'merc' or self.val == 'polar' or self.val == 'tmerc':
             SMajor = self._par('+a')
@@ -324,7 +359,7 @@
             TrueScale = self._par('+lat_ts')
             FE = self._par('+x_0')
             FN = self._par('+y_0')
-            return self._outpar(SMajor, SMinor, 0.0, Factor, CentMer, 
+            return self._outpar(SMajor, SMinor, 0.0, Factor, CentMer,
                                  TrueScale, FE, FN, swath)
         else:
             grass.fatal(_('Projection not supported, please contact the ' \
@@ -335,7 +370,8 @@
         if self.dat not in self.datumlist.keys():
             grass.fatal(_("Datum <%s> is not supported") % self.dat)
         elif self.dat == 'etrs89':
-            grass.warning(_("Changing datum <%s> to <%s>") % (self.dat, 'wgs84'))
+            grass.warning(_("Changing datum <%s> to <%s>") % (self.dat,
+                                                              'wgs84'))
         return self.datumlist[self.dat]
 
     def datumswath(self):
@@ -344,4 +380,4 @@
 
     def utmzone(self):
         """Return the utm zone number"""
-        return self.proj['zone']
+        return self.proj['zone']
\ No newline at end of file



More information about the grass-commit mailing list