[GRASS-SVN] r59323 - in grass-addons/grass7/raster/r.modis: . libmodis r.modis.download r.modis.import
svn_grass at osgeo.org
svn_grass at osgeo.org
Tue Mar 25 03:45:09 PDT 2014
Author: lucadelu
Date: 2014-03-25 03:45:07 -0700 (Tue, 25 Mar 2014)
New Revision: 59323
Modified:
grass-addons/grass7/raster/r.modis/libmodis/convertmodis.py
grass-addons/grass7/raster/r.modis/libmodis/downmodis.py
grass-addons/grass7/raster/r.modis/libmodis/parsemodis.py
grass-addons/grass7/raster/r.modis/libmodis/rmodislib.py
grass-addons/grass7/raster/r.modis/r.modis.download/r.modis.download.html
grass-addons/grass7/raster/r.modis/r.modis.download/r.modis.download.py
grass-addons/grass7/raster/r.modis/r.modis.html
grass-addons/grass7/raster/r.modis/r.modis.import/r.modis.import.py
Log:
r.modis: update pymodis library; update documentation; add metadata information during import
Modified: grass-addons/grass7/raster/r.modis/libmodis/convertmodis.py
===================================================================
--- grass-addons/grass7/raster/r.modis/libmodis/convertmodis.py 2014-03-25 08:19:25 UTC (rev 59322)
+++ grass-addons/grass7/raster/r.modis/libmodis/convertmodis.py 2014-03-25 10:45:07 UTC (rev 59323)
@@ -35,19 +35,19 @@
confile = the full path to the paramater file
- mrtpath = the full path to mrt directory where inside you have bin and
+ mrtpath = the full path to mrt directory which contains the bin and
data directories
"""
# check if the hdf file exists
if os.path.exists(hdfname):
self.name = hdfname
else:
- raise IOError('%s not exists' % hdfname)
+ raise IOError('%s does not exist' % hdfname)
# check if confile exists
if os.path.exists(confile):
self.conf = confile
else:
- raise IOError('%s not exists' % confile)
+ raise IOError('%s does not exist' % confile)
# check if mrtpath and subdirectories exists and set environment variables
if os.path.exists(mrtpath):
if os.path.exists(os.path.join(mrtpath,'bin')):
@@ -55,14 +55,14 @@
os.environ['PATH'] = "%s:%s" % (os.environ['PATH'],os.path.join(mrtpath,
'data'))
else:
- raise IOError('The path %s not exists' % os.path.join(mrtpath,'bin'))
+ raise IOError('The path %s does not exist' % os.path.join(mrtpath,'bin'))
if os.path.exists(os.path.join(mrtpath,'data')):
self.mrtpathdata = os.path.join(mrtpath,'data')
os.environ['MRTDATADIR'] = os.path.join(mrtpath,'data')
else:
- raise IOError('The path %s not exists' % os.path.join(mrtpath,'data'))
+ raise IOError('The path %s does not exist' % os.path.join(mrtpath,'data'))
else:
- raise IOError('The path %s not exists' % mrtpath)
+ raise IOError('The path %s does not exist' % mrtpath)
def executable(self):
"""Return the executable of resample MRT software
@@ -79,11 +79,11 @@
import subprocess
execut = self.executable()
if not os.path.exists(execut):
- raise IOError('The path %s not exists, could be an erroneus path or '\
+ raise IOError('The path %s does not exist: it could be an erroneus path or '\
+ 'software') % execut
else:
subprocess.call([execut,'-p',self.conf])
- return "The hdf file %s was converted" % self.name
+ return "The hdf file %s was converted successfully" % self.name
class createMosaic:
@@ -112,16 +112,16 @@
os.environ['PATH'] = "%s:%s" % (os.environ['PATH'],os.path.join(mrtpath,
'data'))
else:
- raise IOError('The path %s not exists' % os.path.join(mrtpath,'bin'))
+ raise IOError('The path %s does not exist' % os.path.join(mrtpath,'bin'))
if os.path.exists(os.path.join(mrtpath,'data')):
self.mrtpathdata = os.path.join(mrtpath,'data')
os.environ['MRTDATADIR'] = os.path.join(mrtpath,'data')
else:
- raise IOError('The path %s not exists' % os.path.join(mrtpath,'data'))
+ raise IOError('The path %s does not exist' % os.path.join(mrtpath,'data'))
else:
- raise IOError('The path %s not exists' % mrtpath)
+ raise IOError('The path %s does not exist' % mrtpath)
self.out = os.path.join(self.basepath, outprefix + '.hdf')
- self.outxml = os.path.join(self.basepath, self.out + '.xml')
+ self.outxml = self.out + '.xml'
self.subset = subset
def write_mosaic_xml(self):
@@ -130,7 +130,7 @@
listHDF = []
for i in self.HDFfiles:
if i.find(self.basepath) == -1 and i.find('.hdf.xml') == -1:
- print "Attection maybe you have the not full path in the HDF file list"
+ print "Attention: maybe you do not have the full path in the HDF file list"
listHDF.append(os.path.join(self.basepath,i.strip()))
self.tmplistfiles.write("%s\n" % os.path.join(self.basepath,i.strip()))
elif i.find('.hdf.xml') == -1:
@@ -149,14 +149,14 @@
return os.path.join(self.mrtpathbin,'mrtmosaic')
elif sys.platform.count('win32'):
if os.path.exists(os.path.join(self.mrtpathbin,'mrtmosaic.exe')):
- return os.path.join(self.mrtpath,'mrtmosaic.exe')
+ return os.path.join(self.mrtpathbin,'mrtmosaic.exe')
def run(self):
"""Exect the mosaic process"""
import subprocess
execut = self.executable()
if not os.path.exists(execut):
- raise IOError('The path %s not exists, could be an erroneus path or '\
+ raise IOError('The path %s does not exist, it could be an erroneus path or '\
+ 'software') % execut
else:
self.write_mosaic_xml()
@@ -166,7 +166,7 @@
else:
subprocess.call([execut,'-i',self.tmplistfiles.name,'-o',self.out],
stderr = subprocess.STDOUT)
- return "The mosaic file %s is created" % self.out
+ return "The mosaic file %s has been created" % self.out
class processModis:
@@ -179,19 +179,19 @@
"""Initialization function :
hdfname = the full path to the hdf file
confile = the full path to the paramater file
- mrtpath = the full path to mrt directory where inside you have bin and
+ mrtpath = the full path to mrt directory which contains the bin and
data directories
"""
# check if the hdf file exists
if os.path.exists(hdfname):
self.name = hdfname
else:
- raise IOError('%s not exists' % hdfname)
+ raise IOError('%s does not exist' % hdfname)
# check if confile exists
if os.path.exists(confile):
self.conf = confile
else:
- raise IOError('%s not exists' % confile)
+ raise IOError('%s does not exist' % confile)
# check if mrtpath and subdirectories exists and set environment variables
if os.path.exists(mrtpath):
if os.path.exists(os.path.join(mrtpath,'bin')):
@@ -199,14 +199,14 @@
os.environ['PATH'] = "%s:%s" % (os.environ['PATH'],os.path.join(mrtpath,
'data'))
else:
- raise IOError('The path %s not exists' % os.path.join(mrtpath,'bin'))
+ raise IOError('The path %s does not exist' % os.path.join(mrtpath,'bin'))
if os.path.exists(os.path.join(mrtpath,'data')):
self.mrtpathdata = os.path.join(mrtpath,'data')
os.environ['MRTDATADIR'] = os.path.join(mrtpath,'data')
else:
- raise IOError('The path %s not exists' % os.path.join(mrtpath,'data'))
+ raise IOError('The path %s does not exist' % os.path.join(mrtpath,'data'))
else:
- raise IOError('The path %s not exists' % mrtpath)
+ raise IOError('The path %s does not exist' % mrtpath)
def executable(self):
"""Return the executable of resample MRT software
@@ -216,15 +216,15 @@
return os.path.join(self.mrtpathbin,'swath2grid')
elif sys.platform.count('win32') != -1:
if os.path.exists(os.path.join(self.mrtpathbin,'swath2grid.exe')):
- return os.path.join(self.mrtpath,'swath2grid.exe')
+ return os.path.join(self.mrtpathbin,'swath2grid.exe')
def run(self):
"""Exec the convertion process"""
import subprocess
execut = self.executable()
if not os.path.exists(execut):
- raise IOError('The path %s not exists, could be an erroneus path or '\
+ raise IOError('The path %s does not exist, it could be an erroneus path or '\
+ 'software') % execut
else:
subprocess.call([execut,'-pf=%s' % self.conf])
- return "The hdf file %s was converted" % self.name
+ return "The hdf file %s has been converted" % self.name
Modified: grass-addons/grass7/raster/r.modis/libmodis/downmodis.py
===================================================================
--- grass-addons/grass7/raster/r.modis/libmodis/downmodis.py 2014-03-25 08:19:25 UTC (rev 59322)
+++ grass-addons/grass7/raster/r.modis/libmodis/downmodis.py 2014-03-25 10:45:07 UTC (rev 59323)
@@ -19,427 +19,740 @@
#
##################################################################
-from datetime import *
+from datetime import date, timedelta
import os
import glob
import logging
import socket
from ftplib import FTP
import ftplib
+try:
+ import requests
+except:
+ pass
+import urllib2
+from HTMLParser import HTMLParser
+import re
+try:
+ import osgeo.gdal as gdal
+except ImportError:
+ try:
+ import gdal
+ except ImportError:
+ raise('Python GDAL library not found, please install python-gdal')
+
+
+def urljoin(*args):
+ """
+ Joins given arguments into a url. Trailing but not leading slashes are
+ stripped for each argument.
+ http://stackoverflow.com/a/11326230
+ """
+
+ return "/".join(map(lambda x: str(x).rstrip('/'), args))
+
+
+def getNewerVersion(oldFile, newFile):
+ """ Return newer version of a file
+
+ oldFile = one of the two similar file
+
+ newFile = one of the two similar file
+ """
+ oldFileSplit = oldFile.split('.')
+ newFileSplit = newFile.split('.')
+ if oldFileSplit[4] > newFileSplit[4]:
+ return oldFile
+ else:
+ return newFile
+
+
+def str2date(strin):
+ """Return a date object from a string
+
+ string = text string to return date (2012-10-04)
+
+ """
+ todaySplit = strin.split('-')
+ return date(int(todaySplit[0]), int(todaySplit[1]), int(todaySplit[2]))
+
+
+class modisHtmlParser(HTMLParser):
+ """A class to parse HTML"""
+ def __init__(self, fh):
+ """
+ {fh} must be astring returned by requests.content or
+ urllib2.urlopen().read()
+ """
+ HTMLParser.__init__(self)
+ self.fileids = []
+ self.feed(fh)
+
+ def handle_starttag(self, tag, attrs):
+ if tag == 'a':
+ attrD = dict(attrs)
+ self.fileids.append(attrD['href'].replace('/', ''))
+
+ def get_all(self):
+ """ Return everything """
+ return self.fileids
+
+ def get_dates(self):
+ """ Return a list of directories with date """
+ regex = re.compile('(\d{4})[/.-](\d{2})[/.-](\d{2})$')
+ return [elem for elem in self.fileids if regex.match(elem)]
+
+ def get_tiles(self, prod, tiles, jpeg=False):
+ """ Return a list of file to download """
+ finalList = []
+ for i in self.fileids:
+ name = i.split('.')
+ # distinguish jpeg files from hdf files by the number of index
+ # where find the tile index
+ if not name.count(prod):
+ continue
+ if not tiles and not (name.count('jpg') or name.count('BROWSE')):
+ finalList.append(i)
+ #is a jpeg of tiles number
+ if tiles:
+ if tiles.count(name[3]) == 1 and jpeg:
+ finalList.append(i)
+ #is a hdf of tiles number
+ elif tiles.count(name[2]) == 1:
+ finalList.append(i)
+ return finalList
+
+
class downModis:
- """A class to download MODIS data from NASA FTP repository"""
- def __init__(self,
- password,
+ """A class to download MODIS data from NASA FTP repository"""
+ def __init__(self,
destinationFolder,
+ password=None,
user="anonymous",
- url="e4ftl01.cr.usgs.gov",
+ url="http://e4ftl01.cr.usgs.gov",
tiles=None,
- path="MOLT/MOD11A1.005",
+ path="MOLT",
+ product="MOD11A1.005",
today=None,
enddate=None,
delta=10,
jpg=False,
- debug=False
+ debug=False,
+ timeout=30
):
- """Initialization function :
+ """Initialization function :
- password = is your password, usually your email address
+ destinationFolder = where the files will be stored
- destinationFolder = where the files will be stored
+ password = the password, it should be your email address to
+ connect to a FTP server.
+ Do not use this variable if the server is a HTTP server
- user = your username, by default anonymous
+ user = the user name, by default 'anonymous', used to connect
+ to a FTP server.
+ Do not use this variable if the server is a HTTP server
- url = the url where to download the MODIS data
+ url = the url from where to download the MODIS data, it can be FTP or
+ HTTP and it has to start with http:// or ftp://
- path = the directory where the data that you want to download are
- stored on the ftp server
+ path = the directory where the data that you want to download are
+ stored on the FTP server
- tiles = a list of tiles that you want to download, None == all tiles
+ product = the code of product to download, the code should be
+ idential to the one of the url
- today = the day to start downloading; in order to pass a date different
- from today use the format YYYY-MM-DD
+ tiles = a list of tiles to be downloaded, None == all tiles
- delta = timelag i.e. the number of days starting from today
- (backwards
+ today = the day to start downloading; in order to pass a date
+ different from today use the format YYYY-MM-DD
- Creates a ftp instance, connects user to ftp server and goes into the
- directory where the MODIS data are stored
- """
+ enddate = the day to end downloading; in order to pass a date
+ use the format YYYY-MM-DD
- # url modis
- self.url = url
- # user for download
- self.user = user
- # password for download
- self.password = password
- # directory where data are collected
- self.path = path
- # tiles to downloads
- if tiles:
- self.tiles = tiles.split(',')
- else:
- self.tiles = tiles
- # set destination folder
- if os.access(destinationFolder, os.W_OK):
- self.writeFilePath = destinationFolder
- else:
- raise IOError("Folder to store downloaded files does not exist or is not" \
- + "writeable")
- # return the name of product
- if len(self.path.split('/')) == 2:
- self.product = self.path.split('/')[1]
- elif len(self.path.split('/')) == 3:
- self.product = self.path.split('/')[2]
- # write a file with the name of file downloaded
- self.filelist = open(os.path.join(self.writeFilePath, 'listfile' \
- + self.product + '.txt'), 'w')
- # set jpg download
- self.jpeg = jpg
- # today
- self.today = today
- # force the last day
- self.enday = enddate
- # delta of days
- self.delta = delta
- # status of tile download
- self.status = True
- # for debug, you can download only xml files
- self.debug = debug
- # for logging
- LOG_FILENAME = os.path.join(self.writeFilePath, 'modis' \
- + self.product + '.log')
- LOGGING_FORMAT = '%(asctime)s - %(levelname)s - %(message)s'
- logging.basicConfig(filename=LOG_FILENAME, level=logging.DEBUG, \
- format=LOGGING_FORMAT)
- self.nconnection = 0
+ delta = timelag i.e. the number of days starting from today
+ backwards
- def removeEmptyFiles(self):
- """Check if some file has size ugual 0"""
- year = str(date.today().year)
- pref = self.product.split('.')[0]
- files = glob.glob1(self.writeFilePath, '%s.A%s*' % (pref, year))
- for f in files:
- fil = os.path.join(self.writeFilePath, f)
- if os.path.getsize(fil) == 0:
- os.remove(fil)
+ jpeg = set to True if you want to download also the JPG overview file
- def connectFTP(self, ncon=20):
- """ Set connection to ftp server, move to path where data are stored
- and create a list of directories for all days
+ debug = set to True if you want to obtain debug information
- ncon = number maximum of test to connection at the ftp server
- """
- self.nconnection += 1
- try:
- # connect to ftp server
- self.ftp = FTP(self.url)
- self.ftp.login(self.user, self.password)
- # enter in directory
- self.ftp.cwd(self.path)
- self.dirData = []
- # return data inside directory
- self.ftp.dir(self.dirData.append)
- # reverse order of data for have first the nearest to today
- self.dirData.reverse()
- # check if dirData contain only directory, delete all files
- self.dirData = [elem.split()[-1] for elem in self.dirData if elem.startswith("d")]
- if self.debug==True:
- logging.debug("Open connection %s" % self.url)
- except (EOFError, ftplib.error_perm), e:
- logging.error('Error in connection: %s' % e)
- if self.nconnection <= ncon:
- self.connectFTP()
+ timeout = Timeout value for HTTP server
+ """
- def closeFTP(self):
- """ Close ftp connection """
- self.ftp.quit()
- self.filelist.close()
- if self.debug == True:
- logging.debug("Close connection %s" % self.url)
+ # url modis
+ if 'ftp' in url:
+ self.url = url.replace('ftp://', '').rstrip('/')
+ self.urltype = 'ftp'
+ elif 'http' in url:
+ self.url = url
+ self.urltype = 'http'
+ else:
+ raise IOError("The url should contain 'ftp://' or 'http://'")
+ # user for download using ftp
+ self.user = user
+ # password for download using ftp
+ self.password = password
+ # the product
+ self.product = product
+ self.product_code = product.split('.')[0]
+ # directory where data are collected
+ self.path = urljoin(path, self.product)
+ # tiles to downloads
+ if tiles:
+ self.tiles = tiles.split(',')
+ else:
+ self.tiles = tiles
+ # set destination folder
+ if os.access(destinationFolder, os.W_OK):
+ self.writeFilePath = destinationFolder
+ else:
+ raise IOError("Folder to store downloaded files does not exist" \
+ + " or is not writeable")
+ # return the name of product
+ if len(self.path.split('/')) == 2:
+ self.product = self.path.split('/')[1]
+ elif len(self.path.split('/')) == 3:
+ self.product = self.path.split('/')[2]
+ # write a file with the name of file downloaded
+ self.filelist = open(os.path.join(self.writeFilePath, 'listfile' \
+ + self.product + '.txt'), 'w')
+ # set jpg download
+ self.jpeg = jpg
+ # today
+ self.today = today
+ # force the last day
+ self.enday = enddate
+ # delta of days
+ self.delta = delta
+ # status of tile download
+ self.status = True
+ # for debug, you can download only xml files
+ self.debug = debug
+ # for logging
+ log_filename = os.path.join(self.writeFilePath, 'modis' \
+ + self.product + '.log')
+ log_format = '%(asctime)s - %(levelname)s - %(message)s'
+ logging.basicConfig(filename=log_filename, level=logging.DEBUG, \
+ format=log_format)
+ self.nconnection = 0
+ self.timeout = timeout
+ self.fileInPath = []
+ # add all files in the directory where we will save new modis data
+ for f in os.listdir(self.writeFilePath):
+ if os.path.isfile(os.path.join(self.writeFilePath, f)):
+ self.fileInPath.append(f)
+ gdal.UseExceptions()
+ gdalDriver = gdal.GetDriverByName('HDF4')
+ if not gdalDriver:
+ raise IOError("GDAL installation has no support for HDF4, please update GDAL")
- def setDirectoryIn(self, day):
- """ Enter in the directory of the day """
- try:
- self.ftp.cwd(day)
- except (ftplib.error_reply, socket.error), e:
- logging.error("Error %s entering in directory %s" % e, day)
- self.setDirectoryIn(day)
+ def removeEmptyFiles(self):
+ """Check if some file has size equal to 0"""
+ year = str(date.today().year)
+ pref = self.product.split('.')[0]
+ files = glob.glob1(self.writeFilePath, '%s.A%s*' % (pref, year))
+ for f in files:
+ fil = os.path.join(self.writeFilePath, f)
+ if os.path.getsize(fil) == 0:
+ os.remove(fil)
- def setDirectoryOver(self):
- """ Come back to old path """
- try:
- self.ftp.cwd('..')
- except (ftplib.error_reply, socket.error), e:
- logging.error("Error %s when try to come back" % e)
- self.setDirectoryOver()
+ def connect(self, ncon=20):
+ """Connect to the server and fill the dirData variable
- def _str2date(self, strin):
- """Return a date object from a string
+ ncon = maximum number of attempts to connect to the HTTP server
+ before failing
+ """
+ if self.urltype == 'ftp':
+ self._connectFTP(ncon)
+ elif self.urltype == 'http':
+ self._connectHTTP(ncon)
- string = text string to return date (2012-10-04)
- """
- todaySplit = strin.split('-')
- return date(int(todaySplit[0]), int(todaySplit[1]), int(todaySplit[2]))
+ def _connectHTTP(self, ncon=20):
+ """ Connect to HTTP server, create a list of directories for all days
- def _getToday(self):
- """Return the first day for start to download"""
- if self.today == None:
- # set today variable to today
- self.today = date.today()
- else:
- # set today variable to data pass from user
- self.today = self._str2date(self.today)
- # set enday variable to data
- if self.enday != None:
- self.enday = self._str2date(self.enday)
+ ncon = maximum number of attempts to connect to the HTTP server
+ before failing
+ """
+ self.nconnection += 1
+ try:
+ try:
+ http = requests.get(urljoin(self.url, self.path),
+ timeout=self.timeout)
+ self.dirData = modisHtmlParser(http.content).get_dates()
+ except:
+ http = urllib2.urlopen(urljoin(self.url, self.path),
+ timeout=self.timeout)
+ self.dirData = modisHtmlParser(http.read()).get_dates()
+ self.dirData.reverse()
+ except:
+ logging.error('Error in connection')
+ if self.nconnection <= ncon:
+ self._connectHTTP()
- def getListDays(self):
- """Return a list of all selected days"""
- self._getToday()
+ def _connectFTP(self, ncon=20):
+ """ Set connection to ftp server, move to path where data are stored
+ and create a list of directories for all days
- today_s = self.today.strftime("%Y.%m.%d")
- # dirData is reverse sorted
- for i, d in enumerate(self.dirData):
- if d <= today_s:
- today_index = i
- break
- else:
- logging.error("No data available for requested days")
- import sys
- sys.exit()
- days = self.dirData[today_index:][:self.delta]
- # this is useful for 8/16 days data, delta could download more images
- # that you want
- if self.enday != None:
- enday_s = self.enday.strftime("%Y.%m.%d")
- delta = 0
- # it make a for cicle from the last value and find the internal delta
- #to remove file outside temporaly range
- for i in range(-(len(days)), 0):
- if days[i] < enday_s:
- break
- else:
- delta = delta + 1
- # remove days outside new delta
- days = days[:delta]
- return days
+ ncon = maximum number of attempts to connect to the FTP server
+ before failing
- def getAllDays(self):
- """Return a list of all days"""
- return self.dirData
+ """
+ self.nconnection += 1
+ try:
+ # connect to ftp server
+ self.ftp = FTP(self.url)
+ self.ftp.login(self.user, self.password)
+ # enter in directory
+ self.ftp.cwd(self.path)
+ self.dirData = []
+ # return data inside directory
+ self.ftp.dir(self.dirData.append)
+ # reverse order of data for have first the nearest to today
+ self.dirData.reverse()
+ # check if dirData contain only directory, delete all files
+ self.dirData = [elem.split()[-1] for elem in self.dirData if elem.startswith("d")]
+ if self.debug == True:
+ logging.debug("Open connection %s" % self.url)
+ except (EOFError, ftplib.error_perm), e:
+ logging.error('Error in connection: %s' % e)
+ if self.nconnection <= ncon:
+ self._connectFTP()
- def getFilesList(self):
- """ Create a list of files to download, it is possible choose to download
- also the jpeg files or only the hdf files"""
- def cicle_file(jpeg=False, tile=True):
- finalList = []
- for i in self.listfiles:
- File = i.split('.')
- # distinguish jpeg files from hdf files by the number of index
- # where find the tile index
- if not tile and not (File.count('jpg') or File.count('BROWSE')):
- finalList.append(i)
- if tile and self.tiles.count(File[3]) == 1 and jpeg: #is a jpeg of tiles number
- finalList.append(i)
- if tile and self.tiles.count(File[2]) == 1: #is a hdf of tiles number
- finalList.append(i)
- return finalList
+ def closeFTP(self):
+ """ Close ftp connection """
+ self.ftp.quit()
+ self.closeFilelist()
+ if self.debug == True:
+ logging.debug("Close connection %s" % self.url)
+ def closeFilelist(self):
+ """ Function to close the file where write the downloaded files """
+ self.filelist.close()
+
+ def setDirectoryIn(self, day):
+ """ Enter in the directory of the day """
+ try:
+ self.ftp.cwd(day)
+ except (ftplib.error_reply, socket.error), e:
+ logging.error("Error %s entering in directory %s" % e, day)
+ self.setDirectoryIn(day)
+
+ def setDirectoryOver(self):
+ """ Come back to old path """
+ try:
+ self.ftp.cwd('..')
+ except (ftplib.error_reply, socket.error), e:
+ logging.error("Error %s when try to come back" % e)
+ self.setDirectoryOver()
+
+ def _getToday(self):
+ """Return the first day for start to download"""
+ if self.today == None:
+ # set today variable to today
+ self.today = date.today()
+ else:
+ # set today variable to data pass from user
+ self.today = str2date(self.today)
+ # set enday variable to data
+ if self.enday != None:
+ self.enday = str2date(self.enday)
+ if self.today and self.enday:
+ if self.today < self.enday:
+ raise IOError("The first day should be newer then end date")
+ D = self.today - self.enday
+ self.delta = D.days
+
+ def getListDays(self):
+ """Return a list of all selected days"""
+ self._getToday()
+
+ today_s = self.today.strftime("%Y.%m.%d")
+ # dirData is reverse sorted
+ for i, d in enumerate(self.dirData):
+ if d <= today_s:
+ today_index = i
+ break
+# else:
+# logging.error("No data available for requested days")
+# import sys
+# sys.exit()
+ days = self.dirData[today_index:][:self.delta]
+ # this is useful for 8/16 days data, delta could download more images
+ # that you want
+ if self.enday != None:
+ enday_s = self.enday.strftime("%Y.%m.%d")
+ delta = 0
+ # it make a for cicle from the last value and find the internal
+ # delta to remove file outside temporaly range
+ for i in range(-(len(days)), 0):
+ if days[i] < enday_s:
+ break
+ else:
+ delta = delta + 1
+ # remove days outside new delta
+ days = days[:delta]
+ return days
+
+ def getAllDays(self):
+ """Return a list of all days"""
+ return self.dirData
+
+ def getFilesList(self, day=None):
+ """ Creates a list of files to download, it is possible to choose to
+ download also the JPG overview files or only the HDF files
+
+ day = the date of data
+
+ """
+ if self.urltype == 'http':
+ return self._getFilesListHTTP(day)
+ elif self.urltype == 'ftp':
+ return self._getFilesListFTP()
+
+ def _getFilesListHTTP(self, day):
+ """ Creates a list of files to download from http server, it is
+ possible to choose to download also the JPG overview files or
+ only the HDF files
+
+ day = the date of data
+
+ """
# return the file's list inside the directory of each day
- try:
- self.listfiles = self.ftp.nlst()
- # download also jpeg
- if self.jpeg:
- # finallist is ugual to all file with jpeg file
- if not self.tiles:
- finalList = self.listfiles
- # finallist is ugual to tiles file with jpeg file
+ try:
+ url = urljoin(self.url, self.path, day)
+ if self.debug == True:
+ logging.debug("The url is: %s" % url)
+ try:
+ http = modisHtmlParser(requests.get(url,
+ timeout=self.timeout).content)
+ except:
+ http = modisHtmlParser(urllib2.urlopen(url,
+ timeout=self.timeout).read())
+ # download also jpeg
+ if self.jpeg:
+ # finallist is ugual to all file with jpeg file
+ if not self.tiles:
+ finalList = http.get_all()
+ # finallist is ugual to tiles file with jpeg file
+ else:
+ finalList = http.get_tiles(self.product_code,
+ self.tiles, jpeg=True)
+ # not download jpeg
+ else:
+ finalList = http.get_tiles(self.product_code, self.tiles)
+ if self.debug == True:
+ logging.debug("The number of file to download is: %i" % len(finalList))
+ return finalList
+ except (socket.error), e:
+ logging.error("Error %s when try to receive list of files" % e)
+ self._getFilesListHTTP(day)
+
+ def _getFilesListFTP(self):
+ """ Create a list of files to download from FTP server, it is possible
+ choose to download also the JPG overview files or only the HDF files
+ """
+ def cicle_file(jpeg=False):
+ """Check the type of file"""
+ finalList = []
+ for i in self.listfiles:
+ name = i.split('.')
+ # distinguish jpeg files from hdf files by the number of index
+ # where find the tile index
+ if not self.tiles and not (name.count('jpg') or
+ name.count('BROWSE')):
+ finalList.append(i)
+ #is a jpeg of tiles number
+ if self.tiles:
+ if self.tiles.count(name[3]) == 1 and jpeg:
+ finalList.append(i)
+ #is a hdf of tiles number
+ elif self.tiles.count(name[2]) == 1:
+ finalList.append(i)
+ return finalList
+
+ # return the file's list inside the directory of each day
+ try:
+ self.listfiles = self.ftp.nlst()
+ # download also jpeg
+ if self.jpeg:
+ # finallist is ugual to all file with jpeg file
+ if not self.tiles:
+ finalList = self.listfiles
+ # finallist is ugual to tiles file with jpeg file
+ else:
+ finalList = cicle_file(jpeg=True)
+ # not download jpeg
+ else:
+ finalList = cicle_file()
+ if self.debug == True:
+ logging.debug("The number of file to download is: %i" % len(finalList))
+ return finalList
+ except (ftplib.error_reply, socket.error), e:
+ logging.error("Error %s when trying to receive list of files" % e)
+ self._getFilesListFTP()
+
+ def checkDataExist(self, listNewFile, move=0):
+ """ Check if a file already exists in the directory of download
+
+ listNewFile = list of all files, returned by getFilesList function
+
+ move = it is useful to know if a function is called from download
+ or move function
+ """
+ # different return if this method is used from downloadsAllDay() or
+ # moveFile()
+ if move == 0:
+ listOfDifferent = list(set(listNewFile) - set(self.fileInPath))
+ elif move == 1:
+ listOfDifferent = list(set(self.fileInPath) - set(listNewFile))
+ return listOfDifferent
+
+ def checkFile(self, filHdf):
+ """Check by using GDAL to be sure that the download went ok
+
+ filHdf = name of the HDF file to check
+ """
+ try:
+ gdal.Open(filHdf)
+ return 0
+ except (RuntimeError), e:
+ logging.error(e)
+ return 1
+
+ def downloadFile(self, filDown, filHdf, day):
+ """Download the single file
+
+ filDown = name of the file to download
+
+ filHdf = name of the file to write to
+
+ day = the day in format YYYY.MM.DD
+ """
+ if self.urltype == 'http':
+ self._downloadFileHTTP(filDown, filHdf, day)
+ elif self.urltype == 'ftp':
+ self._downloadFileFTP(filDown, filHdf)
+
+ def _downloadFileHTTP(self, filDown, filHdf, day):
+ """Download the single file from http server
+
+ filDown = name of the file to download
+
+ filSave = name of the file to write to
+
+ day = the day in format YYYY.MM.DD
+ """
+ filSave = open(filHdf, "wb")
+ orig_size = None
+ try:
+ try:
+ http = requests.get(urljoin(self.url, self.path, day, filDown))
+ orig_size = http.headers['content-length']
+ filSave.write(http.content)
+ except:
+ http = urllib2.urlopen(urljoin(self.url, self.path, day,
+ filDown))
+ orig_size = http.headers['content-length']
+ filSave.write(http.read())
+ filSave.close()
+ #if it have an error it try to download again the file
+ except:
+ logging.error("Cannot download %s. Retrying.." % filDown)
+ filSave.close()
+ os.remove(filSave.name)
+ self._downloadFileHTTP(filDown, filHdf, day)
+ transf_size = os.path.getsize(filSave.name)
+ if orig_size and int(orig_size) == int(transf_size):
+ if filHdf.find('.xml') == -1:
+ if self.checkFile(filHdf):
+ os.remove(filSave.name)
+ self._downloadFileHTTP(filDown, filHdf, day)
+ else:
+ self.filelist.write("%s\n" % filDown)
+ if self.debug == True:
+ logging.debug("File %s downloaded correctly" % filDown)
+ return 0
+ else:
+ self.filelist.write("%s\n" % filDown)
+ if self.debug == True:
+ logging.debug("File %s downloaded correctly" % filDown)
+ return 0
else:
- finalList = cicle_file(jpeg=True)
- # not download jpeg
- else:
- if not self.tiles:
- finalList = cicle_file(tile=False)
+ if not orig_size:
+ logging.warning("Different size for file %s - original data:"\
+ " None, downloaded: %s" % (filDown,
+ transf_size))
+ else:
+ logging.warning("Different size for file %s - original data:"\
+ " %s, downloaded: %s" % (filDown, orig_size,
+ transf_size))
+ os.remove(filSave.name)
+ self._downloadFileHTTP(filDown, filHdf, day)
+
+ def _downloadFileFTP(self, filDown, filHdf):
+ """Download the single file from ftp server
+
+ filDown = name of the file to download
+
+ filSave = name of the file to write to
+ """
+ filSave = open(filHdf, "wb")
+ try:
+ self.ftp.retrbinary("RETR " + filDown, filSave.write)
+ self.filelist.write("%s\n" % filDown)
+ if self.debug == True:
+ logging.debug("File %s downloaded" % filDown)
+ #if it have an error it try to download again the file
+ except (ftplib.error_reply, socket.error, ftplib.error_temp, EOFError), e:
+ logging.error("Cannot download %s, the error was '%s'. Retrying..." % (
+ filDown, e))
+ filSave.close()
+ os.remove(filSave.name)
+ try:
+ self.ftp.pwd()
+ except (ftplib.error_temp, EOFError), e:
+ self._connectFTP()
+ self._downloadFileFTP(filDown, filHdf)
+ filSave.close()
+ orig_size = self.ftp.size(filDown)
+ transf_size = os.path.getsize(filSave.name)
+ if orig_size == transf_size:
+ return 0
else:
- finalList = cicle_file()
- if self.debug == True:
- logging.debug("The number of file to download is: %i" % len(finalList))
- return finalList
- except (ftplib.error_reply, socket.error), e:
- logging.error("Error %s when try to receive list of files" % e)
- self.getFilesList()
+ logging.warning("Different size for file %s - original data: %s," \
+ " downloaded: %s" % (filDown, orig_size,
+ transf_size))
+ os.remove(filSave.name)
+ self._downloadFileFTP(filDown, filHdf)
- def checkDataExist(self,listNewFile, move = 0):
- """ Check if a file already exists in the directory of download
+ def dayDownload(self, day, listFilesDown):
+ """ Downloads tiles are in files_hdf_consider
- listNewFile = list of all files, returned by getFilesList function
+ listFilesDown = list of the files to download, returned by
+ checkDataExist function
+ """
+ # for each file in files' list
+ for i in listFilesDown:
+ fileSplit = i.split('.')
+ filePrefix = "%s.%s.%s.%s" % (fileSplit[0], fileSplit[1],
+ fileSplit[2], fileSplit[3])
- move = it is useful to know if a function is called from download or move function
- """
- fileInPath = []
- # add all files in the directory where we will save new modis data
- for f in os.listdir(self.writeFilePath):
- if os.path.isfile(os.path.join(self.writeFilePath, f)):
- fileInPath.append(f)
- # different return if this method is used from downloadsAllDay() or
- # moveFile()
- if move == 0:
- listOfDifferent = list(set(listNewFile) - set(fileInPath))
- elif move == 1:
- listOfDifferent = list(set(fileInPath) - set(listNewFile))
- return listOfDifferent
+ # check data exists in the return directory
+ oldFile = glob.glob1(self.writeFilePath, filePrefix + "*" \
+ + fileSplit[-1])
+ numFiles = len(oldFile)
+ if numFiles == 0:
+ file_hdf = os.path.join(self.writeFilePath, i)
+ elif numFiles == 1:
+ # check the version of file
+ fileDown = getNewerVersion(oldFile[0], i)
+ if fileDown != oldFile[0]:
+ os.remove(os.path.join(self.writeFilePath, oldFile[0]))
+ file_hdf = os.path.join(self.writeFilePath, fileDown)
+ elif numFiles > 1:
+ logging.error("There are to many files for %s" % i)
+ if numFiles == 0 or (numFiles == 1 and fileDown != oldFile[0]):
+ self.downloadFile(i, file_hdf, day)
- def getNewerVersion(self, oldFile, newFile):
- """ Return newer version of a file
-
- oldFile = one of the two similar file
-
- newFile = one of the two similar file
- """
- oldFileSplit = oldFile.split('.')
- newFileSplit = newFile.split('.')
- if oldFileSplit[4] > newFileSplit[4]:
- return oldFile
- else:
- return newFile
+ def downloadsAllDay(self, clean=False, allDays=False):
+ """Download the single file
- def _downloadFile(self, filDown, filHdf):
- """Download the single file
-
- filDown = name of the file to download
-
- filSave = name of the file to write
- """
- filSave = open(filHdf, "wb")
- try:
- self.ftp.retrbinary("RETR " + filDown, filSave.write)
- self.filelist.write("%s\n" % filDown)
- if self.debug==True:
- logging.debug("File %s downloaded" % filDown)
- #if it have an error it try to download again the file
- except (ftplib.error_reply, socket.error, ftplib.error_temp, EOFError), e:
- logging.error("Cannot download %s, retry.." % filDown)
- filSave.close()
- os.remove(filSave.name)
- try:
- self.ftp.pwd()
- except (ftplib.error_temp, EOFError), e:
- self.connectFTP()
- self._downloadFile(filDown, filHdf)
- filSave.close()
- orig_size = self.ftp.size(filDown)
- transf_size = os.path.getsize(filSave.name)
- if orig_size == transf_size:
- return 0
- else:
- logging.warning("Different size for file %s - original data: %s, downloaded: %s" %
- (filDown, orig_size, transf_size))
- os.remove(filSave.name)
- self._downloadFile(filDown,filHdf)
+ filDown = name of the file to download
- def dayDownload(self, listFilesDown):
- """ Downloads tiles are in files_hdf_consider
+ filSave = name of the file to write to
+ """
+ #return the days to download
+ if clean:
+ self.removeEmptyFiles()
+ if allDays:
+ days = self.getAllDays()
+ else:
+ days = self.getListDays()
+ if self.debug == True:
+ logging.debug("The number of days to download is: %i" % len(days))
+ if self.urltype == 'http':
+ self._downloadsAllDayHTTP(days)
+ elif self.urltype == 'ftp':
+ self._downloadAllDayFTP(days)
- listFilesDown = list of the files to download, returned by checkDataExist function
- """
- # for each file in files' list
- for i in listFilesDown:
- fileSplit = i.split('.')
- filePrefix = fileSplit[0] + '.' + fileSplit[1] + '.' + fileSplit[2] \
- + '.' + fileSplit[3]
- #for debug, download only xml
- if (self.debug and fileSplit[-1] == 'xml') or not self.debug:
- # check data exists in the return directory, if it doesn't exists
- oldFile = glob.glob1(self.writeFilePath, filePrefix + "*" \
- + fileSplit[-1])
- numFiles = len(oldFile)
- if numFiles == 0:
- file_hdf = os.path.join(self.writeFilePath, i)
- elif numFiles == 1:
- # check the version of file
- fileDown = self.getNewerVersion(oldFile[0], i)
- if fileDown != oldFile[0]:
- os.remove(os.path.join(self.writeFilePath, oldFile[0]))
- file_hdf = os.path.join(self.writeFilePath, fileDown)
- elif numFiles > 1:
- logging.error("There are to much files for %s" % i)
- #raise EOFError("There are to much file with the same prefix")
- if numFiles == 0 or (numFiles == 1 and fileDown != oldFile[0]):
- self._downloadFile(i, file_hdf)
+ def _downloadsAllDayHTTP(self, days):
+ """ Downloads all the tiles considered from HTTP server"""
- def downloadsAllDay(self, clean=False, allDays=False):
- """ Downloads all the tiles considered """
- #return the days to download
- if clean:
- self.removeEmptyFiles()
- if allDays:
- days = self.getAllDays()
- else:
- days = self.getListDays()
- if self.debug == True:
- logging.debug("The number of days to download is: %i" % len(days))
- #for each day
- for day in days:
- #enter in the directory of day
- self.setDirectoryIn(day)
- #obtain list of all files
- listAllFiles = self.getFilesList()
- #obtain list of files to download
- listFilesDown = self.checkDataExist(listAllFiles)
- #download files for a day
- self.dayDownload(listFilesDown)
- self.setDirectoryOver()
- self.closeFTP()
- if self.debug == True:
- logging.debug("Download terminated")
- return 0
+ #for each day
+ for day in days:
+ #obtain list of all files
+ listAllFiles = self.getFilesList(day)
+ #obtain list of files to download
+ listFilesDown = self.checkDataExist(listAllFiles)
+ #download files for a day
+ self.dayDownload(day, listFilesDown)
+ self.closeFilelist()
+ if self.debug == True:
+ logging.debug("Download terminated")
+ return 0
- def debugLog(self):
- """Function to create the debug file"""
- # create logger
- logger = logging.getLogger("PythonLibModis debug")
- logger.setLevel(logging.DEBUG)
- # create console handler and set level to debug
- ch = logging.StreamHandler()
- ch.setLevel(logging.DEBUG)
- # create formatter
- formatter = logging.Formatter("%(asctime)s - %(name)s - " \
- + "%(levelname)s - %(message)s")
- # add formatter to ch
- ch.setFormatter(formatter)
- # add ch to logger
- logger.addHandler(ch)
- return logger
+ def _downloadsAllDayFTP(self, days):
+ """ Downloads all the tiles considered from FTP server"""
+ #for each day
+ for day in days:
+ #enter in the directory of day
+ self.setDirectoryIn(day)
+ #obtain list of all files
+ listAllFiles = self.getFilesList()
+ #obtain list of files to download
+ listFilesDown = self.checkDataExist(listAllFiles)
+ #download files for a day
+ self.dayDownload(day, listFilesDown)
+ self.setDirectoryOver()
+ self.closeFTP()
+ if self.debug == True:
+ logging.debug("Download terminated")
+ return 0
- def debugDays(self):
- """This function is useful to debug the number of days"""
- logger = debugLog()
- days = self.getListDays()
- # if lenght of list of days and the delta of day they are different
- if len(days) != self.delta:
- # for each day
- for i in range(1,self.delta+1):
- # calculate the current day
- delta = timedelta(days = i)
- day = self.today - delta
- day = day.strftime("%Y.%m.%d")
- # check if day is in the days list
- if day not in days:
- logger.critical("This day %s is not present on list" % day)
- # the lenght of list of days and delta are ugual
- else:
- logger.info("All right!!")
+ def debugLog(self):
+ """Function to create the debug file"""
+ # create logger
+ logger = logging.getLogger("PythonLibModis debug")
+ logger.setLevel(logging.DEBUG)
+ # create console handler and set level to debug
+ ch = logging.StreamHandler()
+ ch.setLevel(logging.DEBUG)
+ # create formatter
+ formatter = logging.Formatter("%(asctime)s - %(name)s - " \
+ + "%(levelname)s - %(message)s")
+ # add formatter to ch
+ ch.setFormatter(formatter)
+ # add ch to logger
+ logger.addHandler(ch)
+ return logger
- def debugMaps(self):
- """This function is useful to debug the number of maps to download for
- each day"""
- logger = debugLog()
- days = self.getListDays()
- for day in days:
- self.setDirectoryIn(day)
- listAllFiles = self.getFilesList()
- string = day + ": " + str(len(listAllFiles)) + "\n"
- logger.debug(string)
- self.setDirectoryOver()
\ No newline at end of file
+ def debugDays(self):
+ """This function is useful to debug the number of days"""
+ logger = self.debugLog()
+ days = self.getListDays()
+ # if lenght of list of days and the delta of day they are different
+ if len(days) != self.delta:
+ # for each day
+ for i in range(1, self.delta + 1):
+ # calculate the current day
+ delta = timedelta(days=i)
+ day = self.today - delta
+ day = day.strftime("%Y.%m.%d")
+ # check if day is in the days list
+ if day not in days:
+ logger.critical("This day %s is not present on list" % day)
+ # the lenght of list of days and delta are ugual
+ else:
+ logger.info("All right!!")
+
+ def debugMaps(self):
+ """This function is useful to debug the number of maps to download for
+ each day"""
+ logger = self.debugLog()
+ days = self.getListDays()
+ for day in days:
+ listAllFiles = self.getFilesList(day)
+ string = day + ": " + str(len(listAllFiles)) + "\n"
+ logger.debug(string)
Modified: grass-addons/grass7/raster/r.modis/libmodis/parsemodis.py
===================================================================
--- grass-addons/grass7/raster/r.modis/libmodis/parsemodis.py 2014-03-25 08:19:25 UTC (rev 59322)
+++ grass-addons/grass7/raster/r.modis/libmodis/parsemodis.py 2014-03-25 10:45:07 UTC (rev 59323)
@@ -38,7 +38,7 @@
class parseModis:
- """Class to parse MODIS xml files, it also can create the parameter
+ """Class to parse MODIS xml files, it can also create the parameter
configuration file for resampling MODIS DATA with the MRT software or
convertmodis Module
"""
@@ -53,13 +53,13 @@
# hdf name
self.hdfname = filename
else:
- raise IOError('%s not exists' % self.hdfname)
+ raise IOError('%s does not exist' % filename)
if os.path.exists(self.hdfname + '.xml'):
# xml hdf name
self.xmlname = self.hdfname + '.xml'
else:
- raise IOError('%s not exists' % self.hdfname + '.xml')
+ raise IOError('%s does not exist' % self.hdfname + '.xml')
# tif name for the output file for resample MRT software
self.tifname = self.hdfname.replace('.hdf', '.tif')
@@ -253,9 +253,9 @@
bound=None
):
"""Create the parameter file to use with resample MRT software to create
- tif file
+ tif (geotiff) file
- spectral = the spectral subset to be used, look the product table to
+ spectral = the spectral subset to be used, see the product table to
understand the layer that you want use. For example:
- NDVI ( 1 1 1 0 0 0 0 0 0 0 0 0) copy only layer NDVI, EVI
@@ -265,10 +265,10 @@
res = the resolution for the output file, it must be set in the map
unit of output projection system. The software will use the
- original resolution of input file if res it isn't set
+ original resolution of input file if res not set
- output = the output name, if it doesn't set will use the prefix name
- of input hdf file
+ output = the output name, if not set if not set the prefix name
+ of input hdf file will be used
utm = the UTM zone if projection system is UTM
@@ -377,7 +377,7 @@
bound=None
):
"""Create the parameter file to use with resample MRT software to create
- tif file
+ tif (geotiff) file
sds = Name of band/s (Science Data Set) to resample
@@ -385,10 +385,10 @@
res = the resolution for the output file, it must be set in the map
unit of output projection system. The software will use the
- original resolution of input file if res it isn't set
+ original resolution of input file if res not set
- output = the output name, if it doesn't set will use the prefix name
- of input hdf file
+ output = the output name, if not set the prefix name
+ of input hdf file will be used
sphere = Output sphere number. Valid options are:
- 0=Clarke 1866
Modified: grass-addons/grass7/raster/r.modis/libmodis/rmodislib.py
===================================================================
--- grass-addons/grass7/raster/r.modis/libmodis/rmodislib.py 2014-03-25 08:19:25 UTC (rev 59322)
+++ grass-addons/grass7/raster/r.modis/libmodis/rmodislib.py 2014-03-25 10:45:07 UTC (rev 59323)
@@ -50,8 +50,8 @@
def __init__(self, value=None):
# url to download products
- urlbase = 'e4ftl01.cr.usgs.gov'
- usrsnow = 'n4ftl01u.ecs.nasa.gov'
+ urlbase = 'http://e4ftl01.cr.usgs.gov'
+ usrsnow = 'ftp://n4ftl01u.ecs.nasa.gov'
### values of lst product:
lst_spec = '( 1 0 0 0 1 0 0 0 0 0 0 0 )'
lst_specqa = '( 1 1 0 0 1 1 0 0 0 0 0 0 )'
@@ -88,83 +88,96 @@
'.sur_refl_b07': '.sur_refl_qc_500m'}
self.prod = value
- lst = {'lst_aqua_daily_1000': {'url': urlbase,
- 'folder': 'MOLA/MYD11A1.005',
+ lst = {'lst_aqua_daily_1000': {'url': urlbase,
+ 'folder': 'MOLA/',
+ 'prod': 'MYD11A1.005',
'spec': lst_spec, 'spec_qa': lst_specqa,
'suff': lst1km_suff, 'res': 1000,
'color': lst_color
},
- 'lst_terra_daily_1000': {'url': urlbase,
- 'folder': 'MOLT/MOD11A1.005',
+ 'lst_terra_daily_1000': {'url': urlbase,
+ 'folder': 'MOLT/',
+ 'prod': 'MOD11A1.005',
'spec': lst_spec, 'spec_qa': lst_specqa,
'suff': lst1km_suff, 'res': 1000,
'color': lst_color
},
- 'lst_terra_eight_1000': {'url': urlbase,
- 'folder': 'MOLT/MOD11A2.005',
+ 'lst_terra_eight_1000': {'url': urlbase,
+ 'folder': 'MOLT/',
+ 'prod': 'MOD11A2.005',
'spec': lst_spec, 'spec_qa': lst_specqa,
'suff': lst1km_suff, 'res': 1000,
'color': lst_color
},
- 'lst_aqua_eight_1000': {'url': urlbase,
- 'folder': 'MOLA/MYD11A2.005',
+ 'lst_aqua_eight_1000': {'url': urlbase,
+ 'folder': 'MOLA/',
+ 'prod': 'MYD11A2.005',
'spec': lst_spec, 'spec_qa': lst_specqa,
'suff': lst1km_suff, 'res': 1000,
'color': lst_color
},
- 'lst_terra_daily_6000': {'url': urlbase,
- 'folder': 'MOLT/MOD11B1.005',
+ 'lst_terra_daily_6000': {'url': urlbase,
+ 'folder': 'MOLT/',
+ 'prod': 'MOD11B1.005',
'spec': lst_spec, 'spec_qa': lst_specqa,
'suff': lst6km_suff, 'res': 6000,
'color': lst_color
},
- 'lst_aqua_daily_6000': {'url': urlbase,
- 'folder': 'MOLA/MYD11B1.005',
+ 'lst_aqua_daily_6000': {'url': urlbase,
+ 'folder': 'MOLA/',
+ 'prod': 'MYD11B1.005',
'spec': lst_spec, 'spec_qa': lst_specqa,
'suff': lst6km_suff, 'res': 6000,
'color': lst_color
},
}
- vi = {'ndvi_terra_sixteen_250': {'url': urlbase,
- 'folder': 'MOLT/MOD13Q1.005',
+ vi = {'ndvi_terra_sixteen_250': {'url': urlbase,
+ 'folder': 'MOLT/',
+ 'prod': 'MOD13Q1.005',
'spec': vi_spec, 'spec_qa': vi_specqa,
'suff': vi250m_suff, 'res': 250,
'color': vi_color
},
- 'ndvi_aqua_sixteen_250': {'url': urlbase,
- 'folder': 'MOLA/MYD13Q1.005',
+ 'ndvi_aqua_sixteen_250': {'url': urlbase,
+ 'folder': 'MOLA/',
+ 'prod': 'MYD13Q1.005',
'spec': vi_spec, 'spec_qa': vi_specqa,
'suff': vi250m_suff, 'res': 250,
'color': vi_color
},
- 'ndvi_terra_sixteen_500': {'url': urlbase,
- 'folder': 'MOLT/MOD13A1.005',
+ 'ndvi_terra_sixteen_500': {'url': urlbase,
+ 'folder': 'MOLT/',
+ 'prod': 'MOD13A1.005',
'spec': vi_spec, 'spec_qa': vi_specqa,
'suff': vi1km_suff, 'res': 500,
'color': vi_color
},
- 'ndvi_aqua_sixteen_500': {'url': urlbase,
- 'folder': 'MOLA/MYD13A1.005',
+ 'ndvi_aqua_sixteen_500': {'url': urlbase,
+ 'folder': 'MOLA/',
+ 'prod': 'MYD13A1.005',
'spec': vi_spec, 'spec_qa': vi_specqa,
'suff': vi500m_suff, 'res': 500,
'color': vi_color
},
- 'ndvi_terra_sixteen_1000': {'url': urlbase,
- 'folder': 'MOLT/MOD13A2.005',
+ 'ndvi_terra_sixteen_1000': {'url': urlbase,
+ 'folder': 'MOLT/',
+ 'prod': 'MOD13A2.005',
'spec': vi_spec, 'spec_qa': vi_specqa,
'suff': vi500m_suff, 'res': 1000,
'color': vi_color
},
- 'ndvi_aqua_sixteen_1000': {'url': urlbase,
- 'folder': 'MOLA/MYD13A2.005',
+ 'ndvi_aqua_sixteen_1000': {'url': urlbase,
+ 'folder': 'MOLA/',
+ 'prod': 'MYD13A2.005',
'spec': vi_spec, 'spec_qa': vi_specqa,
'suff': vi1km_suff, 'res': 1000,
'color': vi_color
}
}
surf_refl = {'surfreflec_terra_eight_500': {'url': urlbase,
- 'folder': 'MOLT/MOD09A1.005',
+ 'folder': 'MOLT/',
+ 'prod': 'MOD09A1.005',
'spec': surf_spec,
'spec_qa': surf_specqa,
'res': 500,
@@ -172,38 +185,42 @@
'suff': surf_suff
},
'surfreflec_aqua_eight_500': {'url': urlbase,
- 'folder': 'MOLA/MYD09A1.005',
+ 'folder': 'MOLA/',
+ 'prod': 'MYD09A1.005',
'spec': surf_spec,
'spec_qa': surf_specqa,
'res': 500,
'color': snow_color,
'suff': surf_suff
}
-
}
snow = {'snow_terra_daily_500': {'url': usrsnow,
- 'folder': 'SAN/MOST/MOD10A1.005',
+ 'folder': 'SAN/MOST/',
+ 'prod': 'MOD10A1.005',
'spec': snow1_spec,
'spec_qa': snow1_specqa,
'color': snow_color,
'suff': snow1_suff, 'res': 500
},
'snow_aqua_daily_500': {'url': usrsnow,
- 'folder': 'SAN/MOSA/MYD10A1.005',
- 'spec' : snow1_spec,
+ 'folder': 'SAN/MOSA/',
+ 'prod': 'MYD10A1.005',
+ 'spec': snow1_spec,
'spec_qa': snow1_specqa,
- 'color' : snow_color,
+ 'color': snow_color,
'suff' : snow1_suff, 'res' : 500
},
'snow_terra_eight_500': {'url': usrsnow,
- 'folder': 'SAN/MOST/MOD10A2.005',
+ 'folder': 'SAN/MOST/',
+ 'prod': 'MOD10A2.005',
'spec': snow8_spec,
'spec_qa': None,
'color': snow_color,
'suff': snow8_suff, 'res' : 500
},
'snow_aqua_eight_500' : {'url': usrsnow,
- 'folder': 'SAN/MOSA/MYD10A2.005',
+ 'folder': 'SAN/MOSA/',
+ 'prod': 'MYD10A2.005',
'spec': snow8_spec,
'spec_qa': None,
'color': snow_color,
@@ -216,10 +233,12 @@
self.products.update(snow)
self.products.update(surf_refl)
self.products_swath = {'lst_terra_daily': {'url': urlbase,
- 'folder': 'MOLT/MOD11_L2.005',
+ 'folder': 'MOLT/',
+ 'prod': 'MOD11_L2.005',
'spec': lstL2_spec},
'lst_aqua_daily': {'url': urlbase,
- 'folder': 'MOLA/MYD11_L2.005',
+ 'folder': 'MOLA/',
+ 'prod': 'MYD11_L2.005',
'spec': lstL2_spec}
}
@@ -235,10 +254,10 @@
def fromcode(self, code):
import string
for k, v in self.products.iteritems():
- if string.find(v['folder'], code) != -1:
+ if string.find(v['prod'], code) != -1:
return self.products[k]
for k, v in self.products_swath.iteritems():
- if string.find(v['folder'], code) != -1:
+ if string.find(v['prod'], code) != -1:
return self.products_swath[k]
grass.fatal(_("The code insert is not supported yet. Consider to " \
"ask on the grass-dev mailing list for future support"))
@@ -270,7 +289,8 @@
"""
def __init__(self, value):
self.code = value
- self.resampling = {'nearest': 'NEAREST_NEIGHBOR', 'bilinear': 'BILINEAR',
+ self.resampling = {'nearest': 'NEAREST_NEIGHBOR',
+ 'bilinear': 'BILINEAR',
'cubic': 'CUBIC CONVOLUTION'}
def returned(self):
Modified: grass-addons/grass7/raster/r.modis/r.modis.download/r.modis.download.html
===================================================================
--- grass-addons/grass7/raster/r.modis/r.modis.download/r.modis.download.html 2014-03-25 08:19:25 UTC (rev 59322)
+++ grass-addons/grass7/raster/r.modis/r.modis.download/r.modis.download.html 2014-03-25 10:45:07 UTC (rev 59323)
@@ -17,6 +17,10 @@
</pre></div>
As alternative, the user can pass the values from standard input.
<p>
+From 2013 NASA use HTTP protocol for some of the MODIS repositories so
+the password it is no more necessary. If you are sure about the protocol
+you can escape to set the password.
+<p>
<b>Warning</b>: The NASA policy does not permit to open more then ten
connections in parallel with their FTP server. Hence no more than ten
@@ -24,7 +28,7 @@
<p>
By default the downloaded files are stored in the path where the setting
file is saved. The user can change this directory with <em>folder</em>
-option. The user has <!-- NOT?? --> to set the <em>folder</em> option if
+option. The user has to set the <em>folder</em> option if
user and password are sent by standard input.
<p>
The time for the download depends on the number of requested tiles, days
Modified: grass-addons/grass7/raster/r.modis/r.modis.download/r.modis.download.py
===================================================================
--- grass-addons/grass7/raster/r.modis/r.modis.download/r.modis.download.py 2014-03-25 08:19:25 UTC (rev 59322)
+++ grass-addons/grass7/raster/r.modis/r.modis.download/r.modis.download.py 2014-03-25 10:45:07 UTC (rev 59323)
@@ -35,7 +35,6 @@
#% gisprompt: old,file,input
#% label: Full path to settings file
#% description: "-" to pass the parameter from stdin
-#% answer: -
#% guisection: Define
#%end
#%option
@@ -215,7 +214,7 @@
destinationFolder=fold, tiles=tiles, path=prod['folder'],
today=firstday, enddate=finalday, delta=delta, debug=debug_opt)
# connect to ftp
- modisOgg.connectFTP()
+ modisOgg.connect()
if modisOgg.nconnection <= 20:
# download tha tiles
grass.message(_("Downloading MODIS product <%s>..." % produ))
Modified: grass-addons/grass7/raster/r.modis/r.modis.html
===================================================================
--- grass-addons/grass7/raster/r.modis/r.modis.html 2014-03-25 08:19:25 UTC (rev 59322)
+++ grass-addons/grass7/raster/r.modis/r.modis.html 2014-03-25 10:45:07 UTC (rev 59323)
@@ -1,14 +1,14 @@
<h2>DESCRIPTION</h2>
The <em>r.modis</em> suite is a toolset to import MODIS satellite data in GRASS GIS.
-It uses the <a href="http://gis.cri.fmach.it/development/pyModis">pyModis</a>
+It uses the <a href="http://pymodis.fem-environment.eu">pyModis</a>
library and the <a
href="https://lpdaac.usgs.gov/lpdaac/tools/modis_reprojection_tool"> MODIS
Reprojection Tool</a> software to convert, mosaik and process MODIS data.
It requires the <a
href="https://lpdaac.usgs.gov/lpdaac/tools/modis_reprojection_tool">MODIS
Reprojection Tool</a> to be installed while <a
-href="http://gis.cri.fmach.it/development/pyModis">pyModis</a> is
+href="http://pymodis.fem-environment.eu">pyModis</a> is
included in the <em>r.modis</em> suite.
The suite offers three modules as interface with MODIS data. Each modules
is dedicated to for a specific operation. The module <em>r.modis.download</em>
Modified: grass-addons/grass7/raster/r.modis/r.modis.import/r.modis.import.py
===================================================================
--- grass-addons/grass7/raster/r.modis/r.modis.import/r.modis.import.py 2014-03-25 08:19:25 UTC (rev 59322)
+++ grass-addons/grass7/raster/r.modis/r.modis.import/r.modis.import.py 2014-03-25 10:45:07 UTC (rev 59323)
@@ -36,12 +36,8 @@
#%end
#%flag
#% key: q
-#% description: Ignore the QA map layer, do not use with "r" flag
+#% description: Ignore the QA map layer
#%end
-#%flag
-#% key: r
-#% description: Do not rescale the output map values to destination units
-#%end
#%option
#% key: mrtpath
#% type: string
@@ -96,13 +92,11 @@
from datetime import date
from grass.pygrass.functions import get_lib_path
-
path = get_lib_path(modname='r.modis', libname='libmodis')
if path is None:
grass.fatal("Not able to find the modis library directory.")
sys.path.append(path)
-
# try to import pymodis (modis) and some classes for r.modis.download
from rmodislib import resampling, product, projection
from convertmodis import convertModis, createMosaic
@@ -131,7 +125,7 @@
# for mosaic create a list of hdf files for each day
elif string.find(line, 'xml') == -1 and mosaik == True:
day = line.split('/')[-1].split('.')[1]
- if filelist.has_key(day):
+ if day in filelist:
filelist[day].append(line)
else:
filelist[day] = [line]
@@ -210,8 +204,35 @@
return None
-def import_tif(out, basedir, rem, write, target=None):
+def metadata(pars, mapp):
+ """ Set metadata to the imported files """
+ # metadata
+ grass.run_command('r.support', quiet=True, map=mapp, source1="MODIS NASA",
+ hist="Imported with r.modis.import")
+ # timestamp
+ rangetime = pars.retRangeTime()
+ data = rangetime['RangeBeginningDate'].split('-')
+ dataobj = date(int(data[0]), int(data[1]), int(data[2]))
+ grass.run_command('r.timestamp', map=mapp, quiet=True,
+ date=dataobj.strftime("%d %b %Y"))
+ return 0
+ # color
+# if string.find(mapp, 'QC') != -1 or string.find(mapp, 'Quality') != -1 or \
+# string.find(mapp, 'QA') != -1:
+# grass.run_command('r.colors', quiet=True, map=mapp, color=coll)
+# elif string.find(mapp, 'NDVI') != -1:
+# grass.run_command('r.colors', quiet=True, map=mapp, color=coll[0])
+# elif string.find(mapp, 'EVI') != -1:
+# grass.run_command('r.colors', quiet=True, map=mapp, color=coll[1])
+# elif string.find(mapp, 'LST') != -1:
+# grass.run_command('r.colors', quiet=True, map=mapp, color=coll[0])
+# elif string.find(mapp, 'Snow') != -1:
+# grass.run_command('r.colors', quiet=True, map=mapp, color=coll[0])
+
+
+def import_tif(out, basedir, rem, write, pm, target=None):
"""Import TIF files"""
+ print "start import"
# list of tif files
tifiles = glob.glob1(basedir, "*.tif")
if not tifiles:
@@ -245,6 +266,7 @@
except:
grass.warning(_('Error during import of %s' % basename))
continue
+ metadata(pm, basename)
if rem:
os.remove(name)
if target:
@@ -261,115 +283,10 @@
grass.warning(_("Raster map <%s> not found") % (pref + suff))
-def metadata(pars, mapp, coll):
- """ Set metadata to the imported files """
- # metadata
- meta = pars.metastring()
- grass.run_command('r.support', quiet=True, map=mapp, hist=meta)
- # timestamp
- rangetime = pars.retRangeTime()
- data = rangetime['RangeBeginningDate'].split('-')
- dataobj = date(int(data[0]), int(data[1]), int(data[2]))
- grass.run_command('r.timestamp', map=mapp, quiet=True,
- date=dataobj.strftime("%d %b %Y"))
- # color
- if string.find(mapp, 'QC') != -1 or string.find(mapp, 'Quality') != -1 or \
- string.find(mapp, 'QA') != -1:
- grass.run_command('r.colors', quiet=True, map=mapp, color=coll)
- elif string.find(mapp, 'NDVI') != -1:
- grass.run_command('r.colors', quiet=True, map=mapp, color=coll[0])
- elif string.find(mapp, 'EVI') != -1:
- grass.run_command('r.colors', quiet=True, map=mapp, color=coll[1])
- elif string.find(mapp, 'LST') != -1:
- grass.run_command('r.colors', quiet=True, map=mapp, color=coll[0])
- elif string.find(mapp, 'Snow') != -1:
- grass.run_command('r.colors', quiet=True, map=mapp, color=coll[0])
-
-
-def analyze(pref, an, cod, parse, write):
- """ Analyze the MODIS data using QA if present """
- if pref.find('.tif') != -1:
- pref = pref.rstrip('.tif')
- prod = product().fromcode(cod)
- if not prod['spec_qa']:
- grass.warning(_("There is no QA layer, analysis and filtering will be skipped"))
- an = 'noqa'
- pat = prod['pattern']
- suf = prod['suff']
- col = prod['color']
- val = []
- qa = []
- for v, q in suf.iteritems():
- val.append(findfile(pref, v))
- if q:
- qa.append(findfile(pref, q))
- for n in range(len(val)):
- if val[n] == None:
- grass.warning(_("Some error occur"))
- continue
- valname = val[n]['name']
- valfull = val[n]['fullname']
- grass.run_command('g.region', rast=valfull)
- grass.run_command('r.null', map=valfull, setnull=0)
- if string.find(cod, '13Q1') >= 0 or string.find(cod, '13A2') >= 0:
- mapc = "%s.2 = %s / 10000." % (valname, valfull)
- elif string.find(cod, '11A1') >= 0 or string.find(cod, '11A2') >= 0 \
- or string.find(cod, '11B1') >= 0:
- mapc = "%s.2 = (%s * 0.0200) - 273.15" % (valname, valfull)
- grass.mapcalc(mapc)
- if an == 'noqa':
- #grass.run_command('g.remove', quiet=True, rast = valfull)
- try:
- grass.run_command('g.rename', quiet=True, overwrite=write,
- rast=(valname, valname + '.orig'))
- grass.run_command('g.rename', quiet=True, overwrite=write,
- rast=(valname + '.2', valname))
- except:
- pass
- metadata(parse, valname, col)
- metadata(parse, valname, col)
- metadata(parse, valname, 'byr')
- if an == 'all':
- if len(qa) != len(val):
- grass.fatal(_("The number of QA and value maps is different,"\
- " something is wrong"))
- qaname = qa[n]['name']
- qafull = qa[n]['fullname']
- finalmap = "%s.3=if(" % valname
- first_map = 1
- for key, value in prod['pattern'].iteritems():
- for v in value:
- outpat = "%s.%i.%i" % (qaname, key, v)
- grass.run_command('r.bitpattern', quiet=True, input=valname,
- output=outpat, pattern=key, patval=v)
- if first_map:
- first_map = 0
- finalmap += "%s == 0 " % outpat
- else:
- finalmap += "&& %s == 0 " % outpat
-
- if string.find(cod, '13Q1') >= 0 or string.find(cod, '13A2') >= 0:
- finalmap += "&& %s.2 <= 1.000" % valname
- finalmap += ",%s.2, null() )" % valname
- # grass.message("mapc finalmap: %s" % finalmap)
- grass.mapcalc(finalmap)
- grass.run_command('g.rename', quiet=True, overwrite=write,
- rast=(valname, valname + '.orig'))
- grass.run_command('g.remove', quiet=True, rast=(valname + '.2'))
- grass.run_command('g.mremove', flags="f", quiet=True,
- rast=("%s.*" % qaname))
- grass.run_command('g.rename', quiet=True, overwrite=write,
- rast=(valname + '.3', valname))
- metadata(parse, valname, col)
- metadata(parse, valname, col)
- metadata(parse, valname, 'byr')
-
-
def single(options, remove, an, ow):
"""Convert the HDF file to TIF and import it
"""
listfile, basedir = list_files(options)
- pid = str(os.getpid())
# for each file
for i in listfile:
if os.path.exists(i):
@@ -390,19 +307,8 @@
if not output:
output = os.path.split(hdf)[1].rstrip('.hdf')
# import tif files
- maps_import = import_tif(output, basedir, remove, ow)
- if an and len(maps_import) != 0:
- grass.run_command('g.region', save='oldregion.%s' % pid)
- try:
- cod = os.path.split(pm.hdfname)[1].split('.')[0]
- analyze(output, an, cod, pm, ow)
- except:
- grass.run_command('g.region', region='oldregion.%s' % pid)
- grass.run_command('g.remove', quiet=True,
- region='oldregion.%s' % pid)
-# cod = os.path.split(pm.hdfname)[1].split('.')[0]
-# analyze(output, an, cod, pm, ow)
- #os.remove(confname)
+ import_tif(output, basedir, remove, ow, pm)
+ os.remove(confname)
def mosaic(options, remove, an, ow):
@@ -441,30 +347,13 @@
# remove hdf
if remove:
# import tif files
- maps_import = import_tif(outname, basedir, remove, ow)
- if an:
- grass.run_command('g.region', save='oldregion.%s' % pid)
- try:
- cod = os.path.split(pm.hdfname)[1].split('.')[0]
- analyze(outname, an, cod, pm, ow)
- except:
- grass.run_command('g.region', region='oldregion.%s' % pid)
- grass.run_command('g.remove', quiet=True,
- region='oldregion.%s' % pid)
+ import_tif(outname, basedir, remove, ow, pm)
os.remove(hdf)
os.remove(hdf + '.xml')
# or move the hdf and hdf.xml to the dir where are the original files
else:
# import tif files
- import_tif(outname, basedir, remove, ow, targetdir)
- if an and len(maps_import) != 0:
- grass.run_command('g.region', save='oldregion.%s' % pid)
- try:
- cod = os.path.split(pm.hdfname)[1].split('.')[0]
- analyze(outname, an, cod, pm, ow)
- except:
- grass.run_command('g.region', region='oldregion.%s' % pid)
- grass.run_command('g.remove', region='oldregion.%s' % pid)
+ import_tif(outname, basedir, remove, ow, pm, targetdir)
try:
shutil.move(hdf, targetdir)
shutil.move(hdf + '.xml', targetdir)
@@ -509,12 +398,10 @@
else:
over = False
# check if do check quality, rescaling and setting of colors
- if flags['r']:
- analyze = None
- elif flags['q']:
- analyze = 'noqa'
+ if flags['q']:
+ analyze = False
else:
- analyze = 'all'
+ analyze = True
# check if import simple file or mosaic
if flags['m'] and options['dns'] != '':
grass.fatal(_('It is not possible to create a mosaic with a single HDF file'))
@@ -527,4 +414,3 @@
if __name__ == "__main__":
options, flags = grass.parser()
sys.exit(main())
-
More information about the grass-commit
mailing list