[GRASS-SVN] r52631 - grass/trunk/lib/python/temporal

svn_grass at osgeo.org svn_grass at osgeo.org
Sat Aug 11 18:54:40 PDT 2012


Author: huhabla
Date: 2012-08-11 18:54:40 -0700 (Sat, 11 Aug 2012)
New Revision: 52631

Modified:
   grass/trunk/lib/python/temporal/abstract_dataset.py
   grass/trunk/lib/python/temporal/abstract_map_dataset.py
   grass/trunk/lib/python/temporal/abstract_space_time_dataset.py
   grass/trunk/lib/python/temporal/aggregation.py
   grass/trunk/lib/python/temporal/base.py
   grass/trunk/lib/python/temporal/core.py
   grass/trunk/lib/python/temporal/datetime_math.py
   grass/trunk/lib/python/temporal/extract.py
   grass/trunk/lib/python/temporal/mapcalc.py
   grass/trunk/lib/python/temporal/metadata.py
   grass/trunk/lib/python/temporal/space_time_datasets.py
   grass/trunk/lib/python/temporal/space_time_datasets_tools.py
   grass/trunk/lib/python/temporal/spatial_extent.py
   grass/trunk/lib/python/temporal/stds_export.py
   grass/trunk/lib/python/temporal/stds_import.py
   grass/trunk/lib/python/temporal/temporal_extent.py
   grass/trunk/lib/python/temporal/temporal_granularity.py
   grass/trunk/lib/python/temporal/temporal_relationships.py
   grass/trunk/lib/python/temporal/unit_tests.py
   grass/trunk/lib/python/temporal/univar_statistics.py
Log:
PEP8 compliance and better doxygen formatting. New rtree tests.


Modified: grass/trunk/lib/python/temporal/abstract_dataset.py
===================================================================
--- grass/trunk/lib/python/temporal/abstract_dataset.py	2012-08-11 21:26:18 UTC (rev 52630)
+++ grass/trunk/lib/python/temporal/abstract_dataset.py	2012-08-12 01:54:40 UTC (rev 52631)
@@ -7,6 +7,8 @@
 
 Usage:
 
+ at code
+
 >>> import grass.temporal as tgis
 >>> ad = AbstractDataset()
 >>> ad.reset(ident="soil at PERMANENT")
@@ -19,7 +21,9 @@
     raise ImplementationError("This method must be implemented in the subclasses")
 ImplementationError: 'This method must be implemented in the subclasses'
 
-(C) 2008-2011 by the GRASS Development Team
+ at endcode
+
+(C) 2011-2012 by the GRASS Development Team
 This program is free software under the GNU General Public
 License (>=v2). Read the file COPYING that comes with GRASS
 for details.
@@ -43,7 +47,8 @@
         return repr(self.msg)
     
 class AbstractDataset(object):
-    """!This is the base class for all datasets (raster, vector, raster3d, strds, stvds, str3ds)"""
+    """!This is the base class for all datasets 
+       (raster, vector, raster3d, strds, stvds, str3ds)"""
 
     def reset(self, ident):
         """!Reset the internal structure and set the identifier
@@ -107,7 +112,8 @@
         return self.base.get_mapset()
 
     def get_valid_time(self):
-        """!Returns a tuple of the start, the end valid time, this can be either datetime or double values
+        """!Returns a tuple of the start, the end valid time, 
+           this can be either datetime or double values
            @return A tuple of (start_time, end_time)
         """
 
@@ -124,7 +130,9 @@
         return (start, end)
 
     def get_absolute_time(self):
-        """!Returns a tuple of the start, the end valid time and the timezone of the map
+        """!Returns a tuple of the start, the end 
+           valid time and the timezone of the map
+           
            @return A tuple of (start_time, end_time, timezone)
         """
 
@@ -135,7 +143,8 @@
         return (start, end, tz)
 
     def get_relative_time(self):
-        """!Returns the relative time interval (start_time, end_time, unit) or None if not present"""
+        """!Returns the relative time interval (start_time, end_time, unit) 
+           or None if not present"""
 
         start = self.relative_time.get_start_time()
         end = self.relative_time.get_end_time()
@@ -151,7 +160,8 @@
         return unit
 
     def check_relative_time_unit(self, unit):
-        """!Check if unit is of type  years, months, days, hours, minutes or seconds
+        """!Check if unit is of type  years, months, days, hours, 
+           minutes or seconds
 
            Return True if success or False otherwise
         """
@@ -166,11 +176,13 @@
         return self.base.get_ttype()
 
     def get_spatial_extent(self):
-        """!Return a tuple of spatial extent (north, south, east, west, top, bottom) """
+        """!Return a tuple of spatial extent 
+           (north, south, east, west, top, bottom) """
         return self.spatial_extent.get_spatial_extent()
 
     def select(self, dbif=None):
-        """!Select temporal dataset entry from database and fill up the internal structure"""
+        """!Select temporal dataset entry from database and fill 
+           up the internal structure"""
 
         dbif, connect = init_dbif(dbif)
 
@@ -197,12 +209,14 @@
         raise ImplementationError("This method must be implemented in the subclasses")
 
     def insert(self, dbif=None, execute=True):
-        """!Insert temporal dataset entry into database from the internal structure
+        """!Insert temporal dataset entry into 
+           database from the internal structure
 
 
            @param dbif: The database interface to be used
            @param execute: If True the SQL statements will be executed.
-                           If False the prepared SQL statements are returned and must be executed by the caller.
+                           If False the prepared SQL statements are returned 
+                           and must be executed by the caller.
         """
 
         dbif, connect = init_dbif(dbif)
@@ -234,7 +248,8 @@
 
            @param dbif: The database interface to be used
            @param execute: If True the SQL statements will be executed.
-                           If False the prepared SQL statements are returned and must be executed by the caller.
+                           If False the prepared SQL statements are returned 
+                           and must be executed by the caller.
         """
 
         dbif, connect = init_dbif(dbif)
@@ -266,7 +281,8 @@
 
            @param dbif: The database interface to be used
            @param execute: If True the SQL statements will be executed.
-                           If False the prepared SQL statements are returned and must be executed by the caller.
+                           If False the prepared SQL statements are returned 
+                           and must be executed by the caller.
         """
 
         dbif, connect = init_dbif(dbif)

Modified: grass/trunk/lib/python/temporal/abstract_map_dataset.py
===================================================================
--- grass/trunk/lib/python/temporal/abstract_map_dataset.py	2012-08-11 21:26:18 UTC (rev 52630)
+++ grass/trunk/lib/python/temporal/abstract_map_dataset.py	2012-08-12 01:54:40 UTC (rev 52631)
@@ -8,8 +8,8 @@
 Usage:
 
 >>> import grass.temporal as tgis
->>> tmr = TemporalMapRelations()
->>> amd = AbstractMapDataset()
+>>> tmr = tgis.TemporalMapRelations()
+>>> amd = tgis.AbstractMapDataset()
 
 (C) 2008-2011 by the GRASS Development Team
 This program is free software under the GNU General Public
@@ -27,8 +27,8 @@
 
        This object will be set up by temporal topology creation methods.
 
-       If correctly initialize the calls next() and prev() let the user walk temporally forward
-       and backward in time.
+       If correctly initialize the calls next() and prev() 
+       let the user walk temporally forward and backward in time.
 
        The following temporal relations with access methods are supported:
        * equal
@@ -55,11 +55,14 @@
         
         Usage:
         
-        >>> import grass.temporal as tgis
+        @code
+        
         >>> tmr = TemporalMapRelations()
         >>> tmr.print_temporal_topology_info()
          +-------------------- Temporal Topology -------------------------------------+
         >>> tmr.print_temporal_topology_shell_info()
+        
+        @code
     """
 
     def __init__(self):
@@ -89,7 +92,8 @@
            temporally located AFTER the start time of this map, but temporally
            near than other maps of the same dataset.
 
-           @param map_: This object should be of type AbstractMapDataset or derived classes
+           @param map_: This object should be of type AbstractMapDataset 
+                        or derived classes
         """
         self._temporal_topology["NEXT"] = map_
 
@@ -100,7 +104,8 @@
            temporally located BEFORE the start time of this map, but temporally
            near than other maps of the same dataset.
 
-           @param map_: This object should be of type AbstractMapDataset or derived classes
+           @param map_: This object should be of type AbstractMapDataset 
+                        or derived classes
         """
         self._temporal_topology["PREV"] = map_
 
@@ -127,7 +132,8 @@
     def append_temporal_equivalent(self, map_):
         """!Append a map with equivalent temporal extent as this map
 
-           @param map_: This object should be of type AbstractMapDataset or derived classes
+           @param map_: This object should be of type AbstractMapDataset 
+                        or derived classes
         """
         if "EQUAL" not in self._temporal_topology:
             self._temporal_topology["EQUAL"] = []
@@ -145,7 +151,8 @@
     def append_temporal_overlaps(self, map_):
         """!Append a map that this map temporally overlaps
 
-           @param map_: This object should be of type AbstractMapDataset or derived classes
+           @param map_: This object should be of type AbstractMapDataset 
+                        or derived classes
         """
         if "OVERLAPS" not in self._temporal_topology:
             self._temporal_topology["OVERLAPS"] = []
@@ -163,7 +170,8 @@
     def append_temporal_overlapped(self, map_):
         """!Append a map that this map temporally overlapped
 
-           @param map_: This object should be of type AbstractMapDataset or derived classes
+           @param map_: This object should be of type AbstractMapDataset 
+                        or derived classes
         """
         if "OVERLAPPED" not in self._temporal_topology:
             self._temporal_topology["OVERLAPPED"] = []
@@ -181,7 +189,8 @@
     def append_temporal_follows(self, map_):
         """!Append a map that this map temporally follows
 
-           @param map_: This object should be of type AbstractMapDataset or derived classes
+           @param map_: This object should be of type AbstractMapDataset 
+                        or derived classes
         """
         if "FOLLOWS" not in self._temporal_topology:
             self._temporal_topology["FOLLOWS"] = []
@@ -199,7 +208,8 @@
     def append_temporal_precedes(self, map_):
         """!Append a map that this map temporally precedes
 
-           @param map_: This object should be of type AbstractMapDataset or derived classes
+           @param map_: This object should be of type AbstractMapDataset 
+                        or derived classes
         """
         if "PRECEDES" not in self._temporal_topology:
             self._temporal_topology["PRECEDES"] = []
@@ -218,7 +228,8 @@
         """!Append a map that this map is temporally located during
            This includes temporal relationships starts and finishes
 
-           @param map_: This object should be of type AbstractMapDataset or derived classes
+           @param map_: This object should be of type 
+                        AbstractMapDataset or derived classes
         """
         if "DURING" not in self._temporal_topology:
             self._temporal_topology["DURING"] = []
@@ -238,7 +249,8 @@
         """!Append a map that this map temporally contains
            This includes temporal relationships started and finished
 
-           @param map_: This object should be of type AbstractMapDataset or derived classes
+           @param map_: This object should be of type AbstractMapDataset 
+                        or derived classes
         """
         if "CONTAINS" not in self._temporal_topology:
             self._temporal_topology["CONTAINS"] = []
@@ -389,7 +401,8 @@
     def set_stds_register(self, name):
         """!Set the space time dataset register table name.
 
-           This table stores all space time datasets in which this map is registered.
+           This table stores all space time datasets in 
+           which this map is registered.
 
            @param ident: The name of the register table
         """
@@ -397,9 +410,13 @@
             "This method must be implemented in the subclasses")
 
     def check_resolution_with_current_region(self):
-        """!Check if the raster or voxel resolution is finer than the current resolution
-           Return "finer" in case the raster/voxel resolution is finer than the current region
-           Return "coarser" in case the raster/voxel resolution is coarser than the current region
+        """!Check if the raster or voxel resolution is 
+           finer than the current resolution
+           
+           * Return "finer" in case the raster/voxel resolution is finer 
+             than the current region
+           * Return "coarser" in case the raster/voxel resolution is coarser 
+             than the current region
 
            Vector maps are alwyas finer than the current region
         """
@@ -413,14 +430,15 @@
             "This method must be implemented in the subclasses")
 
     def write_timestamp_to_grass(self):
-        """!Write the timestamp of this map into the map metadata in the grass file system based spatial
-           database.
+        """!Write the timestamp of this map into the map metadata 
+           in the grass file system based spatial database.
         """
         raise ImplementationError(
             "This method must be implemented in the subclasses")
 
     def remove_timestamp_from_grass(self):
-        """!Remove the timestamp from the grass file system based spatial database
+        """!Remove the timestamp from the grass file 
+           system based spatial database
         """
         raise ImplementationError(
             "This method must be implemented in the subclasses")
@@ -434,19 +452,21 @@
             "This method must be implemented in the subclasses")
 
     def read_info(self):
-        """!Read the map info from the grass file system based database and store the content
-           into a dictionary
+        """!Read the map info from the grass file system based database and 
+           store the content into a dictionary
         """
         raise ImplementationError(
             "This method must be implemented in the subclasses")
 
     def load(self):
-        """!Load the content of this object from the grass file system based database"""
+        """!Load the content of this object from the grass 
+           file system based database"""
         raise ImplementationError(
             "This method must be implemented in the subclasses")
 
     def _convert_timestamp(self):
-        """!Convert the valid time into a grass datetime library compatible timestamp string
+        """!Convert the valid time into a grass datetime library 
+           compatible timestamp string
 
             This methods works for reltaive and absolute time
 
@@ -486,9 +506,11 @@
     def build_id(self, name, mapset, layer=None):
         """!Convenient method to build the unique identifier
 
-            Existing layer and mapset definitions in the name string will be reused
+            Existing layer and mapset definitions in the name 
+            string will be reused
 
-           @param return the id of the vector map as name(:layer)@mapset while layer is optional
+           @param return the id of the vector map as name(:layer)@mapset 
+                  while layer is optional
         """
 
         # Check if the name includes any mapset
@@ -514,17 +536,14 @@
         if self.get_type() == "raster":
             #                1         2         3         4         5         6         7
             #      0123456789012345678901234567890123456789012345678901234567890123456789012345678
-            print ""
             print " +-------------------- Raster Dataset ----------------------------------------+"
         if self.get_type() == "raster3d":
             #                1         2         3         4         5         6         7
             #      0123456789012345678901234567890123456789012345678901234567890123456789012345678
-            print ""
             print " +-------------------- Raster3d Dataset --------------------------------------+"
         if self.get_type() == "vector":
             #                1         2         3         4         5         6         7
             #      0123456789012345678901234567890123456789012345678901234567890123456789012345678
-            print ""
             print " +-------------------- Vector Dataset ----------------------------------------+"
         print " |                                                                            |"
         self.base.print_info()
@@ -629,22 +648,37 @@
         """
         if start_time and not isinstance(start_time, datetime):
             if self.get_layer() is not None:
-                core.fatal(_("Start time must be of type datetime for %s map <%s> with layer: %s") % (self.get_type(), self.get_map_id(), self.get_layer()))
+                core.fatal(_("Start time must be of type datetime "
+                             "for %s map <%s> with layer: %s") % \
+                           (self.get_type(), self.get_map_id(), 
+                            self.get_layer()))
             else:
-                core.fatal(_("Start time must be of type datetime for %s map <%s>") % (self.get_type(), self.get_map_id()))
+                core.fatal(_("Start time must be of type "
+                             "datetime ""for %s map <%s>") % \
+                           (self.get_type(), self.get_map_id()))
 
         if end_time and not isinstance(end_time, datetime):
             if self.get_layer():
-                core.fatal(_("End time must be of type datetime for %s map <%s> with layer: %s") % (self.get_type(), self.get_map_id(), self.get_layer()))
+                core.fatal(_("End time must be of type datetime "
+                             "for %s map <%s> with layer: %s") % \
+                           (self.get_type(), self.get_map_id(), 
+                            self.get_layer()))
             else:
-                core.fatal(_("End time must be of type datetime for %s map <%s>") % (self.get_type(), self.get_map_id()))
+                core.fatal(_("End time must be of type datetime "
+                             "for %s map <%s>") % (self.get_type(), 
+                                                   self.get_map_id()))
 
         if start_time is not None and end_time is not None:
             if start_time > end_time:
                 if self.get_layer():
-                    core.fatal(_("End time must be greater than start time for %s map <%s> with layer: %s") % (self.get_type(), self.get_map_id(), self.get_layer()))
+                    core.fatal(_("End time must be greater than "
+                                 "start time for %s map <%s> with layer: %s") %\
+                                (self.get_type(), self.get_map_id(), 
+                                 self.get_layer()))
                 else:
-                    core.fatal(_("End time must be greater than start time for %s map <%s>") % (self.get_type(), self.get_map_id()))
+                    core.fatal(_("End time must be greater than "
+                                 "start time for %s map <%s>") % \
+                               (self.get_type(), self.get_map_id()))
             else:
                 # Do not create an interval in case start and end time are equal
                 if start_time == end_time:
@@ -655,7 +689,8 @@
         self.absolute_time.set_end_time(end_time)
         self.absolute_time.set_timezone(timezone)
 
-    def update_absolute_time(self, start_time, end_time=None, timezone=None, dbif=None):
+    def update_absolute_time(self, start_time, end_time=None, 
+                             timezone=None, dbif=None):
         """!Update the absolute time
 
            This functions assures that the timetsamp is written to the 
@@ -690,17 +725,28 @@
 
         if not self.check_relative_time_unit(unit):
             if self.get_layer() is not None:
-                core.error(_("Unsupported relative time unit type for %s map <%s> with layer %s: %s") % (self.get_type(), self.get_id(), self.get_layer(), unit))
+                core.error(_("Unsupported relative time unit type for %s map "
+                             "<%s> with layer %s: %s") % (self.get_type(), 
+                                                          self.get_id(), 
+                                                          self.get_layer(), 
+                                                          unit))
             else:
-                core.error(_("Unsupported relative time unit type for %s map <%s>: %s") % (self.get_type(), self.get_id(), unit))
+                core.error(_("Unsupported relative time unit type for %s map "
+                             "<%s>: %s") % (self.get_type(), self.get_id(), 
+                                            unit))
             return False
 
         if start_time is not None and end_time is not None:
             if int(start_time) > int(end_time):
                 if self.get_layer() is not None:
-                    core.error(_("End time must be greater than start time for %s map <%s> with layer %s") % (self.get_type(), self.get_id(), self.get_layer()))
+                    core.error(_("End time must be greater than start time for"
+                                 " %s map <%s> with layer %s") % \
+                               (self.get_type(), self.get_id(), 
+                                self.get_layer()))
                 else:
-                    core.error(_("End time must be greater than start time for %s map <%s>") % (self.get_type(), self.get_id()))
+                    core.error(_("End time must be greater than start time for"
+                                 " %s map <%s>") % (self.get_type(), 
+                                                    self.get_id()))
                 return False
             else:
                 # Do not create an interval in case start and end time are equal
@@ -763,9 +809,14 @@
             if end is not None:
                 if start >= end:
                     if self.get_layer() is not None:
-                        core.error(_("Map <%s> with layer %s has incorrect time interval, start time is greater than end time") % (self.get_map_id(), self.get_layer()))
+                        core.error(_("Map <%s> with layer %s has incorrect "
+                                     "time interval, start time is greater "
+                                     "than end time") % (self.get_map_id(), 
+                                                         self.get_layer()))
                     else:
-                        core.error(_("Map <%s> has incorrect time interval, start time is greater than end time") % (self.get_map_id()))
+                        core.error(_("Map <%s> has incorrect time interval, "
+                                     "start time is greater than end time") % \
+                                   (self.get_map_id()))
                     return False
         else:
             core.error(_("Map <%s> has incorrect start time") %
@@ -778,14 +829,16 @@
         """!Delete a map entry from database if it exists
 
             Remove dependent entries:
-            * Remove the map entry in each space time dataset in which this map is registered
+            * Remove the map entry in each space time dataset in which this map 
+              is registered
             * Remove the space time dataset register table
 
            @param dbif: The database interface to be used
            @param update: Call for each unregister statement the update from 
                           registered maps of the space time dataset. 
                           This can slow down the un-registration process significantly.
-           @param execute: If True the SQL DELETE and DROP table statements will be executed.
+           @param execute: If True the SQL DELETE and DROP table statements will 
+                           be executed.
                            If False the prepared SQL statements are 
                            returned and must be executed by the caller.
 
@@ -812,7 +865,8 @@
             core.verbose(_("Delete %s dataset <%s> from temporal database")
                          % (self.get_type(), self.get_id()))
 
-            # Delete yourself from the database, trigger functions will take care of dependencies
+            # Delete yourself from the database, trigger functions will 
+            # take care of dependencies
             statement += self.base.get_delete_statement()
 
         if execute:
@@ -832,13 +886,15 @@
         return statement
 
     def unregister(self, dbif=None, update=True, execute=True):
-        """! Remove the map entry in each space time dataset in which this map is registered
+        """! Remove the map entry in each space time dataset in which this map 
+           is registered
 
            @param dbif: The database interface to be used
-           @param update: Call for each unregister statement the update from registered maps
-                          of the space time dataset. This can slow down the 
-                          un-registration process significantly.
-           @param execute: If True the SQL DELETE and DROP table statements will be executed.
+           @param update: Call for each unregister statement the update from 
+                          registered maps of the space time dataset. This can 
+                          slow down the un-registration process significantly.
+           @param execute: If True the SQL DELETE and DROP table statements 
+                           will be executed.
                            If False the prepared SQL statements are 
                            returned and must be executed by the caller.
 
@@ -846,11 +902,14 @@
         """
 
         if self.get_layer() is not None:
-            core.verbose(_("Unregister %s map <%s> with layer %s from space time datasets") %
-                         (self.get_type(), self.get_map_id(), self.get_layer()))
+            core.verbose(_("Unregister %(type)s map <%(map)s> with "
+                           "layer %(layer)s from space time datasets" % \
+                         {'type':self.get_type(), 'map':self.get_map_id(), 
+                          'layer':self.get_layer()}))
         else:
-            core.verbose(_("Unregister %s map <%s> from space time datasets")
-                         % (self.get_type(), self.get_map_id()))
+            core.verbose(_("Unregister %(type)s map <%(map)s> "
+                           "from space time datasets"
+                         % {'type':self.get_type(), 'map':self.get_map_id()}))
 
         statement = ""
         dbif, connect = init_dbif(dbif)
@@ -906,7 +965,8 @@
                 dbif.cursor.execute(sql)
                 rows = dbif.cursor.fetchall()
         except:
-            core.error(_("Unable to select space time dataset register table <%s>") % (self.get_stds_register()))
+            core.error(_("Unable to select space time dataset register table "
+                         "<%s>") % (self.get_stds_register()))
 
         if connect:
             dbif.close()

Modified: grass/trunk/lib/python/temporal/abstract_space_time_dataset.py
===================================================================
--- grass/trunk/lib/python/temporal/abstract_space_time_dataset.py	2012-08-11 21:26:18 UTC (rev 52630)
+++ grass/trunk/lib/python/temporal/abstract_space_time_dataset.py	2012-08-12 01:54:40 UTC (rev 52631)
@@ -5,7 +5,6 @@
 
 Temporal GIS related functions to be used in temporal GIS Python library package.
 
-    
 (C) 2011-2012 by the GRASS Development Team
 This program is free software under the GNU General Public
 License (>=v2). Read the file COPYING that comes with GRASS
@@ -37,7 +36,8 @@
         self.map_counter = 0
 
     def get_new_map_instance(self, ident=None):
-        """!Return a new instance of a map dataset which is associated with the type of this class
+        """!Return a new instance of a map dataset which is associated 
+           with the type of this class
 
            @param ident: The unique identifier of the new object
         """
@@ -52,7 +52,8 @@
     def set_map_register(self, name):
         """!Set the name of the map register table
 
-        This table stores all map names which are registered in this space time dataset.
+           This table stores all map names which are registered
+           in this space time dataset.
 
            @param name: The name of the register table
         """
@@ -75,17 +76,14 @@
         if self.get_type() == "strds":
             #                1         2         3         4         5         6         7
             #      0123456789012345678901234567890123456789012345678901234567890123456789012345678
-            print ""
             print " +-------------------- Space Time Raster Dataset -----------------------------+"
         if self.get_type() == "str3ds":
             #                1         2         3         4         5         6         7
             #      0123456789012345678901234567890123456789012345678901234567890123456789012345678
-            print ""
             print " +-------------------- Space Time Raster3d Dataset ---------------------------+"
         if self.get_type() == "stvds":
             #                1         2         3         4         5         6         7
             #      0123456789012345678901234567890123456789012345678901234567890123456789012345678
-            print ""
             print " +-------------------- Space Time Vector Dataset -----------------------------+"
         print " |                                                                            |"
         self.base.print_info()
@@ -111,7 +109,8 @@
                            title=None, description=None):
         """!Set the initial values of the space time dataset
 
-           @param temporal_type: The temporal type of this space time dataset (absolute or relative)
+           @param temporal_type: The temporal type of this space 
+                                 time dataset (absolute or relative)
            @param semantic_type: The semantic type of this dataset
            @param title: The title
            @param description: The description of this dataset
@@ -133,7 +132,8 @@
         return self.base.get_semantic_type()
 
     def get_initial_values(self):
-        """!Return the initial values: temporal_type, semantic_type, title, description"""
+        """!Return the initial values: temporal_type, 
+           semantic_type, title, description"""
 
         temporal_type = self.get_temporal_type()
         semantic_type = self.base.get_semantic_type()
@@ -143,7 +143,13 @@
         return temporal_type, semantic_type, title, description
 
     def get_granularity(self):
-        """!Return the granularity"""
+        """!Return the granularity
+        
+           Granularity can be of absolute time or relative time.
+           In case of absolute time a string containing an integer
+           value and the time unit (years, months, days, hours, minuts, seconds).
+           In case of relative time an integer value is expected.
+        """
 
         temporal_type = self.get_temporal_type()
 
@@ -155,6 +161,13 @@
         return granularity
 
     def set_granularity(self, granularity):
+        """!Set the granularity
+        
+           Granularity can be of absolute time or relative time.
+           In case of absolute time a string containing an integer
+           value and the time unit (years, months, days, hours, minuts, seconds).
+           In case of relative time an integer value is expected.
+        """
 
         temporal_type = self.get_temporal_type()
 
@@ -168,9 +181,11 @@
             core.fatal(_("Unknown temporal type \"%s\"") % (temporal_type))
 
     def set_relative_time_unit(self, unit):
-        """!Set the relative time unit which may be of type: years, months, days, hours, minutes or seconds
+        """!Set the relative time unit which may be of type: 
+           years, months, days, hours, minutes or seconds
 
-           All maps registered in a (relative time) space time dataset must have the same unit
+           All maps registered in a (relative time) 
+           space time dataset must have the same unit
         """
 
         temporal_type = self.get_temporal_type()
@@ -277,8 +292,9 @@
     def count_temporal_relations(self, maps=None, dbif=None):
         """!Count the temporal relations between the registered maps.
 
-           The map list must be ordered by start time. Temporal relations are counted
-           by analysing the sparse upper right side temporal relationships matrix.
+           The map list must be ordered by start time. 
+           Temporal relations are counted by analysing the sparse 
+           upper right side temporal relationships matrix.
 
            @param maps: A sorted (start_time) list of AbstractDataset objects
            @param dbif: The database interface to be used
@@ -295,26 +311,28 @@
         """!Check the temporal topology
 
            Correct topology means, that time intervals are not overlap or
-           that intervals does not contain other intervals. Equal time intervals or
-           points of time are not allowed.
+           that intervals does not contain other intervals. 
+           Equal time intervals  are not allowed.
 
            The map list must be ordered by start time
 
-           Allowed and not allowed temporal relationships for correct topology
-           after      -> allowed
-           precedes     -> allowed
-           follows    -> allowed
-           precedes   -> allowed
+           Allowed and not allowed temporal relationships for correct topology:
+           @verbatim
+           * after      -> allowed
+           * precedes   -> allowed
+           * follows    -> allowed
+           * precedes   -> allowed
 
-           equivalent -> not allowed
-           during     -> not allowed
-           contains   -> not allowed
-           overlaps   -> not allowed
-           overlapped -> not allowed
-           starts     -> not allowed
-           finishes   -> not allowed
-           started    -> not allowed
-           finished   -> not allowed
+           * equivalent -> not allowed
+           * during     -> not allowed
+           * contains   -> not allowed
+           * overlaps   -> not allowed
+           * overlapped -> not allowed
+           * starts     -> not allowed
+           * finishes   -> not allowed
+           * started    -> not allowed
+           * finished   -> not allowed
+           @endverbatim
 
            @param maps: A sorted (start_time) list of AbstractDataset objects
            @return True if topology is correct
@@ -355,20 +373,26 @@
         return True
 
     def sample_by_dataset(self, stds, method=None, spatial=False, dbif=None):
-        """!Sample this space time dataset with the temporal topology of a second space time dataset
+        """!Sample this space time dataset with the temporal topology 
+           of a second space time dataset
 
-           The sample dataset must have "interval" as temporal map type, so all sample maps have valid interval time.
+           The sample dataset must have "interval" as temporal map type, 
+           so all sample maps have valid interval time.
 
-           In case spatial is True, the spatial overlap between temporal related maps is performed. Only
+           In case spatial is True, the spatial overlap between 
+           temporal related maps is performed. Only
            temporal related and spatial overlapping maps are returned.
 
            Return all registered maps as ordered (by start_time) object list with
            "gap" map objects (id==None). Each list entry is a list of map objects
-           which are potentially located in temporal relation to the actual granule of the second space time dataset.
+           which are potentially located in temporal relation to the actual 
+           granule of the second space time dataset.
 
-           Each entry in the object list is a dict. The actual sampler map and its temporal extent (the actual granule) and
+           Each entry in the object list is a dict. The actual sampler 
+           map and its temporal extent (the actual granule) and
            the list of samples are stored:
 
+           @code
            list = self.sample_by_dataset(stds=sampler, method=[
                "during","overlap","contain","equal"])
            for entry in list:
@@ -377,58 +401,89 @@
                for map in maplist:
                    map.select()
                    map.print_info()
+           @endcode
 
-           A valid temporal topology (no overlapping or inclusion allowed) is needed to get correct results in case of gaps
-           in the sample dataset.
+           A valid temporal topology (no overlapping or inclusion allowed) 
+           is needed to get correct results in case of gaps in the sample dataset.
 
            Gaps between maps are identified as unregistered maps with id==None.
 
-           The map objects are initialized with the id and the temporal extent of the granule (temporal type, start time, end time).
-           In case more map information are needed, use the select() method for each listed object.
+           The map objects are initialized with the id and the temporal 
+           extent of the granule (temporal type, start time, end time).
+           In case more map information are needed, use the select() 
+           method for each listed object.
 
            @param stds: The space time dataset to be used for temporal sampling
-           @param method: This option specifies what sample method should be used. In case the registered maps are of temporal point type,
-                          only the start time is used for sampling. In case of mixed of interval data the user can chose between:
-                          * start: Select maps of which the start time is located in the selection granule
-                            map    :        s
-                            granule:  s-----------------e
+           @param method: This option specifies what sample method should be used. 
+                  In case the registered maps are of temporal point type,
+                  only the start time is used for sampling. In case of mixed 
+                  of interval data the user can chose between:
+                  
+                  * start: Select maps of which the start time is 
+                    located in the selection granule
+                    @verbatim
+                    map    :        s
+                    granule:  s-----------------e
 
-                            map    :        s--------------------e
-                            granule:  s-----------------e
+                    map    :        s--------------------e
+                    granule:  s-----------------e
 
-                            map    :        s--------e
-                            granule:  s-----------------e
+                    map    :        s--------e
+                    granule:  s-----------------e
+                    @endverbatim
 
-                          * during: Select maps which are temporal during the selection granule
-                            map    :     s-----------e
-                            granule:  s-----------------e
+                  * during: Select maps which are temporal 
+                    during the selection granule
+                    @verbatim
+                    map    :     s-----------e
+                    granule:  s-----------------e
+                    @endverbatim
 
-                          * overlap: Select maps which temporal overlap the selection granule
-                            map    :     s-----------e
-                            granule:        s-----------------e
+                  * overlap: Select maps which temporal overlap 
+                    the selection granule
+                    @verbatim
+                    map    :     s-----------e
+                    granule:        s-----------------e
 
-                            map    :     s-----------e
-                            granule:  s----------e
+                    map    :     s-----------e
+                    granule:  s----------e
+                    @endverbatim
 
-                          * contain: Select maps which temporally contain the selection granule
-                            map    :  s-----------------e
-                            granule:     s-----------e
+                  * contain: Select maps which temporally contain 
+                    the selection granule
+                    @verbatim
+                    map    :  s-----------------e
+                    granule:     s-----------e
+                    @endverbatim
 
-                          * equal: Select maps which temporally equal to the selection granule
-                            map    :  s-----------e
-                            granule:  s-----------e
+                  * equal: Select maps which temporally equal 
+                    to the selection granule
+                    @verbatim
+                    map    :  s-----------e
+                    granule:  s-----------e
+                    @endverbatim
 
-                          * follows: Select maps which temporally follow the selection granule
-                            map    :              s-----------e
-                            granule:  s-----------e
+                  * follows: Select maps which temporally follow 
+                    the selection granule
+                    @verbatim
+                    map    :              s-----------e
+                    granule:  s-----------e
+                    @endverbatim
 
-                          * precedes: Select maps which temporally precedes the selection granule
-                            map    :  s-----------e
-                            granule:              s-----------e
+                  * precedes: Select maps which temporally precedes 
+                    the selection granule
+                    @verbatim
+                    map    :  s-----------e
+                    granule:              s-----------e
+                    @endverbatim
 
-                          All these methods can be combined. Method must be of type tuple including the identification strings.
-           @param spatial: If set True additional the spatial overlapping is used for selection -> spatio-temporal relation.
-                           The returned map objects will have temporal and spatial extents
+                  All these methods can be combined. Method must be of 
+                  type tuple including the identification strings.
+                  
+           @param spatial: If set True additional the spatial overlapping 
+                           is used for selection -> spatio-temporal relation.
+                           The returned map objects will have temporal and 
+                           spatial extents
            @param dbif: The database interface to be used
 
            In case nothing found None is returned
@@ -466,11 +521,13 @@
             use_equal = True
 
         if self.get_temporal_type() != stds.get_temporal_type():
-            core.error(_("The space time datasets must be of the same temporal type"))
+            core.error(_("The space time datasets must be of "
+                         "the same temporal type"))
             return None
 
         if stds.get_map_time() != "interval":
-            core.error(_("The temporal map type of the sample dataset must be interval"))
+            core.error(_("The temporal map type of the sample "
+                         "dataset must be interval"))
             return None
 
         # In case points of time are available, disable the interval specific methods
@@ -542,23 +599,28 @@
 
     def get_registered_maps_as_objects_by_granularity(self, gran=None, dbif=None):
         """!Return all registered maps as ordered (by start_time) object list with
-           "gap" map objects (id==None) for temporal topological operations using the
-           granularity of the space time dataset as increment. Each list entry is a list of map objects
+           "gap" map objects (id==None) for temporal topological operations using 
+           the granularity of the space time dataset as increment. 
+           Each list entry is a list of map objects
            which are potentially located in the actual granule.
 
-           A valid temporal topology (no overlapping or inclusion allowed) is needed to get correct results.
+           A valid temporal topology (no overlapping or inclusion allowed) 
+           is needed to get correct results.
 
-           The dataset must have "interval" as temporal map type, so all maps have valid interval time.
+           The dataset must have "interval" as temporal map type, 
+           so all maps have valid interval time.
 
            Gaps between maps are identified as unregistered maps with id==None.
 
-           The objects are initialized with the id and the temporal extent (temporal type, start time, end time).
-           In case more map information are needed, use the select() method for each listed object.
+           The objects are initialized with the id and the temporal 
+           extent (temporal type, start time, end time).
+           In case more map information are needed, use the select() 
+           method for each listed object.
 
            @param gran: The granularity to be used
            @param dbif: The database interface to be used
 
-           In case nothing found None is returned
+           @return ordered object list, in case nothing found None is returned
         """
 
         dbif, connect = init_dbif(dbif)
@@ -611,13 +673,16 @@
 
            Gaps between maps are identified as maps with id==None
 
-           The objects are initialized with the id and the temporal extent (temporal type, start time, end time).
-           In case more map information are needed, use the select() method for each listed object.
+           The objects are initialized with the id and the 
+           temporal extent (temporal type, start time, end time).
+           In case more map information are needed, use the select() 
+           method for each listed object.
 
-           @param where: The SQL where statement to select a subset of the registered maps without "WHERE"
+           @param where: The SQL where statement to select a 
+                         subset of the registered maps without "WHERE"
            @param dbif: The database interface to be used
 
-           In case nothing found None is returned
+           @return ordered object list, in case nothing found None is returned
         """
 
         dbif, connect = init_dbif(dbif)
@@ -646,7 +711,8 @@
                         if self.is_time_absolute():
                             map.set_absolute_time(start, end)
                         elif self.is_time_relative():
-                            map.set_relative_time(start, end, self.get_relative_time_unit())
+                            map.set_relative_time(start, end, 
+                                                  self.get_relative_time_unit())
                         obj_list.append(copy.copy(map))
 
         if connect:
@@ -654,14 +720,20 @@
 
         return obj_list
 
-    def get_registered_maps_as_objects(self, where=None, order="start_time", dbif=None):
-        """!Return all registered maps as ordered object list for temporal topological operations
+    def get_registered_maps_as_objects(self, where=None, order="start_time", 
+                                       dbif=None):
+        """!Return all registered maps as ordered object list for 
+           temporal topological operations
 
-           The objects are initialized with the id and the temporal extent (temporal type, start time, end time).
-           In case more map information are needed, use the select() method for each listed object.
+           The objects are initialized with the id and the temporal extent 
+           (temporal type, start time, end time).
+           In case more map information are needed, use the select() 
+           method for each listed object.
 
-           @param where: The SQL where statement to select a subset of the registered maps without "WHERE"
-           @param order: The SQL order statement to be used to order the objects in the list without "ORDER BY"
+           @param where: The SQL where statement to select a subset of 
+                         the registered maps without "WHERE"
+           @param order: The SQL order statement to be used to order the 
+                         objects in the list without "ORDER BY"
            @param dbif: The database interface to be used
 
            In case nothing found None is returned
@@ -695,16 +767,20 @@
         return obj_list
 
     def get_registered_maps(self, columns=None, where=None, order=None, dbif=None):
-        """!Return sqlite rows of all registered maps.
+        """!Return SQL rows of all registered maps.
 
-           In case columns are not specified, each row includes all columns specified in the datatype specific view
+           In case columns are not specified, each row includes all columns 
+           specified in the datatype specific view.
 
            @param columns: Columns to be selected as SQL compliant string
-           @param where: The SQL where statement to select a subset of the registered maps without "WHERE"
-           @param order: The SQL order statement to be used to order the objects in the list without "ORDER BY"
+           @param where: The SQL where statement to select a subset 
+                         of the registered maps without "WHERE"
+           @param order: The SQL order statement to be used to order the 
+                         objects in the list without "ORDER BY"
            @param dbif: The database interface to be used
 
-           In case nothing found None is returned
+           @return SQL rows of all registered maps, 
+                   In case nothing found None is returned
         """
 
         dbif, connect = init_dbif(dbif)
@@ -721,9 +797,11 @@
                     None).get_type() + "_view_rel_time"
 
             if columns is not None:
-                sql = "SELECT %s FROM %s  WHERE %s.id IN (SELECT id FROM %s)" % (columns, map_view, map_view, self.get_map_register())
+                sql = "SELECT %s FROM %s  WHERE %s.id IN (SELECT id FROM %s)" %\
+                      (columns, map_view, map_view, self.get_map_register())
             else:
-                sql = "SELECT * FROM %s  WHERE %s.id IN (SELECT id FROM %s)" % (map_view, map_view, self.get_map_register())
+                sql = "SELECT * FROM %s  WHERE %s.id IN (SELECT id FROM %s)" % \
+                      (map_view, map_view, self.get_map_register())
 
             if where is not None:
                 sql += " AND (%s)" % (where.split(";")[0])
@@ -748,18 +826,24 @@
     def delete(self, dbif=None, execute=True):
         """!Delete a space time dataset from the temporal database
 
-           This method removes the space time dataset from the temporal database and drops its map register table
+           This method removes the space time dataset from the temporal 
+           database and drops its map register table
 
            @param dbif: The database interface to be used
-           @param execute: If True the SQL DELETE and DROP table statements will be executed.
-                           If False the prepared SQL statements are returned and must be executed by the caller.
+           @param execute: If True the SQL DELETE and DROP table 
+                           statements will be executed.
+                           If False the prepared SQL statements are returned 
+                           and must be executed by the caller.
 
            @return The SQL statements if execute == False, else an empty string
         """
         # First we need to check if maps are registered in this dataset and
         # unregister them
 
-        core.verbose(_("Delete space time %s  dataset <%s> from temporal database") % (self.get_new_map_instance(ident=None).get_type(), self.get_id()))
+        core.verbose(_("Delete space time %s  dataset <%s> from temporal "
+                       "database") % \
+                     (self.get_new_map_instance(ident=None).get_type(), 
+                      self.get_id()))
 
         statement = ""
         dbif, connect = init_dbif(dbif)
@@ -809,8 +893,8 @@
             This method takes care of the registration of a map
             in a space time dataset.
 
-            In case the map is already registered this function will break with a warning
-            and return False
+            In case the map is already registered this function 
+            will break with a warning and return False.
 
            @param dbif: The database interface to be used
         """
@@ -818,11 +902,19 @@
 
         if map.is_in_db(dbif) == False:
             dbif.close()
-            core.fatal(_("Only maps with absolute or relative valid time can be registered"))
+            core.fatal(_("Only maps with absolute or relative valid time can "
+                         "be registered"))
         if map.get_layer():
-            core.verbose(_("Register %s map <%s> with layer %s in space time %s dataset <%s>") % (map.get_type(), map.get_map_id(), map.get_layer(), map.get_type(), self.get_id()))
+            core.verbose(_("Register %s map <%s> with layer %s in space "
+                           "time %s dataset <%s>") % (map.get_type(), 
+                                                      map.get_map_id(), 
+                                                      map.get_layer(), 
+                                                      map.get_type(), 
+                                                      self.get_id()))
         else:
-            core.verbose(_("Register %s map <%s> in space time %s dataset <%s>") % (map.get_type(), map.get_map_id(), map.get_type(), self.get_id()))
+            core.verbose(_("Register %s map <%s> in space time %s "
+                           "dataset <%s>") % (map.get_type(), map.get_map_id(),
+                                               map.get_type(), self.get_id()))
 
         # First select all data from the database
         map.select(dbif)
@@ -855,27 +947,39 @@
         # Check temporal types
         if stds_ttype != map_ttype:
             if map.get_layer():
-                core.fatal(_("Temporal type of space time dataset <%s> and map <%s> with layer %s are different") % (self.get_id(), map.get_map_id(), map.get_layer()))
+                core.fatal(_("Temporal type of space time dataset <%s> and "
+                             "map <%s> with layer %s are different") % \
+                           (self.get_id(), map.get_map_id(), map.get_layer()))
             else:
-                core.fatal(_("Temporal type of space time dataset <%s> and map <%s> are different") % (self.get_id(), map.get_map_id()))
+                core.fatal(_("Temporal type of space time dataset <%s> and "
+                             "map <%s> are different") % \
+                           (self.get_id(), map.get_map_id()))
 
-        # In case no map has been registered yet, set the relative time unit from the first map
-        if (self.metadata.get_number_of_maps() is None or self.metadata.get_number_of_maps() == 0) and \
+        # In case no map has been registered yet, set the 
+        # relative time unit from the first map
+        if (self.metadata.get_number_of_maps() is None or \
+            self.metadata.get_number_of_maps() == 0) and \
             self.map_counter == 0 and self.is_time_relative():
 
             self.set_relative_time_unit(map_rel_time_unit)
             statement += self.relative_time.get_update_all_statement_mogrified(
                 dbif)
-            core.verbose(_("Set temporal unit for space time %s dataset <%s> to %s") % (map.get_type(), self.get_id(), map_rel_time_unit))
+            core.verbose(_("Set temporal unit for space time %s dataset "
+                           "<%s> to %s") % (map.get_type(), self.get_id(), 
+                                            map_rel_time_unit))
 
         stds_rel_time_unit = self.get_relative_time_unit()
 
         # Check the relative time unit
         if self.is_time_relative() and (stds_rel_time_unit != map_rel_time_unit):
             if map.get_layer():
-                core.fatal(_("Relative time units of space time dataset <%s> and map <%s> with layer %s are different") % (self.get_id(), map.get_map_id(), map.get_layer()))
+                core.fatal(_("Relative time units of space time dataset "
+                             "<%s> and map <%s> with layer %s are different") %\
+                            (self.get_id(), map.get_map_id(), map.get_layer()))
             else:
-                core.fatal(_("Relative time units of space time dataset <%s> and map <%s> are different") % (self.get_id(), map.get_map_id()))
+                core.fatal(_("Relative time units of space time dataset "
+                             "<%s> and map <%s> are different") % \
+                           (self.get_id(), map.get_map_id()))
 
         #print "STDS register table", stds_register_table
 
@@ -904,7 +1008,9 @@
                     dbif.close()
 
                 if map.get_layer() is not None:
-                    core.warning(_("Map <%s> with layer %s is already registered.") % (map.get_map_id(), map.get_layer()))
+                    core.warning(_("Map <%s> with layer %s is already "
+                                   "registered.") % (map.get_map_id(), 
+                                                     map.get_layer()))
                 else:
                     core.warning(_("Map <%s> is already registered.")
                         % (map.get_map_id()))
@@ -922,7 +1028,9 @@
                 self.get_type() + "_register"
 
             # Read the SQL template
-            sql = open(os.path.join(sql_path, "map_stds_register_table_template.sql"), 'r').read()
+            sql = open(os.path.join(sql_path, 
+                                    "map_stds_register_table_template.sql"), 
+                                    'r').read()
             # Create the raster, raster3d and vector tables
             sql = sql.replace("GRASS_MAP", map.get_type())
             sql = sql.replace("MAP_NAME", map_name + "_" + map_mapset)
@@ -937,11 +1045,14 @@
             statement += map.metadata.get_update_statement_mogrified(dbif)
 
             if map.get_layer():
-                core.verbose(_("Created register table <%s> for %s map <%s> with layer %s") %
-                                (map_register_table, map.get_type(), map.get_map_id(), map.get_layer()))
+                core.verbose(_("Created register table <%s> for "
+                               "%s map <%s> with layer %s") %
+                                (map_register_table, map.get_type(), 
+                                 map.get_map_id(), map.get_layer()))
             else:
                 core.verbose(_("Created register table <%s> for %s map <%s>") %
-                                (map_register_table, map.get_type(), map.get_map_id()))
+                                (map_register_table, map.get_type(), 
+                                 map.get_map_id()))
 
         # We need to create the table and register it
         if stds_register_table is None:
@@ -949,7 +1060,9 @@
             stds_register_table = stds_name + "_" + \
                 stds_mapset + "_" + map.get_type() + "_register"
             # Read the SQL template
-            sql = open(os.path.join(sql_path, "stds_map_register_table_template.sql"), 'r').read()
+            sql = open(os.path.join(sql_path, 
+                                    "stds_map_register_table_template.sql"), 
+                                    'r').read()
             # Create the raster, raster3d and vector tables
             sql = sql.replace("GRASS_MAP", map.get_type())
             sql = sql.replace("SPACETIME_NAME", stds_name + "_" + stds_mapset)
@@ -961,7 +1074,8 @@
             self.set_map_register(stds_register_table)
             statement += self.metadata.get_update_statement_mogrified(dbif)
 
-            core.verbose(_("Created register table <%s> for space time %s  dataset <%s>") %
+            core.verbose(_("Created register table <%s> for space "
+                           "time %s  dataset <%s>") %
                           (stds_register_table, map.get_type(), self.get_id()))
 
         # We need to execute the statement at this time
@@ -1021,10 +1135,13 @@
 
            @param map: The map object to unregister
            @param dbif: The database interface to be used
-           @param execute: If True the SQL DELETE and DROP table statements will be executed.
-                           If False the prepared SQL statements are returned and must be executed by the caller.
+           @param execute: If True the SQL DELETE and DROP table 
+                           statements will be executed.
+                           If False the prepared SQL statements are 
+                           returned and must be executed by the caller.
 
-           @return The SQL statements if execute == False, else an empty string, None in case of a failure
+           @return The SQL statements if execute == False, else an empty 
+                   string, None in case of a failure
         """
 
         statement = ""
@@ -1060,9 +1177,14 @@
             # Break if the map is not registered
             if row is None:
                 if map.get_layer() is not None:
-                    core.warning(_("Map <%s> with layer %s is not registered in space time dataset <%s>") % (map.get_map_id(), map.get_layer(), self.base.get_id()))
+                    core.warning(_("Map <%s> with layer %s is not registered "
+                                   "in space time dataset <%s>") % \
+                                 (map.get_map_id(), map.get_layer(), 
+                                  self.base.get_id()))
                 else:
-                    core.warning(_("Map <%s> is not registered in space time dataset <%s>") % (map.get_map_id(), self.base.get_id()))
+                    core.warning(_("Map <%s> is not registered in space "
+                                   "time dataset <%s>") % (map.get_map_id(), 
+                                                           self.base.get_id()))
                 if connect == True:
                     dbif.close()
                 return ""
@@ -1117,7 +1239,8 @@
 
            @param dbif: The database interface to be used
         """
-        core.verbose(_("Update metadata, spatial and temporal extent from all registered maps of <%s>") % (self.get_id()))
+        core.verbose(_("Update metadata, spatial and temporal extent from "
+                       "all registered maps of <%s>") % (self.get_id()))
 
         # Nothing to do if the register is not present
         if not self.get_map_register():
@@ -1139,7 +1262,9 @@
 
         # Update the spatial and temporal extent from registered maps
         # Read the SQL template
-        sql = open(os.path.join(sql_path, "update_stds_spatial_temporal_extent_template.sql"), 'r').read()
+        sql = open(os.path.join(sql_path, 
+                   "update_stds_spatial_temporal_extent_template.sql"), 
+                   'r').read()
         sql = sql.replace(
             "GRASS_MAP", self.get_new_map_instance(None).get_type())
         sql = sql.replace("SPACETIME_NAME", stds_name + "_" + stds_mapset)
@@ -1171,20 +1296,23 @@
         else:
             start_time, end_time, unit = self.get_relative_time()
 
-        # In case no end time is set, use the maximum start time of all registered maps as end time
+        # In case no end time is set, use the maximum start time of 
+        # all registered maps as end time
         if end_time is None:
             use_start_time = True
         else:
             # Check if the end time is smaller than the maximum start time
             if self.is_time_absolute():
-                sql = """SELECT max(start_time) FROM GRASS_MAP_absolute_time WHERE GRASS_MAP_absolute_time.id IN
+                sql = """SELECT max(start_time) FROM GRASS_MAP_absolute_time 
+                         WHERE GRASS_MAP_absolute_time.id IN
                         (SELECT id FROM SPACETIME_NAME_GRASS_MAP_register);"""
                 sql = sql.replace("GRASS_MAP", self.get_new_map_instance(
                     None).get_type())
                 sql = sql.replace("SPACETIME_NAME",
                     stds_name + "_" + stds_mapset)
             else:
-                sql = """SELECT max(start_time) FROM GRASS_MAP_relative_time WHERE GRASS_MAP_relative_time.id IN
+                sql = """SELECT max(start_time) FROM GRASS_MAP_relative_time 
+                         WHERE GRASS_MAP_relative_time.id IN
                         (SELECT id FROM SPACETIME_NAME_GRASS_MAP_register);"""
                 sql = sql.replace("GRASS_MAP", self.get_new_map_instance(
                     None).get_type())
@@ -1219,7 +1347,8 @@
         if use_start_time:
             if self.is_time_absolute():
                 sql = """UPDATE STDS_absolute_time SET end_time =
-               (SELECT max(start_time) FROM GRASS_MAP_absolute_time WHERE GRASS_MAP_absolute_time.id IN
+               (SELECT max(start_time) FROM GRASS_MAP_absolute_time WHERE 
+               GRASS_MAP_absolute_time.id IN
                         (SELECT id FROM SPACETIME_NAME_GRASS_MAP_register)
                ) WHERE id = 'SPACETIME_ID';"""
                 sql = sql.replace("GRASS_MAP", self.get_new_map_instance(
@@ -1230,7 +1359,8 @@
                 sql = sql.replace("STDS", self.get_type())
             elif self.is_time_relative():
                 sql = """UPDATE STDS_relative_time SET end_time =
-               (SELECT max(start_time) FROM GRASS_MAP_relative_time WHERE GRASS_MAP_relative_time.id IN
+               (SELECT max(start_time) FROM GRASS_MAP_relative_time WHERE 
+               GRASS_MAP_relative_time.id IN
                         (SELECT id FROM SPACETIME_NAME_GRASS_MAP_register)
                ) WHERE id = 'SPACETIME_ID';"""
                 sql = sql.replace("GRASS_MAP", self.get_new_map_instance(
@@ -1246,11 +1376,14 @@
         maps = self.get_registered_maps_as_objects(dbif=dbif)
         tlist = self.count_temporal_types(maps)
 
-        if tlist["interval"] > 0 and tlist["point"] == 0 and tlist["invalid"] == 0:
+        if tlist["interval"] > 0 and tlist["point"] == 0 and \
+           tlist["invalid"] == 0:
             map_time = "interval"
-        elif tlist["interval"] == 0 and tlist["point"] > 0 and tlist["invalid"] == 0:
+        elif tlist["interval"] == 0 and tlist["point"] > 0 and \
+             tlist["invalid"] == 0:
             map_time = "point"
-        elif tlist["interval"] > 0 and tlist["point"] > 0 and tlist["invalid"] == 0:
+        elif tlist["interval"] > 0 and tlist["point"] > 0 and \
+             tlist["invalid"] == 0:
             map_time = "mixed"
         else:
             map_time = "invalid"
@@ -1302,6 +1435,7 @@
         @param start: The start time
         @param end: The end time
         @param use_start: Select maps of which the start time is located in the selection granule
+                         @verbatim
                          map    :        s
                          granule:  s-----------------e
 
@@ -1310,36 +1444,51 @@
 
                          map    :        s--------e
                          granule:  s-----------------e
+                         @endverbatim
 
         @param use_during: during: Select maps which are temporal during the selection granule
+                         @verbatim
                          map    :     s-----------e
                          granule:  s-----------------e
+                         @endverbatim
 
         @param use_overlap: Select maps which temporal overlap the selection granule
+                         @verbatim
                          map    :     s-----------e
                          granule:        s-----------------e
 
                          map    :     s-----------e
                          granule:  s----------e
+                         @endverbatim
 
         @param use_contain: Select maps which temporally contain the selection granule
+                         @verbatim
                          map    :  s-----------------e
                          granule:     s-----------e
+                         @endverbatim
 
         @param use_equal: Select maps which temporally equal to the selection granule
+                         @verbatim
                          map    :  s-----------e
                          granule:  s-----------e
+                         @endverbatim
 
         @param use_follows: Select maps which temporally follow the selection granule
+                         @verbatim
                          map    :              s-----------e
                          granule:  s-----------e
+                         @endverbatim
 
         @param use_precedes: Select maps which temporally precedes the selection granule
+                         @verbatim
                          map    :  s-----------e
                          granule:              s-----------e
+                         @endverbatim
 
         Usage:
         
+        @code
+        
         >>> # Relative time
         >>> start = 1
         >>> end = 2
@@ -1405,6 +1554,8 @@
         ... use_start=True, use_during=True, use_overlap=True, use_contain=True,
         ... use_equal=True, use_follows=True, use_precedes=True)
         "((start_time >= '2001-01-01 12:30:00' and start_time < '2001-03-31 14:30:00')  OR ((start_time > '2001-01-01 12:30:00' and end_time < '2001-03-31 14:30:00') OR (start_time >= '2001-01-01 12:30:00' and end_time < '2001-03-31 14:30:00') OR (start_time > '2001-01-01 12:30:00' and end_time <= '2001-03-31 14:30:00')) OR ((start_time < '2001-01-01 12:30:00' and end_time > '2001-01-01 12:30:00' and end_time < '2001-03-31 14:30:00') OR (start_time < '2001-03-31 14:30:00' and start_time > '2001-01-01 12:30:00' and end_time > '2001-03-31 14:30:00')) OR ((start_time < '2001-01-01 12:30:00' and end_time > '2001-03-31 14:30:00') OR (start_time <= '2001-01-01 12:30:00' and end_time > '2001-03-31 14:30:00') OR (start_time < '2001-01-01 12:30:00' and end_time >= '2001-03-31 14:30:00')) OR (start_time = '2001-01-01 12:30:00' and end_time = '2001-03-31 14:30:00') OR (start_time = '2001-03-31 14:30:00') OR (end_time = '2001-01-01 12:30:00'))"
+        
+        @endcode
         """
 
     where = "("

Modified: grass/trunk/lib/python/temporal/aggregation.py
===================================================================
--- grass/trunk/lib/python/temporal/aggregation.py	2012-08-11 21:26:18 UTC (rev 52630)
+++ grass/trunk/lib/python/temporal/aggregation.py	2012-08-12 01:54:40 UTC (rev 52631)
@@ -9,7 +9,8 @@
 @code
 import grass.temporal as tgis
 
-tgis.aggregate_raster_maps(dataset, mapset, inputs, base, start, end, count, method, register_null, dbif)
+tgis.aggregate_raster_maps(dataset, mapset, inputs, base, start, end,
+    count, method, register_null, dbif)
 
 ...
 @endcode
@@ -27,6 +28,7 @@
 
 ###############################################################################
 
+
 def collect_map_names(sp, dbif, start, end, sampling):
     """!Gather all maps from dataset using a specific sample method
 
@@ -36,7 +38,7 @@
        @param end: The end time of the sample interval, may be relative or absolute
        @param sampling: The sampling methods to use
     """
-    
+
     use_start = False
     use_during = False
     use_overlap = False
@@ -75,8 +77,15 @@
         use_follows = False
         use_precedes = False
 
-    where = create_temporal_relation_sql_where_statement(start, end, use_start, use_during, use_overlap, use_contain, use_equal, use_follows, use_precedes)
-   
+    where = create_temporal_relation_sql_where_statement(start, end, 
+                                                         use_start, 
+                                                         use_during, 
+                                                         use_overlap, 
+                                                         use_contain, 
+                                                         use_equal, 
+                                                         use_follows, 
+                                                         use_precedes)
+
     rows = sp.get_registered_maps("id", where, "start_time", dbif)
 
     if not rows:
@@ -86,13 +95,15 @@
     for row in rows:
         names.append(row["id"])
 
-    return names    
+    return names
 
 ###############################################################################
 
-def aggregate_raster_maps(inputs, base, start, end, count, method, register_null, dbif):
+
+def aggregate_raster_maps(inputs, base, start, end, count, method, 
+                          register_null, dbif):
     """!Aggregate a list of raster input maps with r.series
-       
+
        @param inputs: The names of the raster maps to be aggregated
        @param base: The basename of the new created raster maps
        @param start: The start time of the sample interval, may be relative or absolute
@@ -103,13 +114,11 @@
        @param dbif: The temporal database interface to use
     """
 
-    core.verbose(_("Aggregate %s raster maps") %(len(inputs)))
+    core.verbose(_("Aggregate %s raster maps") % (len(inputs)))
     output = "%s_%i" % (base, count)
-    
+
     mapset = libgis.G_mapset()
-
     map_id = output + "@" + mapset
-
     new_map = raster_dataset(map_id)
 
     # Check if new map is in the temporal database
@@ -122,7 +131,8 @@
             core.error(_("Raster map <%s> is already in temporal database, use overwrite flag to overwrite"))
             return
 
-    core.verbose(_("Compute aggregation of maps between %s - %s" % (str(start), str(end))))
+    core.verbose(_("Compute aggregation of maps between %s - %s" % (
+        str(start), str(end))))
 
     # Create the r.series input file
     filename = core.tempfile(True)
@@ -134,20 +144,21 @@
 
     file.close()
     # Run r.series
-    ret = core.run_command("r.series", flags="z", file=filename, output=output, overwrite=core.overwrite(), method=method)
+    ret = core.run_command("r.series", flags="z", file=filename,
+                           output=output, overwrite=core.overwrite(), 
+                           method=method)
 
     if ret != 0:
         dbif.close()
         core.fatal(_("Error while r.series computation"))
-        
 
     # Read the raster map data
     new_map.load()
-    
+
     # In case of a null map continue, do not register null maps
-    if new_map.metadata.get_min() == None and new_map.metadata.get_max() == None:
+    if new_map.metadata.get_min() is None and new_map.metadata.get_max() is None:
         if not register_null:
             core.run_command("g.remove", rast=output)
             return None
-    
+
     return new_map

Modified: grass/trunk/lib/python/temporal/base.py
===================================================================
--- grass/trunk/lib/python/temporal/base.py	2012-08-11 21:26:18 UTC (rev 52630)
+++ grass/trunk/lib/python/temporal/base.py	2012-08-12 01:54:40 UTC (rev 52631)
@@ -5,12 +5,14 @@
 Temporal GIS base classes to be used in other
 Python temporal gis packages.
 
-This packages includes all base classes to store basic information like id, name,
-mapset creation and modification time as well as sql serialization and de-serialization
-and the sql database interface.
+This packages includes all base classes to store basic information 
+like id, name, mapset creation and modification time as well as sql 
+serialization and de-serialization and the sql database interface.
 
 Usage:
 
+ at code
+
 >>> import grass.temporal as tgis
 >>> rbase = tgis.RasterBase(ident="soil at PERMANENT")
 >>> vbase = tgis.VectorBase(ident="soil:1 at PERMANENT")
@@ -19,7 +21,9 @@
 >>> stvdsbase = tgis.STVDSBase(ident="soil at PERMANENT")
 >>> str3dsbase = tgis.STR3DSBase(ident="soil at PERMANENT")
 
-(C) 2008-2011 by the GRASS Development Team
+ at endcode
+
+(C) 2011-2012 by the GRASS Development Team
 This program is free software under the GNU General Public
 License (>=v2). Read the file COPYING that comes with GRASS
 for details.
@@ -38,12 +42,15 @@
         self.D = {}
 
     def serialize(self, type, table, where=None):
-        """!Convert the internal dictionary into a string of semicolon separated SQL statements
-            The keys are the column names and the values are the row entries
+        """!Convert the internal dictionary into a string of semicolon 
+            separated SQL statements The keys are the column names and 
+            the values are the row entries
             
-            >>> import grass.temporal as tgis
-            >>> from datetime import datetime, date, time, timedelta
-            >>> t = tgis.DictSQLSerializer()
+            Usage:
+            
+            \code
+            
+            >>> t = DictSQLSerializer()
             >>> t.D["id"] = "soil at PERMANENT"
             >>> t.D["name"] = "soil"
             >>> t.D["mapset"] = "PERMANENT"
@@ -63,6 +70,8 @@
             @table The name of the table to select, insert or update
             @where The optional where statement
             @return a tuple containing the SQL string and the arguments
+            
+            \endcode
         """
 
         sql = ""
@@ -166,7 +175,8 @@
         return sql, tuple(args)
 
     def deserialize(self, row):
-        """!Convert the content of the dbmi dictionary like row into the internal dictionary
+        """!Convert the content of the dbmi dictionary like row into the 
+           internal dictionary
 
            @param row: The dictionary like row to store in the internal dict
         """
@@ -188,10 +198,10 @@
 class SQLDatabaseInterface(DictSQLSerializer):
     """!This class represents the SQL database interface
 
-       Functions to insert, select and update the internal structure of this class
-       in the temporal database are implemented.
-       This is the base class for raster, raster3d, vector and space time datasets
-       data management classes:
+       Functions to insert, select and update the internal 
+       structure of this class in the temporal database are implemented.
+       This is the base class for raster, raster3d, vector and 
+       space time datasets data management classes:
        * Identification information (base)
        * Spatial extent
        * Temporal extent
@@ -199,9 +209,9 @@
        
        Usage:
        
-        >>> import grass.temporal as tgis
-        >>> from datetime import datetime, date, time, timedelta
-        >>> t = tgis.SQLDatabaseInterface("raster", "soil at PERMANENT")
+       \code
+       
+        >>> t = SQLDatabaseInterface("raster", "soil at PERMANENT")
         >>> t.D["name"] = "soil"
         >>> t.D["mapset"] = "PERMANENT"
         >>> t.D["creator"] = "soeren"
@@ -226,12 +236,15 @@
         ("UPDATE raster SET  creation_time = ?  ,mapset = ?  ,name = ?  ,creator = ? WHERE id = 'soil at PERMANENT';\\n", (datetime.datetime(2001, 1, 1, 0, 0), 'PERMANENT', 'soil', 'soeren'))
         >>> t.get_update_all_statement_mogrified()
         "UPDATE raster SET  creation_time = '2001-01-01 00:00:00'  ,mapset = 'PERMANENT'  ,name = 'soil'  ,creator = 'soeren' WHERE id = 'soil at PERMANENT';\\n"
+        
+        \endcode
     """
     def __init__(self, table=None, ident=None):
         """!Constructor of this class
 
            @param table: The name of the table
-           @param ident: The identifier (primary key) of this object in the database table
+           @param ident: The identifier (primary key) of this 
+                         object in the database table
         """
         DictSQLSerializer.__init__(self)
 
@@ -239,17 +252,20 @@
         self.ident = ident
 
     def get_table_name(self):
-        """!Return the name of the table in which the internal data are inserted, updated or selected"""
+        """!Return the name of the table in which the internal 
+           data are inserted, updated or selected"""
         return self.table
 
     def get_delete_statement(self):
         """!Return the delete string"""
-        return "DELETE FROM " + self.get_table_name() + " WHERE id = \'" + str(self.ident) + "\';\n"
+        return "DELETE FROM " + self.get_table_name() + \
+               " WHERE id = \'" + str(self.ident) + "\';\n"
 
     def delete(self, dbif=None):
         """!Delete the entry of this object from the temporal database
 
-           @param dbif: The database interface to be used, if None a temporary connection will be established
+           @param dbif: The database interface to be used, 
+                        if None a temporary connection will be established
         """
         sql = self.get_delete_statement()
         #print sql
@@ -264,12 +280,14 @@
 
     def get_is_in_db_statement(self):
         """Return the selection string"""
-        return "SELECT id FROM " + self.get_table_name() + " WHERE id = \'" + str(self.ident) + "\';\n"
+        return "SELECT id FROM " + self.get_table_name() + \
+               " WHERE id = \'" + str(self.ident) + "\';\n"
 
     def is_in_db(self, dbif=None):
         """!Check if this object is present in the temporal database
 
-           @param dbif: The database interface to be used, if None a temporary connection will be established
+           @param dbif: The database interface to be used, 
+                        if None a temporary connection will be established
         """
 
         sql = self.get_is_in_db_statement()
@@ -292,13 +310,16 @@
         return True
 
     def get_select_statement(self):
-        """!Return the sql statement and the argument list in database specific style"""
-        return self.serialize("SELECT", self.get_table_name(), "WHERE id = \'" + str(self.ident) + "\'")
+        """!Return the sql statement and the argument list in 
+           database specific style"""
+        return self.serialize("SELECT", self.get_table_name(), 
+                              "WHERE id = \'" + str(self.ident) + "\'")
 
     def get_select_statement_mogrified(self, dbif=None):
         """!Return the select statement as mogrified string
 
-           @param dbif: The database interface to be used, if None a temporary connection will be established
+           @param dbif: The database interface to be used, 
+                        if None a temporary connection will be established
         """
         if not dbif:
             dbif = SQLDatabaseInterfaceConnection()
@@ -309,7 +330,8 @@
         """!Select the content from the temporal database and store it
            in the internal dictionary structure
 
-           @param dbif: The database interface to be used, if None a temporary connection will be established
+           @param dbif: The database interface to be used, 
+                        if None a temporary connection will be established
         """
         sql, args = self.get_select_statement()
         #print sql
@@ -344,13 +366,15 @@
         return True
 
     def get_insert_statement(self):
-        """!Return the sql statement and the argument list in database specific style"""
+        """!Return the sql statement and the argument 
+           list in database specific style"""
         return self.serialize("INSERT", self.get_table_name())
 
     def get_insert_statement_mogrified(self, dbif=None):
         """!Return the insert statement as mogrified string
 
-           @param dbif: The database interface to be used, if None a temporary connection will be established
+           @param dbif: The database interface to be used, 
+                        if None a temporary connection will be established
         """
         if not dbif:
             dbif = SQLDatabaseInterfaceConnection()
@@ -361,7 +385,8 @@
         """!Serialize the content of this object and store it in the temporal
            database using the internal identifier
 
-           @param dbif: The database interface to be used, if None a temporary connection will be established
+           @param dbif: The database interface to be used, 
+                        if None a temporary connection will be established
         """
         sql, args = self.get_insert_statement()
         #print sql
@@ -376,13 +401,16 @@
             dbif.close()
 
     def get_update_statement(self):
-        """!Return the sql statement and the argument list in database specific style"""
-        return self.serialize("UPDATE", self.get_table_name(), "WHERE id = \'" + str(self.ident) + "\'")
+        """!Return the sql statement and the argument list 
+           in database specific style"""
+        return self.serialize("UPDATE", self.get_table_name(), 
+                              "WHERE id = \'" + str(self.ident) + "\'")
 
     def get_update_statement_mogrified(self, dbif=None):
         """!Return the update statement as mogrified string
 
-           @param dbif: The database interface to be used, if None a temporary connection will be established
+           @param dbif: The database interface to be used, 
+                        if None a temporary connection will be established
         """
         if not dbif:
             dbif = SQLDatabaseInterfaceConnection()
@@ -395,7 +423,8 @@
 
            Only object entries which are exists (not None) are updated
 
-           @param dbif: The database interface to be used, if None a temporary connection will be established
+           @param dbif: The database interface to be used, 
+                        if None a temporary connection will be established
         """
         if self.ident is None:
             raise IOError("Missing identifer")
@@ -413,13 +442,16 @@
             dbif.close()
 
     def get_update_all_statement(self):
-        """!Return the sql statement and the argument list in database specific style"""
-        return self.serialize("UPDATE ALL", self.get_table_name(), "WHERE id = \'" + str(self.ident) + "\'")
+        """!Return the sql statement and the argument 
+           list in database specific style"""
+        return self.serialize("UPDATE ALL", self.get_table_name(), 
+                              "WHERE id = \'" + str(self.ident) + "\'")
 
     def get_update_all_statement_mogrified(self, dbif=None):
         """!Return the update all statement as mogrified string
 
-           @param dbif: The database interface to be used, if None a temporary connection will be established
+           @param dbif: The database interface to be used, 
+                        if None a temporary connection will be established
         """
         if not dbif:
             dbif = SQLDatabaseInterfaceConnection()
@@ -427,10 +459,11 @@
         return dbif.mogrify_sql_statement(self.get_update_all_statement())
 
     def update_all(self, dbif=None):
-        """!Serialize the content of this object, including None objects, and update it in the temporal
-           database using the internal identifier
+        """!Serialize the content of this object, including None objects, 
+        and update it in the temporal database using the internal identifier
 
-           @param dbif: The database interface to be used, if None a temporary connection will be established
+           @param dbif: The database interface to be used, 
+                        if None a temporary connection will be established
         """
         if self.ident is None:
             raise IOError("Missing identifer")
@@ -455,10 +488,10 @@
         basic identification information
         
         Usage:
+        
+        \code
 
-        >>> import grass.temporal as tgis
-        >>> from datetime import datetime, date, time, timedelta
-        >>> t = tgis.DatasetBase("raster", "soil at PERMANENT", creator="soeren", ctime=datetime(2001,1,1), ttype="absolute")
+        >>> t = DatasetBase("raster", "soil at PERMANENT", creator="soeren", ctime=datetime(2001,1,1), ttype="absolute")
         >>> t.id
         'soil at PERMANENT'
         >>> t.name
@@ -486,15 +519,19 @@
         creator=soeren
         creation_time=2001-01-01 00:00:00
         temporal_type=absolute
+        
+        \endcode
     """
     
     def __init__(self, table=None, ident=None, name=None, mapset=None, 
                  creator=None, ctime=None,ttype=None):
         """!Constructor
         
-            @param table: The name of the temporal database table that should be used to store the values
-            @param ident: The unique identifier must be a combination of the dataset name, 
-                          layer name and the mapset name at mapset or name:1 at mapset
+            @param table: The name of the temporal database table 
+                          that should be used to store the values
+            @param ident: The unique identifier must be a combination of 
+                          the dataset name, layer name and the mapset 
+                          name at mapset or name:1 at mapset
                           used as as primary key in the temporal database
             @param name: The name of the map or dataset
             @param mapset: The name of the mapset 
@@ -525,8 +562,9 @@
     def set_id(self, ident):
         """!Convenient method to set the unique identifier (primary key)
 
-           @param ident: The unique identifier must be a combination of the dataset name, 
-                         layer name and the mapset name at mapset or name:1 at mapset
+           @param ident: The unique identifier must be a combination 
+                         of the dataset name, layer name and the mapset 
+                         name at mapset or name:1 at mapset
         """
         self.ident = ident
         self.D["id"] = ident
@@ -575,7 +613,8 @@
         self.D["creator"] = creator
 
     def set_ctime(self, ctime=None):
-        """!Set the creation time of the dataset, if nothing set the current time is used
+        """!Set the creation time of the dataset, 
+           if nothing set the current time is used
 
            @param ctime: The current time of type datetime
         """
@@ -617,13 +656,15 @@
             return None
 
     def get_map_id(self):
-        """!Convenient method to get the unique map identifier without layer information
+        """!Convenient method to get the unique map identifier 
+           without layer information
 
            @param return the name of the vector map as name at mapset
         """
         if self.id.find(":") >= 0:
             # Remove the layer identifier from the id
-            return iself.d.split("@")[0].split(":")[0] + "@" + self.id.split("@")[1]
+            return iself.d.split("@")[0].split(":")[0] + "@" + \
+                   self.id.split("@")[1]
         else:
             return self.id
 
@@ -782,9 +823,9 @@
        
     Usage:
 
-    >>> import grass.temporal as tgis
-    >>> from datetime import datetime, date, time, timedelta
-    >>> t = tgis.STDSBase("stds", "soil at PERMANENT", semantic_type="average", creator="soeren", ctime=datetime(2001,1,1), ttype="absolute")
+    \code
+    
+    >>> t = STDSBase("stds", "soil at PERMANENT", semantic_type="average", creator="soeren", ctime=datetime(2001,1,1), ttype="absolute")
     >>> t.semantic_type
     'average'
     >>> t.print_info()
@@ -804,6 +845,8 @@
     creation_time=2001-01-01 00:00:00
     temporal_type=absolute
     semantic_type=average
+    
+    \endcode
     """
     def __init__(self, table=None, ident=None, name=None, mapset=None, 
                  semantic_type=None, creator=None, ctime=None,

Modified: grass/trunk/lib/python/temporal/core.py
===================================================================
--- grass/trunk/lib/python/temporal/core.py	2012-08-11 21:26:18 UTC (rev 52630)
+++ grass/trunk/lib/python/temporal/core.py	2012-08-12 01:54:40 UTC (rev 52631)
@@ -9,6 +9,8 @@
 
 Usage:
 
+\code
+
 >>> import grass.temporal as tgis
 >>> # Create the temporal database
 >>> tgis.create_temporal_database()
@@ -18,10 +20,13 @@
 >>> # Execute a SQL statement
 >>> dbif.execute_transaction("SELECT datetime(0, 'unixepoch', 'localtime');")
 >>> # Mogrify an SQL statement
->>> dbif.mogrify_sql_statement(["SELECT name from raster_base where name = ?", ("precipitation",)])
+>>> dbif.mogrify_sql_statement(["SELECT name from raster_base where name = ?", 
+... ("precipitation",)])
 "SELECT name from raster_base where name = 'precipitation'"
 >>> dbif.close()
 
+\endcode
+
 (C) 2008-2011 by the GRASS Development Team
 This program is free software under the GNU General Public
 License (>=v2). Read the file COPYING that comes with GRASS
@@ -47,7 +52,8 @@
         # Needed for dictionary like cursors
         import psycopg2.extras
     else:
-        core.fatal(_("Unable to initialize the temporal DBMI interface. Use t.connect to specify the driver and the database string"))
+        core.fatal(_("Unable to initialize the temporal DBMI interface. Use "
+                     "t.connect to specify the driver and the database string"))
 else:
     # Use the default sqlite variable
     core.run_command("t.connect", flags="d")
@@ -67,13 +73,17 @@
                 "$LOCATION_NAME", grassenv["LOCATION_NAME"])
             return string
         else:
-            core.fatal(_("Unable to initialize the temporal GIS DBMI interface. Use t.connect to specify the driver and the database string"))
+            core.fatal(_("Unable to initialize the temporal GIS DBMI "
+                         "interface. Use t.connect to specify the driver "
+                         "and the database string"))
     elif dbmi.__name__ == "psycopg2":
         if "database" in kv:
             string = kv["database"]
             return string
         else:
-            core.fatal(_("Unable to initialize the temporal GIS DBMI interface. Use t.connect to specify the driver and the database string"))
+            core.fatal(_("Unable to initialize the temporal GIS DBMI "
+                         "interface. Use t.connect to specify the driver "
+                         "and the database string"))
             return "dbname=grass_test user=soeren password=abcdefgh"
 
 ###############################################################################
@@ -88,10 +98,12 @@
 
 
 def create_temporal_database():
-    """!This function creates the grass location database structure for raster, vector and raster3d maps
-       as well as for the space-time datasets strds, str3ds and stvds
+    """!This function creates the grass location database structure for raster, 
+       vector and raster3d maps as well as for the space-time datasets strds, 
+       str3ds and stvds
 
-       This functions must be called before any spatio-temporal processing can be started
+       This functions must be called before any spatio-temporal processing 
+       can be started
     """
 
     database = get_temporal_dbmi_init_string()
@@ -108,7 +120,8 @@
         connection = dbmi.connect(database)
         cursor = connection.cursor()
         # Check for raster_base table
-        cursor.execute("SELECT EXISTS(SELECT * FROM information_schema.tables WHERE table_name=%s)", ('raster_base',))
+        cursor.execute("SELECT EXISTS(SELECT * FROM information_schema.tables "
+                       "WHERE table_name=%s)", ('raster_base',))
         db_exists = cursor.fetchone()[0]
         connection.commit()
         cursor.close()
@@ -214,7 +227,8 @@
         init = get_temporal_dbmi_init_string()
         #print "Connect to",  self.database
         if dbmi.__name__ == "sqlite3":
-            self.connection = dbmi.connect(init, detect_types=dbmi.PARSE_DECLTYPES | dbmi.PARSE_COLNAMES)
+            self.connection = dbmi.connect(init, 
+                    detect_types=dbmi.PARSE_DECLTYPES | dbmi.PARSE_COLNAMES)
             self.connection.row_factory = dbmi.Row
             self.connection.isolation_level = None
             self.cursor = self.connection.cursor()
@@ -277,8 +291,8 @@
                         break
 
                     if args[count] is None:
-                        statement = "%sNULL%s" % (statement[0:
-                                                            pos], statement[pos + 1:])
+                        statement = "%sNULL%s" % (statement[0:pos], 
+                                                  statement[pos + 1:])
                     elif isinstance(args[count], (int, long)):
                         statement = "%s%d%s" % (statement[0:pos], args[count],
                                                 statement[pos + 1:])
@@ -286,8 +300,11 @@
                         statement = "%s%f%s" % (statement[0:pos], args[count],
                                                 statement[pos + 1:])
                     else:
-                        # Default is a string, this works for datetime objects too
-                        statement = "%s\'%s\'%s" % (statement[0:pos], str(args[count]), statement[pos + 1:])
+                        # Default is a string, this works for datetime 
+                        # objects too
+                        statement = "%s\'%s\'%s" % (statement[0:pos], 
+                                                    str(args[count]), 
+                                                    statement[pos + 1:])
                     count += 1
 
                 return statement
@@ -301,7 +318,7 @@
             @param statement The executable SQL statement or SQL script
         """
         connect = False
-        if self.connected == False:
+        if not self.connected:
             self.connect()
             connect = True
 
@@ -309,6 +326,7 @@
         sql_script += "BEGIN TRANSACTION;\n"
         sql_script += statement
         sql_script += "END TRANSACTION;"
+        
         try:
             if dbmi.__name__ == "sqlite3":
                 self.cursor.executescript(statement)
@@ -316,9 +334,10 @@
                 self.cursor.execute(statement)
             self.connection.commit()
         except:
-            if connect == True:
+            if connect:
                 self.close()
-            core.error(_("Unable to execute transaction:\n %s") % (statement))
+            core.error(_("Unable to execute transaction:\n %(sql)s" % \
+                         {"sql":statement}))
             raise
 
         if connect:
@@ -327,13 +346,17 @@
 ###############################################################################
 
 def init_dbif(dbif):
-    """!This method checks if the database interface connection exists, if not a new one
-        will be created, connected and True will be returned
+    """!This method checks if the database interface connection exists, 
+        if not a new one will be created, connected and True will be returned
 
         Usage code sample:
+        \code
+        
         dbif, connect = tgis.init_dbif(dbif)
         if connect:
             dbif.close()
+        
+        \code
     """
     if dbif is None:
         dbif = SQLDatabaseInterfaceConnection()

Modified: grass/trunk/lib/python/temporal/datetime_math.py
===================================================================
--- grass/trunk/lib/python/temporal/datetime_math.py	2012-08-11 21:26:18 UTC (rev 52630)
+++ grass/trunk/lib/python/temporal/datetime_math.py	2012-08-12 01:54:40 UTC (rev 52631)
@@ -2,17 +2,8 @@
 
 @brief GRASS Python scripting module (temporal GIS functions)
 
-Temporal GIS datetime math functions to be used in Python scripts.
+Temporal GIS datetime math functions to be used in library functions and modules.
 
-Usage:
-
- at code
-import grass.temporal as tgis
-
-tgis.increment_datetime_by_string(mydate, "3 month, 2 hours")
-...
- at endcode
-
 (C) 2008-2011 by the GRASS Development Team
 This program is free software under the GNU General Public
 License (>=v2). Read the file COPYING that comes with GRASS
@@ -26,10 +17,11 @@
 from dateutil import parser
 
 DAY_IN_SECONDS = 86400
-SECOND_AS_DAY  = 1.1574074074074073e-05
+SECOND_AS_DAY = 1.1574074074074073e-05
 
 ###############################################################################
 
+
 def relative_time_to_time_delta(value):
     """!Convert the double value representing days
        into a timedelta object.
@@ -43,6 +35,7 @@
 
 ###############################################################################
 
+
 def time_delta_to_relative_time(delta):
     """!Convert the time delta into a
        double value, representing days.
@@ -52,17 +45,69 @@
 
 ###############################################################################
 
-def increment_datetime_by_string(mydate, increment, mult = 1):
-    """!Return a new datetime object incremented with the provided 
+
+def increment_datetime_by_string(mydate, increment, mult=1):
+    """!Return a new datetime object incremented with the provided
        relative dates specified as string.
-       Additional a multiplier can be specified to multiply the increment 
+       Additional a multiplier can be specified to multiply the increment
        before adding to the provided datetime object.
+       
+       Usage:
+       
+       @code
+       
+        >>> dt = datetime(2001, 9, 1, 0, 0, 0)
+        >>> string = "60 seconds, 4 minutes, 12 hours, 10 days, 1 weeks, 5 months, 1 years"
+        >>> increment_datetime_by_string(dt, string)
+        datetime.datetime(2003, 2, 18, 12, 5)
 
+        >>> dt = datetime(2001, 11, 1, 0, 0, 0)
+        >>> string = "1 months"
+        >>> increment_datetime_by_string(dt, string)
+        datetime.datetime(2001, 12, 1, 0, 0)
+
+        >>> dt = datetime(2001, 11, 1, 0, 0, 0)
+        >>> string = "13 months"
+        >>> increment_datetime_by_string(dt, string)
+        datetime.datetime(2002, 12, 1, 0, 0)
+         
+        >>> dt = datetime(2001, 1, 1, 0, 0, 0)
+        >>> string = "72 months"
+        >>> increment_datetime_by_string(dt, string)
+        datetime.datetime(2007, 1, 1, 0, 0)
+         
+        >>> dt = datetime(2001, 1, 1, 0, 0, 0)
+        >>> string = "72 months"
+        >>> increment_datetime_by_string(dt, string)
+        datetime.datetime(2007, 1, 1, 0, 0)
+         
+        >>> dt = datetime(2001, 1, 1, 0, 0, 0)
+        >>> string = "5 minutes"
+        >>> increment_datetime_by_string(dt, string)
+        datetime.datetime(2001, 1, 1, 0, 5)
+         
+        >>> dt = datetime(2001, 1, 1, 0, 0, 0)
+        >>> string = "49 hours"
+        >>> increment_datetime_by_string(dt, string)
+        datetime.datetime(2001, 1, 3, 1, 0)
+         
+        >>> dt = datetime(2001, 1, 1, 0, 0, 0)
+        >>> string = "3600 seconds"
+        >>> increment_datetime_by_string(dt, string)
+        datetime.datetime(2001, 1, 1, 1, 0)
+         
+        >>> dt = datetime(2001, 1, 1, 0, 0, 0)
+        >>> string = "30 days"
+        >>> increment_datetime_by_string(dt, string)
+        datetime.datetime(2001, 1, 31, 0, 0)
+        
+        @endcode
+       
        @param mydate A datetime object to incremented
        @param increment A string providing increment information:
-                  The string may include comma separated values of type seconds, 
+                  The string may include comma separated values of type seconds,
                   minutes, hours, days, weeks, months and years
-                  Example: Increment the datetime 2001-01-01 00:00:00 
+                  Example: Increment the datetime 2001-01-01 00:00:00
                   with "60 seconds, 4 minutes, 12 hours, 10 days, 1 weeks, 5 months, 1 years"
                   will result in the datetime 2003-02-18 12:05:00
        @param mult A multiplier, default is 1
@@ -83,12 +128,11 @@
         incparts = increment.split(",")
         for incpart in incparts:
             inclist.append(incpart.strip().split(" "))
-            
 
         for inc in inclist:
-	    if len(inc) < 2:
-		core.error(_("Wrong increment format: %s") % (increment))
-		return None
+            if len(inc) < 2:
+                core.error(_("Wrong increment format: %s") % (increment))
+                return None
             if inc[1].find("seconds") >= 0:
                 seconds = mult * int(inc[0])
             elif inc[1].find("minutes") >= 0:
@@ -108,14 +152,17 @@
                 return None
 
         return increment_datetime(mydate, years, months, weeks, days, hours, minutes, seconds)
-    
+
     return mydate
 
 ###############################################################################
 
-def increment_datetime(mydate, years=0, months=0, weeks=0, days=0, hours=0, minutes=0, seconds=0):
-    """!Return a new datetime object incremented with the provided relative dates and times"""
 
+def increment_datetime(mydate, years=0, months=0, weeks=0, days=0, hours=0, 
+                       minutes=0, seconds=0):
+    """!Return a new datetime object incremented with the provided 
+       relative dates and times"""
+
     tdelta_seconds = timedelta(seconds=seconds)
     tdelta_minutes = timedelta(minutes=minutes)
     tdelta_hours = timedelta(hours=hours)
@@ -124,14 +171,13 @@
     tdelta_months = timedelta(0)
     tdelta_years = timedelta(0)
 
-
     if months > 0:
         # Compute the actual number of days in the month to add as timedelta
         year = mydate.year
         month = mydate.month
 
         all_months = int(months) + int(month)
-        years_to_add = int(all_months/12.001)
+        years_to_add = int(all_months / 12.001)
         residual_months = all_months - (years_to_add * 12)
 
         # Make a deep copy of the datetime object
@@ -141,7 +187,7 @@
         if residual_months == 0:
             residual_months = 1
 
-        dt1 = dt1.replace(year = year + years_to_add, month = residual_months)
+        dt1 = dt1.replace(year=year + years_to_add, month=residual_months)
         tdelta_months = dt1 - mydate
 
     if years > 0:
@@ -152,12 +198,60 @@
         tdelta_years = dt1 - mydate
 
     return mydate + tdelta_seconds + tdelta_minutes + tdelta_hours + \
-                    tdelta_days + tdelta_weeks + tdelta_months + tdelta_years
+        tdelta_days + tdelta_weeks + tdelta_months + tdelta_years
 
 ###############################################################################
 
+
 def adjust_datetime_to_granularity(mydate, granularity):
-    """!Mofiy the datetime object to fit the given granularity    """
+    """!Modify the datetime object to fit the given granularity    
+    
+        * Years will start at the first of Januar
+        * Months will start at the first day of the month
+        * Days will start at the first Hour of the day
+        * Hours will start at the first minute of an hour
+        * Minutes will start at the first second of a minute
+        
+        Usage:
+        
+        @code
+        
+        >>> dt = datetime(2001, 8, 8, 12,30,30)
+        >>> adjust_datetime_to_granularity(dt, "5 seconds")
+        datetime.datetime(2001, 8, 8, 12, 30, 30)
+        
+        >>> adjust_datetime_to_granularity(dt, "20 minutes")
+        datetime.datetime(2001, 8, 8, 12, 30)
+        
+        >>> adjust_datetime_to_granularity(dt, "20 minutes")
+        datetime.datetime(2001, 8, 8, 12, 30)
+        
+        >>> adjust_datetime_to_granularity(dt, "3 hours")
+        datetime.datetime(2001, 8, 8, 12, 0)
+        
+        >>> adjust_datetime_to_granularity(dt, "5 days")
+        datetime.datetime(2001, 8, 8, 0, 0)
+        
+        >>> adjust_datetime_to_granularity(dt, "2 weeks")
+        datetime.datetime(2001, 8, 6, 0, 0)
+        
+        >>> adjust_datetime_to_granularity(dt, "6 months")
+        datetime.datetime(2001, 8, 1, 0, 0)
+        
+        >>> adjust_datetime_to_granularity(dt, "2 years")
+        datetime.datetime(2001, 1, 1, 0, 0)
+        
+        >>> adjust_datetime_to_granularity(dt, "2 years, 3 months, 5 days, 3 hours, 3 minutes, 2 seconds")
+        datetime.datetime(2001, 8, 8, 12, 30, 30)
+        
+        >>> adjust_datetime_to_granularity(dt, "3 months, 5 days, 3 minutes")
+        datetime.datetime(2001, 8, 8, 12, 30)
+        
+        >>> adjust_datetime_to_granularity(dt, "3 weeks, 5 days")
+        datetime.datetime(2001, 8, 8, 0, 0)
+        
+        @endcode
+    """
 
     if granularity:
 
@@ -203,30 +297,30 @@
                 return None
 
         if has_seconds:
-            pass          
-        elif has_minutes: # Start at 0 seconds
+            pass
+        elif has_minutes:  # Start at 0 seconds
             seconds = 0
-        elif has_hours: # Start at 0 minutes and seconds
+        elif has_hours:  # Start at 0 minutes and seconds
             seconds = 0
             minutes = 0
-        elif has_days: # Start at 0 hours, minutes and seconds
+        elif has_days:  # Start at 0 hours, minutes and seconds
             seconds = 0
             minutes = 0
             hours = 0
-        elif has_weeks: # Start at the first day of the week (Monday) at 00:00:00
+        elif has_weeks:  # Start at the first day of the week (Monday) at 00:00:00
             seconds = 0
             minutes = 0
             hours = 0
             if days > weekday:
-                days = days - weekday # this needs to be fixed
+                days = days - weekday  # this needs to be fixed
             else:
-                days = days + weekday # this needs to be fixed
-        elif has_months: # Start at the first day of the month at 00:00:00
+                days = days + weekday  # this needs to be fixed
+        elif has_months:  # Start at the first day of the month at 00:00:00
             seconds = 0
             minutes = 0
             hours = 0
             days = 1
-        elif has_years: # Start at the first day of the first month at 00:00:00
+        elif has_years:  # Start at the first day of the first month at 00:00:00
             seconds = 0
             minutes = 0
             hours = 0
@@ -234,16 +328,142 @@
             months = 1
 
         dt = copy.copy(mydate)
-        result = dt.replace(year=years, month=months, day=days, hour=hours, minute=minutes, second=seconds)
-        core.verbose(_("Adjust datetime from %s to %s with granularity %s") % (dt, result, granularity))
+        return dt.replace(year=years, month=months, day=days,
+                            hour=hours, minute=minutes, second=seconds)
 
-        return result
-
 ###############################################################################
 
+
 def compute_datetime_delta(start, end):
-    """!Return a dictionary with the accumulated delta in year, month, day, hour, minute and second
-    
+    """!Return a dictionary with the accumulated delta in year, month, day, 
+       hour, minute and second
+       
+        Usage:
+        
+        @code
+        
+        >>> start = datetime(2001, 1, 1, 00,00,00)
+        >>> end = datetime(2001, 1, 1, 00,00,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 0, 'month': 0, 'second': 0, 'max_days': 0, 'year': 0, 'day': 0, 'minute': 0}
+        
+        >>> start = datetime(2001, 1, 1, 00,00,14)
+        >>> end = datetime(2001, 1, 1, 00,00,44)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 0, 'month': 0, 'second': 30, 'max_days': 0, 'year': 0, 'day': 0, 'minute': 0}
+        
+        >>> start = datetime(2001, 1, 1, 00,00,44)
+        >>> end = datetime(2001, 1, 1, 00,01,14)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 0, 'month': 0, 'second': 30, 'max_days': 0, 'year': 0, 'day': 0, 'minute': 1}
+        
+        >>> start = datetime(2001, 1, 1, 00,00,30)
+        >>> end = datetime(2001, 1, 1, 00,05,30)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 0, 'month': 0, 'second': 300, 'max_days': 0, 'year': 0, 'day': 0, 'minute': 5}
+        
+        >>> start = datetime(2001, 1, 1, 00,00,00)
+        >>> end = datetime(2001, 1, 1, 00,01,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 0, 'month': 0, 'second': 0, 'max_days': 0, 'year': 0, 'day': 0, 'minute': 1}
+        
+        >>> start = datetime(2011,10,31, 00,45,00)
+        >>> end = datetime(2011,10,31, 01,45,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 1, 'second': 0, 'max_days': 0, 'year': 0, 'day': 0, 'minute': 60}
+        
+        >>> start = datetime(2011,10,31, 00,45,00)
+        >>> end = datetime(2011,10,31, 01,15,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 1, 'second': 0, 'max_days': 0, 'year': 0, 'day': 0, 'minute': 30}
+        
+        >>> start = datetime(2011,10,31, 00,45,00)
+        >>> end = datetime(2011,10,31, 12,15,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 12, 'second': 0, 'max_days': 0, 'year': 0, 'day': 0, 'minute': 690}
+        
+        >>> start = datetime(2011,10,31, 00,00,00)
+        >>> end = datetime(2011,10,31, 01,00,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 1, 'second': 0, 'max_days': 0, 'year': 0, 'day': 0, 'minute': 0}
+        
+        >>> start = datetime(2011,10,31, 00,00,00)
+        >>> end = datetime(2011,11,01, 01,00,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 25, 'second': 0, 'max_days': 1, 'year': 0, 'day': 1, 'minute': 0}
+        
+        >>> start = datetime(2011,10,31, 12,00,00)
+        >>> end = datetime(2011,11,01, 06,00,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 18, 'second': 0, 'max_days': 0, 'year': 0, 'day': 0, 'minute': 0}
+        
+        >>> start = datetime(2011,11,01, 00,00,00)
+        >>> end = datetime(2011,12,01, 01,00,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 721, 'month': 1, 'second': 0, 'max_days': 30, 'year': 0, 'day': 0, 'minute': 0}
+        
+        >>> start = datetime(2011,11,01, 00,00,00)
+        >>> end = datetime(2011,11,05, 00,00,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 0, 'second': 0, 'max_days': 4, 'year': 0, 'day': 4, 'minute': 0}
+        
+        >>> start = datetime(2011,10,06, 00,00,00)
+        >>> end = datetime(2011,11,05, 00,00,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 0, 'second': 0, 'max_days': 30, 'year': 0, 'day': 30, 'minute': 0}
+        
+        >>> start = datetime(2011,12,02, 00,00,00)
+        >>> end = datetime(2012,01,01, 00,00,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 0, 'second': 0, 'max_days': 30, 'year': 1, 'day': 30, 'minute': 0}
+        
+        >>> start = datetime(2011,01,01, 00,00,00)
+        >>> end = datetime(2011,02,01, 00,00,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 0, 'month': 1, 'second': 0, 'max_days': 31, 'year': 0, 'day': 0, 'minute': 0}
+        
+        >>> start = datetime(2011,12,01, 00,00,00)
+        >>> end = datetime(2012,01,01, 00,00,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 0, 'month': 1, 'second': 0, 'max_days': 31, 'year': 1, 'day': 0, 'minute': 0}
+        
+        >>> start = datetime(2011,12,01, 00,00,00)
+        >>> end = datetime(2012,06,01, 00,00,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 0, 'month': 6, 'second': 0, 'max_days': 183, 'year': 1, 'day': 0, 'minute': 0}
+        
+        >>> start = datetime(2011,06,01, 00,00,00)
+        >>> end = datetime(2021,06,01, 00,00,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 0, 'month': 120, 'second': 0, 'max_days': 3653, 'year': 10, 'day': 0, 'minute': 0}
+        
+        >>> start = datetime(2011,06,01, 00,00,00)
+        >>> end = datetime(2012,06,01, 12,00,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 8796, 'month': 12, 'second': 0, 'max_days': 366, 'year': 1, 'day': 0, 'minute': 0}
+        
+        >>> start = datetime(2011,06,01, 00,00,00)
+        >>> end = datetime(2012,06,01, 12,30,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 8796, 'month': 12, 'second': 0, 'max_days': 366, 'year': 1, 'day': 0, 'minute': 527790}
+        
+        >>> start = datetime(2011,06,01, 00,00,00)
+        >>> end = datetime(2012,06,01, 12,00,05)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 8796, 'month': 12, 'second': 31665605, 'max_days': 366, 'year': 1, 'day': 0, 'minute': 0}
+        
+        >>> start = datetime(2011,06,01, 00,00,00)
+        >>> end = datetime(2012,06,01, 00,30,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 0, 'month': 12, 'second': 0, 'max_days': 366, 'year': 1, 'day': 0, 'minute': 527070}
+        
+        >>> start = datetime(2011,06,01, 00,00,00)
+        >>> end = datetime(2012,06,01, 00,00,05)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 0, 'month': 12, 'second': 31622405, 'max_days': 366, 'year': 1, 'day': 0, 'minute': 0}
+        
+        @endcode
+        
        @return A dictionary with year, month, day, hour, minute and second as keys()
     """
     comp = {}
@@ -281,11 +501,11 @@
     else:
         d = end.hour - start.hour
         if d < 0:
-            d = d + 24  + 24 * day_diff
+            d = d + 24 + 24 * day_diff
         else:
             d = d + 24 * day_diff
         comp["hour"] = d
-    
+
     # Minutes
     if start.minute == 0 and end.minute == 0:
         comp["minute"] = 0
@@ -298,7 +518,7 @@
                 d = d + 24 * 60 * day_diff
         elif d == 0:
             if comp["hour"]:
-                d = 60* comp["hour"]
+                d = 60 * comp["hour"]
             else:
                 d = 24 * 60 * day_diff
 
@@ -311,14 +531,14 @@
         d = end.second - start.second
         if d != 0:
             if comp["minute"]:
-                d = d + 60* comp["minute"]
+                d = d + 60 * comp["minute"]
             elif comp["hour"]:
-                d = d + 3600* comp["hour"]
+                d = d + 3600 * comp["hour"]
             else:
                 d = d + 24 * 60 * 60 * day_diff
         elif d == 0:
             if comp["minute"]:
-                d = 60* comp["minute"]
+                d = 60 * comp["minute"]
             elif comp["hour"]:
                 d = 3600 * comp["hour"]
             else:
@@ -329,12 +549,15 @@
 
 ###############################################################################
 
+
 def string_to_datetime(time_string):
-    """!Convert a string into a datetime object using the dateutil parser. Return None in case of failure"""
+    """!Convert a string into a datetime object using the dateutil parser. 
+       Return None in case of failure"""
 
     # BC is not supported
     if time_string.find("bc") > 0:
-        core.error("Dates Before Christ are not supported in the temporal database")
+        core.error("Dates Before Christ are not supported "
+                   "in the temporal database")
         return None
 
     try:
@@ -345,17 +568,27 @@
 
 ###############################################################################
 
+
 def datetime_to_grass_datetime_string(dt):
     """!Convert a python datetime object into a GRASS datetime string"""
 
     # GRASS datetime month names
-    month_names  = ["", "jan","feb","mar","apr","may","jun","jul","aug","sep","oct","nov","dec"]
+    month_names = ["", "jan", "feb", "mar", "apr", "may", "jun",
+                   "jul", "aug", "sep", "oct", "nov", "dec"]
 
     # Check for time zone info in the datetime object
-    if dt.tzinfo != None:
-        string = "%.2i %s %.2i %.2i:%.2i:%.2i %+.4i"%(dt.day, month_names[dt.month], dt.year, \
-                 dt.hour, dt.minute, dt.second, dt.tzinfo._offset.seconds/60)
+    if dt.tzinfo is not None:
+        string = "%.2i %s %.2i %.2i:%.2i:%.2i %+.4i" % (dt.day, 
+                 month_names[dt.month], dt.year,
+                 dt.hour, dt.minute, dt.second, dt.tzinfo._offset.seconds / 60)
     else:
-        string = "%.2i %s %.4i %.2i:%.2i:%.2i"%(dt.day, month_names[dt.month], dt.year, dt.hour, dt.minute, dt.second)
+        string = "%.2i %s %.4i %.2i:%.2i:%.2i" % (dt.day, month_names[
+            dt.month], dt.year, dt.hour, dt.minute, dt.second)
 
     return string
+
+###############################################################################
+
+if __name__ == "__main__":
+    import doctest
+    doctest.testmod()
\ No newline at end of file

Modified: grass/trunk/lib/python/temporal/extract.py
===================================================================
--- grass/trunk/lib/python/temporal/extract.py	2012-08-11 21:26:18 UTC (rev 52630)
+++ grass/trunk/lib/python/temporal/extract.py	2012-08-12 01:54:40 UTC (rev 52631)
@@ -4,12 +4,6 @@
 
 Temporal GIS related functions to be used in Python scripts.
 
-Usage:
-
- at code
-import grass.temporal as tgis
- at endcode
-
 (C) 2008-2011 by the GRASS Development Team
 This program is free software under the GNU General Public
 License (>=v2). Read the file COPYING that comes with GRASS
@@ -23,54 +17,61 @@
 
 ############################################################################
 
-def extract_dataset(input, output, type, where, expression, base, nprocs=1, register_null=False, layer=1, vtype="point,line,boundary,centroid,area,face"):
+
+def extract_dataset(input, output, type, where, expression, base, nprocs=1,
+                    register_null=False, layer=1,
+                    vtype="point,line,boundary,centroid,area,face"):
     """!Extract a subset of a space time raster, raster3d or vector dataset
-    
+
        A mapcalc expression can be provided to process the temporal extracted maps.
        Mapcalc expressions are supported for raster and raster3d maps.
-       
-       @param input The name of the input space time raster/raster3d dataset 
+
+       @param input The name of the input space time raster/raster3d dataset
        @param output The name of the extracted new space time raster/raster3d dataset
        @param type The type of the dataset: "raster", "raster3d" or vector
        @param where The temporal SQL WHERE statement for subset extraction
        @param expression The r(3).mapcalc expression or the v.extract where statement
-       @param base The base name of the new created maps in case a mapclac expression is provided 
+       @param base The base name of the new created maps in case a mapclac
+              expression is provided
        @param nprocs The number of parallel processes to be used for mapcalc processing
-       @param register_null Set this number True to register empty maps (only raster and raster3d maps)
-       @param layer The vector layer number to be used when no timestamped layer is present, default is 1
-       @param vtype The feature type to be extracted for vector maps, default is point,line,boundary,centroid,area and face
+       @param register_null Set this number True to register empty maps
+             (only raster and raster3d maps)
+       @param layer The vector layer number to be used when no timestamped
+              layer is present, default is 1
+       @param vtype The feature type to be extracted for vector maps, default
+              is point,line,boundary,centroid,area and face
     """
 
     # Check the parameters
 
     if expression and not base:
         core.fatal(_("You need to specify the base name of new created maps"))
-    
-    mapset =  core.gisenv()["MAPSET"]
 
+    mapset = core.gisenv()["MAPSET"]
+
     if input.find("@") >= 0:
         id = input
     else:
         id = input + "@" + mapset
 
     if type == "raster":
-	sp = space_time_raster_dataset(id)
+        sp = space_time_raster_dataset(id)
     elif type == "raster3d":
-	sp = space_time_raster3d_dataset(id)
+        sp = space_time_raster3d_dataset(id)
     elif type == "vector":
-	sp = space_time_vector_dataset(id)
-	
+        sp = space_time_vector_dataset(id)
+
     dummy = sp.get_new_map_instance(None)
-	
+
     dbif = ()
     dbif.connect()
-    
-    if sp.is_in_db(dbif) == False:
-	dbif.close()
+
+    if not sp.is_in_db(dbif):
+        dbif.close()
         core.fatal(_("Space time %s dataset <%s> not found") % (type, id))
 
     if expression and not base:
-	dbif.close()
+        dbif.close()
         core.fatal(_("Please specify base="))
 
     sp.select(dbif)
@@ -82,192 +83,214 @@
 
     # The new space time dataset
     new_sp = sp.get_new_instance(out_id)
-	
+
     if new_sp.is_in_db():
-        if core.overwrite() == False:
-	    dbif.close()
-            core.fatal(_("Space time %s dataset <%s> is already in database, use overwrite flag to overwrite") % (type, out_id))
+        if not core.overwrite():
+            dbif.close()
+            core.fatal(_("Space time %s dataset <%s> is already in database,"
+                         " use overwrite flag to overwrite") % (type, out_id))
     if type == "vector":
-	rows = sp.get_registered_maps("id,name,mapset,layer", where, "start_time", dbif)
+        rows = sp.get_registered_maps(
+            "id,name,mapset,layer", where, "start_time", dbif)
     else:
-	rows = sp.get_registered_maps("id", where, "start_time", dbif)
+        rows = sp.get_registered_maps("id", where, "start_time", dbif)
 
     new_maps = {}
     if rows:
-	num_rows = len(rows)
-	
-	core.percent(0, num_rows, 1)
-	
-	# Run the mapcalc expression
+        num_rows = len(rows)
+
+        core.percent(0, num_rows, 1)
+
+        # Run the mapcalc expression
         if expression:
-	    count = 0
-	    proc_count = 0
-	    proc_list = []
-	    
-	    for row in rows:
-		count += 1
-		
-		core.percent(count, num_rows, 1)
-		
-		map_name = "%s_%i" % (base, count)
-		
-		# We need to modify the r(3).mapcalc expression
-		if type != "vector":
-		    expr = "%s = %s" % (map_name, expression)
-		    
-		    expr = expr.replace(sp.base.get_map_id(), row["id"])
-		    expr = expr.replace(sp.base.get_name(), row["id"])
-		    
-		    # We need to build the id
-		    map_id = dummy.build_id(map_name, mapset)
-		else:
-		    map_id = dummy.build_id(map_name, mapset, row["layer"])
+            count = 0
+            proc_count = 0
+            proc_list = []
 
-		new_map = sp.get_new_map_instance(map_id)
+            for row in rows:
+                count += 1
 
-		# Check if new map is in the temporal database
-		if new_map.is_in_db(dbif):
-		    if core.overwrite() == True:
-			# Remove the existing temporal database entry
-			new_map.delete(dbif)
-			new_map = sp.get_new_map_instance(map_id)
-		    else:
-			core.error(_("Map <%s> is already in temporal database, use overwrite flag to overwrite")%(new_map.get_map_id()))
-			continue
-		
-		# Add process to the process list
-		if type == "raster":
-		    core.verbose(_("Apply r.mapcalc expression: \"%s\"") % expr)
-		    proc_list.append(Process(target=run_mapcalc2d, args=(expr,)))
-		elif type == "raster3d":
-		    core.verbose(_("Apply r3.mapcalc expression: \"%s\"") % expr)
-		    proc_list.append(Process(target=run_mapcalc3d, args=(expr,)))
-		elif type == "vector":
-		    core.verbose(_("Apply v.extract where statement: \"%s\"") % expression)
-		    if row["layer"]:
-			proc_list.append(Process(target=run_vector_extraction, args=(row["name"] + "@" + row["mapset"], map_name, row["layer"], vtype, expression)))
-		    else:
-			proc_list.append(Process(target=run_vector_extraction, args=(row["name"] + "@" + row["mapset"], map_name, layer, vtype, expression)))
-		
-		proc_list[proc_count].start()
-		proc_count += 1
-		
-		# Join processes if the maximum number of processes are reached or the end of the
-		# loop is reached
-		if proc_count == nprocs or proc_count == num_rows:
-		    proc_count = 0
-		    exitcodes = 0
-		    for proc in proc_list:
-			proc.join()
-			exitcodes += proc.exitcode
-			
-		    if exitcodes != 0:
-			dbif.close()
-			core.fatal(_("Error while computation"))
-			
-		    # Empty process list
-		    proc_list = []
-		    
-		# Store the new maps
-		new_maps[row["id"]] = new_map
-	
-	core.percent(0, num_rows, 1)
-	
-	# Insert the new space time dataset
-	if new_sp.is_in_db(dbif):
-	    if core.overwrite() == True:
-		new_sp.delete(dbif)
-		new_sp = sp.get_new_instance(out_id)
+                core.percent(count, num_rows, 1)
 
-	temporal_type, semantic_type, title, description = sp.get_initial_values()
-	new_sp.set_initial_values(temporal_type, semantic_type, title, description)
-	new_sp.insert(dbif)
-	
-	# collect empty maps to remove them
-	empty_maps = []
-	
-	# Register the maps in the database
+                map_name = "%s_%i" % (base, count)
+
+                # We need to modify the r(3).mapcalc expression
+                if type != "vector":
+                    expr = "%s = %s" % (map_name, expression)
+
+                    expr = expr.replace(sp.base.get_map_id(), row["id"])
+                    expr = expr.replace(sp.base.get_name(), row["id"])
+
+                    # We need to build the id
+                    map_id = dummy.build_id(map_name, mapset)
+                else:
+                    map_id = dummy.build_id(map_name, mapset, row["layer"])
+
+                new_map = sp.get_new_map_instance(map_id)
+
+                # Check if new map is in the temporal database
+                if new_map.is_in_db(dbif):
+                    if core.overwrite():
+                        # Remove the existing temporal database entry
+                        new_map.delete(dbif)
+                        new_map = sp.get_new_map_instance(map_id)
+                    else:
+                        core.error(_("Map <%s> is already in temporal database,"
+                                     " use overwrite flag to overwrite") %
+                                    (new_map.get_map_id()))
+                        continue
+
+                # Add process to the process list
+                if type == "raster":
+                    core.verbose(_("Apply r.mapcalc expression: \"%s\"")
+                                 % expr)
+                    proc_list.append(Process(target=run_mapcalc2d,
+                                             args=(expr,)))
+                elif type == "raster3d":
+                    core.verbose(_("Apply r3.mapcalc expression: \"%s\"")
+                                 % expr)
+                    proc_list.append(Process(target=run_mapcalc3d,
+                                             args=(expr,)))
+                elif type == "vector":
+                    core.verbose(_("Apply v.extract where statement: \"%s\"")
+                                 % expression)
+                    if row["layer"]:
+                        proc_list.append(Process(target=run_vector_extraction,
+                                                 args=(row["name"] + "@" + row["mapset"],
+                                                 map_name, row["layer"], 
+                                                 vtype, expression)))
+                    else:
+                        proc_list.append(Process(target=run_vector_extraction,
+                                                 args=(row["name"] + "@" + row["mapset"],
+                                                 map_name, layer, vtype, 
+                                                 expression)))
+
+                proc_list[proc_count].start()
+                proc_count += 1
+
+                # Join processes if the maximum number of processes are reached or the end of the
+                # loop is reached
+                if proc_count == nprocs or proc_count == num_rows:
+                    proc_count = 0
+                    exitcodes = 0
+                    for proc in proc_list:
+                        proc.join()
+                        exitcodes += proc.exitcode
+
+                    if exitcodes != 0:
+                        dbif.close()
+                        core.fatal(_("Error while computation"))
+
+                    # Empty process list
+                    proc_list = []
+
+                # Store the new maps
+                new_maps[row["id"]] = new_map
+
+        core.percent(0, num_rows, 1)
+
+        # Insert the new space time dataset
+        if new_sp.is_in_db(dbif):
+            if core.overwrite():
+                new_sp.delete(dbif)
+                new_sp = sp.get_new_instance(out_id)
+
+        temporal_type, semantic_type, title, description = sp.get_initial_values()
+        new_sp.set_initial_values(
+            temporal_type, semantic_type, title, description)
+        new_sp.insert(dbif)
+
+        # collect empty maps to remove them
+        empty_maps = []
+
+        # Register the maps in the database
         count = 0
         for row in rows:
             count += 1
-	    
-	    core.percent(count, num_rows, 1)
 
+            core.percent(count, num_rows, 1)
+
             old_map = sp.get_new_map_instance(row["id"])
             old_map.select(dbif)
-            
+
             if expression:
-		# Register the new maps
-		if new_maps.has_key(row["id"]):
-		    new_map = new_maps[row["id"]]
+                # Register the new maps
+                if row["id"] in new_maps:
+                    new_map = new_maps[row["id"]]
 
-		    # Read the raster map data
-		    new_map.load()
-		    
-		    # In case of a empty map continue, do not register empty maps
-		    if type == "raster" or type == "raster3d":
-			if new_map.metadata.get_min() == None and new_map.metadata.get_max() == None:
-			    if not register_null:
-				empty_maps.append(new_map)
-				continue
-		    elif type == "vector":
-			if new_map.metadata.get_primitives() == 0 or new_map.metadata.get_primitives() == None:
-			    if not register_null:
-				empty_maps.append(new_map)
-				continue
+                    # Read the raster map data
+                    new_map.load()
 
-		    # Set the time stamp
-		    if old_map.is_time_absolute():
-			start, end, tz = old_map.get_absolute_time()
-			new_map.set_absolute_time(start, end, tz)
-		    else:
-			start, end = old_map.get_relative_time()
-			new_map.set_relative_time(start, end)
+                    # In case of a empty map continue, do not register empty maps
+                    if type == "raster" or type == "raster3d":
+                        if new_map.metadata.get_min() is None and \
+                            new_map.metadata.get_max() is None:
+                            if not register_null:
+                                empty_maps.append(new_map)
+                                continue
+                    elif type == "vector":
+                        if new_map.metadata.get_primitives() == 0 or \
+                           new_map.metadata.get_primitives() is None:
+                            if not register_null:
+                                empty_maps.append(new_map)
+                                continue
 
-		    # Insert map in temporal database
-		    new_map.insert(dbif)
+                    # Set the time stamp
+                    if old_map.is_time_absolute():
+                        start, end, tz = old_map.get_absolute_time()
+                        new_map.set_absolute_time(start, end, tz)
+                    else:
+                        start, end = old_map.get_relative_time()
+                        new_map.set_relative_time(start, end)
 
-		    new_sp.register_map(new_map, dbif)
-	    else:
-		new_sp.register_map(old_map, dbif)          
-                
+                    # Insert map in temporal database
+                    new_map.insert(dbif)
+
+                    new_sp.register_map(new_map, dbif)
+            else:
+                new_sp.register_map(old_map, dbif)
+
         # Update the spatio-temporal extent and the metadata table entries
         new_sp.update_from_registered_maps(dbif)
-	
-	core.percent(num_rows, num_rows, 1)
-	
-	# Remove empty maps
-	if len(empty_maps) > 0:
-	    names = ""
-	    count = 0
-	    for map in empty_maps:
-		if count == 0:
-		    names += "%s"%(map.get_name())
-		else:
-		    names += ",%s"%(map.get_name())
-		count += 1
-	    if type == "raster":
-		core.run_command("g.remove", rast=names, quiet=True)
-	    elif type == "raster3d":
-		core.run_command("g.remove", rast3d=names, quiet=True)
-	    elif type == "vector":
-		core.run_command("g.remove", vect=names, quiet=True)
-        
+
+        core.percent(num_rows, num_rows, 1)
+
+        # Remove empty maps
+        if len(empty_maps) > 0:
+            names = ""
+            count = 0
+            for map in empty_maps:
+                if count == 0:
+                    names += "%s" % (map.get_name())
+                else:
+                    names += ",%s" % (map.get_name())
+                count += 1
+            if type == "raster":
+                core.run_command("g.remove", rast=names, quiet=True)
+            elif type == "raster3d":
+                core.run_command("g.remove", rast3d=names, quiet=True)
+            elif type == "vector":
+                core.run_command("g.remove", vect=names, quiet=True)
+
     dbif.close()
 
 ###############################################################################
 
+
 def run_mapcalc2d(expr):
     """Helper function to run r.mapcalc in parallel"""
-    return core.run_command("r.mapcalc", expression=expr, overwrite=core.overwrite(), quiet=True)
+    return core.run_command("r.mapcalc", expression=expr,
+                            overwrite=core.overwrite(), quiet=True)
 
 
 def run_mapcalc3d(expr):
     """Helper function to run r3.mapcalc in parallel"""
-    return core.run_command("r3.mapcalc", expression=expr, overwrite=core.overwrite(), quiet=True)
-    
+    return core.run_command("r3.mapcalc", expression=expr,
+                            overwrite=core.overwrite(), quiet=True)
 
+
 def run_vector_extraction(input, output, layer, type, where):
     """Helper function to run r.mapcalc in parallel"""
-    return core.run_command("v.extract", input=input, output=output, layer=layer, type=type, where=where, overwrite=core.overwrite(), quiet=True)
-
+    return core.run_command("v.extract", input=input, output=output,
+                            layer=layer, type=type, where=where,
+                            overwrite=core.overwrite(), quiet=True)

Modified: grass/trunk/lib/python/temporal/mapcalc.py
===================================================================
--- grass/trunk/lib/python/temporal/mapcalc.py	2012-08-11 21:26:18 UTC (rev 52630)
+++ grass/trunk/lib/python/temporal/mapcalc.py	2012-08-12 01:54:40 UTC (rev 52631)
@@ -4,12 +4,6 @@
 
 Temporal GIS related functions to be used in Python scripts.
 
-Usage:
-
- at code
-import grass.temporal as tgis
- at endcode
-
 (C) 2008-2011 by the GRASS Development Team
 This program is free software under the GNU General Public
 License (>=v2). Read the file COPYING that comes with GRASS
@@ -23,87 +17,96 @@
 
 ############################################################################
 
-def dataset_mapcalculator(inputs, output, type, expression, base, method, nprocs=1, register_null=False, spatial=False):
-    """!Perform map-calculations of maps from different space time raster/raster3d datasets, using
-       a specific sampling method to select temporal related maps.
-    
-       A mapcalc expression can be provided to process the temporal extracted maps.
+
+def dataset_mapcalculator(inputs, output, type, expression, base, method, 
+                          nprocs=1, register_null=False, spatial=False):
+    """!Perform map-calculations of maps from different space time 
+       raster/raster3d datasets, using a specific sampling method 
+       to select temporal related maps.
+
+       A mapcalc expression can be provided to process the temporal 
+       extracted maps.
        Mapcalc expressions are supported for raster and raster3d maps.
-       
-       @param input The name of the input space time raster/raster3d dataset 
-       @param output The name of the extracted new space time raster/raster3d dataset
+
+       @param input The name of the input space time raster/raster3d dataset
+       @param output The name of the extracted new space time raster(3d) dataset
        @param type The type of the dataset: "raster" or "raster3d"
        @param method The method to be used for temporal sampling
        @param expression The r(3).mapcalc expression
-       @param base The base name of the new created maps in case a mapclac expression is provided 
-       @param nprocs The number of parallel processes to be used for mapcalc processing
+       @param base The base name of the new created maps in case a 
+              mapclac expression is provided
+       @param nprocs The number of parallel processes to be used for 
+              mapcalc processing
        @param register_null Set this number True to register empty maps
        @param spatial Check spatial overlap
     """
-    
+
     # We need a database interface for fast computation
     dbif = ()
     dbif.connect()
 
-    mapset =  core.gisenv()["MAPSET"]
-    
+    mapset = core.gisenv()["MAPSET"]
+
     input_name_list = inputs.split(",")
-    
+
     # Process the first input
     if input_name_list[0].find("@") >= 0:
-	id = input_name_list[0]
+        id = input_name_list[0]
     else:
-	id = input_name_list[0] + "@" + mapset
-	
+        id = input_name_list[0] + "@" + mapset
+
     if type == "raster":
-	first_input = space_time_raster_dataset(id)
+        first_input = space_time_raster_dataset(id)
     else:
-	first_input = space_time_raster3d_dataset(id)
-    
-    if first_input.is_in_db(dbif) == False:
-	dbif.close()
+        first_input = space_time_raster3d_dataset(id)
+
+    if not first_input.is_in_db(dbif):
+        dbif.close()
         core.fatal(_("Space time %s dataset <%s> not found") % (type, id))
 
     # Fill the object with data from the temporal database
     first_input.select(dbif)
-    
-    # All additional inputs in reverse sorted order to avoid wrong name substitution
+
+    # All additional inputs in reverse sorted order to avoid 
+    # wrong name substitution
     input_name_list = input_name_list[1:]
     input_name_list.sort()
     input_name_list.reverse()
     input_list = []
-        
+
     for input in input_name_list:
 
-	if input.find("@") >= 0:
-	    id = input
-	else:
-	    id = input + "@" + mapset
-	    
-	sp = first_input.get_new_instance(id)
-	
-	if sp.is_in_db(dbif) == False:
-	    dbif.close()
-	    core.fatal(_("Space time %s dataset <%s> not found in temporal database") % (type, id))
+        if input.find("@") >= 0:
+            id = input
+        else:
+            id = input + "@" + mapset
 
-	sp.select(dbif)
-	
-	input_list.append(copy.copy(sp))
+        sp = first_input.get_new_instance(id)
 
+        if not sp.is_in_db(dbif):
+            dbif.close()
+            core.fatal(_("Space time %s dataset <%s> not "
+                         "found in temporal database") % (type, id))
+
+        sp.select(dbif)
+
+        input_list.append(copy.copy(sp))
+
     # Create the new space time dataset
     if output.find("@") >= 0:
         out_id = output
     else:
         out_id = output + "@" + mapset
-        
+
     new_sp = first_input.get_new_instance(out_id)
-    
+
     # Check if in database
     if new_sp.is_in_db(dbif):
-        if core.overwrite() == False:
-	    dbif.close()
-            core.fatal(_("Space time %s dataset <%s> is already in database, use overwrite flag to overwrite") % (type, out_id))
- 
+        if not core.overwrite():
+            dbif.close()
+            core.fatal(_("Space time %s dataset <%s> is already in database, "
+                         "use overwrite flag to overwrite") % (type, out_id))
+
     # Sample all inputs by the first input and create a sample matrix
     if spatial:
         core.message(_("Start spatio-temporal sampling"))
@@ -114,217 +117,226 @@
     sample_map_list = []
     # First entry is the first dataset id
     id_list.append(first_input.get_name())
-    
+
     if len(input_list) > 0:
-	has_samples = False
-	for dataset in input_list:
-	    list = dataset.sample_by_dataset(stds=first_input, method=method, spatial=spatial, dbif=dbif)
-	    
-	    # In case samples are not found
-	    if not list and len(list) == 0:
-		dbif.close()
-		core.message(_("No samples found for map calculation"))
-		return 0
-	    
-	    # The fist entries are the samples
-	    map_name_list = []
-	    if has_samples == False:
-		for entry in list:
-		    granule = entry["granule"]
-		    # Do not consider gaps
-		    if granule.get_id() == None:
-			continue
-		    sample_map_list.append(granule)
-		    map_name_list.append(granule.get_name())
-		# Attach the map names
-		map_matrix.append(copy.copy(map_name_list))
-		has_samples = True
-		
-	    map_name_list = []
-	    for entry in list:
-		maplist = entry["samples"]
-		granule = entry["granule"]
-		
-		# Do not consider gaps in the sampler
-		if granule.get_id() == None:
-		    continue
-		
-		if len(maplist) > 1:
-		    core.warning(_("Found more than a single map in a sample granule. "\
-		    "Only the first map is used for computation. "\
-		    "Use t.rast.aggregate.ds to create synchronous raster datasets."))
-		
-		# Store all maps! This includes non existent maps, identified by id == None 
-		map_name_list.append(maplist[0].get_name())
-	    
-	    # Attach the map names
-	    map_matrix.append(copy.copy(map_name_list))
+        has_samples = False
+        for dataset in input_list:
+            list = dataset.sample_by_dataset(stds=first_input,
+                                             method=method, spatial=spatial, 
+                                             dbif=dbif)
 
-	    id_list.append(dataset.get_name())
+            # In case samples are not found
+            if not list and len(list) == 0:
+                dbif.close()
+                core.message(_("No samples found for map calculation"))
+                return 0
+
+            # The fist entries are the samples
+            map_name_list = []
+            if not has_samples:
+                for entry in list:
+                    granule = entry["granule"]
+                    # Do not consider gaps
+                    if granule.get_id() is None:
+                        continue
+                    sample_map_list.append(granule)
+                    map_name_list.append(granule.get_name())
+                # Attach the map names
+                map_matrix.append(copy.copy(map_name_list))
+                has_samples = True
+
+            map_name_list = []
+            for entry in list:
+                maplist = entry["samples"]
+                granule = entry["granule"]
+
+                # Do not consider gaps in the sampler
+                if granule.get_id() is None:
+                    continue
+
+                if len(maplist) > 1:
+                    core.warning(_("Found more than a single map in a sample "
+                                   "granule. Only the first map is used for "
+                                   "computation. Use t.rast.aggregate.ds to "
+                                   "create synchronous raster datasets."))
+
+                # Store all maps! This includes non existent maps, 
+                # identified by id == None
+                map_name_list.append(maplist[0].get_name())
+
+            # Attach the map names
+            map_matrix.append(copy.copy(map_name_list))
+
+            id_list.append(dataset.get_name())
     else:
-	list = first_input.get_registered_maps_as_objects(dbif=dbif)
-	
-	if list == None:
-	    dbif.close()
+        list = first_input.get_registered_maps_as_objects(dbif=dbif)
+
+        if list is None:
+            dbif.close()
             core.message(_("No maps in input dataset"))
             return 0
-	
-	map_name_list = []
-	for map in list:
-	    map_name_list.append(map.get_name())
-	    sample_map_list.append(map)
-	
-	# Attach the map names
-	map_matrix.append(copy.copy(map_name_list))
-   
+
+        map_name_list = []
+        for map in list:
+            map_name_list.append(map.get_name())
+            sample_map_list.append(map)
+
+        # Attach the map names
+        map_matrix.append(copy.copy(map_name_list))
+
     # Needed for map registration
     map_list = []
-	
+
     if len(map_matrix) > 0:
-	
-	core.message(_("Start mapcalc computation"))
-	    
-	count = 0
-	# Get the number of samples
-	num = len(map_matrix[0])
-	
-	# Parallel processing
+
+        core.message(_("Start mapcalc computation"))
+
+        count = 0
+        # Get the number of samples
+        num = len(map_matrix[0])
+
+        # Parallel processing
         proc_list = []
         proc_count = 0
-	
-	# For all samples
+
+        # For all samples
         for i in range(num):
-            
+
             count += 1
-	    core.percent(count, num, 1)
+            core.percent(count, num, 1)
 
-	    # Create the r.mapcalc statement for the current time step
-	    map_name = "%s_%i" % (base, count)   
-	    expr = "%s = %s" % (map_name, expression)
-            
+            # Create the r.mapcalc statement for the current time step
+            map_name = "%s_%i" % (base, count)
+            expr = "%s = %s" % (map_name, expression)
+
             # Check that all maps are in the sample
             valid_maps = True
-            # Replace all dataset names with their map names of the current time step
+            # Replace all dataset names with their map names of the 
+            # current time step
             for j in range(len(map_matrix)):
-		if map_matrix[j][i] == None:
-		    valid_maps = False
-		    break
-		# Substitute the dataset name with the map name
-		expr = expr.replace(id_list[j], map_matrix[j][i])
+                if map_matrix[j][i] is None:
+                    valid_maps = False
+                    break
+                # Substitute the dataset name with the map name
+                expr = expr.replace(id_list[j], map_matrix[j][i])
 
-	    # Proceed with the next sample
-	    if valid_maps == False:
-		continue
-		
-	    # Create the new map id and check if the map is already in the database
-	    map_id = map_name + "@" + mapset
+            # Proceed with the next sample
+            if not valid_maps:
+                continue
 
-	    new_map = first_input.get_new_map_instance(map_id)
+            # Create the new map id and check if the map is already 
+            # in the database
+            map_id = map_name + "@" + mapset
 
-	    # Check if new map is in the temporal database
-	    if new_map.is_in_db(dbif):
-		if core.overwrite() == True:
-		    # Remove the existing temporal database entry
-		    new_map.delete(dbif)
-		    new_map = first_input.get_new_map_instance(map_id)
-		else:
-		    core.error(_("Map <%s> is already in temporal database, use overwrite flag to overwrite"))
-		    continue
+            new_map = first_input.get_new_map_instance(map_id)
 
-	    # Set the time stamp
-	    if sample_map_list[i].is_time_absolute():
-		start, end, tz = sample_map_list[i].get_absolute_time()
-		new_map.set_absolute_time(start, end, tz)
-	    else:
-		start, end = sample_map_list[i].get_relative_time()
-		new_map.set_relative_time(start, end)
-	    
-	    map_list.append(new_map)
-	    
-	    # Start the parallel r.mapcalc computation
-	    core.verbose(_("Apply mapcalc expression: \"%s\"") % expr)
+            # Check if new map is in the temporal database
+            if new_map.is_in_db(dbif):
+                if core.overwrite():
+                    # Remove the existing temporal database entry
+                    new_map.delete(dbif)
+                    new_map = first_input.get_new_map_instance(map_id)
+                else:
+                    core.error(_("Map <%s> is already in temporal database, "
+                                 "use overwrite flag to overwrite"))
+                    continue
 
-	    if type == "raster":
-		proc_list.append(Process(target=run_mapcalc2d, args=(expr,)))
-	    else:
-		proc_list.append(Process(target=run_mapcalc3d, args=(expr,)))
-	    proc_list[proc_count].start()
-	    proc_count += 1
-	    
-	    if proc_count == nprocs or proc_count == num:
-		proc_count = 0
-		exitcodes = 0
-		for proc in proc_list:
-		    proc.join()
-		    exitcodes += proc.exitcode
-		    
-		if exitcodes != 0:
-		    dbif.close()
-		    core.fatal(_("Error while mapcalc computation"))
-		    
-		# Empty process list
-		proc_list = []
-		
-	# Register the new maps in the output space time dataset
-	core.message(_("Start map registration in temporal database"))
-	    
-	# Overwrite an existing dataset if requested
-	if new_sp.is_in_db(dbif):
-	    if core.overwrite() == True:
-		new_sp.delete(dbif)
-		new_sp = first_input.get_new_instance(out_id)
-		
-	# Copy the ids from the first input
-	temporal_type, semantic_type, title, description = first_input.get_initial_values()
-	new_sp.set_initial_values(temporal_type, semantic_type, title, description)
-	# Insert the dataset in the temporal database
-	new_sp.insert(dbif)
-    
-	count = 0
-	
-	# collect empty maps to remove them
-	empty_maps = []
-	
-	# Insert maps in the temporal database and in the new space time dataset
-	for new_map in map_list:
+            # Set the time stamp
+            if sample_map_list[i].is_time_absolute():
+                start, end, tz = sample_map_list[i].get_absolute_time()
+                new_map.set_absolute_time(start, end, tz)
+            else:
+                start, end = sample_map_list[i].get_relative_time()
+                new_map.set_relative_time(start, end)
 
+            map_list.append(new_map)
+
+            # Start the parallel r.mapcalc computation
+            core.verbose(_("Apply mapcalc expression: \"%s\"") % expr)
+
+            if type == "raster":
+                proc_list.append(Process(target=run_mapcalc2d, args=(expr,)))
+            else:
+                proc_list.append(Process(target=run_mapcalc3d, args=(expr,)))
+            proc_list[proc_count].start()
+            proc_count += 1
+
+            if proc_count == nprocs or proc_count == num:
+                proc_count = 0
+                exitcodes = 0
+                for proc in proc_list:
+                    proc.join()
+                    exitcodes += proc.exitcode
+
+                if exitcodes != 0:
+                    dbif.close()
+                    core.fatal(_("Error while mapcalc computation"))
+
+                # Empty process list
+                proc_list = []
+
+        # Register the new maps in the output space time dataset
+        core.message(_("Start map registration in temporal database"))
+
+        # Overwrite an existing dataset if requested
+        if new_sp.is_in_db(dbif):
+            if core.overwrite():
+                new_sp.delete(dbif)
+                new_sp = first_input.get_new_instance(out_id)
+
+        # Copy the ids from the first input
+        temporal_type, semantic_type, title, description = first_input.get_initial_values()
+        new_sp.set_initial_values(
+            temporal_type, semantic_type, title, description)
+        # Insert the dataset in the temporal database
+        new_sp.insert(dbif)
+
+        count = 0
+
+        # collect empty maps to remove them
+        empty_maps = []
+
+        # Insert maps in the temporal database and in the new space time dataset
+        for new_map in map_list:
+
             count += 1
-	    core.percent(count, num, 1)
-	    
-	    # Read the map data
-	    new_map.load()
-	    
-	    # In case of a null map continue, do not register null maps
-	    if new_map.metadata.get_min() == None and new_map.metadata.get_max() == None:
-		if not register_null:
-		    empty_maps.append(new_map)
-		    continue
+            core.percent(count, num, 1)
 
-	    # Insert map in temporal database
-	    new_map.insert(dbif)
+            # Read the map data
+            new_map.load()
 
-	    new_sp.register_map(new_map, dbif)
+            # In case of a null map continue, do not register null maps
+            if new_map.metadata.get_min() is None and \
+               new_map.metadata.get_max() is None:
+                if not register_null:
+                    empty_maps.append(new_map)
+                    continue
 
+            # Insert map in temporal database
+            new_map.insert(dbif)
+
+            new_sp.register_map(new_map, dbif)
+
         # Update the spatio-temporal extent and the metadata table entries
         new_sp.update_from_registered_maps(dbif)
-		
-	core.percent(1, 1, 1)
 
-	# Remove empty maps
-	if len(empty_maps) > 0:
-	    names = ""
-	    count = 0
-	    for map in empty_maps:
-		if count == 0:
-		    names += "%s"%(map.get_name())
-		else:
-		    names += ",%s"%(map.get_name())
-		count += 1
-	    if type == "raster":
-		core.run_command("g.remove", rast=names, quiet=True)
-	    elif type == "raster3d":
-		core.run_command("g.remove", rast3d=names, quiet=True)
-        
+        core.percent(1, 1, 1)
+
+        # Remove empty maps
+        if len(empty_maps) > 0:
+            names = ""
+            count = 0
+            for map in empty_maps:
+                if count == 0:
+                    names += "%s" % (map.get_name())
+                else:
+                    names += ",%s" % (map.get_name())
+                count += 1
+            if type == "raster":
+                core.run_command("g.remove", rast=names, quiet=True)
+            elif type == "raster3d":
+                core.run_command("g.remove", rast3d=names, quiet=True)
+
     dbif.close()
 
 
@@ -332,10 +344,13 @@
 
 def run_mapcalc2d(expr):
     """Helper function to run r.mapcalc in parallel"""
-    return core.run_command("r.mapcalc", expression=expr, overwrite=core.overwrite(), quiet=True)
+    return core.run_command("r.mapcalc", expression=expr, 
+                            overwrite=core.overwrite(), quiet=True)
 
 ###############################################################################
 
+
 def run_mapcalc3d(expr):
     """Helper function to run r3.mapcalc in parallel"""
-    return core.run_command("r3.mapcalc", expression=expr, overwrite=core.overwrite(), quiet=True)
+    return core.run_command("r3.mapcalc", expression=expr, 
+                            overwrite=core.overwrite(), quiet=True)

Modified: grass/trunk/lib/python/temporal/metadata.py
===================================================================
--- grass/trunk/lib/python/temporal/metadata.py	2012-08-11 21:26:18 UTC (rev 52630)
+++ grass/trunk/lib/python/temporal/metadata.py	2012-08-12 01:54:40 UTC (rev 52631)
@@ -6,6 +6,8 @@
 
 Usage:
 
+ at code
+
 >>> import grass.temporal as tgis
 >>> meta = tgis.RasterMetadata()
 >>> meta = tgis.Raster3DMetadata()
@@ -14,6 +16,8 @@
 >>> meta = tgis.STR3DSMetadata()
 >>> meta = tgis.STVDSMetadata()
 
+ at endcode
+
 (C) 2008-2011 by the GRASS Development Team
 This program is free software under the GNU General Public
 License (>=v2). Read the file COPYING that comes with GRASS
@@ -31,8 +35,9 @@
     
         Usage:
         
-        >>> import grass.temporal as tgis
-        >>> meta = tgis.RasterMetadataBase(table="metadata", ident="soil at PERMANENT",
+        @code
+        
+        >>> meta = RasterMetadataBase(table="metadata", ident="soil at PERMANENT",
         ... datatype="CELL", cols=100, rows=100, number_of_cells=10000, nsres=0.1,
         ... ewres=0.1, min=0, max=100)
         >>> meta.datatype
@@ -69,7 +74,8 @@
         ewres=0.1
         min=0.0
         max=100.0
-    
+        
+        @endcode
     """
     def __init__(self, table=None, ident=None, datatype=None, cols=None, 
 		rows=None, number_of_cells=None, nsres=None, ewres=None, 
@@ -267,9 +273,10 @@
         register table is stored.
        
         Usage:
+        
+        @code
        
-        >>> import grass.temporal as tgis
-        >>> meta = tgis.RasterMetadata(ident="soil at PERMANENT",
+        >>> meta = RasterMetadata(ident="soil at PERMANENT",
         ... datatype="CELL", cols=100, rows=100, number_of_cells=10000, nsres=0.1,
         ... ewres=0.1, min=0, max=100)
         >>> meta.datatype
@@ -310,6 +317,8 @@
         min=0.0
         max=100.0
         strds_register=None
+        
+        @endcode
     """
     def __init__(self, ident=None, strds_register=None, datatype=None, 
 		 cols=None, rows=None, number_of_cells=None, nsres=None, 
@@ -363,9 +372,10 @@
         raster dataset register table is stored.
        
         Usage:
+        
+        @code
        
-        >>> import grass.temporal as tgis
-        >>> meta = tgis.Raster3DMetadata(ident="soil at PERMANENT",
+        >>> meta = Raster3DMetadata(ident="soil at PERMANENT",
         ... datatype="FCELL", cols=100, rows=100, depths=100,
         ... number_of_cells=1000000, nsres=0.1, ewres=0.1, tbres=0.1,
         ... min=0, max=100)
@@ -415,6 +425,8 @@
         str3ds_register=None
         depths=100
         tbres=0.1
+        
+        @endcode
     """
     def __init__(self, ident=None, str3ds_register=None, datatype=None, 
 		 cols=None, rows=None, depths=None, number_of_cells=None, 
@@ -507,8 +519,9 @@
         raster dataset register table is stored.
        
         Usage:
+        
+        @code
        
-        >>> import grass.temporal as tgis
         >>> meta = VectorMetadata(ident="lidar at PERMANENT", is_3d=True, 
         ... number_of_points=1, number_of_lines=2, number_of_boundaries=3,
         ... number_of_centroids=4, number_of_faces=5, number_of_kernels=6, 
@@ -573,6 +586,8 @@
         islands=10
         holes=11
         volumes=12
+        
+        @endcode
     """
     def __init__(
         self, ident=None, stvds_register=None, is_3d=False, 
@@ -851,13 +866,15 @@
 
 
 class STDSMetadataBase(SQLDatabaseInterface):
-    """!This is the space time dataset metadata base class for strds, stvds and str3ds datasets
+    """!This is the space time dataset metadata base class for 
+       strds, stvds and str3ds datasets
        setting/getting the id, the title and the description
        
         Usage:
         
-        >>> import grass.temporal as tgis
-        >>> meta = tgis.STDSMetadataBase(ident="soils at PERMANENT",
+        @code
+        
+        >>> meta = STDSMetadataBase(ident="soils at PERMANENT",
         ... title="Soils", description="Soils 1950 - 2010")
         >>> meta.id
         'soils at PERMANENT'
@@ -874,6 +891,8 @@
          | Soils 1950 - 2010
         >>> meta.print_shell_info()
         number_of_maps=None
+        
+        @endcode
     """
     def __init__(self, table=None, ident=None, title=None, description=None):
 
@@ -956,16 +975,18 @@
 
 
 class STDSRasterMetadataBase(STDSMetadataBase):
-    """!This is the space time dataset metadata base class for strds and str3ds datasets
+    """!This is the space time dataset metadata base 
+       class for strds and str3ds datasets
 
        Most of the metadata values are set by triggers in the database when
-       new raster or voxel maps are added. Therefor only some set- an many get-functions
-       are available.
+       new raster or voxel maps are added. Therefor only some 
+       set- an many get-functions are available.
        
         Usage:
         
-        >>> import grass.temporal as tgis
-        >>> meta = tgis.STDSRasterMetadataBase(ident="soils at PERMANENT",
+        @code
+        
+        >>> meta = STDSRasterMetadataBase(ident="soils at PERMANENT",
         ... title="Soils", description="Soils 1950 - 2010")
         >>> meta.id
         'soils at PERMANENT'
@@ -1006,6 +1027,8 @@
         min_max=None
         max_min=None
         max_max=None
+        
+        @endcode
     """
     def __init__(self, table=None, ident=None, title=None, description=None):
 
@@ -1147,8 +1170,9 @@
         
         Usage:
         
-        >>> import grass.temporal as tgis
-        >>> meta = tgis.STRDSMetadata(ident="soils at PERMANENT",
+        @code
+        
+        >>> meta = STRDSMetadata(ident="soils at PERMANENT",
         ... title="Soils", description="Soils 1950 - 2010")
         >>> meta.id
         'soils at PERMANENT'
@@ -1193,6 +1217,8 @@
         max_min=None
         max_max=None
         raster_register=None
+        
+        @endcode
     """
     def __init__(self, ident=None, raster_register=None, title=None, description=None):
 
@@ -1241,8 +1267,9 @@
         
         Usage:
         
-        >>> import grass.temporal as tgis
-        >>> meta = tgis.STR3DSMetadata(ident="soils at PERMANENT",
+        @code
+        
+        >>> meta = STR3DSMetadata(ident="soils at PERMANENT",
         ... title="Soils", description="Soils 1950 - 2010")
         >>> meta.id
         'soils at PERMANENT'
@@ -1293,6 +1320,8 @@
         tbres_min=None
         tbres_max=None
         raster3d_register=None
+        
+        @endcode
         """
     def __init__(self, ident=None, raster3d_register=None, title=None, description=None):
 
@@ -1366,9 +1395,12 @@
        Most of the metadata values are set by triggers in the database when
        new vector maps are added. Therefor only some set- an many get-functions
        are available.
-       
-        >>> import grass.temporal as tgis
-        >>> meta = tgis.STVDSMetadata(ident="lidars at PERMANENT",
+        
+        Usage:
+        
+        @code
+        
+        >>> meta = STVDSMetadata(ident="lidars at PERMANENT",
         ... title="LIDARS", description="LIDARS 2008 - 2010")
         >>> meta.id
         'lidars at PERMANENT'
@@ -1424,6 +1456,8 @@
         islands=None
         holes=None
         volumes=None
+        
+        @endcode
     """
     def __init__(
         self, ident=None, vector_register=None, title=None, description=None):
@@ -1600,18 +1634,18 @@
         STDSMetadataBase.print_info(self)
         print " | Vector register table:...... " + str(
             self.get_vector_register())
-        print " | Number of points ........... " + str(self.get_number_of_points())
-        print " | Number of lines ............ " + str(self.get_number_of_lines())
-        print " | Number of boundaries ....... " + str(self.get_number_of_boundaries())
-        print " | Number of centroids ........ " + str(self.get_number_of_centroids())
-        print " | Number of faces ............ " + str(self.get_number_of_faces())
-        print " | Number of kernels .......... " + str(self.get_number_of_kernels())
-        print " | Number of primitives ....... " + str(self.get_number_of_primitives())
-        print " | Number of nodes ............ " + str(self.get_number_of_nodes())
-        print " | Number of areas ............ " + str(self.get_number_of_areas())
-        print " | Number of islands .......... " + str(self.get_number_of_islands())
-        print " | Number of holes ............ " + str(self.get_number_of_holes())
-        print " | Number of volumes .......... " + str(self.get_number_of_volumes())
+        print " | Number of points ........... " + str(self.number_of_points)
+        print " | Number of lines ............ " + str(self.number_of_lines)
+        print " | Number of boundaries ....... " + str(self.number_of_boundaries)
+        print " | Number of centroids ........ " + str(self.number_of_centroids)
+        print " | Number of faces ............ " + str(self.number_of_faces)
+        print " | Number of kernels .......... " + str(self.number_of_kernels)
+        print " | Number of primitives ....... " + str(self.number_of_primitives)
+        print " | Number of nodes ............ " + str(self.number_of_nodes)
+        print " | Number of areas ............ " + str(self.number_of_areas)
+        print " | Number of islands .......... " + str(self.number_of_islands)
+        print " | Number of holes ............ " + str(self.number_of_holes)
+        print " | Number of volumes .......... " + str(self.number_of_volumes)
 
     def print_shell_info(self):
         """!Print information about this class in shell style"""

Modified: grass/trunk/lib/python/temporal/space_time_datasets.py
===================================================================
--- grass/trunk/lib/python/temporal/space_time_datasets.py	2012-08-11 21:26:18 UTC (rev 52630)
+++ grass/trunk/lib/python/temporal/space_time_datasets.py	2012-08-12 01:54:40 UTC (rev 52631)
@@ -4,21 +4,20 @@
 
 Temporal GIS related functions to be used in Python scripts.
 
-Usage:
-
- at code
-import grass.temporal as tgis
-
-strds = tgis.space_time_raster_dataset("soils_1950_2010")
-
-...
- at endcode
-
 (C) 2008-2011 by the GRASS Development Team
 This program is free software under the GNU General Public
 License (>=v2). Read the file COPYING that comes with GRASS
 for details.
 
+
+>>> import grass.script as grass
+
+>>> grass.run_command("r3.mapcalc", overwrite=True, expression="str3ds_map_test_case = 1")
+0
+>>> grass.run_command("v.random", overwrite=True, output="stvds_map_test_case", 
+... n=100, zmin=0, zmax=100, flags="z", column="elevation")
+0
+
 @author Soeren Gebbert
 """
 import getpass
@@ -29,216 +28,300 @@
 import grass.lib.raster3d as libraster3d
 import grass.script.array as garray
 
-from datetime_math import *
-from abstract_map_dataset import *
 from abstract_space_time_dataset import *
 
 
 ###############################################################################
 
-class raster_dataset(AbstractMapDataset):
+class RasterDataset(AbstractMapDataset):
     """!Raster dataset class
 
        This class provides functions to select, update, insert or delete raster
        map information and valid time stamps into the SQL temporal database.
+       
+       Usage:
+        
+        @code
+        
+        >>> import grass.script as grass
+        >>> grass.use_temp_region()
+        >>> grass.run_command("g.region", n=80.0, s=0.0, e=120.0, w=0.0, 
+        ... t=1.0, b=0.0, res=10.0)
+        0
+        >>> grass.run_command("r.mapcalc", overwrite=True, 
+        ... expression="strds_map_test_case = 1")
+        0
+        >>> mapset = grass.gisenv()["MAPSET"]
+        >>> name = "strds_map_test_case"
+        >>> identifier = "%s@%s" % (name, mapset)
+        >>> rmap = RasterDataset(identifier)
+        >>> rmap.set_absolute_time(start_time=datetime(2001,1,1), 
+        ...                        end_time=datetime(2012,1,1))
+        >>> rmap.map_exists()
+        True
+        >>> rmap.load()
+        >>> rmap.spatial_extent.print_info()
+         +-------------------- Spatial extent ----------------------------------------+
+         | North:...................... 80.0
+         | South:...................... 0.0
+         | East:.. .................... 120.0
+         | West:....................... 0.0
+         | Top:........................ 0.0
+         | Bottom:..................... 0.0
+        >>> rmap.absolute_time.print_info()
+         +-------------------- Absolute time -----------------------------------------+
+         | Start time:................. 2001-01-01 00:00:00
+         | End time:................... 2012-01-01 00:00:00
+        >>> rmap.metadata.print_info()
+         +-------------------- Metadata information ----------------------------------+
+         | Datatype:................... CELL
+         | Number of columns:.......... 8
+         | Number of rows:............. 12
+         | Number of cells:............ 96
+         | North-South resolution:..... 10.0
+         | East-west resolution:....... 10.0
+         | Minimum value:.............. 1.0
+         | Maximum value:.............. 1.0
+         | STRDS register table ....... None
+         
+        >>> newmap = rmap.get_new_instance("new at PERMANENT")
+        >>> isinstance(newmap, RasterDataset)
+        True
+        >>> newstrds = rmap.get_new_stds_instance("new at PERMANENT")
+        >>> isinstance(newstrds, SpaceTimeRasterDataset)
+        True
+        >>> rmap.get_type()
+        'raster'
+        >>> rmap.get_stds_register()
+        >>> rmap.get_absolute_time()
+        (datetime.datetime(2001, 1, 1, 0, 0), datetime.datetime(2012, 1, 1, 0, 0), None)
+        >>> rmap.get_valid_time()
+        (datetime.datetime(2001, 1, 1, 0, 0), datetime.datetime(2012, 1, 1, 0, 0))
+        >>> rmap.get_name()
+        'strds_map_test_case'
+        >>> rmap.get_mapset() == mapset
+        True
+        >>> rmap.get_temporal_type()
+        'absolute'
+        >>> rmap.get_spatial_extent()
+        (80.0, 0.0, 120.0, 0.0, 0.0, 0.0)
+        >>> rmap.is_time_absolute()
+        True
+        >>> rmap.is_time_relative()
+        False
+        
+        >>> grass.run_command("g.remove", rast=name)
+        0
+        >>> grass.del_temp_region()
+        
+        @endcode
     """
     def __init__(self, ident):
-	AbstractMapDataset.__init__(self)
-	self.reset(ident)
+        AbstractMapDataset.__init__(self)
+        self.reset(ident)
 
     def get_type(self):
-        return "raster"
+        return 'raster'
 
     def get_new_instance(self, ident):
         """!Return a new instance with the type of this class"""
-        return raster_dataset(ident)
+        return RasterDataset(ident)
 
     def get_new_stds_instance(self, ident):
-        """!Return a new space time dataset instance in which maps are stored with the type of this class"""
-        return space_time_raster_dataset(ident)
+        """!Return a new space time dataset instance in which maps 
+        are stored with the type of this class"""
+        return SpaceTimeRasterDataset(ident)
 
     def get_stds_register(self):
-        """!Return the space time dataset register table name in which stds are listed in which this map is registered"""
+        """!Return the space time dataset register table name in which stds 
+        are listed in which this map is registered"""
         return self.metadata.get_strds_register()
 
     def set_stds_register(self, name):
-        """!Set the space time dataset register table name in which stds are listed in which this map is registered"""
+        """!Set the space time dataset register table name in which stds 
+        are listed in which this map is registered"""
         self.metadata.set_strds_register(name)
 
     def spatial_overlapping(self, dataset):
         """!Return True if the spatial extents 2d overlap"""
-        
+
         return self.spatial_extent.overlapping_2d(dataset.spatial_extent)
 
     def spatial_relation(self, dataset):
-        """Return the two dimensional spatial relation"""
-        
+        """!Return the two dimensional spatial relation"""
+
         return self.spatial_extent.spatial_relation_2d(dataset.spatial_extent)
-        
+
     def get_np_array(self):
-	"""Return this raster map as memmap numpy style array to access the raster
-	   values in numpy style without loading the whole map in the RAM. 
-	   
-	   In case this raster map does exists in the grass spatial database, the map
-	   will be exported using r.out.bin to a temporary location and assigned to the
-	   memmap object that is returned by this function.
-	   
-	   In case the raster map does not exists, an empty temporary binary file will be created
-	   and assigned to the memap object.
-	   
-	   You need to call the write function to write the memmap array back into grass.
-	"""
-	
-	a = garray.array()
-	
-	if self.map_exists():
-	    a.read(self.get_map_id())
-	
-	return a
-	
+        """!Return this raster map as memmap numpy style array to access the raster
+           values in numpy style without loading the whole map in the RAM.
+
+           In case this raster map does exists in the grass spatial database, 
+           the map will be exported using r.out.bin to a temporary location 
+           and assigned to the memmap object that is returned by this function.
+
+           In case the raster map does not exists, an empty temporary 
+           binary file will be created and assigned to the memap object.
+
+           You need to call the write function to write the memmap 
+           array back into grass.
+        """
+
+        a = garray.array()
+
+        if self.map_exists():
+            a.read(self.get_map_id())
+
+        return a
+
     def reset(self, ident):
-	"""!Reset the internal structure and set the identifier"""
-	self.base = RasterBase(ident=ident)
-	self.absolute_time = RasterAbslouteTime(ident=ident)
-	self.relative_time = RasterRelativeTime(ident=ident)
-	self.spatial_extent = RasterSpatialExtent(ident=ident)
-	self.metadata = RasterMetadata(ident=ident)
-		
+        """!Reset the internal structure and set the identifier"""
+        self.base = RasterBase(ident=ident)
+        self.absolute_time = RasterAbsoluteTime(ident=ident)
+        self.relative_time = RasterRelativeTime(ident=ident)
+        self.spatial_extent = RasterSpatialExtent(ident=ident)
+        self.metadata = RasterMetadata(ident=ident)
+
     def has_grass_timestamp(self):
-        """!Check if a grass file bsased time stamp exists for this map. 
+        """!Check if a grass file bsased time stamp exists for this map.
         """
         if G_has_raster_timestamp(self.get_name(), self.get_mapset()):
-	    return True
-	else:
-	    return False
- 
+            return True
+        else:
+            return False
+
     def write_timestamp_to_grass(self):
-        """!Write the timestamp of this map into the map metadata in the grass file system based spatial
-           database. 
-           
+        """!Write the timestamp of this map into the map metadata in 
+           the grass file system based spatial database.
+
            Internally the libgis API functions are used for writing
         """
-        
-	ts = libgis.TimeStamp()
 
-	libgis.G_scan_timestamp(byref(ts), self._convert_timestamp())
-	check = libgis.G_write_raster_timestamp(self.get_name(), byref(ts))
-	
-	if check == -1:
-		core.error(_("Unable to create timestamp file for raster map <%s>"%(self.get_map_id())))
-		
-	if check == -2:
-		core.error(_("Invalid datetime in timestamp for raster map <%s>"%(self.get_map_id())))
-			
-    
+        ts = libgis.TimeStamp()
+
+        libgis.G_scan_timestamp(byref(ts), self._convert_timestamp())
+        check = libgis.G_write_raster_timestamp(self.get_name(), byref(ts))
+
+        if check == -1:
+            core.error(_("Unable to create timestamp file "
+                         "for raster map <%s>" % (self.get_map_id())))
+
+        if check == -2:
+            core.error(_("Invalid datetime in timestamp for raster map <%s>" %
+                         (self.get_map_id())))
+
     def remove_timestamp_from_grass(self):
-        """!Remove the timestamp from the grass file system based spatial database
-        
+        """!Remove the timestamp from the grass file system based 
+           spatial database
+
            Internally the libgis API functions are used for removal
         """
         check = libgis.G_remove_raster_timestamp(self.get_name())
-        
+
         if check == -1:
-            core.error(_("Unable to remove timestamp for raster map <%s>"%(self.get_name())))
-	
+            core.error(_("Unable to remove timestamp for raster map <%s>" %
+                         (self.get_name())))
+
     def map_exists(self):
         """!Return True in case the map exists in the grass spatial database
-        
+
            @return True if map exists, False otherwise
-        """        
+        """
         mapset = libgis.G_find_raster(self.get_name(), self.get_mapset())
-        
+
         if not mapset:
             return False
-	
-	return True
-        
+
+        return True
+
     def read_info(self):
-        """!Read the raster map info from the file system and store the content 
+        """!Read the raster map info from the file system and store the content
            into a dictionary
-           
+
            This method uses the ctypes interface to the gis and raster libraries
            to read the map metadata information
         """
-        
+
         kvp = {}
-        
+
         name = self.get_name()
         mapset = self.get_mapset()
-        
+
         if not self.map_exists():
-	  core.fatal(_("Raster map <%s> not found" % name))
-        
+            core.fatal(_("Raster map <%s> not found" % name))
+
         # Read the region information
         region = libgis.Cell_head()
-	libraster.Rast_get_cellhd(name, mapset, byref(region))
-	
-	kvp["north"] = region.north
-	kvp["south"] = region.south
-	kvp["east"] = region.east
-	kvp["west"] = region.west
-	kvp["nsres"] = region.ns_res
-	kvp["ewres"] = region.ew_res
-	kvp["rows"] = region.cols
-	kvp["cols"] = region.rows
-	
-	maptype = libraster.Rast_map_type(name, mapset)
-  
-	if maptype == libraster.DCELL_TYPE:
-	    kvp["datatype"] = "DCELL"
+        libraster.Rast_get_cellhd(name, mapset, byref(region))
+
+        kvp["north"] = region.north
+        kvp["south"] = region.south
+        kvp["east"] = region.east
+        kvp["west"] = region.west
+        kvp["nsres"] = region.ns_res
+        kvp["ewres"] = region.ew_res
+        kvp["rows"] = region.cols
+        kvp["cols"] = region.rows
+
+        maptype = libraster.Rast_map_type(name, mapset)
+
+        if maptype == libraster.DCELL_TYPE:
+            kvp["datatype"] = "DCELL"
         elif maptype == libraster.FCELL_TYPE:
-	    kvp["datatype"] = "FCELL"
+            kvp["datatype"] = "FCELL"
         elif maptype == libraster.CELL_TYPE:
-	    kvp["datatype"] = "CELL"
-	    
-	# Read range
-	if libraster.Rast_map_is_fp(name, mapset):
-	    range = libraster.FPRange()
-	    libraster.Rast_init_fp_range (byref(range))
-	    ret = libraster.Rast_read_fp_range(name, mapset, byref(range))
-	    if ret < 0:
-		core.fatal(_("Unable to read range file"))
-	    if ret == 2:
-		kvp["min"] = None
-		kvp["max"] = None
-	    else:
-		min = libgis.DCELL()
-		max = libgis.DCELL()
-		libraster.Rast_get_fp_range_min_max(byref(range), byref(min), byref(max))
-		kvp["min"] = min.value
-		kvp["max"] = max.value
-	else:
-	    range = libraster.Range()
-	    libraster.Rast_init_range (byref(range))
-	    ret = libraster.Rast_read_range(name, mapset, byref(range))
-	    if ret < 0:
-		core.fatal(_("Unable to read range file"))
-	    if ret == 2:
-		kvp["min"] = None
-		kvp["max"] = None
-	    else:
-		min = libgis.CELL()
-		max = libgis.CELL()
-		libraster.Rast_get_range_min_max(byref(range), byref(min), byref(max))
-		kvp["min"] = min.value
-		kvp["max"] = max.value
-	
-	return kvp
+            kvp["datatype"] = "CELL"
 
+        # Read range
+        if libraster.Rast_map_is_fp(name, mapset):
+            range = libraster.FPRange()
+            libraster.Rast_init_fp_range(byref(range))
+            ret = libraster.Rast_read_fp_range(name, mapset, byref(range))
+            if ret < 0:
+                core.fatal(_("Unable to read range file"))
+            if ret == 2:
+                kvp["min"] = None
+                kvp["max"] = None
+            else:
+                min = libgis.DCELL()
+                max = libgis.DCELL()
+                libraster.Rast_get_fp_range_min_max(
+                    byref(range), byref(min), byref(max))
+                kvp["min"] = min.value
+                kvp["max"] = max.value
+        else:
+            range = libraster.Range()
+            libraster.Rast_init_range(byref(range))
+            ret = libraster.Rast_read_range(name, mapset, byref(range))
+            if ret < 0:
+                core.fatal(_("Unable to read range file"))
+            if ret == 2:
+                kvp["min"] = None
+                kvp["max"] = None
+            else:
+                min = libgis.CELL()
+                max = libgis.CELL()
+                libraster.Rast_get_range_min_max(
+                    byref(range), byref(min), byref(max))
+                kvp["min"] = min.value
+                kvp["max"] = max.value
+
+        return kvp
+
     def load(self):
-        """!Load all info from an existing raster map into the internal structure"""
+        """!Load all info from an existing raster map into the internal s
+           tructure"""
 
-
         # Fill base information
         self.base.set_creator(str(getpass.getuser()))
 
         # Get the data from an existing raster map
         kvp = self.read_info()
-        
+
         # Fill spatial extent
 
-        self.set_spatial_extent(north=kvp["north"], south=kvp["south"], \
-                                east=kvp["east"],   west=kvp["west"])
+        self.set_spatial_extent(north=kvp["north"], south=kvp["south"],
+                                east=kvp["east"], west=kvp["west"])
 
         # Fill metadata
 
@@ -259,38 +342,41 @@
 
 ###############################################################################
 
-class raster3d_dataset(AbstractMapDataset):
+class Raster3DDataset(AbstractMapDataset):
     """!Raster3d dataset class
 
        This class provides functions to select, update, insert or delete raster3d
        map information and valid time stamps into the SQL temporal database.
     """
     def __init__(self, ident):
-	AbstractMapDataset.__init__(self)
-	self.reset(ident)
+        AbstractMapDataset.__init__(self)
+        self.reset(ident)
 
     def get_type(self):
         return "raster3d"
 
     def get_new_instance(self, ident):
         """!Return a new instance with the type of this class"""
-        return raster3d_dataset(ident)
+        return Raster3DDataset(ident)
 
     def get_new_stds_instance(self, ident):
-        """!Return a new space time dataset instance in which maps are stored with the type of this class"""
-        return space_time_raster3d_dataset(ident)
+        """!Return a new space time dataset instance in which maps 
+        are stored with the type of this class"""
+        return SpaceTimeRaster3DDataset(ident)
 
     def get_stds_register(self):
-        """!Return the space time dataset register table name in which stds are listed in which this map is registered"""
+        """!Return the space time dataset register table name in 
+        which stds are listed in which this map is registered"""
         return self.metadata.get_str3ds_register()
 
     def set_stds_register(self, name):
-        """!Set the space time dataset register table name in which stds are listed in which this map is registered"""
+        """!Set the space time dataset register table name in 
+        which stds are listed in which this map is registered"""
         self.metadata.set_str3ds_register(name)
 
     def spatial_overlapping(self, dataset):
         """!Return True if the spatial extents overlap"""
-        
+
         if self.get_type() == dataset.get_type() or dataset.get_type() == "str3ds":
             return self.spatial_extent.overlapping(dataset.spatial_extent)
         else:
@@ -298,140 +384,146 @@
 
     def spatial_relation(self, dataset):
         """!Return the two or three dimensional spatial relation"""
-        
+
         if self.get_type() == dataset.get_type() or dataset.get_type() == "str3ds":
             return self.spatial_extent.spatial_relation(dataset.spatial_extent)
         else:
             return self.spatial_extent.spatial_relation_2d(dataset.spatial_extent)
-        
+
     def reset(self, ident):
-	"""!Reset the internal structure and set the identifier"""
-	self.base = Raster3DBase(ident=ident)
-	self.absolute_time = Raster3DAbslouteTime(ident=ident)
-	self.relative_time = Raster3DRelativeTime(ident=ident)
-	self.spatial_extent = Raster3DSpatialExtent(ident=ident)
-	self.metadata = Raster3DMetadata(ident=ident)
+        """!Reset the internal structure and set the identifier"""
+        self.base = Raster3DBase(ident=ident)
+        self.absolute_time = Raster3DAbsoluteTime(ident=ident)
+        self.relative_time = Raster3DRelativeTime(ident=ident)
+        self.spatial_extent = Raster3DSpatialExtent(ident=ident)
+        self.metadata = Raster3DMetadata(ident=ident)
 
     def has_grass_timestamp(self):
-        """!Check if a grass file bsased time stamp exists for this map. 
+        """!Check if a grass file bsased time stamp exists for this map.
         """
         if G_has_raster3d_timestamp(self.get_name(), self.get_mapset()):
-	    return True
-	else:
-	    return False
- 
+            return True
+        else:
+            return False
+
     def write_timestamp_to_grass(self):
-        """!Write the timestamp of this map into the map metadata in the grass file system based spatial
-           database. 
-           
+        """!Write the timestamp of this map into the map metadata 
+        in the grass file system based spatial database.
+
            Internally the libgis API functions are used for writing
         """
-        
-	ts = libgis.TimeStamp()
 
-	libgis.G_scan_timestamp(byref(ts), self._convert_timestamp())
-	check = libgis.G_write_raster3d_timestamp(self.get_name(), byref(ts))
-	
-	if check == -1:
-		core.error(_("Unable to create timestamp file for raster3d map <%s>"%(self.get_map_id())))
-		
-	if check == -2:
-		core.error(_("Invalid datetime in timestamp for raster3d map <%s>"%(self.get_map_id())))
-			
-    
+        ts = libgis.TimeStamp()
+
+        libgis.G_scan_timestamp(byref(ts), self._convert_timestamp())
+        check = libgis.G_write_raster3d_timestamp(self.get_name(), byref(ts))
+
+        if check == -1:
+            core.error(_("Unable to create timestamp file "
+                         "for raster3d map <%s>" % (self.get_map_id())))
+
+        if check == -2:
+            core.error(_("Invalid datetime in timestamp "
+                         "for raster3d map <%s>" % (self.get_map_id())))
+
     def remove_timestamp_from_grass(self):
         """!Remove the timestamp from the grass file system based spatial database
-        
+
            Internally the libgis API functions are used for removal
         """
         check = libgis.G_remove_raster3d_timestamp(self.get_name())
-        
+
         if check == -1:
-            core.error(_("Unable to remove timestamp for raster3d map <%s>"%(self.get_name())))
-	
+            core.error(_("Unable to remove timestamp for raster3d map <%s>" %
+                         (self.get_name())))
+
     def map_exists(self):
         """!Return True in case the map exists in the grass spatial database
-        
+
            @return True if map exists, False otherwise
-        """        
+        """
         mapset = libgis.G_find_raster3d(self.get_name(), self.get_mapset())
-        
+
         if not mapset:
             return False
-	
-	return True
-        
+
+        return True
+
     def read_info(self):
-        """!Read the raster3d map info from the file system and store the content 
+        """!Read the raster3d map info from the file system and store the content
            into a dictionary
-           
+
            This method uses the ctypes interface to the gis and raster3d libraries
            to read the map metadata information
         """
-        
+
         kvp = {}
-        
+
         name = self.get_name()
         mapset = self.get_mapset()
-        
+
         if not self.map_exists():
-	  core.fatal(_("Raster3d map <%s> not found" % name))
-        
+            core.fatal(_("Raster3d map <%s> not found" % name))
+
         # Read the region information
         region = libraster3d.RASTER3D_Region()
-	libraster3d.Rast3d_read_region_map(name, mapset, byref(region))
-	
-	kvp["north"] = region.north
-	kvp["south"] = region.south
-	kvp["east"] = region.east
-	kvp["west"] = region.west
-	kvp["nsres"] = region.ns_res
-	kvp["ewres"] = region.ew_res
-	kvp["tbres"] = region.tb_res
-	kvp["rows"] = region.cols
-	kvp["cols"] = region.rows
-	kvp["depths"] = region.depths
-	kvp["top"] = region.top
-	kvp["bottom"] = region.bottom
-	
-	# We need to open the map, this function returns a void pointer
-	# but we may need the correct type which is RASTER3D_Map, hence the casting
-	g3map = cast(libraster3d.Rast3d_open_cell_old(name, mapset, \
-	        libraster3d.RASTER3D_DEFAULT_WINDOW, libraster3d.RASTER3D_TILE_SAME_AS_FILE, \
-	        libraster3d.RASTER3D_NO_CACHE), POINTER(libraster3d.RASTER3D_Map))
-	        
-	if not g3map:
-	    core.fatal(_("Unable to open 3D raster map <%s>"%(name)));
+        libraster3d.Rast3d_read_region_map(name, mapset, byref(region))
 
-	maptype = libraster3d.Rast3d_file_type_map(g3map)
-  
-	if maptype == libraster.DCELL_TYPE:
-	    kvp["datatype"] = "DCELL"
+        kvp["north"] = region.north
+        kvp["south"] = region.south
+        kvp["east"] = region.east
+        kvp["west"] = region.west
+        kvp["nsres"] = region.ns_res
+        kvp["ewres"] = region.ew_res
+        kvp["tbres"] = region.tb_res
+        kvp["rows"] = region.cols
+        kvp["cols"] = region.rows
+        kvp["depths"] = region.depths
+        kvp["top"] = region.top
+        kvp["bottom"] = region.bottom
+
+        # We need to open the map, this function returns a void pointer
+        # but we may need the correct type which is RASTER3D_Map, hence 
+        # the casting
+        g3map = cast(libraster3d.Rast3d_open_cell_old(name, mapset,
+                     libraster3d.RASTER3D_DEFAULT_WINDOW, 
+                     libraster3d.RASTER3D_TILE_SAME_AS_FILE,
+                     libraster3d.RASTER3D_NO_CACHE), 
+                     POINTER(libraster3d.RASTER3D_Map))
+
+        if not g3map:
+            core.fatal(_("Unable to open 3D raster map <%s>" % (name)))
+
+        maptype = libraster3d.Rast3d_file_type_map(g3map)
+
+        if maptype == libraster.DCELL_TYPE:
+            kvp["datatype"] = "DCELL"
         elif maptype == libraster.FCELL_TYPE:
-	    kvp["datatype"] = "FCELL"
-	
-	# Read range
-	min = libgis.DCELL()
-	max = libgis.DCELL()
-	ret = libraster3d.Rast3d_range_load(g3map)
-	if not ret:
-	    core.fatal(_("Unable to load range of 3D raster map <%s>"%(name)));
-	libraster3d.Rast3d_range_min_max(g3map, byref(min), byref(max))
-	
-	if min.value != min.value:
-	    kvp["min"] = None
-	else:
-	    kvp["min"] = float(min.value)
-	if max.value != max.value:
-	    kvp["max"] = None
-	else:
-	    kvp["max"] = float(max.value)
-	
-	if not libraster3d.Rast3d_close(g3map):
-	    G_fatal_error(_("Unable to close 3D raster map <%s>"%(name)))
-	
-	return kvp
-        
+            kvp["datatype"] = "FCELL"
+
+        # Read range
+        min = libgis.DCELL()
+        max = libgis.DCELL()
+        ret = libraster3d.Rast3d_range_load(g3map)
+        if not ret:
+            core.fatal(_("Unable to load range of 3D raster map <%s>" %
+                         (name)))
+        libraster3d.Rast3d_range_min_max(g3map, byref(min), byref(max))
+
+        if min.value != min.value:
+            kvp["min"] = None
+        else:
+            kvp["min"] = float(min.value)
+        if max.value != max.value:
+            kvp["max"] = None
+        else:
+            kvp["max"] = float(max.value)
+
+        if not libraster3d.Rast3d_close(g3map):
+            G_fatal_error(_("Unable to close 3D raster map <%s>" % (name)))
+
+        return kvp
+
     def load(self):
         """!Load all info from an existing raster3d map into the internal structure"""
 
@@ -442,9 +534,9 @@
 
         # Get the data from an existing raster map
         kvp = self.read_info()
-        
-        self.set_spatial_extent(north=kvp["north"], south=kvp["south"], \
-                                east=kvp["east"],   west=kvp["west"],\
+
+        self.set_spatial_extent(north=kvp["north"], south=kvp["south"],
+                                east=kvp["east"], west=kvp["west"],
                                 top=kvp["top"], bottom=kvp["bottom"])
 
         # Fill metadata
@@ -470,33 +562,36 @@
 
 ###############################################################################
 
-class vector_dataset(AbstractMapDataset):
+class VectorDataset(AbstractMapDataset):
     """!Vector dataset class
 
        This class provides functions to select, update, insert or delete vector
        map information and valid time stamps into the SQL temporal database.
     """
     def __init__(self, ident):
-	AbstractMapDataset.__init__(self)
-	self.reset(ident)
+        AbstractMapDataset.__init__(self)
+        self.reset(ident)
 
     def get_type(self):
         return "vector"
 
     def get_new_instance(self, ident):
         """!Return a new instance with the type of this class"""
-        return vector_dataset(ident)
+        return VectorDataset(ident)
 
     def get_new_stds_instance(self, ident):
-        """!Return a new space time dataset instance in which maps are stored with the type of this class"""
-        return space_time_vector_dataset(ident)
+        """!Return a new space time dataset instance in which maps 
+        are stored with the type of this class"""
+        return SpaceTimeVectorDataset(ident)
 
     def get_stds_register(self):
-        """!Return the space time dataset register table name in which stds are listed in which this map is registered"""
+        """!Return the space time dataset register table name in 
+        which stds are listed in which this map is registered"""
         return self.metadata.get_stvds_register()
 
     def set_stds_register(self, name):
-        """!Set the space time dataset register table name in which stds are listed in which this map is registered"""
+        """!Set the space time dataset register table name in 
+        which stds are listed in which this map is registered"""
         self.metadata.set_stvds_register(name)
 
     def get_layer(self):
@@ -505,172 +600,188 @@
 
     def spatial_overlapping(self, dataset):
         """!Return True if the spatial extents 2d overlap"""
-        
+
         return self.spatial_extent.overlapping_2d(dataset.spatial_extent)
 
     def spatial_relation(self, dataset):
         """!Return the two dimensional spatial relation"""
-        
+
         return self.spatial_extent.spatial_relation_2d(dataset.spatial_extent)
-	
+
     def reset(self, ident):
-	"""!Reset the internal structure and set the identifier"""
-	self.base = VectorBase(ident=ident)
-	self.absolute_time = VectorAbslouteTime(ident=ident)
-	self.relative_time = VectorRelativeTime(ident=ident)
-	self.spatial_extent = VectorSpatialExtent(ident=ident)
-	self.metadata = VectorMetadata(ident=ident)
+        """!Reset the internal structure and set the identifier"""
+        self.base = VectorBase(ident=ident)
+        self.absolute_time = VectorAbsoluteTime(ident=ident)
+        self.relative_time = VectorRelativeTime(ident=ident)
+        self.spatial_extent = VectorSpatialExtent(ident=ident)
+        self.metadata = VectorMetadata(ident=ident)
 
     def has_grass_timestamp(self):
-        """!Check if a grass file bsased time stamp exists for this map. 
+        """!Check if a grass file bsased time stamp exists for this map.
         """
-        if G_has_raster_timestamp(self.get_name(), self.get_layer(), self.get_mapset()):
-	    return True
-	else:
-	    return False
- 
+        if G_has_raster_timestamp(self.get_name(), self.get_layer(), 
+                                  self.get_mapset()):
+            return True
+        else:
+            return False
+
     def write_timestamp_to_grass(self):
-        """!Write the timestamp of this map into the map metadata in the grass file system based spatial
-           database. 
-           
+        """!Write the timestamp of this map into the map metadata in 
+           the grass file system based spatial database.
+
            Internally the libgis API functions are used for writing
         """
-        
-	ts = libgis.TimeStamp()
 
-	libgis.G_scan_timestamp(byref(ts), self._convert_timestamp())
-	check = libgis.G_write_vector_timestamp(self.get_name(), self.get_layer(), byref(ts))
-	
-	if check == -1:
-		core.error(_("Unable to create timestamp file for vector map <%s>"%(self.get_map_id())))
-		
-	if check == -2:
-		core.error(_("Invalid datetime in timestamp for vector map <%s>"%(self.get_map_id())))
-			
-    
+        ts = libgis.TimeStamp()
+
+        libgis.G_scan_timestamp(byref(ts), self._convert_timestamp())
+        check = libgis.G_write_vector_timestamp(
+            self.get_name(), self.get_layer(), byref(ts))
+
+        if check == -1:
+            core.error(_("Unable to create timestamp file "
+                         "for vector map <%s>" % (self.get_map_id())))
+
+        if check == -2:
+            core.error(_("Invalid datetime in timestamp for vector map <%s>" %
+                         (self.get_map_id())))
+
     def remove_timestamp_from_grass(self):
-        """!Remove the timestamp from the grass file system based spatial database
-        
+        """!Remove the timestamp from the grass file system based spatial 
+           database
+
            Internally the libgis API functions are used for removal
         """
-        check = libgis.G_remove_vector_timestamp(self.get_name(), self.get_layer())
-        
+        check = libgis.G_remove_vector_timestamp(
+            self.get_name(), self.get_layer())
+
         if check == -1:
-            core.error(_("Unable to remove timestamp for vector map <%s>"%(self.get_name())))
-	
+            core.error(_("Unable to remove timestamp for vector map <%s>" %
+                         (self.get_name())))
+
     def map_exists(self):
         """!Return True in case the map exists in the grass spatial database
-        
+
            @return True if map exists, False otherwise
-        """        
+        """
         mapset = libgis.G_find_vector(self.get_name(), self.get_mapset())
-        
+
         if not mapset:
             return False
-	
-	return True
-        
+
+        return True
+
     def read_info(self):
-        """!Read the vector map info from the file system and store the content 
+        """!Read the vector map info from the file system and store the content
            into a dictionary
-           
+
            This method uses the ctypes interface to the vector libraries
            to read the map metadata information
         """
-        
+
         kvp = {}
-        
+
         name = self.get_name()
         mapset = self.get_mapset()
-        
+
         if not self.map_exists():
-	  core.fatal(_("Vector map <%s> not found" % name))
-	
-	# The vector map structure
-	Map = libvector.Map_info()
-	
+            core.fatal(_("Vector map <%s> not found" % name))
+
+        # The vector map structure
+        Map = libvector.Map_info()
+
         # We open the maps always in topology mode first
         libvector.Vect_set_open_level(2)
         with_topo = True
-        
+
         # Code lend from v.info main.c
         if libvector.Vect_open_old_head2(byref(Map), name, mapset, "1") < 2:
-	    # force level 1, open fully
-	    # NOTE: number of points, lines, boundaries, centroids, faces, kernels is still available
-	    libvector.Vect_set_open_level(1) # no topology
-	    with_topo = False
-	    core.message(_("Open map without topology support"))
-	    if libvector.Vect_open_old2(byref(Map), name, mapset, "1") < 1:
-		core.fatal(_("Unable to open vector map <%s>"%(libvector.Vect_get_full_name(byref(Map)))))
+            # force level 1, open fully
+            # NOTE: number of points, lines, boundaries, centroids, 
+            # faces, kernels is still available
+            libvector.Vect_set_open_level(1)  # no topology
+            with_topo = False
+            core.message(_("Open map without topology support"))
+            if libvector.Vect_open_old2(byref(Map), name, mapset, "1") < 1:
+                core.fatal(_("Unable to open vector map <%s>" % 
+                             (libvector.Vect_get_full_name(byref(Map)))))
 
         # Read the extent information
         bbox = libvector.bound_box()
-        libvector.Vect_get_map_box(byref(Map), byref(bbox));
-	
-	kvp["north"] = bbox.N
-	kvp["south"] = bbox.S
-	kvp["east"] = bbox.E
-	kvp["west"] = bbox.W
-	kvp["top"] = bbox.T
-	kvp["bottom"] = bbox.B
-	
-	kvp["is_3d"] = bool(libvector.Vect_is_3d(byref(Map)))
-	
-	# Read number of features
-	if with_topo:
-	    kvp["points"] = libvector.Vect_get_num_primitives(byref(Map), libvector.GV_POINT)
-	    kvp["lines"] = libvector.Vect_get_num_primitives(byref(Map), libvector.GV_LINE)
-	    kvp["boundaries"] = libvector.Vect_get_num_primitives(byref(Map), libvector.GV_BOUNDARY)
-	    kvp["centroids"] = libvector.Vect_get_num_primitives(byref(Map), libvector.GV_CENTROID)
-	    kvp["faces"] = libvector.Vect_get_num_primitives(byref(Map), libvector.GV_FACE)
-	    kvp["kernels"] = libvector.Vect_get_num_primitives(byref(Map), libvector.GV_KERNEL)
-	
-	    # Summarize the primitives
-	    kvp["primitives"] = kvp["points"] + kvp["lines"] + kvp["boundaries"] + kvp["centroids"]
-	    if kvp["is_3d"]:
-	        kvp["primitives"] += kvp["faces"] + kvp["kernels"]
+        libvector.Vect_get_map_box(byref(Map), byref(bbox))
 
-	    # Read topology information
-	    kvp["nodes"] = libvector.Vect_get_num_nodes(byref(Map))
-	    kvp["areas"] = libvector.Vect_get_num_areas(byref(Map))
-	    kvp["islands"] = libvector.Vect_get_num_islands(byref(Map))
-	    kvp["holes"] = libvector.Vect_get_num_holes(byref(Map))
-	    kvp["volumes"] = libvector.Vect_get_num_primitives(byref(Map), libvector.GV_VOLUME)
-	else:
-	    kvp["points"] = None
-	    kvp["lines"] = None
-	    kvp["boundaries"] = None
-	    kvp["centroids"] = None
-	    kvp["faces"] = None
-	    kvp["kernels"] = None
-	    kvp["primitives"] = None
-	    kvp["nodes"] = None
-	    kvp["areas"] = None
-	    kvp["islands"] = None
-	    kvp["holes"] = None
-	    kvp["volumes"] = None
-	
-	libvector.Vect_close(byref(Map))
-	
-	return kvp
-	
+        kvp["north"] = bbox.N
+        kvp["south"] = bbox.S
+        kvp["east"] = bbox.E
+        kvp["west"] = bbox.W
+        kvp["top"] = bbox.T
+        kvp["bottom"] = bbox.B
+
+        kvp["is_3d"] = bool(libvector.Vect_is_3d(byref(Map)))
+
+        # Read number of features
+        if with_topo:
+            kvp["points"] = libvector.Vect_get_num_primitives(
+                byref(Map), libvector.GV_POINT)
+            kvp["lines"] = libvector.Vect_get_num_primitives(
+                byref(Map), libvector.GV_LINE)
+            kvp["boundaries"] = libvector.Vect_get_num_primitives(
+                byref(Map), libvector.GV_BOUNDARY)
+            kvp["centroids"] = libvector.Vect_get_num_primitives(
+                byref(Map), libvector.GV_CENTROID)
+            kvp["faces"] = libvector.Vect_get_num_primitives(
+                byref(Map), libvector.GV_FACE)
+            kvp["kernels"] = libvector.Vect_get_num_primitives(
+                byref(Map), libvector.GV_KERNEL)
+
+            # Summarize the primitives
+            kvp["primitives"] = kvp["points"] + kvp["lines"] + \
+                kvp["boundaries"] + kvp["centroids"]
+            if kvp["is_3d"]:
+                kvp["primitives"] += kvp["faces"] + kvp["kernels"]
+
+            # Read topology information
+            kvp["nodes"] = libvector.Vect_get_num_nodes(byref(Map))
+            kvp["areas"] = libvector.Vect_get_num_areas(byref(Map))
+            kvp["islands"] = libvector.Vect_get_num_islands(byref(Map))
+            kvp["holes"] = libvector.Vect_get_num_holes(byref(Map))
+            kvp["volumes"] = libvector.Vect_get_num_primitives(
+                byref(Map), libvector.GV_VOLUME)
+        else:
+            kvp["points"] = None
+            kvp["lines"] = None
+            kvp["boundaries"] = None
+            kvp["centroids"] = None
+            kvp["faces"] = None
+            kvp["kernels"] = None
+            kvp["primitives"] = None
+            kvp["nodes"] = None
+            kvp["areas"] = None
+            kvp["islands"] = None
+            kvp["holes"] = None
+            kvp["volumes"] = None
+
+        libvector.Vect_close(byref(Map))
+
+        return kvp
+
     def load(self):
-        """!Load all info from an existing vector map into the internal structure"""
+        """!Load all info from an existing vector map into the internal 
+        structure"""
 
-
         # Fill base information
         self.base.set_creator(str(getpass.getuser()))
 
         # Get the data from an existing raster map
         kvp = self.read_info()
-        
+
         # Fill spatial extent
-        self.set_spatial_extent(north=kvp["north"], south=kvp["south"], \
-                                east=kvp["east"],   west=kvp["west"],\
+        self.set_spatial_extent(north=kvp["north"], south=kvp["south"],
+                                east=kvp["east"], west=kvp["west"],
                                 top=kvp["top"], bottom=kvp["bottom"])
 
-	# Fill metadata
-	self.metadata.set_3d_info(kvp["is_3d"])
+        # Fill metadata
+        self.metadata.set_3d_info(kvp["is_3d"])
         self.metadata.set_points(kvp["points"])
         self.metadata.set_lines(kvp["lines"])
         self.metadata.set_boundaries(kvp["boundaries"])
@@ -686,7 +797,7 @@
 
 ###############################################################################
 
-class space_time_raster_dataset(AbstractSpaceTimeDataset):
+class SpaceTimeRasterDataset(AbstractSpaceTimeDataset):
     """!Space time raster dataset class
     """
     def __init__(self, ident):
@@ -697,11 +808,11 @@
 
     def get_new_instance(self, ident):
         """!Return a new instance with the type of this class"""
-        return space_time_raster_dataset(ident)
+        return SpaceTimeRasterDataset(ident)
 
     def get_new_map_instance(self, ident):
         """!Return a new instance of a map dataset which is associated with the type of this class"""
-        return raster_dataset(ident)
+        return RasterDataset(ident)
 
     def get_map_register(self):
         """!Return the name of the map register table"""
@@ -713,27 +824,27 @@
 
     def spatial_overlapping(self, dataset):
         """!Return True if the spatial extents 2d overlap"""
-        
+
         return self.spatial_extent.overlapping_2d(dataset.spatial_extent)
 
     def spatial_relation(self, dataset):
         """!Return the two dimensional spatial relation"""
-        
+
         return self.spatial_extent.spatial_relation_2d(dataset.spatial_extent)
-	
+
     def reset(self, ident):
 
-	"""!Reset the internal structure and set the identifier"""
-	self.base = STRDSBase(ident=ident)
+        """!Reset the internal structure and set the identifier"""
+        self.base = STRDSBase(ident=ident)
         self.base.set_creator(str(getpass.getuser()))
-        self.absolute_time = STRDSAbslouteTime(ident=ident)
+        self.absolute_time = STRDSAbsoluteTime(ident=ident)
         self.relative_time = STRDSRelativeTime(ident=ident)
-	self.spatial_extent = STRDSSpatialExtent(ident=ident)
-	self.metadata = STRDSMetadata(ident=ident)
+        self.spatial_extent = STRDSSpatialExtent(ident=ident)
+        self.metadata = STRDSMetadata(ident=ident)
 
 ###############################################################################
 
-class space_time_raster3d_dataset(AbstractSpaceTimeDataset):
+class SpaceTimeRaster3DDataset(AbstractSpaceTimeDataset):
     """!Space time raster3d dataset class
     """
 
@@ -745,11 +856,12 @@
 
     def get_new_instance(self, ident):
         """!Return a new instance with the type of this class"""
-        return space_time_raster3d_dataset(ident)
+        return SpaceTimeRaster3DDataset(ident)
 
     def get_new_map_instance(self, ident):
-        """!Return a new instance of a map dataset which is associated with the type of this class"""
-        return raster3d_dataset(ident)
+        """!Return a new instance of a map dataset which is associated 
+        with the type of this class"""
+        return Raster3DDataset(ident)
 
     def get_map_register(self):
         """!Return the name of the map register table"""
@@ -761,7 +873,7 @@
 
     def spatial_overlapping(self, dataset):
         """!Return True if the spatial extents overlap"""
-        
+
         if self.get_type() == dataset.get_type() or dataset.get_type() == "str3ds":
             return self.spatial_extent.overlapping(dataset.spatial_extent)
         else:
@@ -769,25 +881,27 @@
 
     def spatial_relation(self, dataset):
         """!Return the two or three dimensional spatial relation"""
-        
-        if self.get_type() == dataset.get_type() or dataset.get_type() == "str3ds":
+
+        if self.get_type() == dataset.get_type() or \
+           dataset.get_type() == "str3ds":
             return self.spatial_extent.spatial_relation(dataset.spatial_extent)
         else:
             return self.spatial_extent.spatial_relation_2d(dataset.spatial_extent)
-        
+
     def reset(self, ident):
 
-	"""!Reset the internal structure and set the identifier"""
-	self.base = STR3DSBase(ident=ident)
+        """!Reset the internal structure and set the identifier"""
+        self.base = STR3DSBase(ident=ident)
         self.base.set_creator(str(getpass.getuser()))
-        self.absolute_time = STR3DSAbslouteTime(ident=ident)
+        self.absolute_time = STR3DSAbsoluteTime(ident=ident)
         self.relative_time = STR3DSRelativeTime(ident=ident)
-	self.spatial_extent = STR3DSSpatialExtent(ident=ident)
-	self.metadata = STR3DSMetadata(ident=ident)
+        self.spatial_extent = STR3DSSpatialExtent(ident=ident)
+        self.metadata = STR3DSMetadata(ident=ident)
 
 ###############################################################################
 
-class space_time_vector_dataset(AbstractSpaceTimeDataset):
+
+class SpaceTimeVectorDataset(AbstractSpaceTimeDataset):
     """!Space time vector dataset class
     """
 
@@ -799,11 +913,12 @@
 
     def get_new_instance(self, ident):
         """!Return a new instance with the type of this class"""
-        return space_time_vector_dataset(ident)
+        return SpaceTimeVectorDataset(ident)
 
     def get_new_map_instance(self, ident):
-        """!Return a new instance of a map dataset which is associated with the type of this class"""
-        return vector_dataset(ident)
+        """!Return a new instance of a map dataset which is associated 
+        with the type of this class"""
+        return VectorDataset(ident)
 
     def get_map_register(self):
         """!Return the name of the map register table"""
@@ -815,28 +930,30 @@
 
     def spatial_overlapping(self, dataset):
         """!Return True if the spatial extents 2d overlap"""
-        
+
         return self.spatial_extent.overlapping_2d(dataset.spatial_extent)
 
     def spatial_relation(self, dataset):
         """!Return the two dimensional spatial relation"""
-        
+
         return self.spatial_extent.spatial_relation_2d(dataset.spatial_extent)
 
     def reset(self, ident):
 
-	"""!Reset the internal structure and set the identifier"""
-	self.base = STVDSBase(ident=ident)
+        """!Reset the internal structure and set the identifier"""
+        self.base = STVDSBase(ident=ident)
         self.base.set_creator(str(getpass.getuser()))
-        self.absolute_time = STVDSAbslouteTime(ident=ident)
+        self.absolute_time = STVDSAbsoluteTime(ident=ident)
         self.relative_time = STVDSRelativeTime(ident=ident)
-	self.spatial_extent = STVDSSpatialExtent(ident=ident)
-	self.metadata = STVDSMetadata(ident=ident)
+        self.spatial_extent = STVDSSpatialExtent(ident=ident)
+        self.metadata = STVDSMetadata(ident=ident)
 
 ###############################################################################
 
+
 class AbstractDatasetComparisonKeyStartTime(object):
-    """!This comparison key can be used to sort lists of abstract datasets by start time
+    """!This comparison key can be used to sort lists of abstract datasets 
+       by start time
 
         Example:
 
@@ -882,8 +999,10 @@
 
 ###############################################################################
 
+
 class AbstractDatasetComparisonKeyEndTime(object):
-    """!This comparison key can be used to sort lists of abstract datasets by end time
+    """!This comparison key can be used to sort lists of abstract datasets 
+       by end time
 
         Example:
 
@@ -927,3 +1046,8 @@
         startB, endB = other.obj.get_valid_time()
         return endA != endB
 
+###############################################################################
+
+if __name__ == "__main__":
+    import doctest
+    doctest.testmod()
\ No newline at end of file

Modified: grass/trunk/lib/python/temporal/space_time_datasets_tools.py
===================================================================
--- grass/trunk/lib/python/temporal/space_time_datasets_tools.py	2012-08-11 21:26:18 UTC (rev 52630)
+++ grass/trunk/lib/python/temporal/space_time_datasets_tools.py	2012-08-12 01:54:40 UTC (rev 52631)
@@ -23,14 +23,16 @@
 """
 
 from space_time_datasets import *
- 
+
 ###############################################################################
 
-def register_maps_in_space_time_dataset(type, name, maps=None, file=None, start=None, \
-                                        end=None, unit=None, increment=None, dbif = None, \
-                                        interval=False, fs="|"):
-    """!Use this method to register maps in space time datasets. This function is generic and
 
+def register_maps_in_space_time_dataset(
+    type, name, maps=None, file=None, start=None,
+    end=None, unit=None, increment=None, dbif=None,
+        interval=False, fs="|"):
+    """!Use this method to register maps in space time datasets. 
+
        Additionally a start time string and an increment string can be specified
        to assign a time interval automatically to the maps.
 
@@ -40,86 +42,100 @@
        @param type: The type of the maps rast, rast3d or vect
        @param name: The name of the space time dataset
        @param maps: A comma separated list of map names
-       @param file: Input file one map with start and optional end time, one per line
-       @param start: The start date and time of the first raster map (format absolute: "yyyy-mm-dd HH:MM:SS" or "yyyy-mm-dd", format relative is integer 5)
-       @param end: The end date and time of the first raster map (format absolute: "yyyy-mm-dd HH:MM:SS" or "yyyy-mm-dd", format relative is integer 5)
-       @param unit: The unit of the relative time: years, months, days, hours, minutes, seconds
-       @param increment: Time increment between maps for time stamp creation (format absolute: NNN seconds, minutes, hours, days, weeks, months, years; format relative: 1.0)
+       @param file: Input file one map with start and optional end time, 
+                    one per line
+       @param start: The start date and time of the first raster map
+                    (format absolute: "yyyy-mm-dd HH:MM:SS" or "yyyy-mm-dd",
+                    format relative is integer 5)
+       @param end: The end date and time of the first raster map
+                   (format absolute: "yyyy-mm-dd HH:MM:SS" or "yyyy-mm-dd",
+                   format relative is integer 5)
+       @param unit: The unit of the relative time: years, months, days,
+                    hours, minutes, seconds
+       @param increment: Time increment between maps for time stamp creation
+                         (format absolute: NNN seconds, minutes, hours, days,
+                         weeks, months, years; format relative: 1.0)
        @param dbif: The database interface to be used
-       @param interval: If True, time intervals are created in case the start time and an increment is provided
+       @param interval: If True, time intervals are created in case the start
+                        time and an increment is provided
        @param fs: Field separator used in input file
     """
 
     start_time_in_file = False
     end_time_in_file = False
-    
+
     if maps and file:
-        core.fatal(_("%s= and %s= are mutually exclusive") % ("input","file"))
+        core.fatal(_("%s= and %s= are mutually exclusive") % ("input", "file"))
 
     if end and increment:
-        core.fatal(_("%s= and %s= are mutually exclusive") % ("end","increment"))
+        core.fatal(_("%s= and %s= are mutually exclusive") % (
+            "end", "increment"))
 
     if end and not start:
-        core.fatal(_("Please specify %s= and %s=") % ("start_time","end_time"))
+        core.fatal(_("Please specify %s= and %s=") % ("start_time",
+                                                      "end_time"))
 
     if not maps and not file:
-        core.fatal(_("Please specify %s= or %s=") % ("input","file"))
+        core.fatal(_("Please specify %s= or %s=") % ("input", "file"))
 
     # We may need the mapset
-    mapset =  core.gisenv()["MAPSET"]
-    
+    mapset = core.gisenv()["MAPSET"]
+
     # The name of the space time dataset is optional
     if name:
-	# Check if the dataset name contains the mapset as well
-	if name.find("@") < 0:
-	    id = name + "@" + mapset
-	else:
-	    id = name
+        # Check if the dataset name contains the mapset as well
+        if name.find("@") < 0:
+            id = name + "@" + mapset
+        else:
+            id = name
 
-	if type == "rast" or type == "raster":
-	    sp = dataset_factory("strds", id)
-	elif type == "rast3d":
-	    sp = dataset_factory("str3ds", id)
-	elif type == "vect" or type == "vector":
-	    sp = dataset_factory("stvds", id)
-	else:
-	    core.fatal(_("Unkown map type: %s")%(type))
+        if type == "rast" or type == "raster":
+            sp = dataset_factory("strds", id)
+        elif type == "rast3d":
+            sp = dataset_factory("str3ds", id)
+        elif type == "vect" or type == "vector":
+            sp = dataset_factory("stvds", id)
+        else:
+            core.fatal(_("Unkown map type: %s") % (type))
 
-        
     dbif, connect = init_dbif(None)
 
     if name:
-	# Read content from temporal database
-	sp.select(dbif)
+        # Read content from temporal database
+        sp.select(dbif)
 
-	if sp.is_in_db(dbif) == False:
-	    dbif.close()
-	    core.fatal(_("Space time %s dataset <%s> no found") % (sp.get_new_map_instance(None).get_type(), name))
+        if not sp.is_in_db(dbif):
+            dbif.close()
+            core.fatal(_("Space time %s dataset <%s> no found") %
+                       (sp.get_new_map_instance(None).get_type(), name))
 
-	if sp.is_time_relative() and not unit:
-	    dbif.close()
-	    core.fatal(_("Space time %s dataset <%s> with relative time found, but no relative unit set for %s maps") % (sp.get_new_map_instance(None).get_type(), name, sp.get_new_map_instance(None).get_type()))
-    
+        if sp.is_time_relative() and not unit:
+            dbif.close()
+            core.fatal(_("Space time %s dataset <%s> with relative time found, "
+                         "but no relative unit set for %s maps") %
+                       (sp.get_new_map_instance(None).get_type(),
+                        name, sp.get_new_map_instance(None).get_type()))
+
     # We need a dummy map object to build the map ids
     dummy = dataset_factory(type, None)
-        
+
     maplist = []
-    
+
     # Map names as comma separated string
     if maps:
         if maps.find(",") < 0:
-            maplist = [maps,]
+            maplist = [maps, ]
         else:
             maplist = maps.split(",")
 
-	# Build the map list again with the ids
-	for count in range(len(maplist)):
-	    row = {}
-	    mapid = dummy.build_id(maplist[count], mapset, None)
-		
-	    row["id"] = mapid
+        # Build the map list again with the ids
+        for count in range(len(maplist)):
+            row = {}
+            mapid = dummy.build_id(maplist[count], mapset, None)
+
+            row["id"] = mapid
             maplist[count] = row
-            
+
     # Read the map list from file
     if file:
         fd = open(file, "r")
@@ -145,69 +161,73 @@
 
             mapname = line_list[0].strip()
             row = {}
-            
-	    if start_time_in_file and  end_time_in_file:
-	        row["start"] = line_list[1].strip()
-	        row["end"] = line_list[2].strip()
 
-	    if start_time_in_file and  not end_time_in_file:
-	        row["start"] = line_list[1].strip()
-	    
-	    row["id"] = dummy.build_id(mapname, mapset)
+            if start_time_in_file and end_time_in_file:
+                row["start"] = line_list[1].strip()
+                row["end"] = line_list[2].strip()
 
+            if start_time_in_file and not end_time_in_file:
+                row["start"] = line_list[1].strip()
+
+            row["id"] = dummy.build_id(mapname, mapset)
+
             maplist.append(row)
-    
+
     num_maps = len(maplist)
     map_object_list = []
     statement = ""
-    
+
     core.message(_("Gathering map informations"))
-    
+
     for count in range(len(maplist)):
-	core.percent(count, num_maps, 1)
+        core.percent(count, num_maps, 1)
 
         # Get a new instance of the map type
         map = dataset_factory(type, maplist[count]["id"])
 
         # Use the time data from file
-        if maplist[count].has_key("start"):
+        if "start" in maplist[count]:
             start = maplist[count]["start"]
-        if maplist[count].has_key("end"):
+        if "end" in maplist[count]:
             end = maplist[count]["end"]
-            
+
         is_in_db = False
 
         # Put the map into the database
-        if map.is_in_db(dbif) == False:
+        if not map.is_in_db(dbif):
             is_in_db = False
             # Break in case no valid time is provided
-            if start == "" or start == None:
+            if start == "" or start is None:
                 dbif.close()
                 if map.get_layer():
-		    core.fatal(_("Unable to register %s map <%s> with layer %s. The map has no valid time and the start time is not set.") % \
-				(map.get_type(), map.get_map_id(), map.get_layer() ))
-		else:
-		    core.fatal(_("Unable to register %s map <%s>. The map has no valid time and the start time is not set.") % \
-				(map.get_type(), map.get_map_id() ))
-	    
-	    if unit:
+                    core.fatal(_("Unable to register %s map <%s> with layer %s. "
+                                 "The map has no valid time and the start time is not set.") %
+                               (map.get_type(), map.get_map_id(), map.get_layer()))
+                else:
+                    core.fatal(_("Unable to register %s map <%s>. The map has no valid"
+                                 " time and the start time is not set.") %
+                               (map.get_type(), map.get_map_id()))
+
+            if unit:
                 map.set_time_to_relative()
             else:
                 map.set_time_to_absolute()
- 
+
         else:
             is_in_db = True
-            if core.overwrite == False:
-		continue
+            if not core.overwrite:
+                continue
             map.select(dbif)
             if name and map.get_temporal_type() != sp.get_temporal_type():
                 dbif.close()
                 if map.get_layer():
-		    core.fatal(_("Unable to register %s map <%s> with layer. The temporal types are different.") %  \
-		                 (map.get_type(), map.get_map_id(), map.get_layer()))
-		else:
-		    core.fatal(_("Unable to register %s map <%s>. The temporal types are different.") %  \
-		                 (map.get_type(), map.get_map_id()))
+                    core.fatal(_("Unable to register %s map <%s> with layer. "
+                                 "The temporal types are different.") %
+                               (map.get_type(), map.get_map_id(), map.get_layer()))
+                else:
+                    core.fatal(_("Unable to register %s map <%s>. "
+                                 "The temporal types are different.") %
+                               (map.get_type(), map.get_map_id()))
 
         # Load the data from the grass file database
         map.load()
@@ -217,21 +237,25 @@
             # In case the time is in the input file we ignore the increment counter
             if start_time_in_file:
                 count = 1
-            assign_valid_time_to_map(ttype=map.get_temporal_type(), map=map, start=start, end=end, unit=unit, increment=increment, mult=count, interval=interval)
+            assign_valid_time_to_map(ttype=map.get_temporal_type(),
+                                     map=map, start=start, end=end, unit=unit,
+                                     increment=increment, mult=count,
+                                     interval=interval)
 
         if is_in_db:
-           #  Gather the SQL update statement
-           statement += map.update_all(dbif=dbif, execute=False)
+            #  Gather the SQL update statement
+            statement += map.update_all(dbif=dbif, execute=False)
         else:
-           #  Gather the SQL insert statement
-           statement += map.insert(dbif=dbif, execute=False)
+            #  Gather the SQL insert statement
+            statement += map.insert(dbif=dbif, execute=False)
 
         # Sqlite3 performace better for huge datasets when committing in small chunks
         if dbmi.__name__ == "sqlite3":
             if count % 100 == 0:
-                if statement != None and statement != "":
-                    core.message(_("Registering maps in the temporal database"))
-		    dbif.execute_transaction(statement)
+                if statement is not None and statement != "":
+                    core.message(_("Registering maps in the temporal database")
+                                 )
+                    dbif.execute_transaction(statement)
                     statement = ""
 
         # Store the maps in a list to register in a space time dataset
@@ -240,7 +264,7 @@
 
     core.percent(num_maps, num_maps, 1)
 
-    if statement != None and statement != "":
+    if statement is not None and statement != "":
         core.message(_("Register maps in the temporal database"))
         dbif.execute_transaction(statement)
 
@@ -251,63 +275,80 @@
         num_maps = len(map_object_list)
         core.message(_("Register maps in the space time raster dataset"))
         for map in map_object_list:
-	    core.percent(count, num_maps, 1)
-	    sp.register_map(map=map, dbif=dbif)
+            core.percent(count, num_maps, 1)
+            sp.register_map(map=map, dbif=dbif)
             count += 1
-        
+
     # Update the space time tables
     if name:
         core.message(_("Update space time raster dataset"))
-	sp.update_from_registered_maps(dbif)
+        sp.update_from_registered_maps(dbif)
 
     if connect == True:
         dbif.close()
 
     core.percent(num_maps, num_maps, 1)
-        
 
+
 ###############################################################################
 
 def assign_valid_time_to_map(ttype, map, start, end, unit, increment=None, mult=1, interval=False):
     """!Assign the valid time to a map dataset
 
-       @param ttype: The temporal type which should be assigned and which the time format is of
+       @param ttype: The temporal type which should be assigned
+                     and which the time format is of
        @param map: A map dataset object derived from abstract_map_dataset
-       @param start: The start date and time of the first raster map (format absolute: "yyyy-mm-dd HH:MM:SS" or "yyyy-mm-dd", format relative is integer 5)
-       @param end: The end date and time of the first raster map (format absolute: "yyyy-mm-dd HH:MM:SS" or "yyyy-mm-dd", format relative is integer 5)
-       @param unit: The unit of the relative time: years, months, days, hours, minutes, seconds
-       @param increment: Time increment between maps for time stamp creation (format absolute: NNN seconds, minutes, hours, days, weeks, months, years; format relative is integer 1)
+       @param start: The start date and time of the first raster map
+                     (format absolute: "yyyy-mm-dd HH:MM:SS" or "yyyy-mm-dd",
+                     format relative is integer 5)
+       @param end: The end date and time of the first raster map
+                   (format absolute: "yyyy-mm-dd HH:MM:SS" or "yyyy-mm-dd",
+                   format relative is integer 5)
+       @param unit: The unit of the relative time: years, months,
+                    days, hours, minutes, seconds
+       @param increment: Time increment between maps for time stamp creation
+                        (format absolute: NNN seconds, minutes, hours, days,
+                        weeks, months, years; format relative is integer 1)
        @param multi: A multiplier for the increment
-       @param interval: If True, time intervals are created in case the start time and an increment is provided
+       @param interval: If True, time intervals are created in case the start
+                        time and an increment is provided
     """
 
     if ttype == "absolute":
         start_time = string_to_datetime(start)
-        if start_time == None:
+        if start_time is None:
             dbif.close()
-            core.fatal(_("Unable to convert string \"%s\"into a datetime object")%(start))
+            core.fatal(_("Unable to convert string \"%s\"into a "
+                         "datetime object") % (start))
         end_time = None
 
         if end:
             end_time = string_to_datetime(end)
-            if end_time == None:
+            if end_time is None:
                 dbif.close()
-                core.fatal(_("Unable to convert string \"%s\"into a datetime object")%(end))
+                core.fatal(_("Unable to convert string \"%s\"into a "
+                             "datetime object") % (end))
 
         # Add the increment
         if increment:
-            start_time = increment_datetime_by_string(start_time, increment, mult)
-            if start_time == None:
-		core.fatal(_("Error in increment computation"))
+            start_time = increment_datetime_by_string(
+                start_time, increment, mult)
+            if start_time is None:
+                core.fatal(_("Error in increment computation"))
             if interval:
-                end_time = increment_datetime_by_string(start_time, increment, 1)
-		if end_time == None:
-		    core.fatal(_("Error in increment computation"))
-	if map.get_layer():
-	    core.verbose(_("Set absolute valid time for map <%s> with layer %s to %s - %s") % (map.get_map_id(), map.get_layer(), str(start_time), str(end_time)))
+                end_time = increment_datetime_by_string(
+                    start_time, increment, 1)
+                if end_time is None:
+                    core.fatal(_("Error in increment computation"))
+        if map.get_layer():
+            core.verbose(_("Set absolute valid time for map <%(id)s> with "
+                           "layer %(layer)s to %(start)s - %(end)s") %
+                         {'id': map.get_map_id(), 'layer': map.get_layer(),
+                          'start': str(start_time), 'end': str(end_time)})
         else:
-	    core.verbose(_("Set absolute valid time for map <%s> to %s - %s") % (map.get_map_id(), str(start_time), str(end_time)))
-        
+            core.verbose(_("Set absolute valid time for map <%s> to %s - %s") %
+                         (map.get_map_id(), str(start_time), str(end_time)))
+
         map.set_absolute_time(start_time, end_time, None)
     else:
         start_time = int(start)
@@ -321,19 +362,26 @@
             if interval:
                 end_time = start_time + int(increment)
 
-	if map.get_layer():
-	    core.verbose(_("Set relative valid time for map <%s> with layer %s to %i - %s with unit %s") % (map.get_map_id(), map.get_layer(), start_time,  str(end_time), unit))
+        if map.get_layer():
+            core.verbose(_("Set relative valid time for map <%s> with layer %s "
+                           "to %i - %s with unit %s") %
+                         (map.get_map_id(), map.get_layer(), start_time,
+                          str(end_time), unit))
         else:
-	    core.verbose(_("Set relative valid time for map <%s> to %i - %s with unit %s") % (map.get_map_id(), start_time,  str(end_time), unit))
-	    
+            core.verbose(_("Set relative valid time for map <%s> to %i - %s "
+                           "with unit %s") % (map.get_map_id(), start_time,
+                                              str(end_time), unit))
+
         map.set_relative_time(start_time, end_time, unit)
 
 ###############################################################################
 
+
 def dataset_factory(type, id):
     """!A factory functions to create space time or map datasets
-    
-       @param type: the dataset type: rast or raster, rast3d, vect or vector, strds, str3ds, stvds
+
+       @param type: the dataset type: rast or raster, rast3d,
+                    vect or vector, strds, str3ds, stvds
        @param id: The id of the dataset ("name at mapset")
     """
     if type == "strds":
@@ -346,7 +394,7 @@
         sp = raster_dataset(id)
     elif type == "rast3d":
         sp = raster3d_dataset(id)
-    elif type == "vect" or  type == "vector":
+    elif type == "vect" or type == "vector":
         sp = vector_dataset(id)
     else:
         core.error(_("Unknown dataset type: %s") % type)
@@ -356,24 +404,32 @@
 
 ###############################################################################
 
+
 def list_maps_of_stds(type, input, columns, order, where, separator, method, header):
     """! List the maps of a space time dataset using diffetent methods
 
         @param type: The type of the maps raster, raster3d or vector
         @param input: Name of a space time raster dataset
-        @param columns: A comma separated list of columns to be printed to stdout 
-        @param order: A comma separated list of columns to order the space time dataset by category 
-        @param where: A where statement for selected listing without "WHERE" e.g: start_time < "2001-01-01" and end_time > "2001-01-01"
+        @param columns: A comma separated list of columns to be printed to stdout
+        @param order: A comma separated list of columns to order the
+                      space time dataset by category
+        @param where: A where statement for selected listing without "WHERE"
+                      e.g: start_time < "2001-01-01" and end_time > "2001-01-01"
         @param separator: The field separator character between the columns
-        @param method: String identifier to select a method out of cols,comma,delta or deltagaps
+        @param method: String identifier to select a method out of cols,
+                       comma,delta or deltagaps
             * "cols": Print preselected columns specified by columns
             * "comma": Print the map ids (name at mapset) as comma separated string
-            * "delta": Print the map ids (name at mapset) with start time, end time, relative length of intervals and the relative distance to the begin
-            * "deltagaps": Same as "delta" with additional listing of gaps. Gaps can be simply identified as the id is "None"
-            * "gran": List map using the granularity of the space time dataset, columns are identical to deltagaps 
-        @param header: Set True to print column names 
+            * "delta": Print the map ids (name at mapset) with start time,
+                       end time, relative length of intervals and the relative
+                       distance to the begin
+            * "deltagaps": Same as "delta" with additional listing of gaps.
+                           Gaps can be simply identified as the id is "None"
+            * "gran": List map using the granularity of the space time dataset,
+                      columns are identical to deltagaps
+        @param header: Set True to print column names
     """
-    mapset =  core.gisenv()["MAPSET"]
+    mapset = core.gisenv()["MAPSET"]
 
     if input.find("@") >= 0:
         id = input
@@ -381,21 +437,21 @@
         id = input + "@" + mapset
 
     sp = dataset_factory(type, id)
-    
-    if sp.is_in_db() == False:
+
+    if not sp.is_in_db():
         core.fatal(_("Dataset <%s> not found in temporal database") % (id))
 
     sp.select()
 
-    if separator == None or separator == "":
+    if separator is None or separator == "":
         separator = "\t"
-           
+
     # This method expects a list of objects for gap detection
     if method == "delta" or method == "deltagaps" or method == "gran":
-	if type == "stvds":
-	    columns = "id,name,layer,mapset,start_time,end_time"
-	else:
-	    columns = "id,name,mapset,start_time,end_time"
+        if type == "stvds":
+            columns = "id,name,layer,mapset,start_time,end_time"
+        else:
+            columns = "id,name,mapset,start_time,end_time"
         if method == "deltagaps":
             maps = sp.get_registered_maps_as_objects_with_gaps(where, None)
         elif method == "delta":
@@ -405,15 +461,15 @@
 
         if header:
             string = ""
-	    string += "%s%s" % ("id", separator)
-	    string += "%s%s" % ("name", separator)
+            string += "%s%s" % ("id", separator)
+            string += "%s%s" % ("name", separator)
             if type == "stvds":
-		string += "%s%s" % ("layer", separator)
-	    string += "%s%s" % ("mapset", separator)
+                string += "%s%s" % ("layer", separator)
+            string += "%s%s" % ("mapset", separator)
             string += "%s%s" % ("start_time", separator)
             string += "%s%s" % ("end_time", separator)
             string += "%s%s" % ("interval_length", separator)
-            string += "%s"   % ("distance_from_begin")
+            string += "%s" % ("distance_from_begin")
 
         if maps and len(maps) > 0:
 
@@ -431,7 +487,7 @@
 
                 start, end = map.get_valid_time()
                 if end:
-                    delta = end -start
+                    delta = end - start
                 else:
                     delta = None
                 delta_first = start - first_time
@@ -444,13 +500,13 @@
                 string = ""
                 string += "%s%s" % (map.get_id(), separator)
                 string += "%s%s" % (map.get_name(), separator)
-		if type == "stvds":
-		    string += "%s%s" % (map.get_layer(), separator)
+                if type == "stvds":
+                    string += "%s%s" % (map.get_layer(), separator)
                 string += "%s%s" % (map.get_mapset(), separator)
                 string += "%s%s" % (start, separator)
                 string += "%s%s" % (end, separator)
                 string += "%s%s" % (delta, separator)
-                string += "%s"   % (delta_first)
+                string += "%s" % (delta_first)
                 print string
 
     else:
@@ -497,30 +553,35 @@
                         else:
                             output += str(col)
                         count += 1
-                        
+
                     print output
 
 ###############################################################################
 
+
 def sample_stds_by_stds_topology(intype, sampletype, inputs, sampler, header, separator, method, spatial=False):
-    """! Sample the input space time datasets with a sample space time dataset and print the result to stdout
+    """!Sample the input space time datasets with a sample 
+       space time dataset and print the result to stdout
 
-        In case multiple maps are located in the current granule, the map names are separated by comma.
-        
-        In case a layer is present, the names map ids are extended in this form: name:layer at mapset 
+        In case multiple maps are located in the current granule, 
+        the map names are separated by comma.
 
+        In case a layer is present, the names map ids are extended 
+        in this form: name:layer at mapset
+
         Attention: Do not use the comma as separator
 
         @param intype:  Type of the input space time dataset (strds, stvds or str3ds)
         @param samtype: Type of the sample space time dataset (strds, stvds or str3ds)
         @param input: Name of a space time dataset
         @param sampler: Name of a space time dataset used for temporal sampling
-        @param header: Set True to print column names 
+        @param header: Set True to print column names
         @param separator: The field separator character between the columns
-        @param method: The method to be used for temporal sampling (start,during,contain,overlap,equal)
+        @param method: The method to be used for temporal sampling 
+                       (start,during,contain,overlap,equal)
         @param spatial: Perform spatial overlapping check
     """
-    mapset =  core.gisenv()["MAPSET"]
+    mapset = core.gisenv()["MAPSET"]
 
     input_list = inputs.split(",")
     sts = []
@@ -554,9 +615,9 @@
 
     sst.select(dbif)
 
-    if separator == None or separator == "" or separator.find(",") >= 0:
+    if separator is None or separator == "" or separator.find(",") >= 0:
         separator = " | "
-       
+
     mapmatrizes = []
     for st in sts:
         mapmatrix = st.sample_by_dataset(sst, method, spatial, dbif)
@@ -573,7 +634,7 @@
             string += "%s%s" % ("start_time", separator)
             string += "%s%s" % ("end_time", separator)
             string += "%s%s" % ("interval_length", separator)
-            string += "%s"   % ("distance_from_begin")
+            string += "%s" % ("distance_from_begin")
 
         first_time, dummy = mapmatrizes[0][0]["granule"].get_valid_time()
 
@@ -590,7 +651,7 @@
                         mapnames += ",%s" % str(sample.get_id())
                     count += 1
                 mapname_list.append(mapnames)
-                
+
             entry = mapmatrizes[0][i]
             map = entry["granule"]
 
@@ -613,8 +674,7 @@
             string += "%s%s" % (start, separator)
             string += "%s%s" % (end, separator)
             string += "%s%s" % (delta, separator)
-            string += "%s"   % (delta_first)
+            string += "%s" % (delta_first)
             print string
 
     dbif.close()
-

Modified: grass/trunk/lib/python/temporal/spatial_extent.py
===================================================================
--- grass/trunk/lib/python/temporal/spatial_extent.py	2012-08-11 21:26:18 UTC (rev 52630)
+++ grass/trunk/lib/python/temporal/spatial_extent.py	2012-08-12 01:54:40 UTC (rev 52631)
@@ -6,6 +6,8 @@
 
 Usage:
 
+ at code
+
 >>> import grass.temporal as tgis
 >>> extent = tgis.RasterSpatialExtent( 
 ... ident="raster at PERMANENT", north=90, south=90, east=180, west=180,
@@ -26,6 +28,7 @@
 ... ident="stvds at PERMANENT", north=90, south=90, east=180, west=180,
 ... top=100, bottom=-20)
 
+ at endcode
 
 (C) 2008-2011 by the GRASS Development Team
 This program is free software under the GNU General Public
@@ -43,8 +46,11 @@
         This class implements a three dimensional axis aligned bounding box
         and functions to compute topological relationships
         
-        >>> import grass.temporal as tgis
-        >>> extent = tgis.SpatialExtent(table="raster_spatial_extent", 
+        Usage:
+        
+        @code
+        
+        >>> extent = SpatialExtent(table="raster_spatial_extent", 
         ... ident="soil at PERMANENT", north=90, south=90, east=180, west=180,
         ... top=100, bottom=-20)
         >>> extent.id
@@ -76,6 +82,8 @@
         west=180.0
         top=100.0
         bottom=-20.0
+        
+        @endcode
     """
     def __init__(self, table=None, ident=None, north=None, south=None, 
                  east=None, west=None, top=None, bottom=None, proj="XY"):
@@ -98,7 +106,8 @@
         """
 
         if self.get_projection() != extent.get_projection():
-            core.error(_("Projections are different. Unable to compute overlapping_2d for spatial extents"))
+            core.error(_("Projections are different. Unable to compute "
+                         "overlapping_2d for spatial extents"))
             return False
 
         N = extent.get_north()
@@ -142,11 +151,14 @@
            
         Usage:
         
-        >>> import grass.temporal as tgis
-        >>> A = tgis.SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
-        >>> B = tgis.SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
+        @code
+        
+        >>> A = SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
         >>> A.overlapping(B)
         True
+        
+        @endcode
         """
 
         if not self.overlapping_2d(extent):
@@ -164,8 +176,8 @@
         return True
 
     def intersect_2d(self, extent):
-        """!Return the two dimensional intersection as spatial_extent object or None
-           in case no intersection was found.
+        """!Return the two dimensional intersection as spatial_extent 
+           object or None in case no intersection was found.
         """
 
         if not self.overlapping_2d(extent):
@@ -211,14 +223,15 @@
         return new
 
     def intersect(self, extent):
-        """!Return the three dimensional intersection as spatial_extent object or None
-        in case no intersection was found.
+        """!Return the three dimensional intersection as spatial_extent 
+        object or None in case no intersection was found.
         
         Usage:
         
-        >>> import grass.temporal as tgis
-        >>> A = tgis.SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
-        >>> B = tgis.SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
+        @code
+        
+        >>> A = SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
         >>> C = A.intersect(B)
         >>> C.print_info()
          +-------------------- Spatial extent ----------------------------------------+
@@ -228,7 +241,7 @@
          | West:....................... 10.0
          | Top:........................ 50.0
          | Bottom:..................... -50.0
-        >>> B = tgis.SpatialExtent(north=40, south=30, east=60, west=10, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=40, south=30, east=60, west=10, bottom=-50, top=50)
         >>> C = A.intersect(B)
         >>> C.print_info()
          +-------------------- Spatial extent ----------------------------------------+
@@ -238,7 +251,7 @@
          | West:....................... 10.0
          | Top:........................ 50.0
          | Bottom:..................... -50.0
-        >>> B = tgis.SpatialExtent(north=40, south=30, east=60, west=30, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=40, south=30, east=60, west=30, bottom=-50, top=50)
         >>> C = A.intersect(B)
         >>> C.print_info()
          +-------------------- Spatial extent ----------------------------------------+
@@ -248,7 +261,7 @@
          | West:....................... 30.0
          | Top:........................ 50.0
          | Bottom:..................... -50.0
-        >>> B = tgis.SpatialExtent(north=40, south=30, east=60, west=30, bottom=-30, top=50)
+        >>> B = SpatialExtent(north=40, south=30, east=60, west=30, bottom=-30, top=50)
         >>> C = A.intersect(B)
         >>> C.print_info()
          +-------------------- Spatial extent ----------------------------------------+
@@ -258,7 +271,7 @@
          | West:....................... 30.0
          | Top:........................ 50.0
          | Bottom:..................... -30.0
-        >>> B = tgis.SpatialExtent(north=40, south=30, east=60, west=30, bottom=-30, top=30)
+        >>> B = SpatialExtent(north=40, south=30, east=60, west=30, bottom=-30, top=30)
         >>> C = A.intersect(B)
         >>> C.print_info()
          +-------------------- Spatial extent ----------------------------------------+
@@ -268,6 +281,8 @@
          | West:....................... 30.0
          | Top:........................ 30.0
          | Bottom:..................... -30.0
+         
+         @endcode
         """
 
         if not self.overlapping(extent):
@@ -295,15 +310,19 @@
         return new
 
     def is_in_2d(self, extent):
-        """Check two dimensional if the self is located in extent
-
+        """!Check two dimensional if the self is located in extent
+        
+        @verbatim
          _____
         |A _  |
         | |_| |
         |_____|B 
+        
+        @endverbatim
         """
         if self.get_projection() != extent.get_projection():
-            core.error(_("Projections are different. Unable to compute is_in_2d for spatial extents"))
+            core.error(_("Projections are different. Unable to compute "
+                         "is_in_2d for spatial extents"))
             return False
 
         eN = extent.get_north()
@@ -338,17 +357,20 @@
         return True
 
     def is_in(self, extent):
-        """Check three dimensional if the self is located in extent 
+        """!Check three dimensional if the self is located in extent 
         
         Usage:
         
-        >>> import grass.temporal as tgis
-        >>> A = tgis.SpatialExtent(north=79, south=21, east=59, west=11, bottom=-49, top=49)
-        >>> B = tgis.SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
+        @code
+        
+        >>> A = SpatialExtent(north=79, south=21, east=59, west=11, bottom=-49, top=49)
+        >>> B = SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
         >>> A.is_in(B)
         True
         >>> B.is_in(A)
         False
+        
+        @endcode
         """
         if not self.is_in_2d(extent):
             return False
@@ -367,18 +389,19 @@
         return True
 
     def contain_2d(self, extent):
-        """Check two dimensional if self contains extent """
+        """!Check two dimensional if self contains extent """
         return extent.is_in_2d(self)
 
     def contain(self, extent):
-        """Check three dimensional if self contains extent """
+        """!Check three dimensional if self contains extent """
         return extent.is_in(self)
 
     def equivalent_2d(self, extent):
-        """Check two dimensional if self is equivalent to extent """
+        """!Check two dimensional if self is equivalent to extent """
 
         if self.get_projection() != extent.get_projection():
-            core.error(_("Projections are different. Unable to compute equivalent_2d for spatial extents"))
+            core.error(_("Projections are different. Unable to compute "
+                         "equivalent_2d for spatial extents"))
             return False
 
         eN = extent.get_north()
@@ -413,7 +436,7 @@
         return True
 
     def equivalent(self, extent):
-        """Check three dimensional if self is equivalent to extent """
+        """!Check three dimensional if self is equivalent to extent """
 
         if not self.equivalent_2d(extent):
             return False
@@ -432,7 +455,9 @@
         return True
 
     def cover_2d(self, extent):
-        """Return True if two dimensional self covers extent
+        """!Return True if two dimensional self covers extent
+           
+           @verbatim
             _____    _____    _____    _____
            |A  __|  |__  A|  |A | B|  |B | A|
            |  |B |  | B|  |  |  |__|  |__|  |
@@ -447,6 +472,8 @@
            |A|B  |  |_____|A |A|B|A|  |_____|A
            | |   |  |B    |  | | | |  |_____|B
            |_|___|  |_____|  |_|_|_|  |_____|A
+           
+           @endverbatim
 
            The following cases are excluded:
            * contain
@@ -513,7 +540,7 @@
         return True
 
     def cover(self, extent):
-        """Return True if three dimensional self covers extent
+        """!Return True if three dimensional self covers extent
 
            The following cases are excluded:
            * contain
@@ -598,22 +625,27 @@
         return True
 
     def covered_2d(self, extent):
-        """Check two dimensional if self is covered by  extent """
+        """!Check two dimensional if self is covered by  extent """
 
         return extent.cover_2d(self)
 
     def covered(self, extent):
-        """Check three dimensional if self is covered by extent """
+        """!Check three dimensional if self is covered by extent """
 
         return extent.cover(self)
 
     def overlap_2d(self, extent):
-        """Return True if the two dimensional extents overlap. Code is lend from wind_overlap.c in lib/gis
+        """!Return True if the two dimensional extents overlap. Code is 
+           lend from wind_overlap.c in lib/gis
+           
+           @verbatim
             _____
            |A  __|__
            |  |  | B|
            |__|__|  |
               |_____|
+              
+           @endverbatim
 
            The following cases are excluded:
            * contain
@@ -668,7 +700,7 @@
         return True
 
     def overlap(self, extent):
-        """Return True if the three dimensional extents overlap
+        """!Return True if the three dimensional extents overlap
 
            The following cases are excluded:
            * contain
@@ -731,22 +763,26 @@
         return True
 
     def meet_2d(self, extent):
-        """ Check if self and extent meet each other in two dimensions
+        """!Check if self and extent meet each other in two dimensions
+        
+        @verbatim
           _____ _____    _____ _____
          |  A  |  B  |  |  B  |  A  |
          |_____|     |  |     |     |
                |_____|  |_____|_____|
 
-                 ___
-                | A |
-                |   |
-                |___|    _____
-               |  B  |  |  B  |
-               |     |  |     |
-               |_____|  |_____|_
-                          |  A  |
-                          |     |
-                          |_____|
+           ___
+          | A |
+          |   |
+          |___|    _____
+         |  B  |  |  B  |
+         |     |  |     |
+         |_____|  |_____|_
+                    |  A  |
+                    |     |
+                    |_____|
+         
+         @endverbatim
 
         """
 
@@ -805,7 +841,7 @@
         return True
 
     def meet(self, extent):
-        """ Check if self and extent meet each other in three dimensions"""
+        """!Check if self and extent meet each other in three dimensions"""
         eN = extent.get_north()
         eS = extent.get_south()
         eE = extent.get_east()
@@ -880,7 +916,7 @@
         return True
 
     def disjoint_2d(self, extent):
-        """Return True if the two dimensional extents are disjoint
+        """!Return True if the two dimensional extents are disjoint
         """
 
         if self.overlapping_2d(extent) or self.meet_2d(extent):
@@ -888,7 +924,7 @@
         return True
 
     def disjoint(self, extent):
-        """Return True if the three dimensional extents are disjoint
+        """!Return True if the three dimensional extents are disjoint
         """
 
         if self.overlapping(extent) or self.meet(extent):
@@ -896,7 +932,7 @@
         return True
 
     def spatial_relation_2d(self, extent):
-        """Returns the two dimensional spatial relation between self and extent
+        """!Returns the two dimensional spatial relation between self and extent
 
         Spatial relations are:
         * disjoint
@@ -931,7 +967,7 @@
         return "unknown"
 
     def spatial_relation(self, extent):
-        """Returns the three dimensional spatial relation between self and extent
+        """!Returns the three dimensional spatial relation between self and extent
 
         Spatial relations are:
         * disjoint
@@ -946,19 +982,20 @@
         
         Usage:
         
-        >>> import grass.temporal as tgis
-        >>> A = tgis.SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
-        >>> B = tgis.SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
+        @code
+        
+        >>> A = SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
         >>> A.spatial_relation(B)
         'equivalent'
         >>> B.spatial_relation(A)
         'equivalent'
-        >>> B = tgis.SpatialExtent(north=70, south=20, east=60, west=10, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=70, south=20, east=60, west=10, bottom=-50, top=50)
         >>> A.spatial_relation_2d(B)
         'cover'
         >>> A.spatial_relation(B)
         'cover'
-        >>> B = tgis.SpatialExtent(north=70, south=30, east=60, west=10, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=70, south=30, east=60, west=10, bottom=-50, top=50)
         >>> A.spatial_relation_2d(B)
         'cover'
         >>> A.spatial_relation(B)
@@ -967,135 +1004,137 @@
         'covered'
         >>> B.spatial_relation(A)
         'covered'
-        >>> B = tgis.SpatialExtent(north=70, south=30, east=50, west=10, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=70, south=30, east=50, west=10, bottom=-50, top=50)
         >>> A.spatial_relation_2d(B)
         'cover'
         >>> B.spatial_relation_2d(A)
         'covered'
         >>> A.spatial_relation(B)
         'cover'
-        >>> B = tgis.SpatialExtent(north=70, south=30, east=50, west=20, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=70, south=30, east=50, west=20, bottom=-50, top=50)
         >>> B.spatial_relation(A)
         'covered'
-        >>> B = tgis.SpatialExtent(north=70, south=30, east=50, west=20, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=70, south=30, east=50, west=20, bottom=-50, top=50)
         >>> A.spatial_relation_2d(B)
         'contain'
         >>> A.spatial_relation(B)
         'cover'
-        >>> B = tgis.SpatialExtent(north=70, south=30, east=50, west=20, bottom=-40, top=50)
+        >>> B = SpatialExtent(north=70, south=30, east=50, west=20, bottom=-40, top=50)
         >>> A.spatial_relation(B)
         'cover'
-        >>> B = tgis.SpatialExtent(north=70, south=30, east=50, west=20, bottom=-40, top=40)
+        >>> B = SpatialExtent(north=70, south=30, east=50, west=20, bottom=-40, top=40)
         >>> A.spatial_relation(B)
         'contain'
         >>> B.spatial_relation(A)
         'in'
-        >>> B = tgis.SpatialExtent(north=90, south=30, east=50, west=20, bottom=-40, top=40)
+        >>> B = SpatialExtent(north=90, south=30, east=50, west=20, bottom=-40, top=40)
         >>> A.spatial_relation_2d(B)
         'overlap'
         >>> A.spatial_relation(B)
         'overlap'
-        >>> B = tgis.SpatialExtent(north=90, south=5, east=70, west=5, bottom=-40, top=40)
+        >>> B = SpatialExtent(north=90, south=5, east=70, west=5, bottom=-40, top=40)
         >>> A.spatial_relation_2d(B)
         'in'
         >>> A.spatial_relation(B)
         'overlap'
-        >>> B = tgis.SpatialExtent(north=90, south=5, east=70, west=5, bottom=-40, top=60)
+        >>> B = SpatialExtent(north=90, south=5, east=70, west=5, bottom=-40, top=60)
         >>> A.spatial_relation(B)
         'overlap'
-        >>> B = tgis.SpatialExtent(north=90, south=5, east=70, west=5, bottom=-60, top=60)
+        >>> B = SpatialExtent(north=90, south=5, east=70, west=5, bottom=-60, top=60)
         >>> A.spatial_relation(B)
         'in'
-        >>> A = tgis.SpatialExtent(north=80, south=60, east=60, west=10, bottom=-50, top=50)
-        >>> B = tgis.SpatialExtent(north=60, south=20, east=60, west=10, bottom=-50, top=50)
+        >>> A = SpatialExtent(north=80, south=60, east=60, west=10, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=60, south=20, east=60, west=10, bottom=-50, top=50)
         >>> A.spatial_relation_2d(B)
         'meet'
         >>> A.spatial_relation(B)
         'meet'
-        >>> A = tgis.SpatialExtent(north=60, south=40, east=60, west=10, bottom=-50, top=50)
-        >>> B = tgis.SpatialExtent(north=80, south=60, east=60, west=10, bottom=-50, top=50)
+        >>> A = SpatialExtent(north=60, south=40, east=60, west=10, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=80, south=60, east=60, west=10, bottom=-50, top=50)
         >>> A.spatial_relation_2d(B)
         'meet'
         >>> A.spatial_relation(B)
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=60, west=40, bottom=-50, top=50)
-        >>> B = tgis.SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
+        >>> A = SpatialExtent(north=80, south=40, east=60, west=40, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
         >>> A.spatial_relation_2d(B)
         'meet'
         >>> A.spatial_relation(B)
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
-        >>> B = tgis.SpatialExtent(north=90, south=30, east=60, west=40, bottom=-50, top=50)
+        >>> A = SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=90, south=30, east=60, west=40, bottom=-50, top=50)
         >>> A.spatial_relation_2d(B)
         'meet'
         >>> A.spatial_relation(B)
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
-        >>> B = tgis.SpatialExtent(north=70, south=50, east=60, west=40, bottom=-50, top=50)
+        >>> A = SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=70, south=50, east=60, west=40, bottom=-50, top=50)
         >>> A.spatial_relation_2d(B)
         'meet'
         >>> A.spatial_relation(B)
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
-        >>> B = tgis.SpatialExtent(north=60, south=20, east=60, west=40, bottom=-50, top=50)
+        >>> A = SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=60, south=20, east=60, west=40, bottom=-50, top=50)
         >>> A.spatial_relation_2d(B)
         'meet'
         >>> A.spatial_relation(B)
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
-        >>> B = tgis.SpatialExtent(north=40, south=20, east=60, west=40, bottom=-50, top=50)
+        >>> A = SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=40, south=20, east=60, west=40, bottom=-50, top=50)
         >>> A.spatial_relation_2d(B)
         'disjoint'
         >>> A.spatial_relation(B)
         'disjoint'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
-        >>> B = tgis.SpatialExtent(north=60, south=20, east=60, west=40, bottom=-60, top=60)
+        >>> A = SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=60, south=20, east=60, west=40, bottom=-60, top=60)
         >>> A.spatial_relation(B)
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
-        >>> B = tgis.SpatialExtent(north=90, south=30, east=60, west=40, bottom=-40, top=40)
+        >>> A = SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=90, south=30, east=60, west=40, bottom=-40, top=40)
         >>> A.spatial_relation(B)
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
-        >>> B = tgis.SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
+        >>> A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
+        >>> B = SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
         >>> A.spatial_relation(B)
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
-        >>> B = tgis.SpatialExtent(north=80, south=50, east=60, west=30, bottom=-50, top=0)
+        >>> A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
+        >>> B = SpatialExtent(north=80, south=50, east=60, west=30, bottom=-50, top=0)
         >>> A.spatial_relation(B)
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
-        >>> B = tgis.SpatialExtent(north=70, south=50, east=50, west=30, bottom=-50, top=0)
+        >>> A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
+        >>> B = SpatialExtent(north=70, south=50, east=50, west=30, bottom=-50, top=0)
         >>> A.spatial_relation(B)
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
-        >>> B = tgis.SpatialExtent(north=90, south=30, east=70, west=10, bottom=-50, top=0)
+        >>> A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
+        >>> B = SpatialExtent(north=90, south=30, east=70, west=10, bottom=-50, top=0)
         >>> A.spatial_relation(B)
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
-        >>> B = tgis.SpatialExtent(north=70, south=30, east=50, west=10, bottom=-50, top=0)
+        >>> A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
+        >>> B = SpatialExtent(north=70, south=30, east=50, west=10, bottom=-50, top=0)
         >>> A.spatial_relation(B)
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
-        >>> B = tgis.SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
+        >>> A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
+        >>> B = SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
         >>> A.spatial_relation(B)
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
-        >>> B = tgis.SpatialExtent(north=80, south=50, east=60, west=30, bottom=0, top=50)
+        >>> A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
+        >>> B = SpatialExtent(north=80, south=50, east=60, west=30, bottom=0, top=50)
         >>> A.spatial_relation(B)
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
-        >>> B = tgis.SpatialExtent(north=70, south=50, east=50, west=30, bottom=0, top=50)
+        >>> A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
+        >>> B = SpatialExtent(north=70, south=50, east=50, west=30, bottom=0, top=50)
         >>> A.spatial_relation(B)
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
-        >>> B = tgis.SpatialExtent(north=90, south=30, east=70, west=10, bottom=0, top=50)
+        >>> A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
+        >>> B = SpatialExtent(north=90, south=30, east=70, west=10, bottom=0, top=50)
         >>> A.spatial_relation(B)
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
-        >>> B = tgis.SpatialExtent(north=70, south=30, east=50, west=10, bottom=0, top=50)
+        >>> A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
+        >>> B = SpatialExtent(north=70, south=30, east=50, west=10, bottom=0, top=50)
         >>> A.spatial_relation(B)
         'meet'
+        
+        @endverbatim
         """
 
         if self.equivalent(extent):
@@ -1118,7 +1157,7 @@
         return "unknown"
 
     def set_spatial_extent(self, north, south, east, west, top, bottom):
-        """Set the spatial extent"""
+        """!Set the spatial extent"""
 
         self.set_north(north)
         self.set_south(south)
@@ -1128,7 +1167,7 @@
         self.set_bottom(bottom)
 
     def set_projection(self, proj):
-        """Set the projection of the spatial extent it should be XY or LL.
+        """!Set the projection of the spatial extent it should be XY or LL.
            As default the projection is XY
         """
         if proj is None or (proj != "XY" and proj != "LL"):
@@ -1144,54 +1183,54 @@
         self.set_west(west)
 
     def set_id(self, ident):
-        """Convenient method to set the unique identifier (primary key)"""
+        """!Convenient method to set the unique identifier (primary key)"""
         self.ident = ident
         self.D["id"] = ident
 
     def set_north(self, north):
-        """Set the northern edge of the map"""
+        """!Set the northern edge of the map"""
         if north is not None:
             self.D["north"] = float(north)
         else:
             self.D["north"] = None
 
     def set_south(self, south):
-        """Set the southern edge of the map"""
+        """!Set the southern edge of the map"""
         if south is not None:
             self.D["south"] = float(south)
         else:
             self.D["south"] = None
 
     def set_west(self, west):
-        """Set the western edge of the map"""
+        """!Set the western edge of the map"""
         if west is not None:
             self.D["west"] = float(west)
         else:
             self.D["west"] = None
 
     def set_east(self, east):
-        """Set the eastern edge of the map"""
+        """!Set the eastern edge of the map"""
         if east is not None:
             self.D["east"] = float(east)
         else:
             self.D["east"] = None
 
     def set_top(self, top):
-        """Set the top edge of the map"""
+        """!Set the top edge of the map"""
         if top is not None:
             self.D["top"] = float(top)
         else:
             self.D["top"] = None
 
     def set_bottom(self, bottom):
-        """Set the bottom edge of the map"""
+        """!Set the bottom edge of the map"""
         if bottom is not None:
             self.D["bottom"] = float(bottom)
         else:
             self.D["bottom"] = None
 
     def get_id(self):
-        """Convenient method to get the unique identifier (primary key)
+        """!Convenient method to get the unique identifier (primary key)
            @return None if not found
         """
         if "id" in self.D:
@@ -1200,15 +1239,16 @@
             return None
 
     def get_projection(self):
-        """Get the projection of the spatial extent"""
+        """!Get the projection of the spatial extent"""
         return self.D["proj"]
 
     def get_volume(self):
-        """Compute the volume of the extent, in case z is zero 
+        """!Compute the volume of the extent, in case z is zero 
            (top == bottom or top - bottom = 1) the area is returned"""
 
         if self.get_projection() == "LL":
-            core.error(_("Volume computation is not supported for LL projections"))
+            core.error(_("Volume computation is not supported "
+                         "for LL projections"))
 
         area = self.get_area()
 
@@ -1222,10 +1262,11 @@
         return area * z
 
     def get_area(self):
-        """Compute the area of the extent, extent in z direction is ignored"""
+        """!Compute the area of the extent, extent in z direction is ignored"""
 
         if self.get_projection() == "LL":
-            core.error(_("Area computation is not supported for LL projections"))
+            core.error(_("Area computation is not supported "
+                         "for LL projections"))
 
         bbox = self.get_spatial_extent()
 
@@ -1235,18 +1276,20 @@
         return x * y
 
     def get_spatial_extent(self):
-        """Return a tuple (north, south, east, west, top, bottom) of the spatial extent"""
+        """!Return a tuple (north, south, east, west, top, bottom) 
+           of the spatial extent"""
 
         return (
-            self.get_north(), self.get_south, self.get_east(), self.get_west(),
-            self.get_top(), self.get_bottom())
+            self.north, self.south, self.east, self.west,
+            self.top, self.bottom)
 
     def get_spatial_extent_2d(self):
-        """Return a tuple (north, south, east, west,) of the 2d spatial extent"""
-        return (self.get_north(), self.get_south, self.get_east(), self.get_west())
+        """!Return a tuple (north, south, east, west,) of the 2d spatial extent
+        """
+        return (self.north, self.south, self.east, self.west)
 
     def get_north(self):
-        """Get the northern edge of the map
+        """!Get the northern edge of the map
            @return None if not found"""
         if "north" in self.D:
             return self.D["north"]
@@ -1254,7 +1297,7 @@
             return None
 
     def get_south(self):
-        """Get the southern edge of the map
+        """!Get the southern edge of the map
            @return None if not found"""
         if "south" in self.D:
             return self.D["south"]
@@ -1262,7 +1305,7 @@
             return None
 
     def get_east(self):
-        """Get the eastern edge of the map
+        """!Get the eastern edge of the map
            @return None if not found"""
         if "east" in self.D:
             return self.D["east"]
@@ -1270,7 +1313,7 @@
             return None
 
     def get_west(self):
-        """Get the western edge of the map
+        """!Get the western edge of the map
            @return None if not found"""
         if "west" in self.D:
             return self.D["west"]
@@ -1278,7 +1321,7 @@
             return None
 
     def get_top(self):
-        """Get the top edge of the map
+        """!Get the top edge of the map
            @return None if not found"""
         if "top" in self.D:
             return self.D["top"]
@@ -1286,7 +1329,7 @@
             return None
 
     def get_bottom(self):
-        """Get the bottom edge of the map
+        """!Get the bottom edge of the map
            @return None if not found"""
         if "bottom" in self.D:
             return self.D["bottom"]
@@ -1302,7 +1345,7 @@
     bottom= property(fget=get_bottom, fset=set_bottom)
 
     def print_info(self):
-        """Print information about this class in human readable style"""
+        """!Print information about this class in human readable style"""
         #      0123456789012345678901234567890
         print " +-------------------- Spatial extent ----------------------------------------+"
         print " | North:...................... " + str(self.get_north())
@@ -1325,37 +1368,38 @@
 ###############################################################################
 
 class RasterSpatialExtent(SpatialExtent):
-    def __init__(self, ident=None, north=None, south=None, east=None, west=None, top=None, bottom=None):
+    def __init__(self, ident=None, north=None, south=None, east=None, 
+                 west=None, top=None, bottom=None):
         SpatialExtent.__init__(self, "raster_spatial_extent",
                                 ident, north, south, east, west, top, bottom)
 
-
 class Raster3DSpatialExtent(SpatialExtent):
-    def __init__(self, ident=None, north=None, south=None, east=None, west=None, top=None, bottom=None):
+    def __init__(self, ident=None, north=None, south=None, east=None, 
+                 west=None, top=None, bottom=None):
         SpatialExtent.__init__(self, "raster3d_spatial_extent",
                                 ident, north, south, east, west, top, bottom)
 
-
 class VectorSpatialExtent(SpatialExtent):
-    def __init__(self, ident=None, north=None, south=None, east=None, west=None, top=None, bottom=None):
+    def __init__(self, ident=None, north=None, south=None, east=None, 
+                 west=None, top=None, bottom=None):
         SpatialExtent.__init__(self, "vector_spatial_extent",
                                 ident, north, south, east, west, top, bottom)
 
-
 class STRDSSpatialExtent(SpatialExtent):
-    def __init__(self, ident=None, north=None, south=None, east=None, west=None, top=None, bottom=None):
+    def __init__(self, ident=None, north=None, south=None, east=None, 
+                 west=None, top=None, bottom=None):
         SpatialExtent.__init__(self, "strds_spatial_extent",
                                 ident, north, south, east, west, top, bottom)
 
-
 class STR3DSSpatialExtent(SpatialExtent):
-    def __init__(self, ident=None, north=None, south=None, east=None, west=None, top=None, bottom=None):
+    def __init__(self, ident=None, north=None, south=None, east=None, 
+                 west=None, top=None, bottom=None):
         SpatialExtent.__init__(self, "str3ds_spatial_extent",
                                 ident, north, south, east, west, top, bottom)
 
-
 class STVDSSpatialExtent(SpatialExtent):
-    def __init__(self, ident=None, north=None, south=None, east=None, west=None, top=None, bottom=None):
+    def __init__(self, ident=None, north=None, south=None, east=None, 
+                 west=None, top=None, bottom=None):
         SpatialExtent.__init__(self, "stvds_spatial_extent",
                                 ident, north, south, east, west, top, bottom)
 

Modified: grass/trunk/lib/python/temporal/stds_export.py
===================================================================
--- grass/trunk/lib/python/temporal/stds_export.py	2012-08-11 21:26:18 UTC (rev 52630)
+++ grass/trunk/lib/python/temporal/stds_export.py	2012-08-12 01:54:40 UTC (rev 52631)
@@ -14,9 +14,9 @@
 compression="gzip"
 workdir="/tmp"
 where=None
-_format="GTiff"
-_type="strds"
-tgis.export_stds(input, output, compression, workdir, where, _format, _type)
+format_="GTiff"
+type_="strds"
+tgis.export_stds(input, output, compression, workdir, where, format_, type_)
 ...
 @endcode
 
@@ -39,7 +39,7 @@
 metadata_file_name = "metadata.txt"
 read_file_name = "readme.txt"
 list_file_name = "list.txt"
-tmp_tar_file_name = "archive" 
+tmp_tar_file_name = "archive"
 
 # This global variable is for unique vector map export,
 # since single vector maps may have several layer
@@ -47,6 +47,8 @@
 exported_maps = {}
 
 ############################################################################
+
+
 def _export_raster_maps_as_geotiff(rows, tar, list_file, new_cwd, fs):
     for row in rows:
         name = row["name"]
@@ -59,40 +61,46 @@
             end = start
         string = "%s%s%s%s%s\n" % (name, fs, start, fs, end)
         # Write the filename, the start_time and the end_time
-        list_file.write(string) 
+        list_file.write(string)
         # Export the raster map with r.out.gdal as tif
         out_name = name + ".tif"
         if datatype == "CELL":
             nodata = max_val + 1
             if nodata < 256 and min_val >= 0:
-                gdal_type = "Byte" 
+                gdal_type = "Byte"
             elif nodata < 65536 and min_val >= 0:
-                gdal_type = "UInt16" 
+                gdal_type = "UInt16"
             elif min_val >= 0:
-                gdal_type = "UInt32" 
+                gdal_type = "UInt32"
             else:
-                gdal_type = "Int32" 
-            ret = core.run_command("r.out.gdal", flags="c", input=name, output=out_name, nodata=nodata, type=gdal_type, format="GTiff")
+                gdal_type = "Int32"
+            ret = core.run_command("r.out.gdal", flags="c", input=name, 
+                                   output=out_name, nodata=nodata, 
+                                   type=gdal_type, format="GTiff")
         else:
-            ret = core.run_command("r.out.gdal", flags="c", input=name, output=out_name, format="GTiff")
+            ret = core.run_command("r.out.gdal", flags="c",
+                                   input=name, output=out_name, format="GTiff")
         if ret != 0:
             shutil.rmtree(new_cwd)
             tar.close()
             core.fatal(_("Unable to export raster map <%s>" % name))
-            
+
         tar.add(out_name)
 
-        # Export the color rules 
+        # Export the color rules
         out_name = name + ".color"
         ret = core.run_command("r.colors.out", map=name, rules=out_name)
         if ret != 0:
             shutil.rmtree(new_cwd)
             tar.close()
-            core.fatal(_("Unable to export color rules for raster map <%s> r.out.gdal" % name))
-            
+            core.fatal(_("Unable to export color rules for raster "
+                         "map <%s> r.out.gdal" % name))
+
         tar.add(out_name)
 
 ############################################################################
+
+
 def _export_raster_maps(rows, tar, list_file, new_cwd, fs):
     for row in rows:
         name = row["name"]
@@ -102,17 +110,20 @@
             end = start
         string = "%s%s%s%s%s\n" % (name, fs, start, fs, end)
         # Write the filename, the start_time and the end_time
-        list_file.write(string) 
+        list_file.write(string)
         # Export the raster map with r.pack
         ret = core.run_command("r.pack", input=name, flags="c")
         if ret != 0:
             shutil.rmtree(new_cwd)
             tar.close()
-            core.fatal(_("Unable to export raster map <%s> with r.pack" % name))
-            
+            core.fatal(_("Unable to export raster map <%s> with r.pack" %
+                         name))
+
         tar.add(name + ".pack")
-        
+
 ############################################################################
+
+
 def _export_vector_maps_as_gml(rows, tar, list_file, new_cwd, fs):
     for row in rows:
         name = row["name"]
@@ -125,29 +136,33 @@
             end = start
         string = "%s%s%s%s%s\n" % (name, fs, start, fs, end)
         # Write the filename, the start_time and the end_time
-        list_file.write(string) 
+        list_file.write(string)
         # Export the vector map with v.out.ogr
-        ret = core.run_command("v.out.ogr", input=name, dsn=(name + ".xml"), layer=layer, format="GML")
+        ret = core.run_command("v.out.ogr", input=name, 
+                               dsn=(name + ".xml"), layer=layer, format="GML")
         if ret != 0:
             shutil.rmtree(new_cwd)
             tar.close()
-            core.fatal(_("Unable to export vector map <%s> as GML with v.out.ogr" % name))
-            
+            core.fatal(_("Unable to export vector map <%s> as "
+                         "GML with v.out.ogr" % name))
+
         tar.add(name + ".xml")
         tar.add(name + ".xsd")
-                
+
 ############################################################################
+
+
 def _export_vector_maps(rows, tar, list_file, new_cwd, fs):
     for row in rows:
         name = row["name"]
         start = row["start_time"]
         end = row["end_time"]
         layer = row["layer"]
-        
+
         # Export unique maps only
         if name in exported_maps:
             continue
-        
+
         if not layer:
             layer = 1
         if not end:
@@ -160,13 +175,16 @@
         if ret != 0:
             shutil.rmtree(new_cwd)
             tar.close()
-            core.fatal(_("Unable to export vector map <%s> with v.pack" % name))
-            
+            core.fatal(_("Unable to export vector map <%s> with v.pack" %
+                         name))
+
         tar.add(name + ".pack")
-        
+
         exported_maps[name] = name
-        
+
 ############################################################################
+
+
 def _export_raster3d_maps(rows, tar, list_file, new_cwd, fs):
     for row in rows:
         name = row["name"]
@@ -176,172 +194,192 @@
             end = start
         string = "%s%s%s%s%s\n" % (name, fs, start, fs, end)
         # Write the filename, the start_time and the end_time
-        list_file.write(string) 
+        list_file.write(string)
         # Export the raster map with r3.pack
         ret = core.run_command("r3.pack", input=name, flags="c")
         if ret != 0:
             shutil.rmtree(new_cwd)
             tar.close()
-            core.fatal(_("Unable to export raster map <%s> with r3.pack" % name))
-            
+            core.fatal(_("Unable to export raster map <%s> with r3.pack" %
+                         name))
+
         tar.add(name + ".pack")
 
 ############################################################################
-def export_stds(input, output, compression, workdir, where, _format="pack", _type="strds"):
-	"""
-		!Export space time datasets as tar archive with optional compression
-		
-		This method should be used to export space time datasets of type raster and vector
-		as tar archive that can be reimported with the method import_stds().
-		
-		@param input The name of the space time dataset to export
-		@param output The name of the archive file
-		@param compression The compression of the archive file: 
-		  * "no"  no compression
-		  * "gzip" GNU zip compression
-		  * "bzip2" Bzip compression
-		@param workdir The working directory used for extraction and packing
-		@param where The temporal WHERE SQL statement to select a subset of maps from the space time dataset
-		@param _format The export format:
-		  * "GTiff" Geotiff format, only for raster maps
-		  * "pack" The GRASS raster, 3D raster or vector Pack format, this is the default setting
-		  * "GML" GML file export format, only for vector maps, v.out.ogr export option
-		@param type The space time dataset type
-		  * "strds" Space time raster dataset
-		  * "str3ds" Space time 3D raster dataset
-		  * "stvds" Space time vector dataset
-	"""
-	mapset =  core.gisenv()["MAPSET"]
 
-	if input.find("@") >= 0:
-		id = input
-	else:
-		id = input + "@" + mapset
-		
-	sp = dataset_factory(_type, id)
 
-	if sp.is_in_db() == False:
-		core.fatal(_("Space time %s dataset <%s> not found") % (sp.get_new_map_instance(None).get_type(), id))
+def export_stds(input, output, compression, workdir, where, format_="pack", 
+                type_="strds"):
+    """
+            !Export space time datasets as tar archive with optional compression
 
-	# Save current working directory path
-	old_cwd = os.getcwd()
+            This method should be used to export space time datasets 
+            of type raster and vector as tar archive that can be reimported 
+            with the method import_stds().
 
-	# Create the temporary directory and jump into it
-	new_cwd = tempfile.mkdtemp(dir=workdir)
-	os.chdir(new_cwd)
+            @param input: The name of the space time dataset to export
+            @param output: The name of the archive file
+            @param compression: The compression of the archive file:
+              * "no"  no compression
+              * "gzip" GNU zip compression
+              * "bzip2" Bzip compression
+            @param workdir: The working directory used for extraction and packing
+            @param where: The temporal WHERE SQL statement to select a subset 
+                          of maps from the space time dataset
+            @param format:_ The export format:
+              * "GTiff" Geotiff format, only for raster maps
+              * "pack" The GRASS raster, 3D raster or vector Pack format, 
+                       this is the default setting
+              * "GML" GML file export format, only for vector maps, 
+                      v.out.ogr export option
+            @param type_: The space time dataset type
+              * "strds" Space time raster dataset
+              * "str3ds" Space time 3D raster dataset
+              * "stvds" Space time vector dataset
+    """
+    mapset = core.gisenv()["MAPSET"]
 
-	sp.select()
-	   
-	if _type == "strds":
-		columns = "name,start_time,end_time,min,max,datatype"
-	elif _type == "stvds":
-		columns = "name,start_time,end_time,layer"
-	else:
-		columns = "name,start_time,end_time"
-	rows = sp.get_registered_maps(columns, where, "start_time", None)
+    if input.find("@") >= 0:
+        id = input
+    else:
+        id = input + "@" + mapset
 
-	if compression == "gzip":
-		flag = "w:gz"
-	elif compression == "bzip2":
-		flag = "w:bz2"
-	else:
-		flag = "w:"
+    sp = dataset_factory(type_, id)
 
-	# Open the tar archive to add the files
-	tar = tarfile.open(tmp_tar_file_name, flag)
-	list_file = open(list_file_name, "w")
+    if sp.is_in_db() == False:
+        core.fatal(_("Space time %s dataset <%s> not found") % (
+            sp.get_new_map_instance(None).get_type(), id))
 
-	fs = "|"
+    # Save current working directory path
+    old_cwd = os.getcwd()
 
-	if rows:
-		if _type == "strds":
-			if _format == "GTiff":
-				_export_raster_maps_as_geotiff(rows, tar, list_file, new_cwd, fs)
-			else:
-				_export_raster_maps(rows, tar, list_file, new_cwd, fs)
-		elif _type == "stvds":
-			if _format == "GML":
-				_export_vector_maps_as_gml(rows, tar, list_file, new_cwd, fs)
-			else:
-				_export_vector_maps(rows, tar, list_file, new_cwd, fs)
-		elif _type == "str3ds":
-			_export_raster3d_maps(rows, tar, list_file, new_cwd, fs)
-		
-	list_file.close()
+    # Create the temporary directory and jump into it
+    new_cwd = tempfile.mkdtemp(dir=workdir)
+    os.chdir(new_cwd)
 
-	# Write projection and metadata
-	proj = core.read_command("g.proj", flags="j")
+    sp.select()
 
-	proj_file = open(proj_file_name, "w")
-	proj_file.write(proj)
-	proj_file.close()
+    if type_ == "strds":
+        columns = "name,start_time,end_time,min,max,datatype"
+    elif type_ == "stvds":
+        columns = "name,start_time,end_time,layer"
+    else:
+        columns = "name,start_time,end_time"
+    rows = sp.get_registered_maps(columns, where, "start_time", None)
 
-	init_file = open(init_file_name, "w")
-	# Create the init string
-	string = ""
-	string += "%s=%s\n" % ("stds_type", sp.get_type()) # This is optional, if not present strds will be assumed for backward compatibility
-	string += "%s=%s\n" % ("format", _format) # This is optional, if not present gtiff will be assumed for backward compatibility
-	string += "%s=%s\n" % ("temporal_type", sp.get_temporal_type())
-	string += "%s=%s\n" % ("semantic_type", sp.get_semantic_type())
-	string += "%s=%s\n" % ("number_of_maps", sp.metadata.get_number_of_maps())
-	north, south, east, west, top, bottom = sp.get_spatial_extent()
-	string += "%s=%s\n" % ("north", north)
-	string += "%s=%s\n" % ("south", south)
-	string += "%s=%s\n" % ("east", east)
-	string += "%s=%s\n" % ("west", west)
-	init_file.write(string)
-	init_file.close()
+    if compression == "gzip":
+        flag = "w:gz"
+    elif compression == "bzip2":
+        flag = "w:bz2"
+    else:
+        flag = "w:"
 
-	metadata = core.read_command("t.info", type=_type, input=id)
-	metadata_file = open(metadata_file_name, "w")
-	metadata_file.write(metadata)
-	metadata_file.close()
+    # Open the tar archive to add the files
+    tar = tarfile.open(tmp_tar_file_name, flag)
+    list_file = open(list_file_name, "w")
 
-	read_file = open(read_file_name, "w")
-	if _type == "strds":
-		read_file.write("This space time raster dataset was exported with t.rast.export of GRASS GIS 7\n")
-	elif _type == "stvds":
-		read_file.write("This space time vector dataset was exported with t.vect.export of GRASS GIS 7\n")
-	elif _type == "str3ds":
-		read_file.write("This space time 3D raster dataset was exported with t.rast3d.export of GRASS GIS 7\n")
-	read_file.write("\n")
-	read_file.write("Files:\n")
-	if _type == "strds":
-		if _format == "GTiff":
-					#123456789012345678901234567890
-			read_file.write("       *.tif  -- GeoTIFF raster files\n")
-			read_file.write("     *.color  -- GRASS GIS raster color rules\n")
-		elif _format == "pack":
-			read_file.write("      *.pack  -- GRASS raster files packed with r.pack\n")
-	elif _type == "stvds":
-					#123456789012345678901234567890
-		if _format == "GML":
-			read_file.write("       *.xml  -- Vector GML files\n")
-		else:
-			read_file.write("      *.pack  -- GRASS vector files packed with v.pack\n")
-	elif _type == "str3ds":
-		read_file.write("      *.pack  -- GRASS 3D raster files packed with r3.pack\n")
-	read_file.write("%13s -- Projection information in PROJ.4 format\n" % (proj_file_name))
-	read_file.write("%13s -- GRASS GIS space time %s dataset information\n" % (init_file_name, sp.get_new_map_instance(None).get_type()))
-	read_file.write("%13s -- Time series file, lists all maps by name with interval\n"  % (list_file_name))
-	read_file.write("                 time stamps in ISO-Format. Field separator is |\n")
-	read_file.write("%13s -- Projection information in PROJ.4 format\n" % (metadata_file_name))
-	read_file.write("%13s -- This file\n" % (read_file_name))
-	read_file.close()
+    fs = "|"
 
-	# Append the file list
-	tar.add(list_file_name)
-	tar.add(proj_file_name)
-	tar.add(init_file_name)
-	tar.add(read_file_name)
-	tar.add(metadata_file_name)
-	tar.close()
+    if rows:
+        if type_ == "strds":
+            if format_ == "GTiff":
+                _export_raster_maps_as_geotiff(
+                    rows, tar, list_file, new_cwd, fs)
+            else:
+                _export_raster_maps(rows, tar, list_file, new_cwd, fs)
+        elif type_ == "stvds":
+            if format_ == "GML":
+                _export_vector_maps_as_gml(rows, tar, list_file, new_cwd, fs)
+            else:
+                _export_vector_maps(rows, tar, list_file, new_cwd, fs)
+        elif type_ == "str3ds":
+            _export_raster3d_maps(rows, tar, list_file, new_cwd, fs)
 
-	os.chdir(old_cwd)
+    list_file.close()
 
-	# Move the archive to its destination
-	shutil.move(os.path.join(new_cwd, tmp_tar_file_name), output)
+    # Write projection and metadata
+    proj = core.read_command("g.proj", flags="j")
 
-	# Remove the temporary created working directory
-	shutil.rmtree(new_cwd)
+    proj_file = open(proj_file_name, "w")
+    proj_file.write(proj)
+    proj_file.close()
 
+    init_file = open(init_file_name, "w")
+    # Create the init string
+    string = ""
+     # This is optional, if not present strds will be assumed for backward 
+     # compatibility
+    string += "%s=%s\n" % ("stds_type", sp.get_type()) 
+     # This is optional, if not present gtiff will be assumed for 
+     # backward compatibility
+    string += "%s=%s\n" % ("format", format_) 
+    string += "%s=%s\n" % ("temporal_type", sp.get_temporal_type())
+    string += "%s=%s\n" % ("semantic_type", sp.get_semantic_type())
+    string += "%s=%s\n" % ("number_of_maps", sp.metadata.get_number_of_maps())
+    north, south, east, west, top, bottom = sp.get_spatial_extent()
+    string += "%s=%s\n" % ("north", north)
+    string += "%s=%s\n" % ("south", south)
+    string += "%s=%s\n" % ("east", east)
+    string += "%s=%s\n" % ("west", west)
+    init_file.write(string)
+    init_file.close()
+
+    metadata = core.read_command("t.info", type=type_, input=id)
+    metadata_file = open(metadata_file_name, "w")
+    metadata_file.write(metadata)
+    metadata_file.close()
+
+    read_file = open(read_file_name, "w")
+    if type_ == "strds":
+        read_file.write("This space time raster dataset was exported with "
+                        "t.rast.export of GRASS GIS 7\n")
+    elif type_ == "stvds":
+        read_file.write("This space time vector dataset was exported with "
+                        "t.vect.export of GRASS GIS 7\n")
+    elif type_ == "str3ds":
+        read_file.write("This space time 3D raster dataset was exported "
+                        "with t.rast3d.export of GRASS GIS 7\n")
+    read_file.write("\n")
+    read_file.write("Files:\n")
+    if type_ == "strds":
+        if format_ == "GTiff":
+                                #123456789012345678901234567890
+            read_file.write("       *.tif  -- GeoTIFF raster files\n")
+            read_file.write("     *.color  -- GRASS GIS raster color rules\n")
+        elif format_ == "pack":
+            read_file.write("      *.pack  -- GRASS raster files packed with r.pack\n")
+    elif type_ == "stvds":
+                                #123456789012345678901234567890
+        if format_ == "GML":
+            read_file.write("       *.xml  -- Vector GML files\n")
+        else:
+            read_file.write("      *.pack  -- GRASS vector files packed with v.pack\n")
+    elif type_ == "str3ds":
+        read_file.write("      *.pack  -- GRASS 3D raster files packed with r3.pack\n")
+    read_file.write("%13s -- Projection information in PROJ.4 format\n" %
+                    (proj_file_name))
+    read_file.write("%13s -- GRASS GIS space time %s dataset information\n" %
+                    (init_file_name, sp.get_new_map_instance(None).get_type()))
+    read_file.write("%13s -- Time series file, lists all maps by name "
+                    "with interval\n" % (list_file_name))
+    read_file.write("                 time stamps in ISO-Format. Field separator is |\n")
+    read_file.write("%13s -- Projection information in PROJ.4 format\n" %
+                    (metadata_file_name))
+    read_file.write("%13s -- This file\n" % (read_file_name))
+    read_file.close()
+
+    # Append the file list
+    tar.add(list_file_name)
+    tar.add(proj_file_name)
+    tar.add(init_file_name)
+    tar.add(read_file_name)
+    tar.add(metadata_file_name)
+    tar.close()
+
+    os.chdir(old_cwd)
+
+    # Move the archive to its destination
+    shutil.move(os.path.join(new_cwd, tmp_tar_file_name), output)
+
+    # Remove the temporary created working directory
+    shutil.rmtree(new_cwd)

Modified: grass/trunk/lib/python/temporal/stds_import.py
===================================================================
--- grass/trunk/lib/python/temporal/stds_import.py	2012-08-11 21:26:18 UTC (rev 52630)
+++ grass/trunk/lib/python/temporal/stds_import.py	2012-08-12 01:54:40 UTC (rev 52631)
@@ -19,7 +19,7 @@
 exp=True
 overr=False
 create=False
-tgis.import_stds(input, output, extrdir, title, descr, location, 
+tgis.import_stds(input, output, extrdir, title, descr, location,
                 link, exp, overr, create, "strds")
 ...
 @endcode
@@ -51,317 +51,345 @@
 imported_maps = {}
 
 ############################################################################
+
 def _import_raster_maps_from_geotiff(maplist, overr, exp, location, link):
-	impflags = ""
-	if overr:
-		impflags += "o"
-	if exp or location:
-		impflags += "e"
-	for row in maplist:
-		name = row["name"]
-		filename = str(row["name"]) + ".tif"
+    impflags = ""
+    if overr:
+        impflags += "o"
+    if exp or location:
+        impflags += "e"
+    for row in maplist:
+        name = row["name"]
+        filename = str(row["name"]) + ".tif"
 
-		if link:
-			ret = core.run_command("r.external", input = filename,
-						output = name,
-						flags = impflags,
-						overwrite = core.overwrite())
-		else:
-			ret = core.run_command("r.in.gdal", input = filename,
-						output = name,
-						flags = impflags,
-						overwrite = core.overwrite())
+        if link:
+            ret = core.run_command("r.external", input=filename,
+                                   output=name,
+                                   flags=impflags,
+                                   overwrite=core.overwrite())
+        else:
+            ret = core.run_command("r.in.gdal", input=filename,
+                                   output=name,
+                                   flags=impflags,
+                                   overwrite=core.overwrite())
 
-		if ret != 0:
-			core.fatal(_("Unable to import/link raster map <%s>.") % name)
+        if ret != 0:
+            core.fatal(_("Unable to import/link raster map <%s>.") % name)
 
-		# Set the color rules if present
-		filename = str(row["name"]) + ".color"
-		if os.path.isfile(filename):
-			ret = core.run_command("r.colors", map = name,
-						rules = filename,
-						overwrite = core.overwrite())
-			if ret != 0:
-				core.fatal(_("Unable to set the color rules for raster map <%s>.") % name)
-                                                        
+        # Set the color rules if present
+        filename = str(row["name"]) + ".color"
+        if os.path.isfile(filename):
+            ret = core.run_command("r.colors", map=name,
+                                   rules=filename,
+                                   overwrite=core.overwrite())
+            if ret != 0:
+                core.fatal(_("Unable to set the color rules for "
+                             "raster map <%s>.") % name)
+
 ############################################################################
+
 def _import_raster_maps(maplist):
-	# We need to disable the projection check because of its simple implementation
-	impflags = "o"
-	for row in maplist:
-		name = row["name"]
-		filename = str(row["name"]) + ".pack"
-		ret = core.run_command("r.unpack", input = filename,
-						output = name,
-						flags = impflags,
-						overwrite = core.overwrite(),
-						verbose = True)
+    # We need to disable the projection check because of its 
+    # simple implementation
+    impflags = "o"
+    for row in maplist:
+        name = row["name"]
+        filename = str(row["name"]) + ".pack"
+        ret = core.run_command("r.unpack", input=filename,
+                               output=name,
+                               flags=impflags,
+                               overwrite=core.overwrite(),
+                               verbose=True)
 
-		if ret != 0:
-			core.fatal(_("Unable to unpack raster map <%s>.") % name)
+        if ret != 0:
+            core.fatal(_("Unable to unpack raster map <%s>.") % name)
 
 ############################################################################
+
 def _import_vector_maps_from_gml(maplist, overr, exp, location, link):
-        impflags = "o"
-        if exp or location:
-                impflags += "e"
-        for row in maplist:
-                name = row["name"]
-                filename = str(row["name"]) + ".xml"
+    impflags = "o"
+    if exp or location:
+        impflags += "e"
+    for row in maplist:
+        name = row["name"]
+        filename = str(row["name"]) + ".xml"
 
-                ret = core.run_command("v.in.ogr", dsn = filename,
-                                        output = name,
-                                        flags = impflags,
-                                        overwrite = core.overwrite())
+        ret = core.run_command("v.in.ogr", dsn=filename,
+                               output=name,
+                               flags=impflags,
+                               overwrite=core.overwrite())
 
-                if ret != 0:
-                        core.fatal(_("Unable to import vector map <%s>.") % name)
-                        
+        if ret != 0:
+            core.fatal(_("Unable to import vector map <%s>.") % name)
+
 ############################################################################
+
 def _import_vector_maps(maplist):
-        # We need to disable the projection check because of its simple implementation
-        impflags = "o"
-        for row in maplist:
-        	# Separate the name from the layer
-                name = row["name"].split(":")[0]
-                # Import only unique maps
-                if name in imported_maps:
-                    continue
-                filename = name + ".pack"
-                ret = core.run_command("v.unpack", input = filename,
-                                                output = name,
-                                                flags = impflags,
-                                                overwrite = core.overwrite(),
-                                                verbose = True)
+    # We need to disable the projection check because of its 
+    # simple implementation
+    impflags = "o"
+    for row in maplist:
+        # Separate the name from the layer
+        name = row["name"].split(":")[0]
+        # Import only unique maps
+        if name in imported_maps:
+            continue
+        filename = name + ".pack"
+        ret = core.run_command("v.unpack", input=filename,
+                               output=name,
+                               flags=impflags,
+                               overwrite=core.overwrite(),
+                               verbose=True)
 
-                if ret != 0:
-                        core.fatal(_("Unable to unpack vector map <%s>.") % name)
-                
-                imported_maps[name] = name
+        if ret != 0:
+            core.fatal(_("Unable to unpack vector map <%s>.") % name)
+
+        imported_maps[name] = name
 ############################################################################
 
-def import_stds(input, output, extrdir, title = None, descr = None, location = None,
-                link = False, exp = False, overr = False, create = False, stds_type = "strds"):
-	"""
-		!Import space time datasets of type raster and vector
-		
-		@param input Name of the input archive file
-		@param output The name of the output space time dataset
-		@param extrdir The extraction directory
-		@param title The title of the new created space time dataset
-		@param description The description of the new created space time dataset
-		@param location The name of the location that should be created, 
-		                maps are imported into this location
-		@param link Switch to link raster maps instead importing them
-		@param exp Extend location extents based on new dataset
-		@param overr Override projection (use location's projection)
-		@param create Create the location specified by the "location" parameter and exit. 
-		              Do not import the space time datasets.
-		@param stds_type The type of the space time dataset that should be imported
-	"""
+def import_stds(
+    input, output, extrdir, title=None, descr=None, location=None,
+        link=False, exp=False, overr=False, create=False, stds_type="strds"):
+    """!Import space time datasets of type raster and vector
 
-	core.set_raise_on_error(True)
+        @param input: Name of the input archive file
+        @param output: The name of the output space time dataset
+        @param extrdir: The extraction directory
+        @param title: The title of the new created space time dataset
+        @param description: The description of the new created 
+                            space time dataset
+        @param location: The name of the location that should be created,
+                        maps are imported into this location
+        @param link: Switch to link raster maps instead importing them
+        @param exp: Extend location extents based on new dataset
+        @param overr: Override projection (use location's projection)
+        @param create: Create the location specified by the "location" 
+                      parameter and exit.
+                      Do not import the space time datasets.
+        @param stds_type: The type of the space time dataset that 
+                          should be imported
+    """
 
-	# Check if input file and extraction directory exits
-	if not os.path.exists(input):
-		core.fatal(_("Space time raster dataset archive <%s> not found") % input)
-	if not create and not os.path.exists(extrdir):
-		core.fatal(_("Extraction directory <%s> not found") % extrdir)
+    core.set_raise_on_error(True)
 
-	tar = tarfile.open(name = input, mode = 'r')
+    # Check if input file and extraction directory exits
+    if not os.path.exists(input):
+        core.fatal(_("Space time raster dataset archive <%s> not found")
+                   % input)
+    if not create and not os.path.exists(extrdir):
+        core.fatal(_("Extraction directory <%s> not found") % extrdir)
 
-	# Check for important files
-	members = tar.getnames()
+    tar = tarfile.open(name=input, mode='r')
 
-	if init_file_name not in members:
-		core.fatal(_("Unable to find init file <%s>") % init_file_name)
-	if list_file_name not in members:
-		core.fatal(_("Unable to find list file <%s>") % list_file_name)
-	if proj_file_name not in members:
-		core.fatal(_("Unable to find projection file <%s>") % proj_file_name)
+    # Check for important files
+    members = tar.getnames()
 
-	tar.extractall(path = extrdir)
-	tar.close()
+    if init_file_name not in members:
+        core.fatal(_("Unable to find init file <%s>") % init_file_name)
+    if list_file_name not in members:
+        core.fatal(_("Unable to find list file <%s>") % list_file_name)
+    if proj_file_name not in members:
+        core.fatal(_("Unable to find projection file <%s>") % proj_file_name)
 
-	# Save current working directory path
-	old_cwd = os.getcwd()
+    tar.extractall(path=extrdir)
+    tar.close()
 
-	# Switch into the data directory
-	os.chdir(extrdir)
+    # Save current working directory path
+    old_cwd = os.getcwd()
 
-	# Check projection information
-	if not location:
-		temp_name = core.tempfile()
-		temp_file = open(temp_name, "w")
-		proj_name = os.path.abspath(proj_file_name)
+    # Switch into the data directory
+    os.chdir(extrdir)
 
-		p = core.start_command("g.proj", flags = "j", stdout = temp_file)
-		p.communicate()
-		temp_file.close()
+    # Check projection information
+    if not location:
+        temp_name = core.tempfile()
+        temp_file = open(temp_name, "w")
+        proj_name = os.path.abspath(proj_file_name)
 
-		if not core.compare_key_value_text_files(temp_name, proj_name, sep="="):
-			if overr:
-				core.warning(_("Projection information does not match. Proceeding..."))
-			else:
-				core.fatal(_("Projection information does not match. Aborting."))
+        p = core.start_command("g.proj", flags="j", stdout=temp_file)
+        p.communicate()
+        temp_file.close()
 
-	# Create a new location based on the projection information and switch into it
-	old_env = core.gisenv()
-	if location:
-		try:
-			proj4_string = open(proj_file_name, 'r').read()
-			core.create_location(dbase = old_env["GISDBASE"],
-								  location = location,
-								  proj4 = proj4_string)
-			# Just create a new location and return
-			if create:
-				os.chdir(old_cwd)
-				return
-		except Exception as e:
-				core.fatal(_("Unable to create location %s. Reason: %s") % (location, str(e)))
-		# Switch to the new created location
-		ret = core.run_command("g.mapset", mapset = "PERMANENT",
-					location = location,
-					gisdbase = old_env["GISDBASE"])
-		if ret != 0:
-			core.fatal(_("Unable to switch to location %s") % location)
-		# create default database connection
-		ret = core.run_command("t.connect", flags = "d")
-		if ret != 0:
-			core.fatal(_("Unable to create default temporal database in new location %s") % location)
+        if not core.compare_key_value_text_files(temp_name, proj_name, sep="="):
+            if overr:
+                core.warning(_("Projection information does not match. "
+                               "Proceeding..."))
+            else:
+                core.fatal(_("Projection information does not match. Aborting."))
 
-	try:
-		# Make sure the temporal database exists
-		create_temporal_database()
+    # Create a new location based on the projection information and switch into it
+    old_env = core.gisenv()
+    if location:
+        try:
+            proj4_string = open(proj_file_name, 'r').read()
+            core.create_location(dbase=old_env["GISDBASE"],
+                                 location=location,
+                                 proj4=proj4_string)
+            # Just create a new location and return
+            if create:
+                os.chdir(old_cwd)
+                return
+        except Exception as e:
+            core.fatal(_("Unable to create location %s. Reason: %s")
+                       % (location, str(e)))
+        # Switch to the new created location
+        ret = core.run_command("g.mapset", mapset="PERMANENT",
+                               location=location,
+                               gisdbase=old_env["GISDBASE"])
+        if ret != 0:
+            core.fatal(_("Unable to switch to location %s") % location)
+        # create default database connection
+        ret = core.run_command("t.connect", flags="d")
+        if ret != 0:
+            core.fatal(_("Unable to create default temporal database "
+                         "in new location %s") % location)
 
-		fs = "|"
-		maplist = []
-		mapset = core.gisenv()["MAPSET"]
-		list_file = open(list_file_name, "r")
+    try:
+        # Make sure the temporal database exists
+        create_temporal_database()
 
-		# Read the map list from file
-		line_count = 0
-		while True:
-			line = list_file.readline()
-			if not line:
-				break
+        fs = "|"
+        maplist = []
+        mapset = core.gisenv()["MAPSET"]
+        list_file = open(list_file_name, "r")
 
-			line_list = line.split(fs)
+        # Read the map list from file
+        line_count = 0
+        while True:
+            line = list_file.readline()
+            if not line:
+                break
 
-			mapname = line_list[0].strip()
-			mapid = mapname + "@" + mapset
+            line_list = line.split(fs)
 
-			row = {}
-			row["name"] = mapname
-			row["id"] = mapid
-			row["start"] = line_list[1].strip()
-			row["end"] = line_list[2].strip()
+            mapname = line_list[0].strip()
+            mapid = mapname + "@" + mapset
 
-			maplist.append(row)
-			line_count += 1
+            row = {}
+            row["name"] = mapname
+            row["id"] = mapid
+            row["start"] = line_list[1].strip()
+            row["end"] = line_list[2].strip()
 
-		list_file.close()
+            maplist.append(row)
+            line_count += 1
 
-		# Read the init file
-		fs = "="
-		init = {}
-		init_file = open(init_file_name, "r")
-		while True:
-			line = init_file.readline()
-			if not line:
-				break
+        list_file.close()
 
-			kv = line.split(fs)
-			init[kv[0]] = kv[1].strip()
+        # Read the init file
+        fs = "="
+        init = {}
+        init_file = open(init_file_name, "r")
+        while True:
+            line = init_file.readline()
+            if not line:
+                break
 
-		init_file.close()
+            kv = line.split(fs)
+            init[kv[0]] = kv[1].strip()
 
-		if not init.has_key("temporal_type") or \
-		   not init.has_key("semantic_type") or \
-		   not init.has_key("number_of_maps"):
-			core.fatal(_("Key words %s, %s or %s not found in init file.") %
-			("temporal_type", "semantic_type", "number_of_maps"))
+        init_file.close()
 
-		if line_count != int(init["number_of_maps"]):
-			core.fatal(_("Number of maps mismatch in init and list file."))
+        if "temporal_type" not in init or \
+           "semantic_type" not in init or \
+           "number_of_maps" not in init:
+            core.fatal(_("Key words %s, %s or %s not found in init file.") %
+                       ("temporal_type", "semantic_type", "number_of_maps"))
 
-		_format = "GTiff"
-		_type = "strds"
+        if line_count != int(init["number_of_maps"]):
+            core.fatal(_("Number of maps mismatch in init and list file."))
 
-		if init.has_key("stds_type"):
-			_type = init["stds_type"]
-		if init.has_key("format"):
-			_format = init["format"]
+        _format = "GTiff"
+        _type = "strds"
 
-		if stds_type != _type:
-			core.fatal(_("The archive file is of wrong space time dataset type"))
+        if "stds_type" in init:
+            _type = init["stds_type"]
+        if "format" in init:
+            _format = init["format"]
 
-		# Check the existence of the files 
-		if _format == "GTiff":
-			for row in maplist:
-				filename = str(row["name"]) + ".tif"
-				if not os.path.exists(filename):
-					core.fatal(_("Unable to find geotiff raster file <%s> in archive.") % filename)
-		elif _format == "GML":
-			for row in maplist:
-				filename = str(row["name"]) + ".xml"
-				if not os.path.exists(filename):
-					core.fatal(_("Unable to find GML vector file <%s> in archive.") % filename)
-		elif _format == "pack":
-			for row in maplist:
-				if _type == "stvds":
-					filename = str(row["name"].split(":")[0]) + ".pack"
-				else:
-					filename = str(row["name"]) + ".pack"
-				if not os.path.exists(filename):
-					core.fatal(_("Unable to find GRASS package file <%s> in archive.") % filename)
-		else:
-			core.fatal(_("Unsupported input format"))
+        if stds_type != _type:
+            core.fatal(_("The archive file is of wrong space time dataset type"))
 
-		# Check the space time dataset
-		id = output + "@" + mapset
-		sp = dataset_factory(_type, id)
-		if sp.is_in_db() and core.overwrite() == False:
-			core.fatal(_("Space time %s dataset <%s> is already in the database. Use the overwrite flag.") % (_type, sp.get_id()))
+        # Check the existence of the files
+        if _format == "GTiff":
+            for row in maplist:
+                filename = str(row["name"]) + ".tif"
+                if not os.path.exists(filename):
+                    core.fatal(_("Unable to find geotiff raster file "
+                                 "<%s> in archive.") % filename)
+        elif _format == "GML":
+            for row in maplist:
+                filename = str(row["name"]) + ".xml"
+                if not os.path.exists(filename):
+                    core.fatal(_("Unable to find GML vector file "
+                                 "<%s> in archive.") % filename)
+        elif _format == "pack":
+            for row in maplist:
+                if _type == "stvds":
+                    filename = str(row["name"].split(":")[0]) + ".pack"
+                else:
+                    filename = str(row["name"]) + ".pack"
+                if not os.path.exists(filename):
+                    core.fatal(_("Unable to find GRASS package file "
+                                 "<%s> in archive.") % filename)
+        else:
+            core.fatal(_("Unsupported input format"))
 
-		# Import the maps
-		if _type == "strds":
-			if _format == "GTiff":
-				_import_raster_maps_from_geotiff(maplist, overr, exp, location, link)
-			if _format == "pack":
-				_import_raster_maps(maplist)
-                elif _type == "stvds":
-                        if _format == "GML":
-                                _import_vector_maps_from_gml(maplist, overr, exp, location, link)
-                        if _format == "pack":
-                                _import_vector_maps(maplist)
+        # Check the space time dataset
+        id = output + "@" + mapset
+        sp = dataset_factory(_type, id)
+        if sp.is_in_db() and core.overwrite() == False:
+            core.fatal(_("Space time %s dataset <%s> is already in the "
+                         "database. Use the overwrite flag.") % \
+                        (_type, sp.get_id()))
 
-		# Create the space time dataset
-		if sp.is_in_db() and core.overwrite() == True:
-			core.info(_("Overwrite space time %s dataset <%s> and unregister all maps.") % (sp.get_new_map_instance(None).get_type(), sp.get_id()))
-			sp.delete()
-			sp = sp.get_new_instance(id)
+        # Import the maps
+        if _type == "strds":
+            if _format == "GTiff":
+                _import_raster_maps_from_geotiff(
+                    maplist, overr, exp, location, link)
+            if _format == "pack":
+                _import_raster_maps(maplist)
+        elif _type == "stvds":
+            if _format == "GML":
+                _import_vector_maps_from_gml(
+                    maplist, overr, exp, location, link)
+            if _format == "pack":
+                _import_vector_maps(maplist)
 
-		temporal_type = init["temporal_type"]
-		semantic_type = init["semantic_type"]
-		core.verbose(_("Create space time %s dataset.") % sp.get_new_map_instance(None).get_type())
+        # Create the space time dataset
+        if sp.is_in_db() and core.overwrite() == True:
+            core.info(_("Overwrite space time %s dataset "
+                        "<%s> and unregister all maps.") % \
+                       (sp.get_new_map_instance(None).get_type(), sp.get_id()))
+            sp.delete()
+            sp = sp.get_new_instance(id)
 
-		sp.set_initial_values(temporal_type = temporal_type, semantic_type = semantic_type, title = title, description = descr)
-		sp.insert()
+        temporal_type = init["temporal_type"]
+        semantic_type = init["semantic_type"]
+        core.verbose(_("Create space time %s dataset.") %
+                     sp.get_new_map_instance(None).get_type())
 
-		# register the maps
-		fs = "|"
-		register_maps_in_space_time_dataset(type = sp.get_new_map_instance(None).get_type(),
-					 name = output, file = list_file_name, start = "file", end = "file", dbif = None, fs = fs)
+        sp.set_initial_values(temporal_type=temporal_type, 
+                              semantic_type=semantic_type, title=title, 
+                              description=descr)
+        sp.insert()
 
-		os.chdir(old_cwd)
-	except:
-		raise
+        # register the maps
+        fs = "|"
+        register_maps_in_space_time_dataset(
+            type=sp.get_new_map_instance(None).get_type(),
+            name=output, file=list_file_name, start="file", 
+            end="file", dbif=None, fs=fs)
 
-	# Make sure the location is switched back correctly
-	finally:
-		if location:
-			# Switch to the old location
-			ret = core.run_command("g.mapset", mapset = old_env["MAPSET"],
-						location = old_env["LOCATION_NAME"],
-						gisdbase = old_env["GISDBASE"])
+        os.chdir(old_cwd)
+    except:
+        raise
+
+    # Make sure the location is switched back correctly
+    finally:
+        if location:
+            # Switch to the old location
+            ret = core.run_command("g.mapset", mapset=old_env["MAPSET"],
+                                   location=old_env["LOCATION_NAME"],
+                                   gisdbase=old_env["GISDBASE"])

Modified: grass/trunk/lib/python/temporal/temporal_extent.py
===================================================================
--- grass/trunk/lib/python/temporal/temporal_extent.py	2012-08-11 21:26:18 UTC (rev 52630)
+++ grass/trunk/lib/python/temporal/temporal_extent.py	2012-08-12 01:54:40 UTC (rev 52631)
@@ -6,11 +6,15 @@
 
 Usage:
 
+ at code
+
 >>> import grass.temporal as tgis
 >>> from datetime import datetime
 >>> t = tgis.RasterRelativeTime()
 >>> t = tgis.RasterAbsoluteTime()
 
+ at endcode
+
 (C) 2008-2011 by the GRASS Development Team
 This program is free software under the GNU General Public
 License (>=v2). Read the file COPYING that comes with GRASS
@@ -34,8 +38,9 @@
         
         Usage:
         
-        >>> import grass.temporal as tgis
-        >>> A = tgis.AbstractTemporalExtent(table="raster_absolute_time",
+        @code
+        
+        >>> A = AbstractTemporalExtent(table="raster_absolute_time",
         ... ident="soil at PERMANENT", start_time=datetime(2001, 01, 01),
         ... end_time=datetime(2005,01,01) )
         >>> A.id
@@ -51,7 +56,7 @@
         start_time=2001-01-01 00:00:00
         end_time=2005-01-01 00:00:00
         >>> # relative time
-        >>> A = tgis.AbstractTemporalExtent(table="raster_absolute_time",
+        >>> A = AbstractTemporalExtent(table="raster_absolute_time",
         ... ident="soil at PERMANENT", start_time=0, end_time=1 )
         >>> A.id
         'soil at PERMANENT'
@@ -65,6 +70,8 @@
         >>> A.print_shell_info()
         start_time=0
         end_time=1
+        
+        @endcode
     """
     def __init__(self, table=None, ident=None, start_time=None, end_time=None):
 
@@ -77,21 +84,25 @@
     def starts(self, extent):
         """!Return True if this temporal extent (A) starts at the start of the 
            provided temporal extent (B) and finishes within it
+           @verbatim
            A  |-----|
            B  |---------|
+           @endverbatim
            
            @param extent: The temporal extent object with which this extent starts
            
            Usage:
            
+           @code
            
-           >>> import grass.temporal as tgis
-           >>> A = tgis.AbstractTemporalExtent(start_time=5, end_time=6 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=5, end_time=7 )
+           >>> A = AbstractTemporalExtent(start_time=5, end_time=6 )
+           >>> B = AbstractTemporalExtent(start_time=5, end_time=7 )
            >>> A.starts(B)
            True
            >>> B.starts(A)
            False
+           
+           @endcode
         """
         if  self.D["end_time"] is None or extent.D["end_time"] is None:
             return False
@@ -105,20 +116,25 @@
     def started(self, extent):
         """!Return True if this temporal extent (A) started at the start of the 
            provided temporal extent (B) and finishes after it
+           @verbatim
            A  |---------|
            B  |-----|
+           @endverbatim
            
            @param extent: The temporal extent object with which this extent started
            
            Usage:
            
-           >>> import grass.temporal as tgis
-           >>> A = tgis.AbstractTemporalExtent(start_time=5, end_time=7 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=5, end_time=6 )
+           @code
+           
+           >>> A = AbstractTemporalExtent(start_time=5, end_time=7 )
+           >>> B = AbstractTemporalExtent(start_time=5, end_time=6 )
            >>> A.started(B)
            True
            >>> B.started(A)
            False
+           
+           @endcode
         """
         if  self.D["end_time"] is None or extent.D["end_time"] is None:
             return False
@@ -132,20 +148,25 @@
     def finishes(self, extent):
         """!Return True if this temporal extent (A) starts after the start of the 
            provided temporal extent (B) and finishes with it
+           @verbatim
            A      |-----|
            B  |---------|
+           @endverbatim
            
            @param extent: The temporal extent object with which this extent finishes
            
            Usage:
            
-           >>> import grass.temporal as tgis
-           >>> A = tgis.AbstractTemporalExtent(start_time=6, end_time=7 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=5, end_time=7 )
+           @code
+           
+           >>> A = AbstractTemporalExtent(start_time=6, end_time=7 )
+           >>> B = AbstractTemporalExtent(start_time=5, end_time=7 )
            >>> A.finishes(B)
            True
            >>> B.finishes(A)
            False
+           
+           @endcode
         """
         if  self.D["end_time"] is None or extent.D["end_time"] is None:
             return False
@@ -159,20 +180,25 @@
     def finished(self, extent):
         """!Return True if this temporal extent (A) starts before the start of the 
            provided temporal extent (B) and finishes with it
+           @verbatim
            A  |---------|
            B      |-----|
+           @endverbatim
            
            @param extent: The temporal extent object with which this extent finishes
            
            Usage:
            
-           >>> import grass.temporal as tgis
-           >>> A = tgis.AbstractTemporalExtent(start_time=5, end_time=7 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=6, end_time=7 )
+           @code
+           
+           >>> A = AbstractTemporalExtent(start_time=5, end_time=7 )
+           >>> B = AbstractTemporalExtent(start_time=6, end_time=7 )
            >>> A.finished(B)
            True
            >>> B.finished(A)
            False
+           
+           @endcode
         """
         if  self.D["end_time"] is None or extent.D["end_time"] is None:
             return False
@@ -186,20 +212,25 @@
     def after(self, extent):
         """!Return True if this temporal extent (A) is located after the  
            provided temporal extent (B)
+           @verbatim
            A             |---------|
            B  |---------|
+           @endverbatim
            
            @param extent: The temporal extent object that is located before this extent
            
            Usage:
            
-           >>> import grass.temporal as tgis
-           >>> A = tgis.AbstractTemporalExtent(start_time=8, end_time=9 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=6, end_time=7 )
+           @code
+           
+           >>> A = AbstractTemporalExtent(start_time=8, end_time=9 )
+           >>> B = AbstractTemporalExtent(start_time=6, end_time=7 )
            >>> A.after(B)
            True
            >>> B.after(A)
            False
+           
+           @endcode
         """
         if extent.D["end_time"] is None:
             if self.D["start_time"] > extent.D["start_time"]:
@@ -215,20 +246,25 @@
     def before(self, extent):
         """!Return True if this temporal extent (A) is located before the  
            provided temporal extent (B)
+           @verbatim
            A  |---------|
            B             |---------|
+           @endverbatim
            
            @param extent: The temporal extent object that is located after this extent
            
            Usage:
            
-           >>> import grass.temporal as tgis
-           >>> A = tgis.AbstractTemporalExtent(start_time=6, end_time=7 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=8, end_time=9 )
+           @code
+           
+           >>> A = AbstractTemporalExtent(start_time=6, end_time=7 )
+           >>> B = AbstractTemporalExtent(start_time=8, end_time=9 )
            >>> A.before(B)
            True
            >>> B.before(A)
            False
+           
+           @endcode
         """
         if self.D["end_time"] is None:
             if self.D["start_time"] < extent.D["start_time"]:
@@ -244,29 +280,34 @@
     def adjacent(self, extent):
         """!Return True if this temporal extent (A) is a meeting neighbor the 
            provided temporal extent (B)
+           @verbatim
            A            |---------|
            B  |---------|
            A  |---------|
            B            |---------|
+           @endverbatim
            
            @param extent: The temporal extent object that is a meeting neighbor
                           of this extent
            
            Usage:
            
-           >>> import grass.temporal as tgis
-           >>> A = tgis.AbstractTemporalExtent(start_time=5, end_time=7 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=7, end_time=9 )
+           @code
+           
+           >>> A = AbstractTemporalExtent(start_time=5, end_time=7 )
+           >>> B = AbstractTemporalExtent(start_time=7, end_time=9 )
            >>> A.adjacent(B)
            True
            >>> B.adjacent(A)
            True
-           >>> A = tgis.AbstractTemporalExtent(start_time=5, end_time=7 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=3, end_time=5 )
+           >>> A = AbstractTemporalExtent(start_time=5, end_time=7 )
+           >>> B = AbstractTemporalExtent(start_time=3, end_time=5 )
            >>> A.adjacent(B)
            True
            >>> B.adjacent(A)
            True
+           
+           @endcode
         """
         if  self.D["end_time"] is None and extent.D["end_time"] is None:
             return False
@@ -280,19 +321,26 @@
     def follows(self, extent):
         """!Return True if this temporal extent (A) follows the  
            provided temporal extent (B)
+           @verbatim
            A            |---------|
            B  |---------|
+           @endverbatim
            
            @param extent: The temporal extent object that is the predecessor
                           of this extent
            
-           >>> import grass.temporal as tgis
-           >>> A = tgis.AbstractTemporalExtent(start_time=5, end_time=7 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=3, end_time=5 )
+           Usage:
+           
+           @code
+           
+           >>> A = AbstractTemporalExtent(start_time=5, end_time=7 )
+           >>> B = AbstractTemporalExtent(start_time=3, end_time=5 )
            >>> A.follows(B)
            True
            >>> B.follows(A)
            False
+           
+           @endcode
         """
         if  extent.D["end_time"] is None:
             return False
@@ -305,21 +353,26 @@
     def precedes(self, extent):
         """!Return True if this temporal extent (A) precedes the provided 
            temporal extent (B)
+           @verbatim
            A  |---------|
            B            |---------|
+           @endverbatim
            
            @param extent: The temporal extent object that is the successor
                           of this extent
            
            Usage:
            
-           >>> import grass.temporal as tgis
-           >>> A = tgis.AbstractTemporalExtent(start_time=5, end_time=7 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=7, end_time=9 )
+           @code
+           
+           >>> A = AbstractTemporalExtent(start_time=5, end_time=7 )
+           >>> B = AbstractTemporalExtent(start_time=7, end_time=9 )
            >>> A.precedes(B)
            True
            >>> B.precedes(A)
            False
+           
+           @endcode
         """
         if  self.D["end_time"] is None:
             return False
@@ -332,20 +385,25 @@
     def during(self, extent):
         """!Return True if this temporal extent (A) is located during the provided 
            temporal extent (B)
+           @verbatim
            A   |-------|
            B  |---------|
-           
+           @endverbatim
+                      
            @param extent: The temporal extent object that contains this extent
            
            Usage:
            
-           >>> import grass.temporal as tgis
-           >>> A = tgis.AbstractTemporalExtent(start_time=5, end_time=7 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=4, end_time=9 )
+           @code
+           
+           >>> A = AbstractTemporalExtent(start_time=5, end_time=7 )
+           >>> B = AbstractTemporalExtent(start_time=4, end_time=9 )
            >>> A.during(B)
            True
            >>> B.during(A)
            False
+           
+           @endcode
         """
         # Check single point of time in interval
         if  extent.D["end_time"] is None:
@@ -368,21 +426,26 @@
     def contains(self, extent):
         """!Return True if this temporal extent (A) contains the provided 
            temporal extent (B)
+           @verbatim
            A  |---------|
            B   |-------|
+           @endverbatim
            
            @param extent: The temporal extent object that is located 
                           during this extent
            
            Usage:
            
-           >>> import grass.temporal as tgis
-           >>> A = tgis.AbstractTemporalExtent(start_time=4, end_time=9 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=5, end_time=8 )
+           @code
+           
+           >>> A = AbstractTemporalExtent(start_time=4, end_time=9 )
+           >>> B = AbstractTemporalExtent(start_time=5, end_time=8 )
            >>> A.contains(B)
            True
            >>> B.contains(A)
            False
+           
+           @endcode
         """
         # Check single point of time in interval
         if  self.D["end_time"] is None:
@@ -405,21 +468,26 @@
     def equivalent(self, extent):
         """!Return True if this temporal extent (A) is equivalent to the provided 
            temporal extent (B)
+           @verbatim
            A  |---------|
            B  |---------|
+           @endverbatim
            
            @param extent: The temporal extent object that is equivalent 
                           during this extent
            
            Usage:
            
-           >>> import grass.temporal as tgis
-           >>> A = tgis.AbstractTemporalExtent(start_time=5, end_time=6 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=5, end_time=6 )
+           @code
+           
+           >>> A = AbstractTemporalExtent(start_time=5, end_time=6 )
+           >>> B = AbstractTemporalExtent(start_time=5, end_time=6 )
            >>> A.equivalent(B)
            True
            >>> B.equivalent(A)
            True
+           
+           @endcode
         """
         if  self.D["end_time"] is None and extent.D["end_time"] is None:
             if self.D["start_time"] == extent.D["start_time"]:
@@ -439,21 +507,25 @@
     def overlapped(self, extent):
         """!Return True if this temporal extent (A) overlapped the provided 
            temporal extent (B)
+           @verbatim
            A  |---------|
            B    |---------|
-           
+           @endverbatim
            @param extent: The temporal extent object that is overlaps 
                           this extent
            
            Usage:
            
-           >>> import grass.temporal as tgis
-           >>> A = tgis.AbstractTemporalExtent(start_time=5, end_time=7 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=6, end_time=8 )
+           @code
+           
+           >>> A = AbstractTemporalExtent(start_time=5, end_time=7 )
+           >>> B = AbstractTemporalExtent(start_time=6, end_time=8 )
            >>> A.overlapped(B)
            True
            >>> B.overlapped(A)
            False
+           
+           @endcode
         """
         if  self.D["end_time"] is None or extent.D["end_time"] is None:
             return False
@@ -468,21 +540,26 @@
     def overlaps(self, extent):
         """!Return True if this temporal extent (A) overlapps the provided 
            temporal extent (B)
+           @verbatim
            A    |---------|
            B  |---------|
+           @endverbatim
            
            @param extent: The temporal extent object that is overlapped 
                           this extent
            
            Usage:
            
-           >>> import grass.temporal as tgis
-           >>> A = tgis.AbstractTemporalExtent(start_time=6, end_time=8 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=5, end_time=7 )
+           @code
+           
+           >>> A = AbstractTemporalExtent(start_time=6, end_time=8 )
+           >>> B = AbstractTemporalExtent(start_time=5, end_time=7 )
            >>> A.overlaps(B)
            True
            >>> B.overlaps(A)
            False
+           
+           @endcode
         """
         if  self.D["end_time"] is None or extent.D["end_time"] is None:
             return False
@@ -663,17 +740,20 @@
 ###############################################################################
 
 class RasterAbsoluteTime(AbsoluteTemporalExtent):
-    def __init__(self, ident=None, start_time=None, end_time=None, timezone=None):
+    def __init__(self, ident=None, start_time=None, end_time=None, 
+                 timezone=None):
         AbsoluteTemporalExtent.__init__(self, "raster_absolute_time",
             ident, start_time, end_time, timezone)
 
 class Raster3DAbsoluteTime(AbsoluteTemporalExtent):
-    def __init__(self, ident=None, start_time=None, end_time=None, timezone=None):
+    def __init__(self, ident=None, start_time=None, end_time=None, 
+                 timezone=None):
         AbsoluteTemporalExtent.__init__(self, "raster3d_absolute_time",
             ident, start_time, end_time, timezone)
 
 class VectorAbsoluteTime(AbsoluteTemporalExtent):
-    def __init__(self, ident=None, start_time=None, end_time=None, timezone=None):
+    def __init__(self, ident=None, start_time=None, end_time=None, 
+                 timezone=None):
         AbsoluteTemporalExtent.__init__(self, "vector_absolute_time",
             ident, start_time, end_time, timezone)
 
@@ -687,8 +767,9 @@
         
         Usage:
         
-        >>> import grass.temporal as tgis
-        >>> A = tgis.STDSAbsoluteTime(table="strds_absolute_time",
+        @code
+        
+        >>> A = STDSAbsoluteTime(table="strds_absolute_time",
         ... ident="strds at PERMANENT", start_time=datetime(2001, 01, 01),
         ... end_time=datetime(2005,01,01), granularity="1 days",
         ... map_time="interval")
@@ -713,6 +794,8 @@
         end_time=2005-01-01 00:00:00
         granularity=1 days
         map_time=interval
+        
+        @endcode
     """
     def __init__(self, table=None, ident=None, start_time=None, end_time=None, 
                  granularity=None, timezone=None, map_time=None):
@@ -808,9 +891,10 @@
         start_time and end_time must be of type integer
        
         Usage:
+        
+        @code
        
-        >>> import grass.temporal as tgis
-        >>> A = tgis.RelativeTemporalExtent(table="raster_absolute_time",
+        >>> A = RelativeTemporalExtent(table="raster_absolute_time",
         ... ident="soil at PERMANENT", start_time=0, end_time=1, unit="years")
         >>> A.id
         'soil at PERMANENT'
@@ -829,6 +913,8 @@
         start_time=0
         end_time=1
         unit=years
+        
+        @endcode
     """
     def __init__(self, table=None, ident=None, start_time=None, end_time=None, 
                  unit=None):
@@ -916,9 +1002,10 @@
         start_time and end_time must be of type integer
        
         Usage:
+        
+        @code
        
-        >>> import grass.temporal as tgis
-        >>> A = tgis.STDSRelativeTime(table="raster_absolute_time",
+        >>> A = STDSRelativeTime(table="raster_absolute_time",
         ... ident="soil at PERMANENT", start_time=0, end_time=1, unit="years",
         ... granularity=5, map_time="interval")
         >>> A.id
@@ -946,6 +1033,8 @@
         unit=years
         granularity=5
         map_time=interval
+        
+        @endcode
     """
     def __init__(self, table=None, ident=None, start_time=None, end_time=None, 
                  unit=None, granularity=None, map_time=None):

Modified: grass/trunk/lib/python/temporal/temporal_granularity.py
===================================================================
--- grass/trunk/lib/python/temporal/temporal_granularity.py	2012-08-11 21:26:18 UTC (rev 52630)
+++ grass/trunk/lib/python/temporal/temporal_granularity.py	2012-08-12 01:54:40 UTC (rev 52631)
@@ -25,14 +25,14 @@
 
 ###############################################################################
 
-def compute_relative_time_granularity(maps):            
+
+def compute_relative_time_granularity(maps):
     """!Compute the relative time granularity
-        
-        Attention: The computation of the granularity is only correct in case of not
-        overlapping intervals. Hence a correct temporal topology is required for
-        computation.
-    
-	
+
+        Attention: The computation of the granularity 
+        is only correct in case of not overlapping intervals. 
+        Hence a correct temporal topology is required for computation.
+
         @param maps: a ordered by start_time list of map objects
     """
 
@@ -44,7 +44,7 @@
     for map in maps:
         start, end = map.get_valid_time()
         if start and end:
-            t =  abs(end - start)
+            t = abs(end - start)
             delta.append(int(t))
 
     # Compute the timedelta of the gaps
@@ -54,12 +54,13 @@
             if relation == "after":
                 start1, end1 = maps[i].get_valid_time()
                 start2, end2 = maps[i + 1].get_valid_time()
-                # Gaps are between intervals, intervals and points, points and points
+                # Gaps are between intervals, intervals and 
+                # points, points and points
                 if end1 and start2:
-                    t =  abs(end1 - start2)
+                    t = abs(end1 - start2)
                     delta.append(int(t))
-                if  not end1 and start2:
-                    t =  abs(start1 - start2)
+                if not end1 and start2:
+                    t = abs(start1 - start2)
                     delta.append(int(t))
 
     delta.sort()
@@ -76,16 +77,16 @@
 
 ###############################################################################
 
-def compute_absolute_time_granularity(maps):                  
+
+def compute_absolute_time_granularity(maps):
     """!Compute the absolute time granularity
-        
-        Attention: The computation of the granularity is only correct in case of not
-        overlapping intervals. Hence a correct temporal topology is required for
-        computation.
-    
-	
+
+        Attention: The computation of the granularity 
+        is only correct in case of not overlapping intervals. 
+        Hence a correct temporal topology is required for computation.
+
         @param maps: a ordered by start_time list of map objects
-    """     
+    """
 
     has_seconds = False
     has_minutes = False
@@ -117,83 +118,85 @@
             if relation == "after":
                 start1, end1 = maps[i].get_valid_time()
                 start2, end2 = maps[i + 1].get_valid_time()
-                # Gaps are between intervals, intervals and points, points and points
+                # Gaps are between intervals, intervals and 
+                # points, points and points
                 if end1 and start2:
                     delta.append(end1 - start2)
                     datetime_delta.append(compute_datetime_delta(end1, start2))
-                if  not end1 and start2:
+                if not end1 and start2:
                     delta.append(start2 - start1)
-                    datetime_delta.append(compute_datetime_delta(start1, start2))
+                    datetime_delta.append(compute_datetime_delta(
+                        start1, start2))
 
     # Check what changed
     dlist = []
     for d in datetime_delta:
-        if d.has_key("second") and d["second"] > 0:
+        if "second" in d and d["second"] > 0:
             has_seconds = True
-        if d.has_key("minute") and d["minute"] > 0:
+        if "minute" in d and d["minute"] > 0:
             has_minutes = True
-        if d.has_key("hour") and d["hour"] > 0:
+        if "hour" in d and d["hour"] > 0:
             has_hours = True
-        if d.has_key("day") and d["day"] > 0:
+        if "day" in d and d["day"] > 0:
             has_days = True
-        if d.has_key("month") and d["month"] > 0:
+        if "month" in d and d["month"] > 0:
             has_months = True
-        if d.has_key("year") and d["year"] > 0:
+        if "year" in d and d["year"] > 0:
             has_years = True
 
     # Create a list with a single time unit only
     if has_seconds:
         for d in datetime_delta:
-            if d.has_key("second"):
-                dlist.append(d["second"])   
-            elif d.has_key("minute"):
-                dlist.append(d["minute"] * 60)   
-            elif d.has_key("hour"):
-                dlist.append(d["hour"] * 3600)   
-            elif d.has_key("day"):
-                dlist.append(d["day"] * 24 * 3600)   
+            if "second" in d:
+                dlist.append(d["second"])
+            elif "minute" in d:
+                dlist.append(d["minute"] * 60)
+            elif "hour" in d:
+                dlist.append(d["hour"] * 3600)
+            elif "day" in d:
+                dlist.append(d["day"] * 24 * 3600)
             else:
-                dlist.append(d["max_days"] * 24 * 3600)   
-        use_seconds = True        
+                dlist.append(d["max_days"] * 24 * 3600)
+        use_seconds = True
     elif has_minutes:
         for d in datetime_delta:
-            if d.has_key("minute"):
-                dlist.append(d["minute"])   
-            elif d.has_key("hour"):
-                dlist.append(d["hour"] * 60)   
-            elif d.has_key("day"):
-                dlist.append(d["day"] * 24 * 60)   
+            if "minute" in d:
+                dlist.append(d["minute"])
+            elif "hour" in d:
+                dlist.append(d["hour"] * 60)
+            elif "day" in d:
+                dlist.append(d["day"] * 24 * 60)
             else:
-                dlist.append(d["max_days"] * 24 * 60)   
-        use_minutes = True        
+                dlist.append(d["max_days"] * 24 * 60)
+        use_minutes = True
     elif has_hours:
         for d in datetime_delta:
-            if d.has_key("hour"):
-                dlist.append(d["hour"])   
-            elif d.has_key("day"):
-                dlist.append(d["day"] * 24)   
+            if "hour" in d:
+                dlist.append(d["hour"])
+            elif "day" in d:
+                dlist.append(d["day"] * 24)
             else:
-                dlist.append(d["max_days"] * 24)   
-        use_hours = True        
+                dlist.append(d["max_days"] * 24)
+        use_hours = True
     elif has_days:
         for d in datetime_delta:
-            if d.has_key("day"):
-                dlist.append(d["day"])   
+            if "day" in d:
+                dlist.append(d["day"])
             else:
-                dlist.append(d["max_days"])   
-        use_days = True        
+                dlist.append(d["max_days"])
+        use_days = True
     elif has_months:
         for d in datetime_delta:
-            if d.has_key("month"):
-                dlist.append(d["month"])   
-            elif d.has_key("year"):
-                dlist.append(d["year"] * 12)   
-        use_months = True        
+            if "month" in d:
+                dlist.append(d["month"])
+            elif "year" in d:
+                dlist.append(d["year"] * 12)
+        use_months = True
     elif has_years:
         for d in datetime_delta:
-            if d.has_key("year"):
-                dlist.append(d["year"])   
-        use_years = True        
+            if "year" in d:
+                dlist.append(d["year"])
+        use_years = True
 
     dlist.sort()
     ulist = list(set(dlist))
@@ -229,20 +232,23 @@
 #  See http://www.opensource.org/licenses/mit-license.php
 # Error Codes:
 #   None
-def gcd(a,b):
-	"""!The Euclidean Algorithm """
-	a = abs(a)
-	b = abs(b)
-        while a:
-                a, b = b%a, a
-        return b
-        
+
+
+def gcd(a, b):
+    """!The Euclidean Algorithm """
+    a = abs(a)
+    b = abs(b)
+    while a:
+        a, b = b % a, a
+    return b
+
 ###############################################################################
 
+
 def gcd_list(list):
-	"""!Finds the GCD of numbers in a list.
-	Input: List of numbers you want to find the GCD of
-		E.g. [8, 24, 12]
-	Returns: GCD of all numbers
-	"""
-	return reduce(gcd, list)
+    """!Finds the GCD of numbers in a list.
+    Input: List of numbers you want to find the GCD of
+            E.g. [8, 24, 12]
+    Returns: GCD of all numbers
+    """
+    return reduce(gcd, list)

Modified: grass/trunk/lib/python/temporal/temporal_relationships.py
===================================================================
--- grass/trunk/lib/python/temporal/temporal_relationships.py	2012-08-11 21:26:18 UTC (rev 52630)
+++ grass/trunk/lib/python/temporal/temporal_relationships.py	2012-08-12 01:54:40 UTC (rev 52631)
@@ -25,43 +25,45 @@
 
 ###############################################################################
 
+
 class temporal_topology_builder(object):
-    """!This class is designed to build the temporal topology based on a lists of maps
-    
-	Example:
-	@code
-	# We have a space time raster dataset and build a map list
-	# from all registered maps ordered by start time
-	maps = strds.get_registered_maps_as_objects()
-	
-	# Now lets build the temporal topology of the maps in the list
-	tb = temporal_topology_builder()
-	tb.build(maps)
-	
-	for _map in tb:
-	    _map.print_temporal_topology_info()
-	    _follows = _map.get_follows()
-	    if _follows:
-		for f in _follows:
-		    f.print_temporal_topology_info()
-	    
-	# Using the next and previous methods, we can iterate over the 
-	# topological related maps in this way
-	
-	_first = tb.get_first()
-	
-	while _first:
-	    _first.print_temporal_topology_info()
-	    _first = _first.next()
-	
-	# Dictionary like accessed
-	_map = tb["name at mapset"]
-	@endcode
-    
+    """!This class is designed to build the temporal topology 
+       based on a lists of maps
+
+        Example:
+        @code
+        # We have a space time raster dataset and build a map list
+        # from all registered maps ordered by start time
+        maps = strds.get_registered_maps_as_objects()
+
+        # Now lets build the temporal topology of the maps in the list
+        tb = temporal_topology_builder()
+        tb.build(maps)
+
+        for _map in tb:
+            _map.print_temporal_topology_info()
+            _follows = _map.get_follows()
+            if _follows:
+                for f in _follows:
+                    f.print_temporal_topology_info()
+
+        # Using the next and previous methods, we can iterate over the
+        # topological related maps in this way
+
+        _first = tb.get_first()
+
+        while _first:
+            _first.print_temporal_topology_info()
+            _first = _first.next()
+
+        # Dictionary like accessed
+        _map = tb["name at mapset"]
+        @endcode
+
     """
     def __init__(self):
-	self._reset()
-        
+        self._reset()
+
     def _reset(self):
         self._store = {}
         self._first = None
@@ -69,192 +71,214 @@
 
     def _set_first(self, first):
         self._first = first
-        self._insert(first)        
-        
+        self._insert(first)
+
     def _detect_first(self):
-	if len(self) > 0:
-	    _prev = self._store.values()[0]
-	    while _prev != None:
-		self._first = _prev
-		_prev = _prev.prev()
-		
+        if len(self) > 0:
+            prev_ = self._store.values()[0]
+            while prev_ is not None:
+                self._first = prev_
+                prev_ = prev_.temporal_prev()
+
     def _insert(self, t):
         self._store[t.get_id()] = t
-        
+
     def get_first(self):
-	"""!Return the first map with the earliest start time
-	
-	   @return The map with the earliest start time
-	"""
-	return self._first
+        """!Return the first map with the earliest start time
 
+           @return The map with the earliest start time
+        """
+        return self._first
+
     def _build_internal_iteratable(self, maps):
-	"""!Build an iteratable temporal topology structure for all maps in the list and store the maps internally
-	
-	   Basically the "next" and "prev" relations will be set in the temporal topology structure of each map
-	   The maps will be added to the object, so they can be accessed using the iterator of this class
-	   
-	   @param maps: A sorted (by start_time)list of abstract_dataset objects with initiated temporal extent
-	"""
-	self._build_iteratable(maps)
+        """!Build an iteratable temporal topology structure for all maps in 
+           the list and store the maps internally
 
-	for _map in maps:
-	    self._insert(_map)
-	
-	# Detect the first map
-	self._detect_first()
-	
+           Basically the "next" and "prev" relations will be set in the 
+           temporal topology structure of each map
+           The maps will be added to the object, so they can be 
+           accessed using the iterator of this class
+
+           @param maps: A sorted (by start_time)list of abstract_dataset 
+                        objects with initiated temporal extent
+        """
+        self._build_iteratable(maps)
+
+        for _map in maps:
+            self._insert(_map)
+
+        # Detect the first map
+        self._detect_first()
+
     def _build_iteratable(self, maps):
-	"""!Build an iteratable temporal topology structure for all maps in the list
-	
-	   Basically the "next" and "prev" relations will be set in the temporal topology structure of each map.
-	   
-	   @param maps: A sorted (by start_time)list of abstract_dataset objects with initiated temporal extent
-	"""
-	for i in xrange(len(maps)):
-	    offset = i + 1
-	    for j in xrange(offset, len(maps)):		
-		# Get the temporal relationship
-		relation = maps[j].temporal_relation(maps[i])
-		
-		# Build the next reference
-		if relation != "equivalent" and relation != "started":
-		    maps[i].set_next(maps[j])
-		    break
-		
-	for _map in maps:
-	    _next = _map.next()
-	    if _next:
-		_next.set_prev(_map)
-	    _map.set_temporal_topology_build_true()
-	
+        """!Build an iteratable temporal topology structure for 
+           all maps in the list
+
+           Basically the "next" and "prev" relations will be set in 
+           the temporal topology structure of each map.
+
+           @param maps: A sorted (by start_time)list of abstract_dataset 
+                        objects with initiated temporal extent
+        """
+        for i in xrange(len(maps)):
+            offset = i + 1
+            for j in xrange(offset, len(maps)):
+                # Get the temporal relationship
+                relation = maps[j].temporal_relation(maps[i])
+
+                # Build the next reference
+                if relation != "equivalent" and relation != "started":
+                    maps[i].set_temporal_next(maps[j])
+                    break
+
+        for map_ in maps:
+            next_ = map_.temporal_next()
+            if next_:
+                next_.set_temporal_prev(map_)
+            map_.set_temporal_topology_build_true()
+
     def build2(self, mapsA, mapsB):
-	"""!Build the temporal topology structure between two ordered lists of maps
-	
-	   This method builds the temporal topology from mapsA to mapsB and vice verse.
-	   The temporal topology structure of each map, defined in class temporal_map_relations,
-	   will be reseted and rebuild for mapsA and mapsB. 
-	   
-	   After building the temporal topology the modified map objects of mapsA can be accessed 
-	   in the same way as a dictionary using there id. The implemented iterator assures 
-	   the chronological iteration over the mapsA.	    
-	   
-	   @param mapsA: A sorted list (by start_time) of abstract_dataset objects with initiated temporal extent
-	   @param mapsB: A sorted list (by start_time) of abstract_dataset objects with initiated temporal extent
-	"""
-	
-	if mapsA == mapsB:
-	    self.build(mapsA, True)
-	    return
-	
-	for _map in mapsA:
-	    _map.reset_temporal_topology()
-	    
-	for _map in mapsB:
-	    _map.reset_temporal_topology()
-	
-	for i in xrange(len(mapsA)):
-	    for j in xrange(len(mapsB)):
-		
-		# Get the temporal relationship
-		relation = mapsB[j].temporal_relation(mapsA[i])
-		
-		if relation == "before":
-		    continue
-			    
-		if relation == "equivalent":
-		    mapsB[j].append_equivalent(mapsA[i])
-		    mapsA[i].append_equivalent(mapsB[j])
-		elif relation == "follows":
-		    mapsB[j].append_follows(mapsA[i])
-		    mapsA[i].append_precedes(mapsB[j])
-		elif relation == "precedes":
-		    mapsB[j].append_precedes(mapsA[i])
-		    mapsA[i].append_follows(mapsB[j])
-		elif relation == "during" or relation == "starts" or relation == "finishes":
-		    mapsB[j].append_during(mapsA[i])
-		    mapsA[i].append_contains(mapsB[j])
-		elif relation == "contains" or relation == "started" or relation == "finished":
-		    mapsB[j].append_contains(mapsA[i])
-		    mapsA[i].append_during(mapsB[j])
-		elif relation == "overlaps":
-		    mapsB[j].append_overlaps(mapsA[i])
-		    mapsA[i].append_overlapped(mapsB[j])
-		elif relation == "overlapped":
-		    mapsB[j].append_overlapped(mapsA[i])
-		    mapsA[i].append_overlaps(mapsB[j])
+        """!Build the temporal topology structure between 
+           two ordered lists of maps
 
-		# Break if the next map follows and the over-next maps is after
-		if relation == "follows":
-		    if j < len(mapsB) - 1:
-			relation = mapsB[j + 1].temporal_relation(mapsA[i])
-			if relation == "after":
-			    break
-		# Break if the the next map is after
-		if relation == "after":
-		    break 
-	
-	self._build_internal_iteratable(mapsA)
-	self._build_iteratable(mapsB)
-			    
+           This method builds the temporal topology from mapsA to 
+           mapsB and vice verse. The temporal topology structure of each map, 
+           defined in class temporal_map_relations,
+           will be reseted and rebuild for mapsA and mapsB.
+
+           After building the temporal topology the modified 
+           map objects of mapsA can be accessed
+           in the same way as a dictionary using there id. 
+           The implemented iterator assures
+           the chronological iteration over the mapsA.
+
+           @param mapsA: A sorted list (by start_time) of abstract_dataset 
+                         objects with initiated temporal extent
+           @param mapsB: A sorted list (by start_time) of abstract_dataset 
+                         objects with initiated temporal extent
+        """
+
+        if mapsA == mapsB:
+            self.build(mapsA, True)
+            return
+
+        for map_ in mapsA:
+            map_.reset_temporal_topology()
+
+        for map_ in mapsB:
+            map_.reset_temporal_topology()
+
+        for i in xrange(len(mapsA)):
+            for j in xrange(len(mapsB)):
+
+                # Get the temporal relationship
+                relation = mapsB[j].temporal_relation(mapsA[i])
+
+                if relation == "before":
+                    continue
+
+                if relation == "equivalent":
+                    mapsB[j].append_temporal_equivalent(mapsA[i])
+                    mapsA[i].append_temporal_equivalent(mapsB[j])
+                elif relation == "follows":
+                    mapsB[j].append_temporal_follows(mapsA[i])
+                    mapsA[i].append_temporal_precedes(mapsB[j])
+                elif relation == "precedes":
+                    mapsB[j].append_temporal_precedes(mapsA[i])
+                    mapsA[i].append_temporal_follows(mapsB[j])
+                elif relation == "during" or relation == "starts" or \
+                     relation == "finishes":
+                    mapsB[j].append_temporal_during(mapsA[i])
+                    mapsA[i].append_temporal_contains(mapsB[j])
+                elif relation == "contains" or relation == "started" or \
+                     relation == "finished":
+                    mapsB[j].append_temporal_contains(mapsA[i])
+                    mapsA[i].append_temporal_during(mapsB[j])
+                elif relation == "overlaps":
+                    mapsB[j].append_temporal_overlaps(mapsA[i])
+                    mapsA[i].append_temporal_overlapped(mapsB[j])
+                elif relation == "overlapped":
+                    mapsB[j].append_temporal_overlapped(mapsA[i])
+                    mapsA[i].append_temporal_overlaps(mapsB[j])
+
+                # Break if the next map follows and the over-next maps is after
+                if relation == "follows":
+                    if j < len(mapsB) - 1:
+                        relation = mapsB[j + 1].temporal_relation(mapsA[i])
+                        if relation == "after":
+                            break
+                # Break if the the next map is after
+                if relation == "after":
+                    break
+
+        self._build_internal_iteratable(mapsA)
+        self._build_iteratable(mapsB)
+
     def build(self, maps):
-	"""!Build the temporal topology structure
-	
-	   This method builds the temporal topology based on all maps in the provided map list.
-	   The temporal topology structure of each map, defined in class temporal_map_relations,
-	   will be reseted and rebuild. 
-	   
-	   After building the temporal topology the modified map objects can be accessed 
-	   in the same way as a dictionary using there id. The implemented iterator assures 
-	   the chronological iteration over the maps.	   
-	   
-	   @param maps: A sorted list (by start_time) of abstract_dataset objects with initiated temporal extent
-	"""
-	for _map in maps:
-	    _map.reset_temporal_topology()
-	
-	for i in xrange(len(maps)):
-	    offset = i + 1
-	    for j in xrange(offset, len(maps)):
-		
-		# Get the temporal relationship
-		relation = maps[j].temporal_relation(maps[i])
-			    
-		# The start time of map j is equal or later than map i
-		if relation == "equivalent":
-		    maps[j].append_equivalent(maps[i])
-		    maps[i].append_equivalent(maps[j])
-		elif relation == "follows":
-		    maps[j].append_follows(maps[i])
-		    maps[i].append_precedes(maps[j])
-		elif relation == "during" or relation == "starts" or relation == "finishes":
-		    maps[j].append_during(maps[i])
-		    maps[i].append_contains(maps[j])
-		elif relation == "started":
-		    # Consider equal start time, in case "started" map j contains map i
-		    maps[j].append_contains(maps[i])
-		    maps[i].append_during(maps[j])
-		elif relation == "overlaps":
-		    maps[j].append_overlaps(maps[i])
-		    maps[i].append_overlapped(maps[j])
+        """!Build the temporal topology structure
 
-		# Break if the last map follows
-		if relation == "follows":
-		    if j < len(maps) - 1:
-			relation = maps[j + 1].temporal_relation(maps[i])
-			if relation == "after":
-			    break
-		# Break if the the next map is after
-		if relation == "after":
-		    break 
-		    
-	self._build_internal_iteratable(maps)
-	
+           This method builds the temporal topology based on 
+           all maps in the provided map list.
+           The temporal topology structure of each map, 
+           defined in class temporal_map_relations,
+           will be reseted and rebuild.
+
+           After building the temporal topology the 
+           modified map objects can be accessed
+           in the same way as a dictionary using there id. 
+           The implemented iterator assures
+           the chronological iteration over the maps.
+
+           @param maps: A sorted list (by start_time) of abstract_dataset 
+                        objects with initiated temporal extent
+        """
+        for map_ in maps:
+            map_.reset_temporal_topology()
+
+        for i in xrange(len(maps)):
+            offset = i + 1
+            for j in xrange(offset, len(maps)):
+
+                # Get the temporal relationship
+                relation = maps[j].temporal_relation(maps[i])
+
+                # The start time of map j is equal or later than map i
+                if relation == "equivalent":
+                    maps[j].append_temporal_equivalent(maps[i])
+                    maps[i].append_temporal_equivalent(maps[j])
+                elif relation == "follows":
+                    maps[j].append_temporal_follows(maps[i])
+                    maps[i].append_temporal_precedes(maps[j])
+                elif relation == "during" or relation == "starts" or \
+                     relation == "finishes":
+                    maps[j].append_temporal_during(maps[i])
+                    maps[i].append_temporal_contains(maps[j])
+                elif relation == "started":
+                    # Consider equal start time, in case 
+                    # "started" map j contains map i
+                    maps[j].append_temporal_contains(maps[i])
+                    maps[i].append_temporal_during(maps[j])
+                elif relation == "overlaps":
+                    maps[j].append_temporal_overlaps(maps[i])
+                    maps[i].append_temporal_overlapped(maps[j])
+
+                # Break if the last map follows
+                if relation == "follows":
+                    if j < len(maps) - 1:
+                        relation = maps[j + 1].temporal_relation(maps[i])
+                        if relation == "after":
+                            break
+                # Break if the the next map is after
+                if relation == "after":
+                    break
+
+        self._build_internal_iteratable(maps)
+
     def __iter__(self):
-	_start = self._first
-	while _start != None:
-	    yield _start
-	    _start = _start.next()
+        start_ = self._first
+        while start_ is not None:
+            yield start_
+            start_ = start_.temporal_next()
 
     def __getitem__(self, index):
         return self._store[index.get_id()]
@@ -269,86 +293,93 @@
 ###############################################################################
 
 def print_temporal_topology_relationships(maps1, maps2):
-    """!Print the temporal relation matrix of the temporal ordered map lists maps1 and maps2
-       to stdout.
-	
-	@param maps1: A sorted (by start_time) list of abstract_dataset objects with initiated temporal extent
-	@param maps2: A sorted (by start_time) list of abstract_dataset objects with initiated temporal extent
+    """!Print the temporal relation matrix of the temporal ordered 
+       map lists maps1 and maps2 to stdout.
+
+        @param maps1: A sorted (by start_time) list of abstract_dataset 
+                      objects with initiated temporal extent
+        @param maps2: A sorted (by start_time) list of abstract_dataset 
+                      objects with initiated temporal extent
     """
-    
+
     identical = False
     use_id = True
-    
+
     if maps1 == maps2:
-	identical = True
-	use_id = False
+        identical = True
+        use_id = False
 
     for i in range(len(maps1)):
-	if identical == True:
-	    start = i + 1
-	else:
-	    start = 0
-	for j in range(start, len(maps2)):
-	    relation = maps1[j].temporal_relation(maps2[i])
+        if identical == True:
+            start = i + 1
+        else:
+            start = 0
+        for j in range(start, len(maps2)):
+            relation = maps1[j].temporal_relation(maps2[i])
 
-	    if use_id == False:
-		print maps2[j].base.get_name(), relation, maps1[i].base.get_name()
-	    else:
-		print maps2[j].base.get_id(), relation, maps1[i].base.get_id()
+            if use_id == False:
+                print maps2[j].base.get_name(
+                ), relation, maps1[i].base.get_name()
+            else:
+                print maps2[j].base.get_id(), relation, maps1[i].base.get_id()
 
-	    # Break if the last map follows
-	    if relation == "follows":
-		if j < len(maps1) - 1:
-		    relation = maps1[j + 1].temporal_relation(maps2[i])
-		    if relation == "after":
-			break
-	    # Break if the the next map is after
-	    if relation == "after":
-		break
+            # Break if the last map follows
+            if relation == "follows":
+                if j < len(maps1) - 1:
+                    relation = maps1[j + 1].temporal_relation(maps2[i])
+                    if relation == "after":
+                        break
+            # Break if the the next map is after
+            if relation == "after":
+                break
 
 ###############################################################################
 
+
 def count_temporal_topology_relationships(maps1, maps2):
     """!Count the temporal relations between the two lists of maps
 
-	The map lists must be ordered by start time. Temporal relations are counted 
-	by analyzing the sparse (upper right side in case maps1 == maps2) temporal relationships matrix.
+        The map lists must be ordered by start time. 
+        Temporal relations are counted by analyzing the sparse 
+        (upper right side in case maps1 == maps2) temporal relationships matrix.
 
-	@param maps1: A sorted (by start_time) list of abstract_dataset objects with initiated temporal extent
-	@param maps2: A sorted (by start_time) list of abstract_dataset objects with initiated temporal extent
-	@return A dictionary with counted temporal relationships
+        @param maps1: A sorted (by start_time) list of abstract_dataset 
+                      objects with initiated temporal extent
+        @param maps2: A sorted (by start_time) list of abstract_dataset 
+                      objects with initiated temporal extent
+        @return A dictionary with counted temporal relationships
     """
-    
+
     tcount = {}
     identical = False
-    
+
     if maps1 == maps2:
-	identical = True
+        identical = True
 
     for i in range(len(maps1)):
-	if identical == True:
-	    start = i + 1
-	else:
-	    start = 0
-	for j in range(start, len(maps2)):
-	    relation = maps1[j].temporal_relation(maps2[i])
+        if identical:
+            start = i + 1
+        else:
+            start = 0
+        for j in range(start, len(maps2)):
+            relation = maps1[j].temporal_relation(maps2[i])
 
-	    if relation == "before":
-		continue
-	    
-	    if tcount.has_key(relation):
-		tcount[relation] = tcount[relation] + 1
-	    else:
-		tcount[relation] = 1
+            if relation == "before":
+                continue
 
-	    # Break if the last map follows
-	    if relation == "follows":
-		if j < len(maps1) - 1:
-		    relation = maps1[j + 1].temporal_relation(maps2[i])
-		    if relation == "after":
-			break
-	    # Break if the the next map is after
-	    if relation == "after":
-		break  
+            if relation in tcount:
+                tcount[relation] = tcount[relation] + 1
+            else:
+                tcount[relation] = 1
 
+            # Break if the last map follows
+            if relation == "follows":
+                if j < len(maps1) - 1:
+                    relation = maps1[j + 1].temporal_relation(maps2[i])
+                    if relation == "after":
+                        break
+            # Break if the the next map is after
+            if relation == "after":
+                break
+
     return tcount

Modified: grass/trunk/lib/python/temporal/unit_tests.py
===================================================================
--- grass/trunk/lib/python/temporal/unit_tests.py	2012-08-11 21:26:18 UTC (rev 52630)
+++ grass/trunk/lib/python/temporal/unit_tests.py	2012-08-12 01:54:40 UTC (rev 52631)
@@ -32,7 +32,7 @@
 from ctypes import *
 
 # Uncomment this to detect the error
-#core.set_raise_on_error(True)
+core.set_raise_on_error(True)
 
 ###############################################################################
 
@@ -43,13 +43,13 @@
     dt = datetime(2001, 9, 1, 0, 0, 0)
     string = "60 seconds, 4 minutes, 12 hours, 10 days, 1 weeks, 5 months, 1 years"
 
-    dt1 = datetime(2003,2,18,12,5,0)
+    dt1 = datetime(2003, 2, 18, 12, 5, 0)
     dt2 = increment_datetime_by_string(dt, string)
 
     print dt
     print dt2
 
-    delta = dt1 -dt2
+    delta = dt1 - dt2
 
     if delta.days != 0 or delta.seconds != 0:
         core.fatal("increment computation is wrong %s" % (delta))
@@ -59,13 +59,13 @@
     dt = datetime(2001, 11, 1, 0, 0, 0)
     string = "1 months"
 
-    dt1 = datetime(2001,12,1)
+    dt1 = datetime(2001, 12, 1)
     dt2 = increment_datetime_by_string(dt, string)
 
     print dt
     print dt2
 
-    delta = dt1 -dt2
+    delta = dt1 - dt2
 
     if delta.days != 0 or delta.seconds != 0:
         core.fatal("increment computation is wrong %s" % (delta))
@@ -75,13 +75,13 @@
     dt = datetime(2001, 11, 1, 0, 0, 0)
     string = "13 months"
 
-    dt1 = datetime(2002,12,1)
+    dt1 = datetime(2002, 12, 1)
     dt2 = increment_datetime_by_string(dt, string)
 
     print dt
     print dt2
 
-    delta = dt1 -dt2
+    delta = dt1 - dt2
 
     if delta.days != 0 or delta.seconds != 0:
         core.fatal("increment computation is wrong %s" % (delta))
@@ -91,13 +91,13 @@
     dt = datetime(2001, 1, 1, 0, 0, 0)
     string = "72 months"
 
-    dt1 = datetime(2007,1,1)
+    dt1 = datetime(2007, 1, 1)
     dt2 = increment_datetime_by_string(dt, string)
 
     print dt
     print dt2
 
-    delta = dt1 -dt2
+    delta = dt1 - dt2
 
     if delta.days != 0 or delta.seconds != 0:
         core.fatal("increment computation is wrong %s" % (delta))
@@ -108,11 +108,11 @@
 
     # First test
     print "Test 1"
-    dt = datetime(2001, 8, 8, 12,30,30)
+    dt = datetime(2001, 8, 8, 12, 30, 30)
     result = adjust_datetime_to_granularity(dt, "5 seconds")
-    correct =  datetime(2001, 8, 8, 12,30,30)
+    correct = datetime(2001, 8, 8, 12, 30, 30)
 
-    delta = correct - result 
+    delta = correct - result
 
     if delta.days != 0 or delta.seconds != 0:
         core.fatal("Granularity adjustment computation is wrong %s" % (delta))
@@ -120,9 +120,9 @@
     # Second test
     print "Test 2"
     result = adjust_datetime_to_granularity(dt, "20 minutes")
-    correct =  datetime(2001, 8, 8, 12,30,00)
+    correct = datetime(2001, 8, 8, 12, 30, 00)
 
-    delta = correct - result 
+    delta = correct - result
 
     if delta.days != 0 or delta.seconds != 0:
         core.fatal("Granularity adjustment computation is wrong %s" % (delta))
@@ -130,9 +130,9 @@
     # Third test
     print "Test 2"
     result = adjust_datetime_to_granularity(dt, "20 minutes")
-    correct =  datetime(2001, 8, 8, 12,30,00)
+    correct = datetime(2001, 8, 8, 12, 30, 00)
 
-    delta = correct - result 
+    delta = correct - result
 
     if delta.days != 0 or delta.seconds != 0:
         core.fatal("Granularity adjustment computation is wrong %s" % (delta))
@@ -140,9 +140,9 @@
     # 4. test
     print "Test 4"
     result = adjust_datetime_to_granularity(dt, "3 hours")
-    correct =  datetime(2001, 8, 8, 12,00,00)
+    correct = datetime(2001, 8, 8, 12, 00, 00)
 
-    delta = correct - result 
+    delta = correct - result
 
     if delta.days != 0 or delta.seconds != 0:
         core.fatal("Granularity adjustment computation is wrong %s" % (delta))
@@ -150,9 +150,9 @@
     # 5. test
     print "Test 5"
     result = adjust_datetime_to_granularity(dt, "5 days")
-    correct =  datetime(2001, 8, 8, 00,00,00)
+    correct = datetime(2001, 8, 8, 00, 00, 00)
 
-    delta = correct - result 
+    delta = correct - result
 
     if delta.days != 0 or delta.seconds != 0:
         core.fatal("Granularity adjustment computation is wrong %s" % (delta))
@@ -160,9 +160,9 @@
     # 6. test
     print "Test 6"
     result = adjust_datetime_to_granularity(dt, "2 weeks")
-    correct =  datetime(2001, 8, 6, 00,00,00)
+    correct = datetime(2001, 8, 6, 00, 00, 00)
 
-    delta = correct - result 
+    delta = correct - result
 
     if delta.days != 0 or delta.seconds != 0:
         core.fatal("Granularity adjustment computation is wrong %s" % (delta))
@@ -170,9 +170,9 @@
     # 7. test
     print "Test 7"
     result = adjust_datetime_to_granularity(dt, "6 months")
-    correct =  datetime(2001, 8, 1, 00,00,00)
+    correct = datetime(2001, 8, 1, 00, 00, 00)
 
-    delta = correct - result 
+    delta = correct - result
 
     if delta.days != 0 or delta.seconds != 0:
         core.fatal("Granularity adjustment computation is wrong %s" % (delta))
@@ -180,19 +180,20 @@
     # 8. test
     print "Test 8"
     result = adjust_datetime_to_granularity(dt, "2 years")
-    correct =  datetime(2001, 1, 1, 00,00,00)
+    correct = datetime(2001, 1, 1, 00, 00, 00)
 
-    delta = correct - result 
+    delta = correct - result
 
     if delta.days != 0 or delta.seconds != 0:
         core.fatal("Granularity adjustment computation is wrong %s" % (delta))
 
     # 9. test
     print "Test 9"
-    result = adjust_datetime_to_granularity(dt, "2 years, 3 months, 5 days, 3 hours, 3 minutes, 2 seconds")
-    correct =  datetime(2001, 8, 8, 12,30,30)
+    result = adjust_datetime_to_granularity(
+        dt, "2 years, 3 months, 5 days, 3 hours, 3 minutes, 2 seconds")
+    correct = datetime(2001, 8, 8, 12, 30, 30)
 
-    delta = correct - result 
+    delta = correct - result
 
     if delta.days != 0 or delta.seconds != 0:
         core.fatal("Granularity adjustment computation is wrong %s" % (delta))
@@ -200,9 +201,9 @@
     # 10. test
     print "Test 10"
     result = adjust_datetime_to_granularity(dt, "3 months, 5 days, 3 minutes")
-    correct =  datetime(2001, 8, 8, 12,30,00)
+    correct = datetime(2001, 8, 8, 12, 30, 00)
 
-    delta = correct - result 
+    delta = correct - result
 
     if delta.days != 0 or delta.seconds != 0:
         core.fatal("Granularity adjustment computation is wrong %s" % (delta))
@@ -210,9 +211,9 @@
     # 11. test
     print "Test 11"
     result = adjust_datetime_to_granularity(dt, "3 weeks, 5 days")
-    correct =  datetime(2001, 8, 8, 00,00,00)
+    correct = datetime(2001, 8, 8, 00, 00, 00)
 
-    delta = correct - result 
+    delta = correct - result
 
     if delta.days != 0 or delta.seconds != 0:
         core.fatal("Granularity adjustment computation is wrong %s" % (delta))
@@ -222,275 +223,274 @@
 def test_compute_datetime_delta():
 
     print "Test 1"
-    start = datetime(2001, 1, 1, 00,00,00)
-    end = datetime(2001, 1, 1, 00,00,00)
+    start = datetime(2001, 1, 1, 00, 00, 00)
+    end = datetime(2001, 1, 1, 00, 00, 00)
 
     comp = compute_datetime_delta(start, end)
 
     result = comp["second"]
     correct = 0
 
-    delta = correct - result 
+    delta = correct - result
 
     if delta != 0:
         core.fatal("Compute datetime delta is wrong %s" % (delta))
 
     print "Test 2"
-    start = datetime(2001, 1, 1, 00,00,14)
-    end = datetime(2001, 1, 1, 00,00,44)
+    start = datetime(2001, 1, 1, 00, 00, 14)
+    end = datetime(2001, 1, 1, 00, 00, 44)
 
     comp = compute_datetime_delta(start, end)
 
     result = comp["second"]
     correct = 30
 
-    delta = correct - result 
+    delta = correct - result
 
     if delta != 0:
         core.fatal("Compute datetime delta is wrong %s" % (delta))
 
     print "Test 3"
-    start = datetime(2001, 1, 1, 00,00,44)
-    end = datetime(2001, 1, 1, 00,01,14)
+    start = datetime(2001, 1, 1, 00, 00, 44)
+    end = datetime(2001, 1, 1, 00, 01, 14)
 
     comp = compute_datetime_delta(start, end)
 
     result = comp["second"]
     correct = 30
 
-    delta = correct - result 
+    delta = correct - result
 
     if delta != 0:
         core.fatal("Compute datetime delta is wrong %s" % (delta))
 
     print "Test 4"
-    start = datetime(2001, 1, 1, 00,00,30)
-    end = datetime(2001, 1, 1, 00,05,30)
+    start = datetime(2001, 1, 1, 00, 00, 30)
+    end = datetime(2001, 1, 1, 00, 05, 30)
 
     comp = compute_datetime_delta(start, end)
 
     result = comp["second"]
     correct = 300
 
-    delta = correct - result 
+    delta = correct - result
 
     if delta != 0:
         core.fatal("Compute datetime delta is wrong %s" % (delta))
 
     print "Test 5"
-    start = datetime(2001, 1, 1, 00,00,00)
-    end = datetime(2001, 1, 1, 00,01,00)
+    start = datetime(2001, 1, 1, 00, 00, 00)
+    end = datetime(2001, 1, 1, 00, 01, 00)
 
     comp = compute_datetime_delta(start, end)
 
     result = comp["minute"]
     correct = 1
 
-    delta = correct - result 
+    delta = correct - result
 
     if delta != 0:
         core.fatal("Compute datetime delta is wrong %s" % (delta))
 
     print "Test 6"
-    start = datetime(2011,10,31, 00,45,00)
-    end = datetime(2011,10,31, 01,45,00)
+    start = datetime(2011, 10, 31, 00, 45, 00)
+    end = datetime(2011, 10, 31, 01, 45, 00)
 
     comp = compute_datetime_delta(start, end)
 
     result = comp["minute"]
     correct = 60
 
-    delta = correct - result 
+    delta = correct - result
 
     if delta != 0:
         core.fatal("Compute datetime delta is wrong %s" % (delta))
 
     print "Test 7"
-    start = datetime(2011,10,31, 00,45,00)
-    end = datetime(2011,10,31, 01,15,00)
+    start = datetime(2011, 10, 31, 00, 45, 00)
+    end = datetime(2011, 10, 31, 01, 15, 00)
 
     comp = compute_datetime_delta(start, end)
 
     result = comp["minute"]
     correct = 30
 
-    delta = correct - result 
+    delta = correct - result
 
     if delta != 0:
         core.fatal("Compute datetime delta is wrong %s" % (delta))
 
     print "Test 8"
-    start = datetime(2011,10,31, 00,45,00)
-    end = datetime(2011,10,31, 12,15,00)
+    start = datetime(2011, 10, 31, 00, 45, 00)
+    end = datetime(2011, 10, 31, 12, 15, 00)
 
     comp = compute_datetime_delta(start, end)
 
     result = comp["minute"]
     correct = 690
 
-    delta = correct - result 
+    delta = correct - result
 
     if delta != 0:
         core.fatal("Compute datetime delta is wrong %s" % (delta))
 
     print "Test 9"
-    start = datetime(2011,10,31, 00,00,00)
-    end = datetime(2011,10,31, 01,00,00)
+    start = datetime(2011, 10, 31, 00, 00, 00)
+    end = datetime(2011, 10, 31, 01, 00, 00)
 
     comp = compute_datetime_delta(start, end)
 
     result = comp["hour"]
     correct = 1
 
-    delta = correct - result 
+    delta = correct - result
 
     if delta != 0:
         core.fatal("Compute datetime delta is wrong %s" % (delta))
 
     print "Test 10"
-    start = datetime(2011,10,31, 00,00,00)
-    end = datetime(2011,11,01, 01,00,00)
+    start = datetime(2011, 10, 31, 00, 00, 00)
+    end = datetime(2011, 11, 01, 01, 00, 00)
 
     comp = compute_datetime_delta(start, end)
 
     result = comp["hour"]
     correct = 25
 
-    delta = correct - result 
+    delta = correct - result
 
     if delta != 0:
         core.fatal("Compute datetime delta is wrong %s" % (delta))
 
     print "Test 11"
-    start = datetime(2011,10,31, 12,00,00)
-    end = datetime(2011,11,01, 06,00,00)
+    start = datetime(2011, 10, 31, 12, 00, 00)
+    end = datetime(2011, 11, 01, 06, 00, 00)
 
     comp = compute_datetime_delta(start, end)
 
     result = comp["hour"]
     correct = 18
 
-    delta = correct - result 
+    delta = correct - result
 
     if delta != 0:
         core.fatal("Compute datetime delta is wrong %s" % (delta))
 
     print "Test 12"
-    start = datetime(2011,11,01, 00,00,00)
-    end = datetime(2011,12,01, 01,00,00)
+    start = datetime(2011, 11, 01, 00, 00, 00)
+    end = datetime(2011, 12, 01, 01, 00, 00)
 
     comp = compute_datetime_delta(start, end)
 
     result = comp["hour"]
     correct = 30 * 24 + 1
 
-    delta = correct - result 
+    delta = correct - result
 
     if delta != 0:
         core.fatal("Compute datetime delta is wrong %s" % (delta))
 
-
     print "Test 13"
-    start = datetime(2011,11,01, 00,00,00)
-    end = datetime(2011,11,05, 00,00,00)
+    start = datetime(2011, 11, 01, 00, 00, 00)
+    end = datetime(2011, 11, 05, 00, 00, 00)
 
     comp = compute_datetime_delta(start, end)
 
     result = comp["day"]
     correct = 4
 
-    delta = correct - result 
+    delta = correct - result
 
     if delta != 0:
         core.fatal("Compute datetime delta is wrong %s" % (delta))
 
     print "Test 14"
-    start = datetime(2011,10,06, 00,00,00)
-    end = datetime(2011,11,05, 00,00,00)
+    start = datetime(2011, 10, 06, 00, 00, 00)
+    end = datetime(2011, 11, 05, 00, 00, 00)
 
     comp = compute_datetime_delta(start, end)
 
     result = comp["day"]
     correct = 30
 
-    delta = correct - result 
+    delta = correct - result
 
     if delta != 0:
         core.fatal("Compute datetime delta is wrong %s" % (delta))
 
     print "Test 15"
-    start = datetime(2011,12,02, 00,00,00)
-    end = datetime(2012,01,01, 00,00,00)
+    start = datetime(2011, 12, 02, 00, 00, 00)
+    end = datetime(2012, 01, 01, 00, 00, 00)
 
     comp = compute_datetime_delta(start, end)
 
     result = comp["day"]
     correct = 30
 
-    delta = correct - result 
+    delta = correct - result
 
     if delta != 0:
         core.fatal("Compute datetime delta is wrong %s" % (delta))
 
     print "Test 16"
-    start = datetime(2011,01,01, 00,00,00)
-    end = datetime(2011,02,01, 00,00,00)
+    start = datetime(2011, 01, 01, 00, 00, 00)
+    end = datetime(2011, 02, 01, 00, 00, 00)
 
     comp = compute_datetime_delta(start, end)
 
     result = comp["month"]
     correct = 1
 
-    delta = correct - result 
+    delta = correct - result
 
     if delta != 0:
         core.fatal("Compute datetime delta is wrong %s" % (delta))
 
     print "Test 17"
-    start = datetime(2011,12,01, 00,00,00)
-    end = datetime(2012,01,01, 00,00,00)
+    start = datetime(2011, 12, 01, 00, 00, 00)
+    end = datetime(2012, 01, 01, 00, 00, 00)
 
     comp = compute_datetime_delta(start, end)
 
     result = comp["month"]
     correct = 1
 
-    delta = correct - result 
+    delta = correct - result
 
     if delta != 0:
         core.fatal("Compute datetime delta is wrong %s" % (delta))
 
     print "Test 18"
-    start = datetime(2011,12,01, 00,00,00)
-    end = datetime(2012,06,01, 00,00,00)
+    start = datetime(2011, 12, 01, 00, 00, 00)
+    end = datetime(2012, 06, 01, 00, 00, 00)
 
     comp = compute_datetime_delta(start, end)
 
     result = comp["month"]
     correct = 6
 
-    delta = correct - result 
+    delta = correct - result
 
     if delta != 0:
         core.fatal("Compute datetime delta is wrong %s" % (delta))
 
     print "Test 19"
-    start = datetime(2011,06,01, 00,00,00)
-    end = datetime(2021,06,01, 00,00,00)
+    start = datetime(2011, 06, 01, 00, 00, 00)
+    end = datetime(2021, 06, 01, 00, 00, 00)
 
     comp = compute_datetime_delta(start, end)
 
     result = comp["year"]
     correct = 10
 
-    delta = correct - result 
+    delta = correct - result
 
     if delta != 0:
         core.fatal("Compute datetime delta is wrong %s" % (delta))
 
     print "Test 20"
-    start = datetime(2011,06,01, 00,00,00)
-    end = datetime(2012,06,01, 12,00,00)
+    start = datetime(2011, 06, 01, 00, 00, 00)
+    end = datetime(2012, 06, 01, 12, 00, 00)
 
     comp = compute_datetime_delta(start, end)
 
@@ -498,14 +498,14 @@
     d = end - start
     correct = 12 + d.days * 24
 
-    delta = correct - result 
+    delta = correct - result
 
     if delta != 0:
         core.fatal("Compute datetime delta is wrong %s" % (delta))
 
     print "Test 21"
-    start = datetime(2011,06,01, 00,00,00)
-    end = datetime(2012,06,01, 12,30,00)
+    start = datetime(2011, 06, 01, 00, 00, 00)
+    end = datetime(2012, 06, 01, 12, 30, 00)
 
     comp = compute_datetime_delta(start, end)
 
@@ -513,14 +513,14 @@
     d = end - start
     correct = d.days * 24 * 60 + 12 * 60 + 30
 
-    delta = correct - result 
+    delta = correct - result
 
     if delta != 0:
         core.fatal("Compute datetime delta is wrong %s" % (delta))
 
     print "Test 22"
-    start = datetime(2011,06,01, 00,00,00)
-    end = datetime(2012,06,01, 12,00,05)
+    start = datetime(2011, 06, 01, 00, 00, 00)
+    end = datetime(2012, 06, 01, 12, 00, 05)
 
     comp = compute_datetime_delta(start, end)
 
@@ -528,14 +528,14 @@
     d = end - start
     correct = 5 + 60 * 60 * 12 + d.days * 24 * 60 * 60
 
-    delta = correct - result 
+    delta = correct - result
 
     if delta != 0:
         core.fatal("Compute datetime delta is wrong %s" % (delta))
 
     print "Test 23"
-    start = datetime(2011,06,01, 00,00,00)
-    end = datetime(2012,06,01, 00,30,00)
+    start = datetime(2011, 06, 01, 00, 00, 00)
+    end = datetime(2012, 06, 01, 00, 30, 00)
 
     comp = compute_datetime_delta(start, end)
 
@@ -543,14 +543,14 @@
     d = end - start
     correct = 30 + d.days * 24 * 60
 
-    delta = correct - result 
+    delta = correct - result
 
     if delta != 0:
         core.fatal("Compute datetime delta is wrong %s" % (delta))
 
     print "Test 24"
-    start = datetime(2011,06,01, 00,00,00)
-    end = datetime(2012,06,01, 00,00,05)
+    start = datetime(2011, 06, 01, 00, 00, 00)
+    end = datetime(2012, 06, 01, 00, 00, 05)
 
     comp = compute_datetime_delta(start, end)
 
@@ -558,18 +558,16 @@
     d = end - start
     correct = 5 + d.days * 24 * 60 * 60
 
-    delta = correct - result 
+    delta = correct - result
 
     if delta != 0:
         core.fatal("Compute datetime delta is wrong %s" % (delta))
 
 ###############################################################################
 
-def test_compute_relative_time_granularity():            
-    
+def test_compute_relative_time_granularity():
 
     # First we test intervals
-
     print "Test 1"
     maps = []
     fact = 5
@@ -577,7 +575,7 @@
     end = start * fact
     for i in range(6):
         end = start * fact
-        map = raster_dataset(None)
+        map = RasterDataset(None)
         map.set_relative_time(start, end, "years")
         maps.append(map)
         start = end
@@ -586,15 +584,15 @@
     gran = round(compute_relative_time_granularity(maps))
     if fact - gran != 0:
         core.fatal("Wrong granularity reference %i != gran %i" % (fact, gran))
- 
+
     print "Test 2"
     maps = []
     fact = 3
-    start = 1.0/86400
+    start = 1.0 / 86400
     end = start * fact
     for i in range(10):
         end = start * fact
-        map = raster_dataset(None)
+        map = RasterDataset(None)
         map.set_relative_time(start, end, "years")
         maps.append(map)
         start = end
@@ -610,11 +608,11 @@
     start = 1
     end = start + fact
     for i in range(10):
-        shift = i*2*fact
+        shift = i * 2 * fact
         start = shift
         end = start + fact
 
-        map = raster_dataset(None)
+        map = RasterDataset(None)
         map.set_relative_time(start, end)
         maps.append(map)
 
@@ -632,7 +630,7 @@
     count = 0
     for i in range(6):
         end = start * fact
-        map = raster_dataset(None)
+        map = RasterDataset(None)
         if count % 2 == 0:
             map.set_relative_time(start, end)
         else:
@@ -645,16 +643,16 @@
     gran = round(compute_relative_time_granularity(maps))
     if fact - gran != 0:
         core.fatal("Wrong granularity reference %i != gran %i" % (fact, gran))
- 
+
     # Second we test points only
- 
+
     print "Test 5 points only"
     maps = []
     fact = 3
-    start = 1.0/86400
+    start = 1.0 / 86400
     for i in range(10):
-        point = (i + 1)*fact*start
-        map = raster_dataset(None)
+        point = (i + 1) * fact * start
+        map = RasterDataset(None)
         map.set_relative_time(point, None)
         maps.append(map)
 
@@ -665,10 +663,8 @@
 ###############################################################################
 
 def test_compute_absolute_time_granularity():
- 
 
     # First we test intervals
-
     print "Test 1"
     maps = []
     a = datetime(2001, 1, 1)
@@ -676,13 +672,14 @@
     for i in range(10):
         start = increment_datetime_by_string(a, increment, i)
         end = increment_datetime_by_string(a, increment, i + 1)
-        map = raster_dataset(None)
+        map = RasterDataset(None)
         map.set_absolute_time(start, end)
         maps.append(map)
 
     gran = compute_absolute_time_granularity(maps)
     if increment != gran:
-        core.fatal("Wrong granularity reference %s != gran %s" % (increment, gran))
+        core.fatal("Wrong granularity reference %s != gran %s" % (
+            increment, gran))
 
     print "Test 2"
     maps = []
@@ -691,13 +688,14 @@
     for i in range(10):
         start = increment_datetime_by_string(a, increment, i)
         end = increment_datetime_by_string(a, increment, i + 1)
-        map = raster_dataset(None)
+        map = RasterDataset(None)
         map.set_absolute_time(start, end)
         maps.append(map)
 
     gran = compute_absolute_time_granularity(maps)
     if increment != gran:
-        core.fatal("Wrong granularity reference %s != gran %s" % (increment, gran))
+        core.fatal("Wrong granularity reference %s != gran %s" % (
+            increment, gran))
 
     print "Test 3"
     maps = []
@@ -706,13 +704,14 @@
     for i in range(20):
         start = increment_datetime_by_string(a, increment, i)
         end = increment_datetime_by_string(a, increment, i + 1)
-        map = raster_dataset(None)
+        map = RasterDataset(None)
         map.set_absolute_time(start, end)
         maps.append(map)
 
     gran = compute_absolute_time_granularity(maps)
     if increment != gran:
-        core.fatal("Wrong granularity reference %s != gran %s" % (increment, gran))
+        core.fatal("Wrong granularity reference %s != gran %s" % (
+            increment, gran))
 
     print "Test 4"
     maps = []
@@ -721,13 +720,14 @@
     for i in range(20):
         start = increment_datetime_by_string(a, increment, i)
         end = increment_datetime_by_string(a, increment, i + 1)
-        map = raster_dataset(None)
+        map = RasterDataset(None)
         map.set_absolute_time(start, end)
         maps.append(map)
 
     gran = compute_absolute_time_granularity(maps)
     if increment != gran:
-        core.fatal("Wrong granularity reference %s != gran %s" % (increment, gran))
+        core.fatal("Wrong granularity reference %s != gran %s" % (
+            increment, gran))
 
     print "Test 3"
     maps = []
@@ -736,13 +736,14 @@
     for i in range(6):
         start = increment_datetime_by_string(a, increment, i)
         end = increment_datetime_by_string(a, increment, i + 1)
-        map = raster_dataset(None)
+        map = RasterDataset(None)
         map.set_absolute_time(start, end)
         maps.append(map)
 
     gran = compute_absolute_time_granularity(maps)
     if increment != gran:
-        core.fatal("Wrong granularity reference %s != gran %s" % (increment, gran))
+        core.fatal("Wrong granularity reference %s != gran %s" % (
+            increment, gran))
 
     print "Test 4"
     maps = []
@@ -751,13 +752,14 @@
     for i in range(6):
         start = increment_datetime_by_string(a, increment, i)
         end = increment_datetime_by_string(a, increment, i + 1)
-        map = raster_dataset(None)
+        map = RasterDataset(None)
         map.set_absolute_time(start, end)
         maps.append(map)
 
     gran = compute_absolute_time_granularity(maps)
     if increment != gran:
-        core.fatal("Wrong granularity reference %s != gran %s" % (increment, gran))
+        core.fatal("Wrong granularity reference %s != gran %s" % (
+            increment, gran))
 
     print "Test 5"
     maps = []
@@ -766,14 +768,15 @@
     for i in range(20):
         start = increment_datetime_by_string(a, increment, i)
         end = increment_datetime_by_string(a, increment, i + 1)
-        map = raster_dataset(None)
+        map = RasterDataset(None)
         map.set_absolute_time(start, end)
         maps.append(map)
 
     increment = "1 days"
     gran = compute_absolute_time_granularity(maps)
     if increment != gran:
-        core.fatal("Wrong granularity reference %s != gran %s" % (increment, gran))
+        core.fatal("Wrong granularity reference %s != gran %s" % (
+            increment, gran))
 
     print "Test 6"
     maps = []
@@ -782,14 +785,15 @@
     for i in range(20):
         start = increment_datetime_by_string(a, increment, i)
         end = increment_datetime_by_string(a, increment, i + 1)
-        map = raster_dataset(None)
+        map = RasterDataset(None)
         map.set_absolute_time(start, end)
         maps.append(map)
 
     increment = "25 hours"
     gran = compute_absolute_time_granularity(maps)
     if increment != gran:
-        core.fatal("Wrong granularity reference %s != gran %s" % (increment, gran))
+        core.fatal("Wrong granularity reference %s != gran %s" % (
+            increment, gran))
 
     print "Test 7"
     maps = []
@@ -798,13 +802,14 @@
     for i in range(20):
         start = increment_datetime_by_string(a, increment, i)
         end = increment_datetime_by_string(a, increment, i + 1)
-        map = raster_dataset(None)
+        map = RasterDataset(None)
         map.set_absolute_time(start, end)
         maps.append(map)
 
     gran = compute_absolute_time_granularity(maps)
     if increment != gran:
-        core.fatal("Wrong granularity reference %s != gran %s" % (increment, gran))
+        core.fatal("Wrong granularity reference %s != gran %s" % (
+            increment, gran))
 
     print "Test 8"
     maps = []
@@ -813,13 +818,14 @@
     for i in range(20):
         start = increment_datetime_by_string(a, increment, i)
         end = increment_datetime_by_string(a, increment, i + 1)
-        map = raster_dataset(None)
+        map = RasterDataset(None)
         map.set_absolute_time(start, end)
         maps.append(map)
 
     gran = compute_absolute_time_granularity(maps)
     if increment != gran:
-        core.fatal("Wrong granularity reference %s != gran %s" % (increment, gran))
+        core.fatal("Wrong granularity reference %s != gran %s" % (
+            increment, gran))
 
     print "Test 9"
     maps = []
@@ -828,14 +834,15 @@
     for i in range(20):
         start = increment_datetime_by_string(a, increment, i)
         end = increment_datetime_by_string(a, increment, i + 1)
-        map = raster_dataset(None)
+        map = RasterDataset(None)
         map.set_absolute_time(start, end)
         maps.append(map)
 
     increment = "325 minutes"
     gran = compute_absolute_time_granularity(maps)
     if increment != gran:
-        core.fatal("Wrong granularity reference %s != gran %s" % (increment, gran))
+        core.fatal("Wrong granularity reference %s != gran %s" % (
+            increment, gran))
 
     print "Test 10"
     maps = []
@@ -844,195 +851,209 @@
     for i in range(20):
         start = increment_datetime_by_string(a, increment, i)
         end = increment_datetime_by_string(a, increment, i + 1)
-        map = raster_dataset(None)
+        map = RasterDataset(None)
         map.set_absolute_time(start, end)
         maps.append(map)
 
     increment = "330 seconds"
     gran = compute_absolute_time_granularity(maps)
     if increment != gran:
-        core.fatal("Wrong granularity reference %s != gran %s" % (increment, gran))
+        core.fatal("Wrong granularity reference %s != gran %s" % (
+            increment, gran))
 
     print "Test 11"
     maps = []
-    a = datetime(2001,12,31)
+    a = datetime(2001, 12, 31)
     increment = "60 minutes, 30 seconds"
     for i in range(24):
         start = increment_datetime_by_string(a, increment, i)
         end = increment_datetime_by_string(a, increment, i + 1)
-        map = raster_dataset(None)
+        map = RasterDataset(None)
         map.set_absolute_time(start, end)
         maps.append(map)
 
     increment = "3630 seconds"
     gran = compute_absolute_time_granularity(maps)
     if increment != gran:
-        core.fatal("Wrong granularity reference %s != gran %s" % (increment, gran))
+        core.fatal("Wrong granularity reference %s != gran %s" % (
+            increment, gran))
 
     print "Test 12"
     maps = []
-    a = datetime(2001,12,31, 12, 30, 30)
+    a = datetime(2001, 12, 31, 12, 30, 30)
     increment = "3600 seconds"
     for i in range(24):
         start = increment_datetime_by_string(a, increment, i)
         end = increment_datetime_by_string(a, increment, i + 1)
-        map = raster_dataset(None)
+        map = RasterDataset(None)
         map.set_absolute_time(start, end)
         maps.append(map)
 
     gran = compute_absolute_time_granularity(maps)
     if increment != gran:
-        core.fatal("Wrong granularity reference %s != gran %s" % (increment, gran))
+        core.fatal("Wrong granularity reference %s != gran %s" % (
+            increment, gran))
 
-    # Test absolute time points    
+    # Test absolute time points
 
     print "Test 13"
     maps = []
-    a = datetime(2001,12,31, 12, 30, 30)
+    a = datetime(2001, 12, 31, 12, 30, 30)
     increment = "3600 seconds"
     for i in range(24):
         start = increment_datetime_by_string(a, increment, i)
         end = None
-        map = raster_dataset(None)
+        map = RasterDataset(None)
         map.set_absolute_time(start, end)
         maps.append(map)
 
     gran = compute_absolute_time_granularity(maps)
     if increment != gran:
-        core.fatal("Wrong granularity reference %s != gran %s" % (increment, gran))
+        core.fatal("Wrong granularity reference %s != gran %s" % (
+            increment, gran))
 
     print "Test 14"
     maps = []
-    a = datetime(2001,12,31, 00, 00, 00)
+    a = datetime(2001, 12, 31, 00, 00, 00)
     increment = "20 days"
     for i in range(24):
         start = increment_datetime_by_string(a, increment, i)
         end = None
-        map = raster_dataset(None)
+        map = RasterDataset(None)
         map.set_absolute_time(start, end)
         maps.append(map)
 
     gran = compute_absolute_time_granularity(maps)
     if increment != gran:
-        core.fatal("Wrong granularity reference %s != gran %s" % (increment, gran))
+        core.fatal("Wrong granularity reference %s != gran %s" % (
+            increment, gran))
 
     print "Test 15"
     maps = []
-    a = datetime(2001,12,01, 00, 00, 00)
+    a = datetime(2001, 12, 01, 00, 00, 00)
     increment = "5 months"
     for i in range(24):
         start = increment_datetime_by_string(a, increment, i)
         end = None
-        map = raster_dataset(None)
+        map = RasterDataset(None)
         map.set_absolute_time(start, end)
         maps.append(map)
 
     gran = compute_absolute_time_granularity(maps)
     if increment != gran:
-        core.fatal("Wrong granularity reference %s != gran %s" % (increment, gran))
+        core.fatal("Wrong granularity reference %s != gran %s" % (
+            increment, gran))
 
-    # Test absolute time interval and points    
+    # Test absolute time interval and points
 
     print "Test 16"
     maps = []
-    a = datetime(2001,12,31, 12, 30, 30)
+    a = datetime(2001, 12, 31, 12, 30, 30)
     increment = "3600 seconds"
 
     for i in range(24):
         start = increment_datetime_by_string(a, increment, i)
         end = increment_datetime_by_string(a, increment, i + 1)
-        map = raster_dataset(None)
+        map = RasterDataset(None)
         map.set_absolute_time(start, end)
         maps.append(map)
 
-    a = datetime(2002,02,01, 12, 30, 30)
+    a = datetime(2002, 02, 01, 12, 30, 30)
     for i in range(24):
         start = increment_datetime_by_string(a, increment, i)
         end = None
-        map = raster_dataset(None)
+        map = RasterDataset(None)
         map.set_absolute_time(start, end)
         maps.append(map)
 
     gran = compute_absolute_time_granularity(maps)
     if increment != gran:
-        core.fatal("Wrong granularity reference %s != gran %s" % (increment, gran))
+        core.fatal("Wrong granularity reference %s != gran %s" % (
+            increment, gran))
 
     print "Test 17"
     maps = []
-    a = datetime(2001,1,1)
+    a = datetime(2001, 1, 1)
     increment = "2 days"
 
     for i in range(8):
         start = increment_datetime_by_string(a, increment, i)
         end = increment_datetime_by_string(a, increment, i + 1)
-        map = raster_dataset(None)
+        map = RasterDataset(None)
         map.set_absolute_time(start, end)
         maps.append(map)
 
-    a = datetime(2001,02,02)
+    a = datetime(2001, 02, 02)
     for i in range(8):
         start = increment_datetime_by_string(a, increment, i)
         end = None
-        map = raster_dataset(None)
+        map = RasterDataset(None)
         map.set_absolute_time(start, end)
         maps.append(map)
 
     gran = compute_absolute_time_granularity(maps)
     if increment != gran:
-        core.fatal("Wrong granularity reference %s != gran %s" % (increment, gran))
+        core.fatal("Wrong granularity reference %s != gran %s" % (
+            increment, gran))
 
 ###############################################################################
 
 def test_spatial_extent_intersection():
     # Generate the extents
-    
-    A = spatial_extent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
+
+    A = SpatialExtent(
+        north=80, south=20, east=60, west=10, bottom=-50, top=50)
     A.print_info()
-    B = spatial_extent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
+    B = SpatialExtent(
+        north=80, south=20, east=60, west=10, bottom=-50, top=50)
     B.print_info()
     C = A.intersect(B)
     C.print_info()
-    
+
     if C.get_north() != B.get_north() or C.get_south() != B.get_south() or \
-       C.get_west() != B.get_west() or C.get_east() != B.get_east() or \
-       C.get_bottom() != B.get_bottom() or C.get_top() != B.get_top():
+        C.get_west() != B.get_west() or C.get_east() != B.get_east() or \
+        C.get_bottom() != B.get_bottom() or C.get_top() != B.get_top():
         core.fatal("Wrong intersection computation")
-        
-    B = spatial_extent(north=40, south=30, east=60, west=10, bottom=-50, top=50)
+
+    B = SpatialExtent(
+        north=40, south=30, east=60, west=10, bottom=-50, top=50)
     B.print_info()
     C = A.intersect(B)
     C.print_info()
-    
+
     if C.get_north() != B.get_north() or C.get_south() != B.get_south() or \
        C.get_west() != B.get_west() or C.get_east() != B.get_east() or \
        C.get_bottom() != B.get_bottom() or C.get_top() != B.get_top():
         core.fatal("Wrong intersection computation")
-        
-    B = spatial_extent(north=40, south=30, east=60, west=30, bottom=-50, top=50)
+
+    B = SpatialExtent(
+        north=40, south=30, east=60, west=30, bottom=-50, top=50)
     B.print_info()
     C = A.intersect(B)
     C.print_info()
-    
+
     if C.get_north() != B.get_north() or C.get_south() != B.get_south() or \
        C.get_west() != B.get_west() or C.get_east() != B.get_east() or \
        C.get_bottom() != B.get_bottom() or C.get_top() != B.get_top():
         core.fatal("Wrong intersection computation")
-    
-    B = spatial_extent(north=40, south=30, east=60, west=30, bottom=-30, top=50)
+
+    B = SpatialExtent(
+        north=40, south=30, east=60, west=30, bottom=-30, top=50)
     B.print_info()
     C = A.intersect(B)
     C.print_info()
-    
+
     if C.get_north() != B.get_north() or C.get_south() != B.get_south() or \
        C.get_west() != B.get_west() or C.get_east() != B.get_east() or \
        C.get_bottom() != B.get_bottom() or C.get_top() != B.get_top():
         core.fatal("Wrong intersection computation")
-    
-    B = spatial_extent(north=40, south=30, east=60, west=30, bottom=-30, top=30)
+
+    B = SpatialExtent(
+        north=40, south=30, east=60, west=30, bottom=-30, top=30)
     B.print_info()
     C = A.intersect(B)
     C.print_info()
-    
+
     if C.get_north() != B.get_north() or C.get_south() != B.get_south() or \
        C.get_west() != B.get_west() or C.get_east() != B.get_east() or \
        C.get_bottom() != B.get_bottom() or C.get_top() != B.get_top():
@@ -1042,402 +1063,429 @@
 
 def test_spatial_relations():
     # Generate the extents
-    
-    A = spatial_extent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
+
+    A = SpatialExtent(
+        north=80, south=20, east=60, west=10, bottom=-50, top=50)
     A.print_info()
-    B = spatial_extent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
+    B = SpatialExtent(
+        north=80, south=20, east=60, west=10, bottom=-50, top=50)
     B.print_info()
-    
+
     relation = A.spatial_relation(B)
     print relation
-    if relation!= "equivalent":
-	core.fatal("Wrong spatial relation: %s"%(relation))
- 
-    B = spatial_extent(north=70, south=20, east=60, west=10, bottom=-50, top=50)
+    if relation != "equivalent":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
+    B = SpatialExtent(
+        north=70, south=20, east=60, west=10, bottom=-50, top=50)
     B.print_info()
-    
+
     relation = A.spatial_relation_2d(B)
     print relation
-    if relation!= "cover":
-	core.fatal("Wrong spatial relation: %s"%(relation))
-	
+    if relation != "cover":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
     relation = A.spatial_relation(B)
     print relation
-    if relation!= "cover":
-	core.fatal("Wrong spatial relation: %s"%(relation))
-	
-    B = spatial_extent(north=70, south=30, east=60, west=10, bottom=-50, top=50)
+    if relation != "cover":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
+    B = SpatialExtent(
+        north=70, south=30, east=60, west=10, bottom=-50, top=50)
     B.print_info()
-    
+
     relation = A.spatial_relation_2d(B)
     print relation
-    if relation!= "cover":
-	core.fatal("Wrong spatial relation: %s"%(relation))
-	
+    if relation != "cover":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
     relation = A.spatial_relation(B)
     print relation
-    if relation!= "cover":
-	core.fatal("Wrong spatial relation: %s"%(relation))
-	
+    if relation != "cover":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
     relation = B.spatial_relation_2d(A)
     print relation
-    if relation!= "covered":
-	core.fatal("Wrong spatial relation: %s"%(relation))
-	
+    if relation != "covered":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
     relation = B.spatial_relation(A)
     print relation
-    if relation!= "covered":
-	core.fatal("Wrong spatial relation: %s"%(relation))
-	
-    B = spatial_extent(north=70, south=30, east=50, west=10, bottom=-50, top=50)
+    if relation != "covered":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
+    B = SpatialExtent(
+        north=70, south=30, east=50, west=10, bottom=-50, top=50)
     B.print_info()
-    
+
     relation = A.spatial_relation_2d(B)
     print relation
-    if relation!= "cover":
-	core.fatal("Wrong spatial relation: %s"%(relation))
-	
+    if relation != "cover":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
     relation = B.spatial_relation_2d(A)
     print relation
-    if relation!= "covered":
-	core.fatal("Wrong spatial relation: %s"%(relation))
-	
+    if relation != "covered":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
     relation = A.spatial_relation(B)
     print relation
-    if relation!= "cover":
-	core.fatal("Wrong spatial relation: %s"%(relation))
-	
-    B = spatial_extent(north=70, south=30, east=50, west=20, bottom=-50, top=50)
-    
+    if relation != "cover":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
+    B = SpatialExtent(
+        north=70, south=30, east=50, west=20, bottom=-50, top=50)
+
     relation = B.spatial_relation(A)
     print relation
-    if relation!= "covered":
-	core.fatal("Wrong spatial relation: %s"%(relation))
-	
-    B = spatial_extent(north=70, south=30, east=50, west=20, bottom=-50, top=50)
+    if relation != "covered":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
+    B = SpatialExtent(
+        north=70, south=30, east=50, west=20, bottom=-50, top=50)
     B.print_info()
-    
+
     relation = A.spatial_relation_2d(B)
     print relation
-    if relation!= "contain":
-	core.fatal("Wrong spatial relation: %s"%(relation))
-	
+    if relation != "contain":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
     relation = A.spatial_relation(B)
     print relation
-    if relation!= "cover":
-	core.fatal("Wrong spatial relation: %s"%(relation))
-	
-    B = spatial_extent(north=70, south=30, east=50, west=20, bottom=-40, top=50)
+    if relation != "cover":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
+    B = SpatialExtent(
+        north=70, south=30, east=50, west=20, bottom=-40, top=50)
     B.print_info()
-    
+
     relation = A.spatial_relation(B)
     print relation
-    if relation!= "cover":
-	core.fatal("Wrong spatial relation: %s"%(relation))
-	
-    B = spatial_extent(north=70, south=30, east=50, west=20, bottom=-40, top=40)
+    if relation != "cover":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
+    B = SpatialExtent(
+        north=70, south=30, east=50, west=20, bottom=-40, top=40)
     B.print_info()
-    
+
     relation = A.spatial_relation(B)
     print relation
-    if relation!= "contain":
-	core.fatal("Wrong spatial relation: %s"%(relation))
-	
+    if relation != "contain":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
     relation = B.spatial_relation(A)
     print relation
-    if relation!= "in":
-	core.fatal("Wrong spatial relation: %s"%(relation))
-	
-    B = spatial_extent(north=90, south=30, east=50, west=20, bottom=-40, top=40)
+    if relation != "in":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
+    B = SpatialExtent(
+        north=90, south=30, east=50, west=20, bottom=-40, top=40)
     B.print_info()
-    
+
     relation = A.spatial_relation_2d(B)
     print relation
-    if relation!= "overlap":
-	core.fatal("Wrong spatial relation: %s"%(relation))
-	
+    if relation != "overlap":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
     relation = A.spatial_relation(B)
     print relation
-    if relation!= "overlap":
-	core.fatal("Wrong spatial relation: %s"%(relation))
-	
-    B = spatial_extent(north=90, south=5, east=70, west=5, bottom=-40, top=40)
+    if relation != "overlap":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
+    B = SpatialExtent(north=90, south=5, east=70, west=5, bottom=-40, top=40)
     A.print_info()
     B.print_info()
-    
+
     relation = A.spatial_relation_2d(B)
     print relation
-    if relation!= "in":
-	core.fatal("Wrong spatial relation: %s"%(relation))
-    
+    if relation != "in":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
     relation = A.spatial_relation(B)
     print relation
-    if relation!= "overlap":
-	core.fatal("Wrong spatial relation: %s"%(relation))
-	
-    B = spatial_extent(north=90, south=5, east=70, west=5, bottom=-40, top=60)
+    if relation != "overlap":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
+    B = SpatialExtent(north=90, south=5, east=70, west=5, bottom=-40, top=60)
     A.print_info()
     B.print_info()
-    
+
     relation = A.spatial_relation(B)
     print relation
-    if relation!= "overlap":
-	core.fatal("Wrong spatial relation: %s"%(relation))
-	
-	
-    B = spatial_extent(north=90, south=5, east=70, west=5, bottom=-60, top=60)
+    if relation != "overlap":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
+    B = SpatialExtent(north=90, south=5, east=70, west=5, bottom=-60, top=60)
     A.print_info()
     B.print_info()
-    
+
     relation = A.spatial_relation(B)
     print relation
-    if relation!= "in":
-	core.fatal("Wrong spatial relation: %s"%(relation))
-	
-    A = spatial_extent(north=80, south=60, east=60, west=10, bottom=-50, top=50)
+    if relation != "in":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
+    A = SpatialExtent(
+        north=80, south=60, east=60, west=10, bottom=-50, top=50)
     A.print_info()
-    B = spatial_extent(north=60, south=20, east=60, west=10, bottom=-50, top=50)
+    B = SpatialExtent(
+        north=60, south=20, east=60, west=10, bottom=-50, top=50)
     B.print_info()
-    
+
     relation = A.spatial_relation_2d(B)
     print relation
-    if relation!= "meet":
-	core.fatal("Wrong spatial relation: %s"%(relation))
-	
+    if relation != "meet":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
     relation = A.spatial_relation(B)
     print relation
-    if relation!= "meet":
-	core.fatal("Wrong spatial relation: %s"%(relation))
-	
-    A = spatial_extent(north=60, south=40, east=60, west=10, bottom=-50, top=50)
+    if relation != "meet":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
+    A = SpatialExtent(
+        north=60, south=40, east=60, west=10, bottom=-50, top=50)
     A.print_info()
-    B = spatial_extent(north=80, south=60, east=60, west=10, bottom=-50, top=50)
+    B = SpatialExtent(
+        north=80, south=60, east=60, west=10, bottom=-50, top=50)
     B.print_info()
-    
+
     relation = A.spatial_relation_2d(B)
     print relation
-    if relation!= "meet":
-	core.fatal("Wrong spatial relation: %s"%(relation))
-	
+    if relation != "meet":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
     relation = A.spatial_relation(B)
     print relation
-    if relation!= "meet":
-	core.fatal("Wrong spatial relation: %s"%(relation))
- 
-    A = spatial_extent(north=80, south=40, east=60, west=40, bottom=-50, top=50)
+    if relation != "meet":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
+    A = SpatialExtent(
+        north=80, south=40, east=60, west=40, bottom=-50, top=50)
     A.print_info()
-    B = spatial_extent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
+    B = SpatialExtent(
+        north=80, south=40, east=40, west=20, bottom=-50, top=50)
     B.print_info()
-    
+
     relation = A.spatial_relation_2d(B)
     print relation
-    if relation!= "meet":
-	core.fatal("Wrong spatial relation: %s"%(relation))
-	
+    if relation != "meet":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
     relation = A.spatial_relation(B)
     print relation
-    if relation!= "meet":
-	core.fatal("Wrong spatial relation: %s"%(relation))
- 
-    A = spatial_extent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
+    if relation != "meet":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
+    A = SpatialExtent(
+        north=80, south=40, east=40, west=20, bottom=-50, top=50)
     A.print_info()
-    B = spatial_extent(north=90, south=30, east=60, west=40, bottom=-50, top=50)
+    B = SpatialExtent(
+        north=90, south=30, east=60, west=40, bottom=-50, top=50)
     B.print_info()
-    
+
     relation = A.spatial_relation_2d(B)
     print relation
-    if relation!= "meet":
-	core.fatal("Wrong spatial relation: %s"%(relation))
-	
+    if relation != "meet":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
     relation = A.spatial_relation(B)
     print relation
-    if relation!= "meet":
-	core.fatal("Wrong spatial relation: %s"%(relation))
- 
-    A = spatial_extent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
+    if relation != "meet":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
+    A = SpatialExtent(
+        north=80, south=40, east=40, west=20, bottom=-50, top=50)
     A.print_info()
-    B = spatial_extent(north=70, south=50, east=60, west=40, bottom=-50, top=50)
+    B = SpatialExtent(
+        north=70, south=50, east=60, west=40, bottom=-50, top=50)
     B.print_info()
-    
+
     relation = A.spatial_relation_2d(B)
     print relation
-    if relation!= "meet":
-	core.fatal("Wrong spatial relation: %s"%(relation))
-	
+    if relation != "meet":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
     relation = A.spatial_relation(B)
     print relation
-    if relation!= "meet":
-	core.fatal("Wrong spatial relation: %s"%(relation))
- 
-    A = spatial_extent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
+    if relation != "meet":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
+    A = SpatialExtent(
+        north=80, south=40, east=40, west=20, bottom=-50, top=50)
     A.print_info()
-    B = spatial_extent(north=60, south=20, east=60, west=40, bottom=-50, top=50)
+    B = SpatialExtent(
+        north=60, south=20, east=60, west=40, bottom=-50, top=50)
     B.print_info()
-    
+
     relation = A.spatial_relation_2d(B)
     print relation
-    if relation!= "meet":
-	core.fatal("Wrong spatial relation: %s"%(relation))
-	
+    if relation != "meet":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
     relation = A.spatial_relation(B)
     print relation
-    if relation!= "meet":
-	core.fatal("Wrong spatial relation: %s"%(relation))
- 
-    A = spatial_extent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
+    if relation != "meet":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
+    A = SpatialExtent(
+        north=80, south=40, east=40, west=20, bottom=-50, top=50)
     A.print_info()
-    B = spatial_extent(north=40, south=20, east=60, west=40, bottom=-50, top=50)
+    B = SpatialExtent(
+        north=40, south=20, east=60, west=40, bottom=-50, top=50)
     B.print_info()
-    
+
     relation = A.spatial_relation_2d(B)
     print relation
-    if relation!= "disjoint":
-	core.fatal("Wrong spatial relation: %s"%(relation))
-	
+    if relation != "disjoint":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
     relation = A.spatial_relation(B)
     print relation
-    if relation!= "disjoint":
-	core.fatal("Wrong spatial relation: %s"%(relation))
- 
-    A = spatial_extent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
+    if relation != "disjoint":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
+    A = SpatialExtent(
+        north=80, south=40, east=40, west=20, bottom=-50, top=50)
     A.print_info()
-    B = spatial_extent(north=60, south=20, east=60, west=40, bottom=-60, top=60)
+    B = SpatialExtent(
+        north=60, south=20, east=60, west=40, bottom=-60, top=60)
     B.print_info()
-    
+
     relation = A.spatial_relation(B)
     print relation
-    if relation!= "meet":
-	core.fatal("Wrong spatial relation: %s"%(relation))
- 
-    A = spatial_extent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
+    if relation != "meet":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
+    A = SpatialExtent(
+        north=80, south=40, east=40, west=20, bottom=-50, top=50)
     A.print_info()
-    B = spatial_extent(north=90, south=30, east=60, west=40, bottom=-40, top=40)
+    B = SpatialExtent(
+        north=90, south=30, east=60, west=40, bottom=-40, top=40)
     B.print_info()
-    
+
     relation = A.spatial_relation(B)
     print relation
-    if relation!= "meet":
-	core.fatal("Wrong spatial relation: %s"%(relation))
- 
-    A = spatial_extent(north=80, south=40, east=60, west=20, bottom=0, top=50)
+    if relation != "meet":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
+    A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
     A.print_info()
-    B = spatial_extent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
+    B = SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
     B.print_info()
-    
+
     relation = A.spatial_relation(B)
     print relation
-    if relation!= "meet":
-	core.fatal("Wrong spatial relation: %s"%(relation))
- 
-    A = spatial_extent(north=80, south=40, east=60, west=20, bottom=0, top=50)
+    if relation != "meet":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
+    A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
     A.print_info()
-    B = spatial_extent(north=80, south=50, east=60, west=30, bottom=-50, top=0)
+    B = SpatialExtent(north=80, south=50, east=60, west=30, bottom=-50, top=0)
     B.print_info()
-    
+
     relation = A.spatial_relation(B)
     print relation
-    if relation!= "meet":
-	core.fatal("Wrong spatial relation: %s"%(relation))
- 
-    A = spatial_extent(north=80, south=40, east=60, west=20, bottom=0, top=50)
+    if relation != "meet":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
+    A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
     A.print_info()
-    B = spatial_extent(north=70, south=50, east=50, west=30, bottom=-50, top=0)
+    B = SpatialExtent(north=70, south=50, east=50, west=30, bottom=-50, top=0)
     B.print_info()
-    
+
     relation = A.spatial_relation(B)
     print relation
-    if relation!= "meet":
-	core.fatal("Wrong spatial relation: %s"%(relation))
- 
-    A = spatial_extent(north=80, south=40, east=60, west=20, bottom=0, top=50)
+    if relation != "meet":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
+    A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
     A.print_info()
-    B = spatial_extent(north=90, south=30, east=70, west=10, bottom=-50, top=0)
+    B = SpatialExtent(north=90, south=30, east=70, west=10, bottom=-50, top=0)
     B.print_info()
-    
+
     relation = A.spatial_relation(B)
     print relation
-    if relation!= "meet":
-	core.fatal("Wrong spatial relation: %s"%(relation))
- 
-    A = spatial_extent(north=80, south=40, east=60, west=20, bottom=0, top=50)
+    if relation != "meet":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
+    A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
     A.print_info()
-    B = spatial_extent(north=70, south=30, east=50, west=10, bottom=-50, top=0)
+    B = SpatialExtent(north=70, south=30, east=50, west=10, bottom=-50, top=0)
     B.print_info()
-    
+
     relation = A.spatial_relation(B)
     print relation
-    if relation!= "meet":
-	core.fatal("Wrong spatial relation: %s"%(relation))
+    if relation != "meet":
+        core.fatal("Wrong spatial relation: %s" % (relation))
  ###
- 
-    A = spatial_extent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
+
+    A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
     A.print_info()
-    B = spatial_extent(north=80, south=40, east=60, west=20, bottom=0, top=50)
+    B = SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
     B.print_info()
-    
+
     relation = A.spatial_relation(B)
     print relation
-    if relation!= "meet":
-	core.fatal("Wrong spatial relation: %s"%(relation))
- 
-    A = spatial_extent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
+    if relation != "meet":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
+    A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
     A.print_info()
-    B = spatial_extent(north=80, south=50, east=60, west=30, bottom=0, top=50)
+    B = SpatialExtent(north=80, south=50, east=60, west=30, bottom=0, top=50)
     B.print_info()
-    
+
     relation = A.spatial_relation(B)
     print relation
-    if relation!= "meet":
-	core.fatal("Wrong spatial relation: %s"%(relation))
- 
-    A = spatial_extent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
+    if relation != "meet":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
+    A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
     A.print_info()
-    B = spatial_extent(north=70, south=50, east=50, west=30, bottom=0, top=50)
+    B = SpatialExtent(north=70, south=50, east=50, west=30, bottom=0, top=50)
     B.print_info()
-    
+
     relation = A.spatial_relation(B)
     print relation
-    if relation!= "meet":
-	core.fatal("Wrong spatial relation: %s"%(relation))
- 
-    A = spatial_extent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
+    if relation != "meet":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
+    A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
     A.print_info()
-    B = spatial_extent(north=90, south=30, east=70, west=10, bottom=0, top=50)
+    B = SpatialExtent(north=90, south=30, east=70, west=10, bottom=0, top=50)
     B.print_info()
-    
+
     relation = A.spatial_relation(B)
     print relation
-    if relation!= "meet":
-	core.fatal("Wrong spatial relation: %s"%(relation))
- 
-    A = spatial_extent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
+    if relation != "meet":
+        core.fatal("Wrong spatial relation: %s" % (relation))
+
+    A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
     A.print_info()
-    B = spatial_extent(north=70, south=30, east=50, west=10, bottom=0, top=50)
+    B = SpatialExtent(north=70, south=30, east=50, west=10, bottom=0, top=50)
     B.print_info()
-    
+
     relation = A.spatial_relation(B)
     print relation
-    if relation!= "meet":
-	core.fatal("Wrong spatial relation: %s"%(relation))
+    if relation != "meet":
+        core.fatal("Wrong spatial relation: %s" % (relation))
 
 ###############################################################################
 
 def test_temporal_topology_builder():
     map_listA = []
-    
-    _map = raster_dataset(ident = "1 at a")
+
+    _map = RasterDataset(ident="1 at a")
     _map.set_absolute_time(datetime(2001, 01, 01), datetime(2001, 02, 01))
     map_listA.append(copy.copy(_map))
-    _map = raster_dataset(ident = "2 at a")
+    _map = RasterDataset(ident="2 at a")
     _map.set_absolute_time(datetime(2001, 02, 01), datetime(2001, 03, 01))
     map_listA.append(copy.copy(_map))
-    _map = raster_dataset(ident = "3 at a")
+    _map = RasterDataset(ident="3 at a")
     _map.set_absolute_time(datetime(2001, 03, 01), datetime(2001, 04, 01))
     map_listA.append(copy.copy(_map))
-    _map = raster_dataset(ident = "4 at a")
+    _map = RasterDataset(ident="4 at a")
     _map.set_absolute_time(datetime(2001, 04, 01), datetime(2001, 05, 01))
     map_listA.append(copy.copy(_map))
-    _map = raster_dataset(ident = "5 at a")
+    _map = RasterDataset(ident="5 at a")
     _map.set_absolute_time(datetime(2001, 05, 01), datetime(2001, 06, 01))
     map_listA.append(copy.copy(_map))
 
@@ -1446,170 +1494,254 @@
 
     count = 0
     for _map in tb:
-	print "[%s]"%(_map.get_name())
-	_map.print_temporal_topology_info()
-	if _map.get_id() != map_listA[count].get_id():
-	    core.fatal("Error building temporal topology <%s> != <%s>"%( _map.get_id(), map_listA[count].get_id()))
-	count += 1
-	
+        print "[%s]" % (_map.get_name())
+        _map.print_temporal_topology_info()
+        if _map.get_id() != map_listA[count].get_id():
+            core.fatal("Error building temporal topology <%s> != <%s>" %
+                (_map.get_id(), map_listA[count].get_id()))
+        count += 1
+
     map_listB = []
-        
-    _map = raster_dataset(ident = "1 at b")
+
+    _map = RasterDataset(ident="1 at b")
     _map.set_absolute_time(datetime(2001, 01, 14), datetime(2001, 03, 14))
     map_listB.append(copy.copy(_map))
-    _map = raster_dataset(ident = "2 at b")
+    _map = RasterDataset(ident="2 at b")
     _map.set_absolute_time(datetime(2001, 02, 01), datetime(2001, 04, 01))
     map_listB.append(copy.copy(_map))
-    _map = raster_dataset(ident = "3 at b")
+    _map = RasterDataset(ident="3 at b")
     _map.set_absolute_time(datetime(2001, 02, 14), datetime(2001, 04, 30))
     map_listB.append(copy.copy(_map))
-    _map = raster_dataset(ident = "4 at b")
+    _map = RasterDataset(ident="4 at b")
     _map.set_absolute_time(datetime(2001, 04, 02), datetime(2001, 04, 30))
     map_listB.append(copy.copy(_map))
-    _map = raster_dataset(ident = "5 at b")
+    _map = RasterDataset(ident="5 at b")
     _map.set_absolute_time(datetime(2001, 05, 01), datetime(2001, 05, 14))
     map_listB.append(copy.copy(_map))
-    
+
     tb = temporal_topology_builder()
     tb.build(map_listB)
 
     # Probing some relations
-    if map_listB[0].get_overlapped()[0] != map_listB[1]:
-	    core.fatal("Error building temporal topology")
-    if map_listB[0].get_overlapped()[1] != map_listB[2]:
-	    core.fatal("Error building temporal topology")
-    if map_listB[2].get_contains()[0] != map_listB[3]:
-	    core.fatal("Error building temporal topology")
-    if map_listB[3].get_during()[0] != map_listB[2]:
-	    core.fatal("Error building temporal topology")
-    
+    if map_listB[0].get_temporal_overlapped()[0] != map_listB[1]:
+        core.fatal("Error building temporal topology")
+    if map_listB[0].get_temporal_overlapped()[1] != map_listB[2]:
+        core.fatal("Error building temporal topology")
+    if map_listB[2].get_temporal_contains()[0] != map_listB[3]:
+        core.fatal("Error building temporal topology")
+    if map_listB[3].get_temporal_during()[0] != map_listB[2]:
+        core.fatal("Error building temporal topology")
+
     count = 0
     for _map in tb:
-	print "[%s]"%(_map.get_map_id
-	())
-	_map.print_temporal_topology_shell_info()
-	if _map.get_id() != map_listB[count].get_id():
-	    core.fatal("Error building temporal topology <%s> != <%s>"%( _map.get_id(), map_listB[count].get_id()))
-	count += 1
-	
+        print "[%s]" % (_map.get_map_id
+        ())
+        _map.print_temporal_topology_shell_info()
+        if _map.get_id() != map_listB[count].get_id():
+            core.fatal("Error building temporal topology <%s> != <%s>" %
+                (_map.get_id(), map_listB[count].get_id()))
+        count += 1
+
     tb = temporal_topology_builder()
     tb.build2(map_listA, map_listB)
-    
+
     count = 0
     for _map in tb:
-	print "[%s]"%(_map.get_map_id())
-	_map.print_temporal_topology_shell_info()
-	if _map.get_id() != map_listA[count].get_id():
-	    core.fatal("Error building temporal topology <%s> != <%s>"%( _map.get_id(), map_listA[count].get_id()))
-	count += 1
+        print "[%s]" % (_map.get_map_id())
+        _map.print_temporal_topology_shell_info()
+        if _map.get_id() != map_listA[count].get_id():
+            core.fatal("Error building temporal topology <%s> != <%s>" %
+                (_map.get_id(), map_listA[count].get_id()))
+        count += 1
 
     count = 0
     for _map in map_listB:
-	print "[%s]"%(_map.get_map_id())
-	_map.print_temporal_topology_shell_info()
+        print "[%s]" % (_map.get_map_id())
+        _map.print_temporal_topology_shell_info()
 
     # Probing some relations
-    if map_listA[3].get_follows()[0] != map_listB[1]:
-	    core.fatal("Error building temporal topology")   
-    if map_listA[3].get_precedes()[0] != map_listB[4]:
-	    core.fatal("Error building temporal topology")
-    if map_listA[3].get_overlaps()[0] != map_listB[2]:
-	    core.fatal("Error building temporal topology")
-    if map_listA[3].get_contains()[0] != map_listB[3]:
-	    core.fatal("Error building temporal topology")
-	    
-    if map_listA[2].get_during()[0] != map_listB[1]:
-	    core.fatal("Error building temporal topology")
-    if map_listA[2].get_during()[1] != map_listB[2]:
-	    core.fatal("Error building temporal topology")
+    if map_listA[3].get_temporal_follows()[0] != map_listB[1]:
+        core.fatal("Error building temporal topology")
+    if map_listA[3].get_temporal_precedes()[0] != map_listB[4]:
+        core.fatal("Error building temporal topology")
+    if map_listA[3].get_temporal_overlaps()[0] != map_listB[2]:
+        core.fatal("Error building temporal topology")
+    if map_listA[3].get_temporal_contains()[0] != map_listB[3]:
+        core.fatal("Error building temporal topology")
 
+    if map_listA[2].get_temporal_during()[0] != map_listB[1]:
+        core.fatal("Error building temporal topology")
+    if map_listA[2].get_temporal_during()[1] != map_listB[2]:
+        core.fatal("Error building temporal topology")
 
 ###############################################################################
 
 def test_map_list_sorting():
-    
+
     map_list = []
-    
-    _map = raster_dataset(ident = "1 at a")
+
+    _map = RasterDataset(ident="1 at a")
     _map.set_absolute_time(datetime(2001, 02, 01), datetime(2001, 03, 01))
     map_list.append(copy.copy(_map))
-    _map = raster_dataset(ident = "2 at a")
+    _map = RasterDataset(ident="2 at a")
     _map.set_absolute_time(datetime(2001, 01, 01), datetime(2001, 02, 01))
     map_list.append(copy.copy(_map))
-    _map = raster_dataset(ident = "3 at a")
+    _map = RasterDataset(ident="3 at a")
     _map.set_absolute_time(datetime(2001, 03, 01), datetime(2001, 04, 01))
     map_list.append(copy.copy(_map))
-	    
+
     print "Original"
     for _map in map_list:
-	print _map.get_valid_time()[0],  _map.get_valid_time()[1]
+        print _map.get_valid_time()[0], _map.get_valid_time()[1]
     print "Sorted by start time"
-    new_list = sorted(map_list, key=abstract_dataset_comparison_key_start_time)
+    new_list = sorted(map_list, key=AbstractDatasetComparisonKeyStartTime)
     for _map in new_list:
-	print _map.get_valid_time()[0],  _map.get_valid_time()[1]
-	
-    
+        print _map.get_valid_time()[0], _map.get_valid_time()[1]
+
     if new_list[0] != map_list[1]:
-	core.fatal("Sorting by start time failed")
+        core.fatal("Sorting by start time failed")
     if new_list[1] != map_list[0]:
-	core.fatal("Sorting by start time failed")
+        core.fatal("Sorting by start time failed")
     if new_list[2] != map_list[2]:
-	core.fatal("Sorting by start time failed")
-	
+        core.fatal("Sorting by start time failed")
+
     print "Sorted by end time"
-    new_list = sorted(map_list, key=abstract_dataset_comparison_key_end_time)
+    new_list = sorted(map_list, key=AbstractDatasetComparisonKeyEndTime)
     for _map in new_list:
-	print _map.get_valid_time()[0],  _map.get_valid_time()[1]
-	
+        print _map.get_valid_time()[0], _map.get_valid_time()[1]
 
     if new_list[0] != map_list[1]:
-	core.fatal("Sorting by end time failed")
+        core.fatal("Sorting by end time failed")
     if new_list[1] != map_list[0]:
-	core.fatal("Sorting by end time failed")
+        core.fatal("Sorting by end time failed")
     if new_list[2] != map_list[2]:
-	core.fatal("Sorting by end time failed")
+        core.fatal("Sorting by end time failed")
 
-def test_rtree():
+###############################################################################
+
+def test_1d_rtree():
     """Testing the rtree ctypes wrapper"""
 
     tree = vector.RTreeNewIndex(-1, 0, 1)
 
-    for i in xrange(50):
+    for i in xrange(10):
+        
         rect = vector.RTree_Rect()
-        vector.RTreeInitRect(byref(rect))
+        # Allocate the boundary
+        vector.RTreeNewRect(byref(rect), tree)
+        vector.RTreeSetRect1D(byref(rect), tree, float(i - 2), float(i + 2))
+        vector.RTreeInsertRect(byref(rect), i + 1, tree)
 
-        rect.boundary[0] = i - 3.75
-        rect.boundary[1] = 0
-        rect.boundary[2] = 0
-        rect.boundary[3] = i + 3.75
-        rect.boundary[4] = 0
-        rect.boundary[5] = 0
+    rect = vector.RTree_Rect()
+    vector.RTreeNewRect(byref(rect), tree)
+    vector.RTreeSetRect1D(byref(rect), tree, 2.0, 7.0)
 
-        # vector.RTreePrintRect(byref(rect), 0)
+    list_ = gis.ilist()
 
+    num = vector.RTreeSearch2(tree, byref(rect), byref(list_))
+
+    # print rectangle ids
+    print "Number of overlapping rectangles", num
+    for i in xrange(list_.n_values):
+        print "id", list_.value[i]
+
+###############################################################################
+
+def test_2d_rtree():
+    """Testing the rtree ctypes wrapper"""
+
+    tree = vector.RTreeNewIndex(-1, 0, 2)
+
+    for i in xrange(10):
+        
+        rect = vector.RTree_Rect()
+        # Allocate the boundary
+        vector.RTreeNewRect(byref(rect), tree)
+
+        vector.RTreeSetRect2D(byref(rect), tree, 
+                              float(i - 2), float(i + 2), 
+                              float(i - 2), float(i + 2))
         vector.RTreeInsertRect(byref(rect), i + 1, tree)
 
     rect = vector.RTree_Rect()
-    vector.RTreeInitRect(byref(rect))
+    vector.RTreeNewRect(byref(rect), tree)
+    vector.RTreeSetRect2D(byref(rect), tree, 2.0, 7.0, 2.0, 7.0)
 
-    i = 25
-    rect.boundary[0] = i - 3.75
-    rect.boundary[1] = 0
-    rect.boundary[2] = 0
-    rect.boundary[3] = i + 3.75
-    rect.boundary[4] = 0
-    rect.boundary[5] = 0
+    list_ = gis.ilist()
 
-    _list = gis.ilist()
+    num = vector.RTreeSearch2(tree, byref(rect), byref(list_))
 
-    num = vector.RTreeSearch2(tree, byref(rect), byref(_list))
+    # print rectangle ids
+    print "Number of overlapping rectangles", num
+    for i in xrange(list_.n_values):
+        print "id", list_.value[i]
 
-    # print rectanlge ids
+###############################################################################
+
+def test_3d_rtree():
+    """Testing the rtree ctypes wrapper"""
+
+    tree = vector.RTreeNewIndex(-1, 0, 3)
+
+    for i in xrange(10):
+        
+        rect = vector.RTree_Rect()
+        # Allocate the boundary
+        vector.RTreeNewRect(byref(rect), tree)
+        vector.RTreeSetRect3D(byref(rect), tree, 
+                              float(i - 2), float(i + 2), 
+                              float(i - 2), float(i + 2), 
+                              float(i - 2), float(i + 2))
+        vector.RTreeInsertRect(byref(rect), i + 1, tree)
+        print i + 1
+        vector.RTreePrintRect(byref(rect), 1, tree)
+
+    rect = vector.RTree_Rect()
+    vector.RTreeNewRect(byref(rect), tree)
+    vector.RTreeSetRect3D(byref(rect), tree, 2.0, 7.0, 2.0, 7.0, 2.0, 7.0)
+    print "Select"
+    vector.RTreePrintRect(byref(rect), 1, tree)
+        
+    list_ = gis.ilist()
+
+    num = vector.RTreeSearch2(tree, byref(rect), byref(list_))
+
+    # print rectangle ids
     print "Number of overlapping rectangles", num
-    for i in xrange(_list.n_values):
-        print "id", _list.value[i]
+    for i in xrange(list_.n_values):
+        print "id", list_.value[i]
 
+###############################################################################
 
+def test_4d_rtree():
+    """Testing the rtree ctypes wrapper"""
+
+    tree = vector.RTreeNewIndex(-1, 0, 4)
+
+    for i in xrange(10):
+        
+        # Allocate the boundary
+        rect = vector.RTreeNewRect(None, tree)
+        vector.RTreeSetRect4D(rect, tree, 
+                              float(i - 2), float(i + 2), 
+                              float(i - 2), float(i + 2), 
+                              float(i - 2), float(i + 2), 
+                              float(i - 2), float(i + 2))
+        vector.RTreeInsertRect(rect, i + 1, tree)
+
+    rect = vector.RTreeNewRect(None, tree)
+    vector.RTreeSetRect4D(rect, tree, 2.0, 7.0, 2.0, 
+                          7.0, 2.0, 7.0, 2.0, 7.0)
+
+    list_ = gis.ilist()
+
+    num = vector.RTreeSearch2(tree, rect, byref(list_))
+
+    # print rectangle ids
+    print "Number of overlapping rectangles", num
+    for i in xrange(list_.n_values):
+        print "id", list_.value[i]
+
 ###############################################################################
 
 if __name__ == "__main__":
@@ -1623,5 +1755,7 @@
     test_spatial_relations()
     test_temporal_topology_builder()
     test_map_list_sorting()
-    #test_rtree()
-    
+    test_1d_rtree()
+    test_2d_rtree()
+    test_3d_rtree()
+    test_4d_rtree()

Modified: grass/trunk/lib/python/temporal/univar_statistics.py
===================================================================
--- grass/trunk/lib/python/temporal/univar_statistics.py	2012-08-11 21:26:18 UTC (rev 52630)
+++ grass/trunk/lib/python/temporal/univar_statistics.py	2012-08-12 01:54:40 UTC (rev 52631)
@@ -9,7 +9,8 @@
 @code
 import grass.temporal as tgis
 
-tgis.print_gridded_dataset_univar_statistics(type, input, where, extended, header, fs)
+tgis.print_gridded_dataset_univar_statistics(
+    type, input, where, extended, header, fs)
 
 ...
 @endcode
@@ -25,172 +26,208 @@
 from space_time_datasets_tools import *
 
 ###############################################################################
-    
+
+
 def print_gridded_dataset_univar_statistics(type, input, where, extended, header, fs):
     """!Print univariate statistics for a space time raster or raster3d dataset
-    
-      param type Must be "strds" or "str3ds"
+
+       @param type Must be "strds" or "str3ds"
        @param input The name of the space time dataset
        @param where A temporal database where statement
-       @param extended If True compute extended statistics 
-       @param header   If True print column names as header 
-       @param fs Field separator 
+       @param extended If True compute extended statistics
+       @param header   If True print column names as header
+       @param fs Field separator
     """
-    
+
     # We need a database interface
     dbif = SQLDatabaseInterfaceConnection()
     dbif.connect()
-   
-    mapset =  core.gisenv()["MAPSET"]
 
+    mapset = core.gisenv()["MAPSET"]
+
     if input.find("@") >= 0:
         id = input
     else:
         id = input + "@" + mapset
 
     sp = dataset_factory(type, id)
-    
+
     if sp.is_in_db(dbif) == False:
         dbif.close()
-        core.fatal(_("Space time %s dataset <%s> not found") % (sp.get_new_map_instance(None).get_type(), id))
+        core.fatal(_("Space time %s dataset <%s> not found") % (
+            sp.get_new_map_instance(None).get_type(), id))
 
     sp.select(dbif)
 
-    rows = sp.get_registered_maps("id,start_time,end_time", where, "start_time", dbif)
+    rows = sp.get_registered_maps(
+        "id,start_time,end_time", where, "start_time", dbif)
 
     if not rows:
         dbif.close()
-        core.fatal(_("Space time %s dataset <%s> is empty") % (sp.get_new_map_instance(None).get_type(), id))
+        core.fatal(_("Space time %s dataset <%s> is empty") % (
+            sp.get_new_map_instance(None).get_type(), id))
 
     if header == True:
-        print "id" + fs + "start" + fs + "end" + fs + "mean" + fs + "min" + fs + "max" + fs,
+        print "id" + fs + "start" + fs + "end" + fs + "mean" + fs + \
+            "min" + fs + "max" + fs,
         print "mean_of_abs" + fs + "stddev" + fs + "variance" + fs,
         if extended == True:
-            print "coeff_var" + fs + "sum" + fs + "null_cells" + fs + "cells" + fs,
-            print "first_quartile" + fs + "median" + fs + "third_quartile" + fs + "percentile_90" 
+            print "coeff_var" + fs + "sum" + fs + \
+                "null_cells" + fs + "cells" + fs,
+            print "first_quartile" + fs + "median" + fs + \
+                "third_quartile" + fs + "percentile_90"
         else:
-            print "coeff_var" + fs + "sum" + fs + "null_cells" + fs + "cells" 
+            print "coeff_var" + fs + "sum" + fs + "null_cells" + fs + "cells"
 
     for row in rows:
         id = row["id"]
         start = row["start_time"]
         end = row["end_time"]
 
-        flag="g"
+        flag = "g"
 
         if extended == True:
             flag += "e"
 
-	if type == "strds":
-	    stats = core.parse_command("r.univar", map=id, flags=flag)
-	elif type == "str3ds":
-	    stats = core.parse_command("r3.univar", map=id, flags=flag)
+        if type == "strds":
+            stats = core.parse_command("r.univar", map=id, flags=flag)
+        elif type == "str3ds":
+            stats = core.parse_command("r3.univar", map=id, flags=flag)
 
         print str(id) + fs + str(start) + fs + str(end),
-        print fs + str(stats["mean"]) + fs + str(stats["min"]) + fs + str(stats["max"]) + fs + str(stats["mean_of_abs"]),
-        print fs + str(stats["stddev"]) + fs + str(stats["variance"]) + fs + str(stats["coeff_var"]) + fs + str(stats["sum"]),
+        print fs + str(stats["mean"]) + fs + str(stats["min"]) + \
+            fs + str(stats["max"]) + fs + str(stats["mean_of_abs"]),
+        print fs + str(stats["stddev"]) + fs + str(stats["variance"]) + \
+            fs + str(stats["coeff_var"]) + fs + str(stats["sum"]),
 
         if extended == True:
-            print fs + str(stats["null_cells"]) + fs + str(stats["cells"]) + fs,
-            print str(stats["first_quartile"]) + fs + str(stats["median"]) + fs + str(stats["third_quartile"]) + fs + str(stats["percentile_90"]) 
+            print fs + str(stats["null_cells"]) + fs + str(
+                stats["cells"]) + fs,
+            print str(stats["first_quartile"]) + fs + str(stats["median"]) + \
+                  fs + str(stats["third_quartile"]) + \
+                  fs + str(stats["percentile_90"])
         else:
             print fs + str(stats["null_cells"]) + fs + str(stats["cells"])
-        
+
     dbif.close()
 
 ###############################################################################
-    
-def print_vector_dataset_univar_statistics(input, twhere, layer, type, column, where, extended, header, fs):
+
+
+def print_vector_dataset_univar_statistics(input, twhere, layer, type, column, 
+                                           where, extended, header, fs):
     """!Print univariate statistics for a space time vector dataset
-    
-       @param input The name of the space time dataset
-       @param twhere A temporal database where statement
-       @param layer The layer number used in case no layer is present in the temporal dataset
-       @param type options: point,line,boundary,centroid,area
-       @param column The name of the attribute column
-       @param where A temporal database where statement
-       @param extended If True compute extended statistics 
-       @param header   If True print column names as header 
-       @param fs Field separator 
+
+       @param input: The name of the space time dataset
+       @param twhere: A temporal database where statement
+       @param layer: The layer number used in case no layer is present 
+              in the temporal dataset
+       @param type: options: point,line,boundary,centroid,area
+       @param column: The name of the attribute column
+       @param where: A temporal database where statement
+       @param extended: If True compute extended statistics
+       @param header:   If True print column names as header
+       @param fs: Field separator
     """
 
     # We need a database interface
     dbif = SQLDatabaseInterfaceConnection()
     dbif.connect()
-   
-    mapset =  core.gisenv()["MAPSET"]
 
+    mapset = core.gisenv()["MAPSET"]
+
     if input.find("@") >= 0:
         id = input
     else:
         id = input + "@" + mapset
 
     sp = dataset_factory("stvds", id)
-    
+
     if sp.is_in_db(dbif) == False:
         dbif.close()
-        core.fatal(_("Space time %s dataset <%s> not found") % (sp.get_new_map_instance(None).get_type(), id))
+        core.fatal(_("Space time %s dataset <%s> not found") % (
+            sp.get_new_map_instance(None).get_type(), id))
 
     sp.select(dbif)
 
-    rows = sp.get_registered_maps("id,name,mapset,start_time,end_time,layer", twhere, "start_time", dbif)
+    rows = sp.get_registered_maps("id,name,mapset,start_time,end_time,layer",
+                                  twhere, "start_time", dbif)
 
     if not rows:
         dbif.close()
-        core.fatal(_("Space time %s dataset <%s> is empty") % (sp.get_new_map_instance(None).get_type(), id))
+        core.fatal(_("Space time %s dataset <%s> is empty") % (
+            sp.get_new_map_instance(None).get_type(), id))
 
     string = ""
     if header == True:
-        string += "id" + fs + "start" + fs + "end" + fs + "n" + fs + "nmissing" + fs + "nnull" + fs
+        string += "id" + fs + "start" + fs + "end" + fs + "n" + \
+            fs + "nmissing" + fs + "nnull" + fs
         string += "min" + fs + "max" + fs + "range"
-	if type == "point" or type == "centroid":
-            string += fs + "mean" + fs + "mean_abs" + fs + "population_stddev" + fs + "population_variance" + fs
-	    string += "population_coeff_variation" + fs + "sample_stddev" + fs + "sample_variance" + fs
-	    string += "kurtosis" + fs + "skewness"
+        if type == "point" or type == "centroid":
+            string += fs + "mean" + fs + "mean_abs" + fs + "population_stddev" +\
+                      fs + "population_variance" + fs
+            string += "population_coeff_variation" + fs + \
+                "sample_stddev" + fs + "sample_variance" + fs
+            string += "kurtosis" + fs + "skewness"
             if extended == True:
-                string+= fs + "first_quartile" + fs + "median" + fs + "third_quartile" + fs + "percentile_90" 
+                string += fs + "first_quartile" + fs + "median" + fs + \
+                    "third_quartile" + fs + "percentile_90"
 
-	print string
+        print string
 
     for row in rows:
         id = row["name"] + "@" + row["mapset"]
         start = row["start_time"]
         end = row["end_time"]
-	mylayer = row["layer"]
+        mylayer = row["layer"]
 
-        flags="g"
+        flags = "g"
 
         if extended == True:
             flags += "e"
 
-	if not mylayer:
-	    mylayer = layer
+        if not mylayer:
+            mylayer = layer
 
-        stats = core.parse_command("v.univar", map=id, where=where, column=column, layer=mylayer, type=type, flags=flags)
+        stats = core.parse_command("v.univar", map=id, where=where,
+                                   column=column, layer=mylayer, 
+                                   type=type, flags=flags)
 
-	string = ""
-	if stats:
+        string = ""
+        if stats:
             string += str(id) + fs + str(start) + fs + str(end)
-            string += fs + str(stats["n"]) + fs + str(stats["nmissing"]) + fs + str(stats["nnull"])
-	    if stats.has_key("min"):
-            	string += fs + str(stats["min"]) + fs + str(stats["max"]) + fs + str(stats["range"])
-	    else:
-            	string += fs + fs + fs
+            string += fs + str(stats["n"]) + fs + str(stats[
+                "nmissing"]) + fs + str(stats["nnull"])
+            if "min" in stats:
+                string += fs + str(stats["min"]) + fs + str(
+                    stats["max"]) + fs + str(stats["range"])
+            else:
+                string += fs + fs + fs
 
-	    if type == "point" or type == "centroid":
-		if stats.has_key("mean"):
-            	    string += fs + str(stats["mean"]) + fs + str(stats["mean_abs"]) + fs + str(stats["population_stddev"]) + fs + str(stats["population_variance"])
-            	    string += fs + str(stats["population_coeff_variation"]) + fs + str(stats["sample_stddev"]) + fs + str(stats["sample_variance"])
-            	    string += fs + str(stats["kurtosis"]) + fs + str(stats["skewness"])
-	        else:
-            	    string += fs + fs + fs + fs + fs + fs + fs + fs + fs
+            if type == "point" or type == "centroid":
+                if "mean" in stats:
+                    string += fs + str(stats["mean"]) + fs + \
+                    str(stats["mean_abs"]) + fs + \
+                    str(stats["population_stddev"]) + fs + \
+                    str(stats["population_variance"])
+                    
+                    string += fs + str(stats["population_coeff_variation"]) + \
+                    fs + str(stats["sample_stddev"]) + fs + \
+                    str(stats["sample_variance"])
+                    
+                    string += fs + str(stats["kurtosis"]) + fs + \
+                    str(stats["skewness"])
+                else:
+                    string += fs + fs + fs + fs + fs + fs + fs + fs + fs
                 if extended == True:
-		    if stats.has_key("first_quartile"):
-                        string += fs + str(stats["first_quartile"]) + fs + str(stats["median"]) + fs + str(stats["third_quartile"]) + fs + str(stats["percentile_90"]) 
-		    else:
-                        string += fs + fs + fs + fs 
-        
-	    print string
+                    if "first_quartile" in stats:
+                        string += fs + str(stats["first_quartile"]) + fs + \
+                        str(stats["median"]) + fs + \
+                        str(stats["third_quartile"]) + fs + \
+                        str(stats["percentile_90"])
+                    else:
+                        string += fs + fs + fs + fs
 
+            print string
+
     dbif.close()
-



More information about the grass-commit mailing list