[GRASS-SVN] r52623 - grass/trunk/lib/python/temporal

svn_grass at osgeo.org svn_grass at osgeo.org
Fri Aug 10 11:33:58 PDT 2012


Author: huhabla
Date: 2012-08-10 11:33:57 -0700 (Fri, 10 Aug 2012)
New Revision: 52623

Modified:
   grass/trunk/lib/python/temporal/abstract_dataset.py
   grass/trunk/lib/python/temporal/abstract_map_dataset.py
   grass/trunk/lib/python/temporal/abstract_space_time_dataset.py
   grass/trunk/lib/python/temporal/datetime_math.py
   grass/trunk/lib/python/temporal/space_time_datasets.py
Log:
PEP8 compliance and new doctests


Modified: grass/trunk/lib/python/temporal/abstract_dataset.py
===================================================================
--- grass/trunk/lib/python/temporal/abstract_dataset.py	2012-08-10 16:12:42 UTC (rev 52622)
+++ grass/trunk/lib/python/temporal/abstract_dataset.py	2012-08-10 18:33:57 UTC (rev 52623)
@@ -8,14 +8,14 @@
 Usage:
 
 >>> import grass.temporal as tgis
->>> ad = abstract_dataset()
+>>> ad = AbstractDataset()
 >>> ad.reset(ident="soil at PERMANENT")
 Traceback (most recent call last):
   File "/usr/lib/python2.7/doctest.py", line 1289, in __run
     compileflags, 1) in test.globs
   File "<doctest __main__[2]>", line 1, in <module>
     ad.reset(ident="soil at PERMANENT")
-  File "abstract_dataset.py", line 53, in reset
+  File "AbstractDataset.py", line 53, in reset
     raise ImplementationError("This method must be implemented in the subclasses")
 ImplementationError: 'This method must be implemented in the subclasses'
 
@@ -42,7 +42,7 @@
     def __str__(self):
         return repr(self.msg)
     
-class abstract_dataset(object):
+class AbstractDataset(object):
     """!This is the base class for all datasets (raster, vector, raster3d, strds, stvds, str3ds)"""
 
     def reset(self, ident):
@@ -83,13 +83,7 @@
 
     def print_self(self):
         """!Print the content of the internal structure to stdout"""
-        self.base.print_self()
-        if self.is_time_absolute():
-            self.absolute_time.print_self()
-        if self.is_time_relative():
-            self.relative_time.print_self()
-        self.spatial_extent.print_self()
-        self.metadata.print_self()
+        raise ImplementationError("This method must be implemented in the subclasses")
 
     def set_id(self, ident):
         self.base.set_id(ident)
@@ -224,7 +218,7 @@
         statement += self.spatial_extent.get_insert_statement_mogrified(dbif)
         statement += self.metadata.get_insert_statement_mogrified(dbif)
 
-        if execute == True:
+        if execute:
             dbif.execute_transaction(statement)
             if connect:
                 dbif.close()
@@ -256,7 +250,7 @@
         statement += self.spatial_extent.get_update_statement_mogrified(dbif)
         statement += self.metadata.get_update_statement_mogrified(dbif)
 
-        if execute == True:
+        if execute:
             dbif.execute_transaction(statement)
             if connect:
                 dbif.close()
@@ -289,7 +283,7 @@
             dbif)
         statement += self.metadata.get_update_all_statement_mogrified(dbif)
 
-        if execute == True:
+        if execute:
             dbif.execute_transaction(statement)
             if connect:
                 dbif.close()

Modified: grass/trunk/lib/python/temporal/abstract_map_dataset.py
===================================================================
--- grass/trunk/lib/python/temporal/abstract_map_dataset.py	2012-08-10 16:12:42 UTC (rev 52622)
+++ grass/trunk/lib/python/temporal/abstract_map_dataset.py	2012-08-10 18:33:57 UTC (rev 52623)
@@ -7,12 +7,10 @@
 
 Usage:
 
- at code
-import grass.temporal as tgis
+>>> import grass.temporal as tgis
+>>> tmr = TemporalMapRelations()
+>>> amd = AbstractMapDataset()
 
-...
- at endcode
-
 (C) 2008-2011 by the GRASS Development Team
 This program is free software under the GNU General Public
 License (>=v2). Read the file COPYING that comes with GRASS
@@ -24,7 +22,7 @@
 from datetime_math import *
 
 
-class TemporalMapRelations(abstract_dataset):
+class TemporalMapRelations(AbstractDataset):
     """!This class implements a temporal topology access structure
 
        This object will be set up by temporal topology creation methods.
@@ -42,17 +40,26 @@
        * contains (including started, finished)
 
 
-       Code:
+       @code:
        # We have build the temporal topology and we know the first map
        start = first
        while start:
 
            # Print all maps this map temporally contains
            dlist = start.get_contains()
-           for _map in dlist:
-               _map.print_info()
+           for map in dlist:
+               map.print_info()
 
            start = start.next()
+         @endcode  
+        
+        Usage:
+        
+        >>> import grass.temporal as tgis
+        >>> tmr = TemporalMapRelations()
+        >>> tmr.print_temporal_topology_info()
+         +-------------------- Temporal Topology -------------------------------------+
+        >>> tmr.print_temporal_topology_shell_info()
     """
 
     def __init__(self):
@@ -75,27 +82,27 @@
         """!Check if the temporal topology was build"""
         return self._has_temporal_topology
 
-    def set_temporal_next(self, _map):
+    def set_temporal_next(self, map_):
         """!Set the map that is temporally as closest located after this map.
 
            Temporally located means that the start time of the "next" map is
            temporally located AFTER the start time of this map, but temporally
            near than other maps of the same dataset.
 
-           @param _map: This object should be of type abstract_map_dataset or derived classes
+           @param map_: This object should be of type AbstractMapDataset or derived classes
         """
-        self._temporal_topology["NEXT"] = _map
+        self._temporal_topology["NEXT"] = map_
 
-    def set_temporal_prev(self, _map):
+    def set_temporal_prev(self, map_):
         """!Set the map that is temporally as closest located before this map.
 
            Temporally located means that the start time of the "previous" map is
            temporally located BEFORE the start time of this map, but temporally
            near than other maps of the same dataset.
 
-           @param _map: This object should be of type abstract_map_dataset or derived classes
+           @param map_: This object should be of type AbstractMapDataset or derived classes
         """
-        self._temporal_topology["PREV"] = _map
+        self._temporal_topology["PREV"] = map_
 
     def temporal_next(self):
         """!Return the map with a start time temporally located after
@@ -117,14 +124,14 @@
             return None
         return self._temporal_topology["PREV"]
 
-    def append_temporal_equivalent(self, _map):
+    def append_temporal_equivalent(self, map_):
         """!Append a map with equivalent temporal extent as this map
 
-           @param _map: This object should be of type abstract_map_dataset or derived classes
+           @param map_: This object should be of type AbstractMapDataset or derived classes
         """
         if "EQUAL" not in self._temporal_topology:
             self._temporal_topology["EQUAL"] = []
-        self._temporal_topology["EQUAL"].append(_map)
+        self._temporal_topology["EQUAL"].append(map_)
 
     def get_temporal_equivalent(self):
         """!Return a list of map objects with equivalent temporal extent as this map
@@ -135,14 +142,14 @@
             return None
         return self._temporal_topology["EQUAL"]
 
-    def append_temporal_overlaps(self, _map):
+    def append_temporal_overlaps(self, map_):
         """!Append a map that this map temporally overlaps
 
-           @param _map: This object should be of type abstract_map_dataset or derived classes
+           @param map_: This object should be of type AbstractMapDataset or derived classes
         """
         if "OVERLAPS" not in self._temporal_topology:
             self._temporal_topology["OVERLAPS"] = []
-        self._temporal_topology["OVERLAPS"].append(_map)
+        self._temporal_topology["OVERLAPS"].append(map_)
 
     def get_temporal_overlaps(self):
         """!Return a list of map objects that this map temporally overlaps
@@ -153,14 +160,14 @@
             return None
         return self._temporal_topology["OVERLAPS"]
 
-    def append_temporal_overlapped(self, _map):
+    def append_temporal_overlapped(self, map_):
         """!Append a map that this map temporally overlapped
 
-           @param _map: This object should be of type abstract_map_dataset or derived classes
+           @param map_: This object should be of type AbstractMapDataset or derived classes
         """
         if "OVERLAPPED" not in self._temporal_topology:
             self._temporal_topology["OVERLAPPED"] = []
-        self._temporal_topology["OVERLAPPED"].append(_map)
+        self._temporal_topology["OVERLAPPED"].append(map_)
 
     def get_temporal_overlapped(self):
         """!Return a list of map objects that this map temporally overlapped
@@ -171,14 +178,14 @@
             return None
         return self._temporal_topology["OVERLAPPED"]
 
-    def append_temporal_follows(self, _map):
+    def append_temporal_follows(self, map_):
         """!Append a map that this map temporally follows
 
-           @param _map: This object should be of type abstract_map_dataset or derived classes
+           @param map_: This object should be of type AbstractMapDataset or derived classes
         """
         if "FOLLOWS" not in self._temporal_topology:
             self._temporal_topology["FOLLOWS"] = []
-        self._temporal_topology["FOLLOWS"].append(_map)
+        self._temporal_topology["FOLLOWS"].append(map_)
 
     def get_temporal_follows(self):
         """!Return a list of map objects that this map temporally follows
@@ -189,14 +196,14 @@
             return None
         return self._temporal_topology["FOLLOWS"]
 
-    def append_temporal_precedes(self, _map):
+    def append_temporal_precedes(self, map_):
         """!Append a map that this map temporally precedes
 
-           @param _map: This object should be of type abstract_map_dataset or derived classes
+           @param map_: This object should be of type AbstractMapDataset or derived classes
         """
         if "PRECEDES" not in self._temporal_topology:
             self._temporal_topology["PRECEDES"] = []
-        self._temporal_topology["PRECEDES"].append(_map)
+        self._temporal_topology["PRECEDES"].append(map_)
 
     def get_temporal_precedes(self):
         """!Return a list of map objects that this map temporally precedes
@@ -207,15 +214,15 @@
             return None
         return self._temporal_topology["PRECEDES"]
 
-    def append_temporal_during(self, _map):
+    def append_temporal_during(self, map_):
         """!Append a map that this map is temporally located during
            This includes temporal relationships starts and finishes
 
-           @param _map: This object should be of type abstract_map_dataset or derived classes
+           @param map_: This object should be of type AbstractMapDataset or derived classes
         """
         if "DURING" not in self._temporal_topology:
             self._temporal_topology["DURING"] = []
-        self._temporal_topology["DURING"].append(_map)
+        self._temporal_topology["DURING"].append(map_)
 
     def get_temporal_during(self):
         """!Return a list of map objects that this map is temporally located during
@@ -227,15 +234,15 @@
             return None
         return self._temporal_topology["DURING"]
 
-    def append_temporal_contains(self, _map):
+    def append_temporal_contains(self, map_):
         """!Append a map that this map temporally contains
            This includes temporal relationships started and finished
 
-           @param _map: This object should be of type abstract_map_dataset or derived classes
+           @param map_: This object should be of type AbstractMapDataset or derived classes
         """
         if "CONTAINS" not in self._temporal_topology:
             self._temporal_topology["CONTAINS"] = []
-        self._temporal_topology["CONTAINS"].append(_map)
+        self._temporal_topology["CONTAINS"].append(map_)
 
     def get_temporal_contains(self):
         """!Return a list of map objects that this map temporally contains
@@ -250,14 +257,14 @@
     def _generate_map_list_string(self, map_list, line_wrap=True):
         count = 0
         string = ""
-        for _map in map_list:
+        for map_ in map_list:
             if line_wrap and count > 0 and count % 3 == 0:
                 string += "\n | ............................ "
                 count = 0
             if count == 0:
-                string += _map.get_id()
+                string += map_.get_id()
             else:
-                string += ",%s" % _map.get_id()
+                string += ",%s" % map_.get_id()
             count += 1
 
         return string
@@ -292,29 +299,29 @@
 
         print " +-------------------- Temporal Topology -------------------------------------+"
         #          0123456789012345678901234567890
-        if _next:
+        if _next is not None:
             print " | Next: ...................... " + str(_next.get_id())
-        if _prev:
+        if _prev is not None:
             print " | Previous: .................. " + str(_prev.get_id())
-        if _equal:
+        if _equal is not None:
             print " | Equivalent: ................ " + \
                 self._generate_map_list_string(_equal)
-        if _follows:
+        if _follows is not None:
             print " | Follows: ................... " + \
                 self._generate_map_list_string(_follows)
-        if _precedes:
+        if _precedes is not None:
             print " | Precedes: .................. " + \
                 self._generate_map_list_string(_precedes)
-        if _overlaps:
+        if _overlaps is not None:
             print " | Overlaps: .................. " + \
                 self._generate_map_list_string(_overlaps)
-        if _overlapped:
+        if _overlapped is not None:
             print " | Overlapped: ................ " + \
                 self._generate_map_list_string(_overlapped)
-        if _during:
+        if _during is not None:
             print " | During: .................... " + \
                 self._generate_map_list_string(_during)
-        if _contains:
+        if _contains is not None:
             print " | Contains: .................. " + \
                 self._generate_map_list_string(_contains)
 
@@ -331,33 +338,33 @@
         _during = self.get_temporal_during()
         _contains = self.get_temporal_contains()
 
-        if _next:
+        if _next is not None:
             print "next=" + _next.get_id()
-        if _prev:
+        if _prev is not None:
             print "prev=" + _prev.get_id()
-        if _equal:
+        if _equal is not None:
             print "equivalent=" + self._generate_map_list_string(_equal, False)
-        if _follows:
+        if _follows is not None:
             print "follows=" + self._generate_map_list_string(_follows, False)
-        if _precedes:
+        if _precedes is not None:
             print "precedes=" + self._generate_map_list_string(
                 _precedes, False)
-        if _overlaps:
+        if _overlaps is not None:
             print "overlaps=" + self._generate_map_list_string(
                 _overlaps, False)
-        if _overlapped:
+        if _overlapped is not None:
             print "overlapped=" + \
                 self._generate_map_list_string(_overlapped, False)
-        if _during:
+        if _during is not None:
             print "during=" + self._generate_map_list_string(_during, False)
-        if _contains:
+        if _contains is not None:
             print "contains=" + self._generate_map_list_string(
                 _contains, False)
 
 ###############################################################################
 
 
-class abstract_map_dataset(TemporalMapRelations):
+class AbstractMapDataset(TemporalMapRelations):
     """!This is the base class for all maps (raster, vector, raster3d)
        providing additional function to set the valid time and the spatial extent.
     """
@@ -450,7 +457,7 @@
         if self.is_time_absolute():
             start_time, end_time, tz = self.get_absolute_time()
             start = datetime_to_grass_datetime_string(start_time)
-            if end_time:
+            if end_time is not None:
                 end = datetime_to_grass_datetime_string(end_time)
                 start += " / %s" % (end)
         else:
@@ -463,11 +470,14 @@
         return start
 
     def get_map_id(self):
-        """!Return the map id. The map id is the unique map identifier in grass and must not be equal to the
-           primary key identifier (id) of the map in the database. Since vector maps may have layer information,
+        """!Return the map id. The map id is the unique map identifier 
+           in grass and must not be equal to the
+           primary key identifier (id) of the map in the database. 
+           Since vector maps may have layer information,
            the unique id is a combination of name, layer and mapset.
 
-           Use get_map_id() every time your need to access the grass map in the file system but not to identify
+           Use get_map_id() every time your need to access the grass map 
+           in the file system but not to identify
            map information in the temporal database.
 
         """
@@ -489,7 +499,7 @@
         if name.find(":") >= 0:
             name, layer = name.split(":")
 
-        if layer:
+        if layer is not None:
             return "%s:%s@%s" % (name, layer, mapset)
         else:
             return "%s@%s" % (name, mapset)
@@ -527,7 +537,7 @@
         datasets = self.get_registered_datasets()
         count = 0
         string = ""
-        if datasets:
+        if datasets is not None:
             for ds in datasets:
                 if count > 0 and count % 3 == 0:
                     string += "\n | ............................ "
@@ -554,7 +564,7 @@
         datasets = self.get_registered_datasets()
         count = 0
         string = ""
-        if datasets:
+        if datasets is not None:
             for ds in datasets:
                 if count == 0:
                     string += ds["id"]
@@ -569,40 +579,46 @@
     def insert(self, dbif=None, execute=True):
         """!Insert temporal dataset entry into database from the internal structure
 
-           This functions assures that the timetsamp is written to the grass file system based database
+           This functions assures that the timetsamp is written to the 
+           grass file system based database
 
            @param dbif: The database interface to be used
            @param execute: If True the SQL statements will be executed.
-                           If False the prepared SQL statements are returned and must be executed by the caller.
+                           If False the prepared SQL statements are 
+                           returned and must be executed by the caller.
         """
         self.write_timestamp_to_grass()
-        return abstract_dataset.insert(self, dbif, execute)
+        return AbstractDataset.insert(self, dbif, execute)
 
     def update(self, dbif=None, execute=True):
         """!Update temporal dataset entry of database from the internal structure
            excluding None variables
 
-           This functions assures that the timetsamp is written to the grass file system based database
+           This functions assures that the timetsamp is written to the 
+           grass file system based database
 
            @param dbif: The database interface to be used
            @param execute: If True the SQL statements will be executed.
-                           If False the prepared SQL statements are returned and must be executed by the caller.
+                           If False the prepared SQL statements are 
+                           returned and must be executed by the caller.
         """
         self.write_timestamp_to_grass()
-        return abstract_dataset.update(self, dbif, execute)
+        return AbstractDataset.update(self, dbif, execute)
 
     def update_all(self, dbif=None, execute=True):
         """!Update temporal dataset entry of database from the internal structure
            and include None varuables.
 
-           This functions assures that the timetsamp is written to the grass file system based database
+           This functions assures that the timetsamp is written to the 
+           grass file system based database
 
            @param dbif: The database interface to be used
            @param execute: If True the SQL statements will be executed.
-                           If False the prepared SQL statements are returned and must be executed by the caller.
+                           If False the prepared SQL statements are 
+                           returned and must be executed by the caller.
         """
         self.write_timestamp_to_grass()
-        return abstract_dataset.update_all(self, dbif, execute)
+        return AbstractDataset.update_all(self, dbif, execute)
 
     def set_absolute_time(self, start_time, end_time=None, timezone=None):
         """!Set the absolute time interval with start time and end time
@@ -610,10 +626,9 @@
            @param start_time: a datetime object specifying the start time of the map
            @param end_time: a datetime object specifying the end time of the map
            @param timezone: Thee timezone of the map
-
         """
         if start_time and not isinstance(start_time, datetime):
-            if self.get_layer():
+            if self.get_layer() is not None:
                 core.fatal(_("Start time must be of type datetime for %s map <%s> with layer: %s") % (self.get_type(), self.get_map_id(), self.get_layer()))
             else:
                 core.fatal(_("Start time must be of type datetime for %s map <%s>") % (self.get_type(), self.get_map_id()))
@@ -624,7 +639,7 @@
             else:
                 core.fatal(_("End time must be of type datetime for %s map <%s>") % (self.get_type(), self.get_map_id()))
 
-        if start_time and end_time:
+        if start_time is not None and end_time is not None:
             if start_time > end_time:
                 if self.get_layer():
                     core.fatal(_("End time must be greater than start time for %s map <%s> with layer: %s") % (self.get_type(), self.get_map_id(), self.get_layer()))
@@ -636,7 +651,6 @@
                     end_time = None
 
         self.base.set_ttype("absolute")
-
         self.absolute_time.set_start_time(start_time)
         self.absolute_time.set_end_time(end_time)
         self.absolute_time.set_timezone(timezone)
@@ -644,7 +658,8 @@
     def update_absolute_time(self, start_time, end_time=None, timezone=None, dbif=None):
         """!Update the absolute time
 
-           This functions assures that the timetsamp is written to the grass file system based database
+           This functions assures that the timetsamp is written to the 
+           grass file system based database
 
            @param start_time: a datetime object specifying the start time of the map
            @param end_time: a datetime object specifying the end time of the map
@@ -656,7 +671,7 @@
         self.absolute_time.update_all(dbif)
         self.base.update(dbif)
 
-        if connect == True:
+        if connect:
             dbif.close()
 
         self.write_timestamp_to_grass()
@@ -666,14 +681,15 @@
 
            @param start_time: A double value
            @param end_time: A double value
-           @param unit: The unit of the relative time. Supported units: years, months, days, hours, minutes, seconds
+           @param unit: The unit of the relative time. Supported units: 
+                        years, months, days, hours, minutes, seconds
 
            Return True for success and False otherwise
 
         """
 
         if not self.check_relative_time_unit(unit):
-            if self.get_layer():
+            if self.get_layer() is not None:
                 core.error(_("Unsupported relative time unit type for %s map <%s> with layer %s: %s") % (self.get_type(), self.get_id(), self.get_layer(), unit))
             else:
                 core.error(_("Unsupported relative time unit type for %s map <%s>: %s") % (self.get_type(), self.get_id(), unit))
@@ -681,7 +697,7 @@
 
         if start_time is not None and end_time is not None:
             if int(start_time) > int(end_time):
-                if self.get_layer():
+                if self.get_layer() is not None:
                     core.error(_("End time must be greater than start time for %s map <%s> with layer %s") % (self.get_type(), self.get_id(), self.get_layer()))
                 else:
                     core.error(_("End time must be greater than start time for %s map <%s>") % (self.get_type(), self.get_id()))
@@ -705,7 +721,8 @@
     def update_relative_time(self, start_time, end_time, unit, dbif=None):
         """!Update the relative time interval
 
-           This functions assures that the timetsamp is written to the grass file system based database
+           This functions assures that the timetsamp is written to the 
+           grass file system based database
 
            @param start_time: A double value
            @param end_time: A double value
@@ -717,7 +734,7 @@
             self.relative_time.update_all(dbif)
             self.base.update(dbif)
 
-        if connect == True:
+        if connect:
             dbif.close()
 
         self.write_timestamp_to_grass()
@@ -745,7 +762,7 @@
         if start is not None:
             if end is not None:
                 if start >= end:
-                    if self.get_layer():
+                    if self.get_layer() is not None:
                         core.error(_("Map <%s> with layer %s has incorrect time interval, start time is greater than end time") % (self.get_map_id(), self.get_layer()))
                     else:
                         core.error(_("Map <%s> has incorrect time interval, start time is greater than end time") % (self.get_map_id()))
@@ -765,12 +782,15 @@
             * Remove the space time dataset register table
 
            @param dbif: The database interface to be used
-           @param update: Call for each unregister statement the update from registered maps
-                          of the space time dataset. This can slow down the un-registration process significantly.
+           @param update: Call for each unregister statement the update from 
+                          registered maps of the space time dataset. 
+                          This can slow down the un-registration process significantly.
            @param execute: If True the SQL DELETE and DROP table statements will be executed.
-                           If False the prepared SQL statements are returned and must be executed by the caller.
+                           If False the prepared SQL statements are 
+                           returned and must be executed by the caller.
 
-           @return The SQL statements if execute == False, else an empty string, None in case of a failure
+           @return The SQL statements if execute == False, else an empty string, 
+                   None in case of a failure
         """
 
         dbif, connect = init_dbif(dbif)
@@ -786,7 +806,7 @@
                 dbif=dbif, update=update, execute=False)
 
             # Remove the strds register table
-            if self.get_stds_register():
+            if self.get_stds_register() is not None:
                 statement += "DROP TABLE " + self.get_stds_register() + ";\n"
 
             core.verbose(_("Delete %s dataset <%s> from temporal database")
@@ -795,7 +815,7 @@
             # Delete yourself from the database, trigger functions will take care of dependencies
             statement += self.base.get_delete_statement()
 
-        if execute == True:
+        if execute:
             dbif.execute_transaction(statement)
 
         # Remove the timestamp from the file system
@@ -803,7 +823,7 @@
 
         self.reset(None)
 
-        if connect == True:
+        if connect:
             dbif.close()
 
         if execute:
@@ -816,14 +836,16 @@
 
            @param dbif: The database interface to be used
            @param update: Call for each unregister statement the update from registered maps
-                          of the space time dataset. This can slow down the un-registration process significantly.
+                          of the space time dataset. This can slow down the 
+                          un-registration process significantly.
            @param execute: If True the SQL DELETE and DROP table statements will be executed.
-                           If False the prepared SQL statements are returned and must be executed by the caller.
+                           If False the prepared SQL statements are 
+                           returned and must be executed by the caller.
 
            @return The SQL statements if execute == False, else an empty string
         """
 
-        if self.get_layer():
+        if self.get_layer() is not None:
             core.verbose(_("Unregister %s map <%s> with layer %s from space time datasets") %
                          (self.get_type(), self.get_map_id(), self.get_layer()))
         else:
@@ -837,7 +859,7 @@
         rows = self.get_registered_datasets(dbif)
 
         # For each stds in which the map is registered
-        if rows:
+        if rows is not None:
             count = 0
             num_sps = len(rows)
             for row in rows:
@@ -855,10 +877,10 @@
 
             core.percent(1, 1, 1)
 
-        if execute == True:
+        if execute:
             dbif.execute_transaction(statement)
 
-        if connect == True:
+        if connect:
             dbif.close()
 
         if execute:
@@ -868,8 +890,8 @@
 
     def get_registered_datasets(self, dbif=None):
         """!Return all space time dataset ids in which this map is registered as
-           dictionary like rows with column "id" or None if this map is not registered in any
-           space time dataset.
+           dictionary like rows with column "id" or None if this map is not 
+           registered in any space time dataset.
 
            @param dbif: The database interface to be used
         """
@@ -886,7 +908,13 @@
         except:
             core.error(_("Unable to select space time dataset register table <%s>") % (self.get_stds_register()))
 
-        if connect == True:
+        if connect:
             dbif.close()
 
         return rows
+
+###############################################################################
+
+if __name__ == "__main__":
+    import doctest
+    doctest.testmod()
\ No newline at end of file

Modified: grass/trunk/lib/python/temporal/abstract_space_time_dataset.py
===================================================================
--- grass/trunk/lib/python/temporal/abstract_space_time_dataset.py	2012-08-10 16:12:42 UTC (rev 52622)
+++ grass/trunk/lib/python/temporal/abstract_space_time_dataset.py	2012-08-10 18:33:57 UTC (rev 52623)
@@ -5,15 +5,8 @@
 
 Temporal GIS related functions to be used in temporal GIS Python library package.
 
-Usage:
-
- at code
-import grass.temporal as tgis
-
-...
- at endcode
-
-(C) 2008-2011 by the GRASS Development Team
+    
+(C) 2011-2012 by the GRASS Development Team
 This program is free software under the GNU General Public
 License (>=v2). Read the file COPYING that comes with GRASS
 for details.
@@ -26,7 +19,8 @@
 
 ###############################################################################
 
-class abstract_space_time_dataset(abstract_dataset):
+
+class AbstractSpaceTimeDataset(AbstractDataset):
     """!Abstract space time dataset class
 
        This class represents a space time dataset. Convenient functions
@@ -39,7 +33,7 @@
        collecting of metadata.
     """
     def __init__(self, ident):
-	self.reset(ident)
+        self.reset(ident)
         self.map_counter = 0
 
     def get_new_map_instance(self, ident=None):
@@ -47,11 +41,13 @@
 
            @param ident: The unique identifier of the new object
         """
-        raise ImplementationError("This method must be implemented in the subclasses")
+        raise ImplementationError(
+            "This method must be implemented in the subclasses")
 
     def get_map_register(self):
         """!Return the name of the map register table"""
-        raise ImplementationError("This method must be implemented in the subclasses")
+        raise ImplementationError(
+            "This method must be implemented in the subclasses")
 
     def set_map_register(self, name):
         """!Set the name of the map register table
@@ -60,21 +56,22 @@
 
            @param name: The name of the register table
         """
-        raise ImplementationError("This method must be implemented in the subclasses")
- 
+        raise ImplementationError(
+            "This method must be implemented in the subclasses")
+
     def print_self(self):
-	"""!Print the content of the internal structure to stdout"""
-	self.base.print_self()
-	if self.is_time_absolute():
-	    self.absolute_time.print_self()
+        """!Print the content of the internal structure to stdout"""
+        self.base.print_self()
+        if self.is_time_absolute():
+            self.absolute_time.print_self()
         if self.is_time_relative():
-	    self.relative_time.print_self()
-	self.spatial_extent.print_self()
-	self.metadata.print_self()
+            self.relative_time.print_self()
+        self.spatial_extent.print_self()
+        self.metadata.print_self()
 
     def print_info(self):
         """!Print information about this class in human readable style"""
-        
+
         if self.get_type() == "strds":
             #                1         2         3         4         5         6         7
             #      0123456789012345678901234567890123456789012345678901234567890123456789012345678
@@ -91,26 +88,26 @@
             print ""
             print " +-------------------- Space Time Vector Dataset -----------------------------+"
         print " |                                                                            |"
-	self.base.print_info()
-	if self.is_time_absolute():
-	    self.absolute_time.print_info()
+        self.base.print_info()
+        if self.is_time_absolute():
+            self.absolute_time.print_info()
         if self.is_time_relative():
-	    self.relative_time.print_info()
-	self.spatial_extent.print_info()
-	self.metadata.print_info()
+            self.relative_time.print_info()
+        self.spatial_extent.print_info()
+        self.metadata.print_info()
         print " +----------------------------------------------------------------------------+"
 
     def print_shell_info(self):
         """!Print information about this class in shell style"""
-	self.base.print_shell_info()
-	if self.is_time_absolute():
-	    self.absolute_time.print_shell_info()
+        self.base.print_shell_info()
+        if self.is_time_absolute():
+            self.absolute_time.print_shell_info()
         if self.is_time_relative():
-	    self.relative_time.print_shell_info()
-	self.spatial_extent.print_shell_info()
-	self.metadata.print_shell_info()
+            self.relative_time.print_shell_info()
+        self.spatial_extent.print_shell_info()
+        self.metadata.print_shell_info()
 
-    def set_initial_values(self, temporal_type, semantic_type, \
+    def set_initial_values(self, temporal_type, semantic_type,
                            title=None, description=None):
         """!Set the initial values of the space time dataset
 
@@ -137,7 +134,7 @@
 
     def get_initial_values(self):
         """!Return the initial values: temporal_type, semantic_type, title, description"""
-        
+
         temporal_type = self.get_temporal_type()
         semantic_type = self.base.get_semantic_type()
         title = self.metadata.get_title()
@@ -147,11 +144,11 @@
 
     def get_granularity(self):
         """!Return the granularity"""
-        
+
         temporal_type = self.get_temporal_type()
 
         if temporal_type == "absolute":
-            granularity   = self.absolute_time.get_granularity()
+            granularity = self.absolute_time.get_granularity()
         elif temporal_type == "relative":
             granularity = self.relative_time.get_granularity()
 
@@ -160,7 +157,7 @@
     def set_granularity(self, granularity):
 
         temporal_type = self.get_temporal_type()
- 
+
         if temporal_type == "absolute":
             self.set_time_to_absolute()
             self.absolute_time.set_granularity(granularity)
@@ -172,12 +169,12 @@
 
     def set_relative_time_unit(self, unit):
         """!Set the relative time unit which may be of type: years, months, days, hours, minutes or seconds
-        
+
            All maps registered in a (relative time) space time dataset must have the same unit
         """
 
         temporal_type = self.get_temporal_type()
- 
+
         if temporal_type == "relative":
             if not self.check_relative_time_unit(unit):
                 core.fatal(_("Unsupported temporal unit: %s") % (unit))
@@ -185,13 +182,13 @@
 
     def get_map_time(self):
         """!Return the type of the map time, interval, point, mixed or invalid"""
-        
+
         temporal_type = self.get_temporal_type()
 
         if temporal_type == "absolute":
-            map_time   = self.absolute_time.get_map_time()
+            map_time = self.absolute_time.get_map_time()
         elif temporal_type == "relative":
-            map_time   = self.relative_time.get_map_time()
+            map_time = self.relative_time.get_map_time()
 
         return map_time
 
@@ -205,13 +202,14 @@
            * interval -> start and end time
            * invalid  -> No valid time point or interval found
 
-           @param maps: A sorted (start_time) list of abstract_dataset objects
+           @param maps: A sorted (start_time) list of AbstractDataset objects
            @param dbif: The database interface to be used
         """
 
-        if maps == None:
-            maps = get_registered_maps_as_objects(where=None, order="start_time", dbif=dbif)
-            
+        if maps is None:
+            maps = get_registered_maps_as_objects(
+                where=None, order="start_time", dbif=dbif)
+
         time_invalid = 0
         time_point = 0
         time_interval = 0
@@ -224,9 +222,9 @@
             if maps[i].is_time_relative():
                 start, end, unit = maps[i].get_relative_time()
 
-            if start != None and end != None:
+            if start is not None and end is not None:
                 time_interval += 1
-            elif start != None and end == None:
+            elif start is not None and end is None:
                 time_point += 1
             else:
                 time_invalid += 1
@@ -239,15 +237,16 @@
 
     def count_gaps(self, maps=None, dbif=None):
         """!Count the number of gaps between temporal neighbors
-        
-           @param maps: A sorted (start_time) list of abstract_dataset objects
+
+           @param maps: A sorted (start_time) list of AbstractDataset objects
            @param dbif: The database interface to be used
            @return The numbers of gaps between temporal neighbors
         """
 
-        if maps == None:
-            maps = self.get_registered_maps_as_objects(where=None, order="start_time", dbif=dbif)
-            
+        if maps is None:
+            maps = self.get_registered_maps_as_objects(
+                where=None, order="start_time", dbif=dbif)
+
         gaps = 0
 
         # Check for gaps
@@ -258,36 +257,38 @@
                     gaps += 1
 
         return gaps
-        
+
     def print_temporal_relationships(self, maps=None, dbif=None):
         """!Print the temporal relation matrix of all registered maps to stdout
 
            The temporal relation matrix includes the temporal relations between
            all registered maps. The relations are strings stored in a list of lists.
-           
+
            @param maps: a ordered by start_time list of map objects
            @param dbif: The database interface to be used
         """
-        
-        if maps == None:
-            maps = self.get_registered_maps_as_objects(where=None, order="start_time", dbif=dbif)
-	    
-	print_temporal_topology_relationships(maps, maps)
 
+        if maps is None:
+            maps = self.get_registered_maps_as_objects(
+                where=None, order="start_time", dbif=dbif)
+
+        print_temporal_topology_relationships(maps, maps)
+
     def count_temporal_relations(self, maps=None, dbif=None):
         """!Count the temporal relations between the registered maps.
 
-           The map list must be ordered by start time. Temporal relations are counted 
+           The map list must be ordered by start time. Temporal relations are counted
            by analysing the sparse upper right side temporal relationships matrix.
 
-           @param maps: A sorted (start_time) list of abstract_dataset objects
+           @param maps: A sorted (start_time) list of AbstractDataset objects
            @param dbif: The database interface to be used
            @return A dictionary with counted temporal relationships
         """
-        
-        if maps == None:
-            maps = self.get_registered_maps_as_objects(where=None, order="start_time", dbif=dbif)
 
+        if maps is None:
+            maps = self.get_registered_maps_as_objects(
+                where=None, order="start_time", dbif=dbif)
+
         return count_temporal_topology_relationships(maps, maps)
 
     def check_temporal_topology(self, maps=None, dbif=None):
@@ -315,37 +316,38 @@
            started    -> not allowed
            finished   -> not allowed
 
-           @param maps: A sorted (start_time) list of abstract_dataset objects
+           @param maps: A sorted (start_time) list of AbstractDataset objects
            @return True if topology is correct
         """
-        if maps == None:
-            maps = self.get_registered_maps_as_objects(where=None, order="start_time", dbif=dbif)
+        if maps is None:
+            maps = self.get_registered_maps_as_objects(
+                where=None, order="start_time", dbif=dbif)
 
         relations = count_temporal_topology_relationships(maps, maps)
 
         map_time = self.get_map_time()
 
         if map_time == "interval" or map_time == "mixed":
-            if relations.has_key("equivalent"):
+            if "equivalent" in relations:
                 return False
-            if relations.has_key("during"):
+            if "during" in relations:
                 return False
-            if relations.has_key("contains"):
+            if "contains" in relations:
                 return False
-            if relations.has_key("overlaps"):
+            if "overlaps" in relations:
                 return False
-            if relations.has_key("overlapped"):
+            if "overlapped" in relations:
                 return False
-            if relations.has_key("starts"):
+            if "starts" in relations:
                 return False
-            if relations.has_key("finishes"):
+            if "finishes" in relations:
                 return False
-            if relations.has_key("started"):
+            if "started" in relations:
                 return False
-            if relations.has_key("finished"):
+            if "finished" in relations:
                 return False
         elif map_time == "point":
-            if relations.has_key("equivalent"):
+            if "equivalent" in relations:
                 return False
         else:
             return False
@@ -359,15 +361,16 @@
 
            In case spatial is True, the spatial overlap between temporal related maps is performed. Only
            temporal related and spatial overlapping maps are returned.
-        
-           Return all registered maps as ordered (by start_time) object list with 
+
+           Return all registered maps as ordered (by start_time) object list with
            "gap" map objects (id==None). Each list entry is a list of map objects
            which are potentially located in temporal relation to the actual granule of the second space time dataset.
 
            Each entry in the object list is a dict. The actual sampler map and its temporal extent (the actual granule) and
            the list of samples are stored:
 
-           list = self.sample_by_dataset(stds=sampler, method=["during","overlap","contain","equal"])    
+           list = self.sample_by_dataset(stds=sampler, method=[
+               "during","overlap","contain","equal"])
            for entry in list:
                granule = entry["granule"]
                maplist = entry["samples"]
@@ -376,8 +379,8 @@
                    map.print_info()
 
            A valid temporal topology (no overlapping or inclusion allowed) is needed to get correct results in case of gaps
-           in the sample dataset. 
-    
+           in the sample dataset.
+
            Gaps between maps are identified as unregistered maps with id==None.
 
            The map objects are initialized with the id and the temporal extent of the granule (temporal type, start time, end time).
@@ -418,19 +421,19 @@
                           * follows: Select maps which temporally follow the selection granule
                             map    :              s-----------e
                             granule:  s-----------e
-                     
+
                           * precedes: Select maps which temporally precedes the selection granule
                             map    :  s-----------e
                             granule:              s-----------e
 
                           All these methods can be combined. Method must be of type tuple including the identification strings.
-           @param spatial: If set True additional the spatial overlapping is used for selection -> spatio-temporal relation. 
+           @param spatial: If set True additional the spatial overlapping is used for selection -> spatio-temporal relation.
                            The returned map objects will have temporal and spatial extents
            @param dbif: The database interface to be used
 
            In case nothing found None is returned
         """
-        
+
         use_start = False
         use_during = False
         use_overlap = False
@@ -440,7 +443,7 @@
         use_precedes = False
 
         # Initialize the methods
-        if method:
+        if method is not None:
             for name in method:
                 if name == "start":
                     use_start = True
@@ -483,28 +486,31 @@
         dbif, connect = init_dbif(dbif)
 
         obj_list = []
-        sample_maps = stds.get_registered_maps_as_objects_with_gaps(where=None, dbif=dbif)
-        
+        sample_maps = stds.get_registered_maps_as_objects_with_gaps(
+            where=None, dbif=dbif)
+
         for granule in sample_maps:
             # Read the spatial extent
-            if spatial == True:
+            if spatial:
                 granule.spatial_extent.select(dbif)
             start, end = granule.get_valid_time()
 
-            where = create_temporal_relation_sql_where_statement(start, end, use_start, \
-                    use_during, use_overlap, use_contain, use_equal, use_follows, use_precedes)  
+            where = create_temporal_relation_sql_where_statement(
+                start, end, use_start,
+                use_during, use_overlap, use_contain, use_equal, use_follows, use_precedes)
 
-            maps = self.get_registered_maps_as_objects(where, "start_time", dbif)
+            maps = self.get_registered_maps_as_objects(
+                where, "start_time", dbif)
 
             result = {}
             result["granule"] = granule
             num_samples = 0
             maplist = []
 
-            if maps:
+            if maps is not None:
                 for map in maps:
                     # Read the spatial extent
-                    if spatial == True:
+                    if spatial:
                         map.spatial_extent.select(dbif)
                         # Ignore spatial disjoint maps
                         if not granule.spatial_overlapping(map):
@@ -514,13 +520,14 @@
                     maplist.append(copy.copy(map))
 
             # Fill with empty map in case no spatio-temporal relations found
-            if not maps or num_samples == 0:
+            if maps is None or num_samples == 0:
                 map = self.get_new_map_instance(None)
 
                 if self.is_time_absolute():
                     map.set_absolute_time(start, end)
                 elif self.is_time_relative():
-                    map.set_relative_time(start, end, self.get_relative_time_unit())
+                    map.set_relative_time(start, end,
+                                          self.get_relative_time_unit())
 
                 maplist.append(copy.copy(map))
 
@@ -528,19 +535,19 @@
 
             obj_list.append(copy.copy(result))
 
-        if connect == True:
+        if connect:
             dbif.close()
 
         return obj_list
 
     def get_registered_maps_as_objects_by_granularity(self, gran=None, dbif=None):
-        """!Return all registered maps as ordered (by start_time) object list with 
+        """!Return all registered maps as ordered (by start_time) object list with
            "gap" map objects (id==None) for temporal topological operations using the
            granularity of the space time dataset as increment. Each list entry is a list of map objects
            which are potentially located in the actual granule.
 
-           A valid temporal topology (no overlapping or inclusion allowed) is needed to get correct results. 
-    
+           A valid temporal topology (no overlapping or inclusion allowed) is needed to get correct results.
+
            The dataset must have "interval" as temporal map type, so all maps have valid interval time.
 
            Gaps between maps are identified as unregistered maps with id==None.
@@ -548,7 +555,7 @@
            The objects are initialized with the id and the temporal extent (temporal type, start time, end time).
            In case more map information are needed, use the select() method for each listed object.
 
-           @param gran: The granularity to be used 
+           @param gran: The granularity to be used
            @param dbif: The database interface to be used
 
            In case nothing found None is returned
@@ -558,7 +565,7 @@
 
         obj_list = []
 
-        if gran == None:
+        if gran is None:
             gran = self.get_granularity()
 
         start, end = self.get_valid_time()
@@ -573,13 +580,13 @@
 
             rows = self.get_registered_maps("id", where, "start_time", dbif)
 
-            if rows:
+            if rows is not None:
                 if len(rows) > 1:
                     core.warning(_("More than one map found in a granule. Temporal granularity seems to be invalid or the chosen granularity is not a greatest common divider of all intervals and gaps in the dataset."))
- 
+
                 maplist = []
                 for row in rows:
-                   # Take the first map    
+                   # Take the first map
                     map = self.get_new_map_instance(rows[0]["id"])
 
                     if self.is_time_absolute():
@@ -589,17 +596,17 @@
 
                     maplist.append(copy.copy(map))
 
-            	obj_list.append(copy.copy(maplist))
+                obj_list.append(copy.copy(maplist))
 
             start = next
 
-        if connect == True:
+        if connect:
             dbif.close()
 
         return obj_list
 
     def get_registered_maps_as_objects_with_gaps(self, where=None, dbif=None):
-        """!Return all registered maps as ordered (by start_time) object list with 
+        """!Return all registered maps as ordered (by start_time) object list with
            "gap" map objects (id==None) for temporal topological operations
 
            Gaps between maps are identified as maps with id==None
@@ -616,10 +623,10 @@
         dbif, connect = init_dbif(dbif)
 
         obj_list = []
-        
+
         maps = self.get_registered_maps_as_objects(where, "start_time", dbif)
 
-        if maps and len(maps) > 0:
+        if maps  is not None and len(maps) > 0:
             for i in range(len(maps)):
                 obj_list.append(maps[i])
                 # Detect and insert gaps
@@ -629,7 +636,7 @@
                         start1, end1 = maps[i].get_valid_time()
                         start2, end2 = maps[i + 1].get_valid_time()
                         end = start2
-                        if end1:
+                        if end1 is not None:
                             start = end1
                         else:
                             start = start1
@@ -642,7 +649,7 @@
                             map.set_relative_time(start, end, self.get_relative_time_unit())
                         obj_list.append(copy.copy(map))
 
-        if connect == True:
+        if connect:
             dbif.close()
 
         return obj_list
@@ -663,31 +670,33 @@
         dbif, connect = init_dbif(dbif)
 
         obj_list = []
-        
-        rows = self.get_registered_maps("id,start_time,end_time", where, order, dbif)
 
+        rows = self.get_registered_maps(
+            "id,start_time,end_time", where, order, dbif)
+
         count = 0
-        if rows:
+        if rows is not None:
             for row in rows:
                 core.percent(count, len(rows), 1)
                 map = self.get_new_map_instance(row["id"])
                 if self.is_time_absolute():
                     map.set_absolute_time(row["start_time"], row["end_time"])
                 elif self.is_time_relative():
-                    map.set_relative_time(row["start_time"], row["end_time"], self.get_relative_time_unit())
+                    map.set_relative_time(row["start_time"], row["end_time"],
+                                          self.get_relative_time_unit())
                 obj_list.append(copy.copy(map))
                 count += 1
 
         core.percent(1, 1, 1)
 
-        if connect == True:
+        if connect:
             dbif.close()
 
         return obj_list
 
-    def get_registered_maps(self, columns=None, where = None, order = None, dbif=None):
+    def get_registered_maps(self, columns=None, where=None, order=None, dbif=None):
         """!Return sqlite rows of all registered maps.
-        
+
            In case columns are not specified, each row includes all columns specified in the datatype specific view
 
            @param columns: Columns to be selected as SQL compliant string
@@ -702,33 +711,36 @@
 
         rows = None
 
-        if self.get_map_register():
+        if self.get_map_register() is not None:
             # Use the correct temporal table
             if self.get_temporal_type() == "absolute":
-                map_view = self.get_new_map_instance(None).get_type() + "_view_abs_time"
+                map_view = self.get_new_map_instance(
+                    None).get_type() + "_view_abs_time"
             else:
-                map_view = self.get_new_map_instance(None).get_type() + "_view_rel_time"
+                map_view = self.get_new_map_instance(
+                    None).get_type() + "_view_rel_time"
 
-            if columns:
+            if columns is not None:
                 sql = "SELECT %s FROM %s  WHERE %s.id IN (SELECT id FROM %s)" % (columns, map_view, map_view, self.get_map_register())
             else:
                 sql = "SELECT * FROM %s  WHERE %s.id IN (SELECT id FROM %s)" % (map_view, map_view, self.get_map_register())
 
-            if where:
+            if where is not None:
                 sql += " AND (%s)" % (where.split(";")[0])
-            if order:
+            if order is not None:
                 sql += " ORDER BY %s" % (order.split(";")[0])
-                
+
             try:
                 dbif.cursor.execute(sql)
                 rows = dbif.cursor.fetchall()
             except:
-                if connect == True:
+                if connect:
                     dbif.close()
-                core.error(_("Unable to get map ids from register table <%s>") % (self.get_map_register()))
+                core.error(_("Unable to get map ids from register table <%s>")
+                           % (self.get_map_register()))
                 raise
 
-        if connect == True:
+        if connect:
             dbif.close()
 
         return rows
@@ -755,20 +767,22 @@
         # SELECT all needed information from the database
         self.metadata.select(dbif)
 
-        if self.get_map_register():
-            core.verbose(_("Drop map register table: %s") %  (self.get_map_register()))
+        if self.get_map_register() is not None:
+            core.verbose(_("Drop map register table: %s") % (
+                self.get_map_register()))
             rows = self.get_registered_maps("id", None, None, dbif)
             # Unregister each registered map in the table
-            if rows:
+            if rows is not None:
                 num_maps = len(rows)
                 count = 0
                 for row in rows:
-	            core.percent(count, num_maps, 1)
+                    core.percent(count, num_maps, 1)
                     # Unregister map
                     map = self.get_new_map_instance(row["id"])
-                    statement += self.unregister_map(map=map, dbif=dbif, execute=False)
+                    statement += self.unregister_map(
+                        map=map, dbif=dbif, execute=False)
                     count += 1
-	        core.percent(1, 1, 1)
+                core.percent(1, 1, 1)
 
             # Safe the DROP table statement
             statement += "DROP TABLE " + self.get_map_register() + ";\n"
@@ -776,12 +790,12 @@
         # Remove the primary key, the foreign keys will be removed by trigger
         statement += self.base.get_delete_statement()
 
-        if execute == True:
+        if execute:
             dbif.execute_transaction(statement)
 
         self.reset(None)
 
-        if connect == True:
+        if connect:
             dbif.close()
 
         if execute:
@@ -805,19 +819,20 @@
         if map.is_in_db(dbif) == False:
             dbif.close()
             core.fatal(_("Only maps with absolute or relative valid time can be registered"))
-	if map.get_layer():
-	    core.verbose(_("Register %s map <%s> with layer %s in space time %s dataset <%s>") %  (map.get_type(), map.get_map_id(), map.get_layer(), map.get_type(), self.get_id()))
-	else:
-	    core.verbose(_("Register %s map <%s> in space time %s dataset <%s>") %  (map.get_type(), map.get_map_id(), map.get_type(), self.get_id()))
+        if map.get_layer():
+            core.verbose(_("Register %s map <%s> with layer %s in space time %s dataset <%s>") % (map.get_type(), map.get_map_id(), map.get_layer(), map.get_type(), self.get_id()))
+        else:
+            core.verbose(_("Register %s map <%s> in space time %s dataset <%s>") % (map.get_type(), map.get_map_id(), map.get_type(), self.get_id()))
 
         # First select all data from the database
         map.select(dbif)
 
         if not map.check_valid_time():
-	    if map.get_layer():
-		core.fatal(_("Map <%s> with layer %s has invalid time") % (map.get_map_id(), map.get_layer()))
-	    else:
-		core.fatal(_("Map <%s> has invalid time") % (map.get_map_id()))
+            if map.get_layer():
+                core.fatal(_("Map <%s> with layer %s has invalid time")
+                           % (map.get_map_id(), map.get_layer()))
+            else:
+                core.fatal(_("Map <%s> has invalid time") % (map.get_map_id()))
 
         map_id = map.base.get_id()
         map_name = map.base.get_name()
@@ -833,33 +848,34 @@
         stds_mapset = self.base.get_mapset()
         stds_register_table = self.get_map_register()
         stds_ttype = self.get_temporal_type()
-        
+
         # The gathered SQL statemets are stroed here
         statement = ""
 
         # Check temporal types
         if stds_ttype != map_ttype:
-	    if map.get_layer():
-		core.fatal(_("Temporal type of space time dataset <%s> and map <%s> with layer %s are different") % (self.get_id(), map.get_map_id(), map.get_layer()))
-	    else:
-		core.fatal(_("Temporal type of space time dataset <%s> and map <%s> are different") % (self.get_id(), map.get_map_id()))
+            if map.get_layer():
+                core.fatal(_("Temporal type of space time dataset <%s> and map <%s> with layer %s are different") % (self.get_id(), map.get_map_id(), map.get_layer()))
+            else:
+                core.fatal(_("Temporal type of space time dataset <%s> and map <%s> are different") % (self.get_id(), map.get_map_id()))
 
         # In case no map has been registered yet, set the relative time unit from the first map
-        if (self.metadata.get_number_of_maps() == None or  self.metadata.get_number_of_maps() == 0) and \
+        if (self.metadata.get_number_of_maps() is None or self.metadata.get_number_of_maps() == 0) and \
             self.map_counter == 0 and self.is_time_relative():
 
             self.set_relative_time_unit(map_rel_time_unit)
-            statement += self.relative_time.get_update_all_statement_mogrified(dbif)
-            core.verbose(_("Set temporal unit for space time %s dataset <%s> to %s") %  (map.get_type(), self.get_id(), map_rel_time_unit))
+            statement += self.relative_time.get_update_all_statement_mogrified(
+                dbif)
+            core.verbose(_("Set temporal unit for space time %s dataset <%s> to %s") % (map.get_type(), self.get_id(), map_rel_time_unit))
 
         stds_rel_time_unit = self.get_relative_time_unit()
 
         # Check the relative time unit
         if self.is_time_relative() and (stds_rel_time_unit != map_rel_time_unit):
-	    if map.get_layer():
-		core.fatal(_("Relative time units of space time dataset <%s> and map <%s> with layer %s are different") % (self.get_id(), map.get_map_id(), map.get_layer()))
-	    else:
-		core.fatal(_("Relative time units of space time dataset <%s> and map <%s> are different") % (self.get_id(), map.get_map_id()))
+            if map.get_layer():
+                core.fatal(_("Relative time units of space time dataset <%s> and map <%s> with layer %s are different") % (self.get_id(), map.get_map_id(), map.get_layer()))
+            else:
+                core.fatal(_("Relative time units of space time dataset <%s> and map <%s> are different") % (self.get_id(), map.get_map_id()))
 
         #print "STDS register table", stds_register_table
 
@@ -868,11 +884,13 @@
             core.fatal(_("Only maps from the same mapset can be registered"))
 
         # Check if map is already registered
-        if stds_register_table:
-	    if dbmi.paramstyle == "qmark":
-		sql = "SELECT id FROM " + stds_register_table + " WHERE id = (?)"
-	    else:
-		sql = "SELECT id FROM " + stds_register_table + " WHERE id = (%s)"
+        if stds_register_table is not None:
+            if dbmi.paramstyle == "qmark":
+                sql = "SELECT id FROM " + \
+                    stds_register_table + " WHERE id = (?)"
+            else:
+                sql = "SELECT id FROM " + \
+                    stds_register_table + " WHERE id = (%s)"
             try:
                 dbif.cursor.execute(sql, (map_id,))
                 row = dbif.cursor.fetchone()
@@ -881,57 +899,60 @@
                 core.warning(_("Error in strds_register_table request"))
                 raise
 
-            if row and row[0] == map_id:
+            if row is not None and row[0] == map_id:
                 if connect == True:
                     dbif.close()
 
-		if map.get_layer():
-		    core.warning(_("Map <%s> with layer %s is already registered.") % (map.get_map_id(), map.get_layer()))
-		else:
-		    core.warning(_("Map <%s> is already registered.") % (map.get_map_id()))
+                if map.get_layer() is not None:
+                    core.warning(_("Map <%s> with layer %s is already registered.") % (map.get_map_id(), map.get_layer()))
+                else:
+                    core.warning(_("Map <%s> is already registered.")
+                        % (map.get_map_id()))
                 return ""
 
         # Create tables
         sql_path = get_sql_template_path()
 
         # We need to create the map raster register table precedes we can register the map
-        if map_register_table == None:
+        if map_register_table is None:
             # Create a unique id
             uuid_rand = "map_" + str(uuid.uuid4()).replace("-", "")
 
-            map_register_table = uuid_rand + "_" + self.get_type() + "_register"
-            
+            map_register_table = uuid_rand + "_" + \
+                self.get_type() + "_register"
+
             # Read the SQL template
             sql = open(os.path.join(sql_path, "map_stds_register_table_template.sql"), 'r').read()
             # Create the raster, raster3d and vector tables
             sql = sql.replace("GRASS_MAP", map.get_type())
-            sql = sql.replace("MAP_NAME", map_name + "_" + map_mapset )
-            sql = sql.replace("TABLE_NAME", uuid_rand )
+            sql = sql.replace("MAP_NAME", map_name + "_" + map_mapset)
+            sql = sql.replace("TABLE_NAME", uuid_rand)
             sql = sql.replace("MAP_ID", map_id)
             sql = sql.replace("STDS", self.get_type())
-            
+
             statement += sql
 
             # Set the stds register table name and put it into the DB
             map.set_stds_register(map_register_table)
             statement += map.metadata.get_update_statement_mogrified(dbif)
-            
+
             if map.get_layer():
-		core.verbose(_("Created register table <%s> for %s map <%s> with layer %s") % \
-				(map_register_table, map.get_type(), map.get_map_id(), map.get_layer()))
-	    else:
-		core.verbose(_("Created register table <%s> for %s map <%s>") % \
-				(map_register_table, map.get_type(), map.get_map_id()))
+                core.verbose(_("Created register table <%s> for %s map <%s> with layer %s") %
+                                (map_register_table, map.get_type(), map.get_map_id(), map.get_layer()))
+            else:
+                core.verbose(_("Created register table <%s> for %s map <%s>") %
+                                (map_register_table, map.get_type(), map.get_map_id()))
 
         # We need to create the table and register it
-        if stds_register_table == None:
+        if stds_register_table is None:
             # Create table name
-            stds_register_table = stds_name + "_" + stds_mapset + "_" + map.get_type() + "_register"
+            stds_register_table = stds_name + "_" + \
+                stds_mapset + "_" + map.get_type() + "_register"
             # Read the SQL template
             sql = open(os.path.join(sql_path, "stds_map_register_table_template.sql"), 'r').read()
             # Create the raster, raster3d and vector tables
             sql = sql.replace("GRASS_MAP", map.get_type())
-            sql = sql.replace("SPACETIME_NAME", stds_name + "_" + stds_mapset )
+            sql = sql.replace("SPACETIME_NAME", stds_name + "_" + stds_mapset)
             sql = sql.replace("SPACETIME_ID", self.base.get_id())
             sql = sql.replace("STDS", self.get_type())
             statement += sql
@@ -940,7 +961,7 @@
             self.set_map_register(stds_register_table)
             statement += self.metadata.get_update_statement_mogrified(dbif)
 
-            core.verbose(_("Created register table <%s> for space time %s  dataset <%s>") % \
+            core.verbose(_("Created register table <%s> for space time %s  dataset <%s>") %
                           (stds_register_table, map.get_type(), self.get_id()))
 
         # We need to execute the statement at this time
@@ -951,10 +972,10 @@
 
         # Register the stds in the map stds register table
         # Check if the entry is already there
-	if dbmi.paramstyle == "qmark":
-	    sql = "SELECT id FROM " + map_register_table + " WHERE id = ?"
-	else:
-	    sql = "SELECT id FROM " + map_register_table + " WHERE id = %s"
+        if dbmi.paramstyle == "qmark":
+            sql = "SELECT id FROM " + map_register_table + " WHERE id = ?"
+        else:
+            sql = "SELECT id FROM " + map_register_table + " WHERE id = %s"
         try:
             dbif.cursor.execute(sql, (self.base.get_id(),))
             row = dbif.cursor.fetchone()
@@ -962,32 +983,37 @@
             row = None
 
         # In case of no entry make a new one
-        if row == None:
-	    if dbmi.paramstyle == "qmark":
-		sql = "INSERT INTO " + map_register_table + " (id) " + "VALUES (?);\n"
-	    else:
-		sql = "INSERT INTO " + map_register_table + " (id) " + "VALUES (%s);\n"
+        if row is None:
+            if dbmi.paramstyle == "qmark":
+                sql = "INSERT INTO " + map_register_table + \
+                    " (id) " + "VALUES (?);\n"
+            else:
+                sql = "INSERT INTO " + map_register_table + \
+                    " (id) " + "VALUES (%s);\n"
 
-            statement += dbif.mogrify_sql_statement((sql, (self.base.get_id(),)))
+            statement += dbif.mogrify_sql_statement(
+                (sql, (self.base.get_id(),)))
 
         # Now put the raster name in the stds map register table
-	if dbmi.paramstyle == "qmark":
-	    sql = "INSERT INTO " + stds_register_table + " (id) " + "VALUES (?);\n"
-	else:
-	    sql = "INSERT INTO " + stds_register_table + " (id) " + "VALUES (%s);\n"
+        if dbmi.paramstyle == "qmark":
+            sql = "INSERT INTO " + stds_register_table + \
+                " (id) " + "VALUES (?);\n"
+        else:
+            sql = "INSERT INTO " + stds_register_table + \
+                " (id) " + "VALUES (%s);\n"
 
         statement += dbif.mogrify_sql_statement((sql, (map_id,)))
 
         # Now execute the insert transaction
         dbif.execute_transaction(statement)
 
-        if connect == True:
+        if connect:
             dbif.close()
 
         # increase the counter
         self.map_counter += 1
 
-    def unregister_map(self, map, dbif = None, execute=True):
+    def unregister_map(self, map, dbif=None, execute=True):
         """!Unregister a map from the space time dataset.
 
            This method takes care of the un-registration of a map
@@ -1012,55 +1038,59 @@
         map_register_table = map.get_stds_register()
         stds_register_table = self.get_map_register()
 
-	if map.get_layer():
-	    core.verbose(_("Unregister %s map <%s> with layer %s") % (map.get_type(), map.get_map_id(), map.get_layer()))
-	else:
-	    core.verbose(_("Unregister %s map <%s>") % (map.get_type(), map.get_map_id()))
+        if map.get_layer() is not None:
+            core.verbose(_("Unregister %s map <%s> with layer %s") % \
+                         (map.get_type(), map.get_map_id(), map.get_layer()))
+        else:
+            core.verbose(_("Unregister %s map <%s>") % (
+                map.get_type(), map.get_map_id()))
 
         # Check if the map is registered in the space time raster dataset
-        if map_register_table != None:
-	    if dbmi.paramstyle == "qmark":
-		sql = "SELECT id FROM " + map_register_table + " WHERE id = ?"
-	    else:
-		sql = "SELECT id FROM " + map_register_table + " WHERE id = %s"
-	    try:
-		dbif.cursor.execute(sql, (self.base.get_id(),))
-		row = dbif.cursor.fetchone()
-	    except:
-		row = None
+        if map_register_table is not None:
+            if dbmi.paramstyle == "qmark":
+                sql = "SELECT id FROM " + map_register_table + " WHERE id = ?"
+            else:
+                sql = "SELECT id FROM " + map_register_table + " WHERE id = %s"
+            try:
+                dbif.cursor.execute(sql, (self.base.get_id(),))
+                row = dbif.cursor.fetchone()
+            except:
+                row = None
 
-	    # Break if the map is not registered
-	    if row == None:
-		if map.get_layer():
-		    core.warning(_("Map <%s> with layer %s is not registered in space time dataset <%s>") %(map.get_map_id(), map.get_layer(), self.base.get_id()))
-		else:
-		    core.warning(_("Map <%s> is not registered in space time dataset <%s>") %(map.get_map_id(), self.base.get_id()))
-		if connect == True:
-		    dbif.close()
-		return ""
+            # Break if the map is not registered
+            if row is None:
+                if map.get_layer() is not None:
+                    core.warning(_("Map <%s> with layer %s is not registered in space time dataset <%s>") % (map.get_map_id(), map.get_layer(), self.base.get_id()))
+                else:
+                    core.warning(_("Map <%s> is not registered in space time dataset <%s>") % (map.get_map_id(), self.base.get_id()))
+                if connect == True:
+                    dbif.close()
+                return ""
 
         # Remove the space time raster dataset from the raster dataset register
-        if map_register_table != None:
+        if map_register_table is not None:
             if dbmi.paramstyle == "qmark":
                 sql = "DELETE FROM " + map_register_table + " WHERE id = ?;\n"
             else:
                 sql = "DELETE FROM " + map_register_table + " WHERE id = %s;\n"
 
-            statement += dbif.mogrify_sql_statement((sql, (self.base.get_id(),)))
+            statement += dbif.mogrify_sql_statement(
+                (sql, (self.base.get_id(),)))
 
         # Remove the raster map from the space time raster dataset register
-        if stds_register_table != None:
+        if stds_register_table is not None:
             if dbmi.paramstyle == "qmark":
                 sql = "DELETE FROM " + stds_register_table + " WHERE id = ?;\n"
             else:
-                sql = "DELETE FROM " + stds_register_table + " WHERE id = %s;\n"
+                sql = "DELETE FROM " + \
+                    stds_register_table + " WHERE id = %s;\n"
 
             statement += dbif.mogrify_sql_statement((sql, (map_id,)))
 
-        if execute == True:
+        if execute:
             dbif.execute_transaction(statement)
-            
-        if connect == True:
+
+        if connect:
             dbif.close()
 
         # decrease the counter
@@ -1070,8 +1100,8 @@
             return ""
 
         return statement
-            
-    def update_from_registered_maps(self, dbif = None):
+
+    def update_from_registered_maps(self, dbif=None):
         """!This methods updates the spatial and temporal extent as well as
            type specific metadata. It should always been called after maps are registered
            or unregistered/deleted from the space time dataset.
@@ -1084,7 +1114,7 @@
            An other solution to automate this is to use the deactivated trigger
            in the SQL files. But this will result in a huge performance issue
            in case many maps are registered (>1000).
-           
+
            @param dbif: The database interface to be used
         """
         core.verbose(_("Update metadata, spatial and temporal extent from all registered maps of <%s>") % (self.get_id()))
@@ -1106,12 +1136,13 @@
 
         #We create a transaction
         sql_script = ""
-        
+
         # Update the spatial and temporal extent from registered maps
         # Read the SQL template
         sql = open(os.path.join(sql_path, "update_stds_spatial_temporal_extent_template.sql"), 'r').read()
-        sql = sql.replace("GRASS_MAP", self.get_new_map_instance(None).get_type())
-        sql = sql.replace("SPACETIME_NAME", stds_name + "_" + stds_mapset )
+        sql = sql.replace(
+            "GRASS_MAP", self.get_new_map_instance(None).get_type())
+        sql = sql.replace("SPACETIME_NAME", stds_name + "_" + stds_mapset)
         sql = sql.replace("SPACETIME_ID", self.base.get_id())
         sql = sql.replace("STDS", self.get_type())
 
@@ -1119,9 +1150,11 @@
         sql_script += "\n"
 
         # Update type specific metadata
-        sql = open(os.path.join(sql_path, "update_" + self.get_type() + "_metadata_template.sql"), 'r').read()
-        sql = sql.replace("GRASS_MAP", self.get_new_map_instance(None).get_type())
-        sql = sql.replace("SPACETIME_NAME", stds_name + "_" + stds_mapset )
+        sql = open(os.path.join(sql_path, "update_" +
+            self.get_type() + "_metadata_template.sql"), 'r').read()
+        sql = sql.replace(
+            "GRASS_MAP", self.get_new_map_instance(None).get_type())
+        sql = sql.replace("SPACETIME_NAME", stds_name + "_" + stds_mapset)
         sql = sql.replace("SPACETIME_ID", self.base.get_id())
         sql = sql.replace("STDS", self.get_type())
 
@@ -1129,7 +1162,7 @@
         sql_script += "\n"
 
         dbif.execute_transaction(sql_script)
-	    
+
         # Read and validate the selected end time
         self.select()
 
@@ -1139,44 +1172,49 @@
             start_time, end_time, unit = self.get_relative_time()
 
         # In case no end time is set, use the maximum start time of all registered maps as end time
-        if end_time == None:
+        if end_time is None:
             use_start_time = True
         else:
             # Check if the end time is smaller than the maximum start time
             if self.is_time_absolute():
                 sql = """SELECT max(start_time) FROM GRASS_MAP_absolute_time WHERE GRASS_MAP_absolute_time.id IN
                         (SELECT id FROM SPACETIME_NAME_GRASS_MAP_register);"""
-                sql = sql.replace("GRASS_MAP", self.get_new_map_instance(None).get_type())
-                sql = sql.replace("SPACETIME_NAME", stds_name + "_" + stds_mapset )
+                sql = sql.replace("GRASS_MAP", self.get_new_map_instance(
+                    None).get_type())
+                sql = sql.replace("SPACETIME_NAME",
+                    stds_name + "_" + stds_mapset)
             else:
                 sql = """SELECT max(start_time) FROM GRASS_MAP_relative_time WHERE GRASS_MAP_relative_time.id IN
                         (SELECT id FROM SPACETIME_NAME_GRASS_MAP_register);"""
-                sql = sql.replace("GRASS_MAP", self.get_new_map_instance(None).get_type())
-                sql = sql.replace("SPACETIME_NAME", stds_name + "_" + stds_mapset )
+                sql = sql.replace("GRASS_MAP", self.get_new_map_instance(
+                    None).get_type())
+                sql = sql.replace("SPACETIME_NAME",
+                    stds_name + "_" + stds_mapset)
 
             dbif.cursor.execute(sql)
             row = dbif.cursor.fetchone()
 
-            if row != None:
+            if row is not None:
                 # This seems to be a bug in sqlite3 Python driver
-		if dbmi.__name__ == "sqlite3":
-		    tstring = row[0]
-		    # Convert the unicode string into the datetime format
+                if dbmi.__name__ == "sqlite3":
+                    tstring = row[0]
+                    # Convert the unicode string into the datetime format
                     if self.is_time_absolute():
                         if tstring.find(":") > 0:
                             time_format = "%Y-%m-%d %H:%M:%S"
                         else:
                             time_format = "%Y-%m-%d"
 
-		        max_start_time = datetime.strptime(tstring, time_format)
+                        max_start_time = datetime.strptime(
+                            tstring, time_format)
                     else:
-		        max_start_time = row[0]
-		else:
-		    max_start_time = row[0]
+                        max_start_time = row[0]
+                else:
+                    max_start_time = row[0]
 
-		if end_time < max_start_time:
-		    use_start_time = True
-		    
+                if end_time < max_start_time:
+                    use_start_time = True
+
         # Set the maximum start time as end time
         if use_start_time:
             if self.is_time_absolute():
@@ -1184,8 +1222,10 @@
                (SELECT max(start_time) FROM GRASS_MAP_absolute_time WHERE GRASS_MAP_absolute_time.id IN
                         (SELECT id FROM SPACETIME_NAME_GRASS_MAP_register)
                ) WHERE id = 'SPACETIME_ID';"""
-                sql = sql.replace("GRASS_MAP", self.get_new_map_instance(None).get_type())
-                sql = sql.replace("SPACETIME_NAME", stds_name + "_" + stds_mapset )
+                sql = sql.replace("GRASS_MAP", self.get_new_map_instance(
+                    None).get_type())
+                sql = sql.replace("SPACETIME_NAME",
+                    stds_name + "_" + stds_mapset)
                 sql = sql.replace("SPACETIME_ID", self.base.get_id())
                 sql = sql.replace("STDS", self.get_type())
             elif self.is_time_relative():
@@ -1193,8 +1233,10 @@
                (SELECT max(start_time) FROM GRASS_MAP_relative_time WHERE GRASS_MAP_relative_time.id IN
                         (SELECT id FROM SPACETIME_NAME_GRASS_MAP_register)
                ) WHERE id = 'SPACETIME_ID';"""
-                sql = sql.replace("GRASS_MAP", self.get_new_map_instance(None).get_type())
-                sql = sql.replace("SPACETIME_NAME", stds_name + "_" + stds_mapset )
+                sql = sql.replace("GRASS_MAP", self.get_new_map_instance(
+                    None).get_type())
+                sql = sql.replace("SPACETIME_NAME",
+                    stds_name + "_" + stds_mapset)
                 sql = sql.replace("SPACETIME_ID", self.base.get_id())
                 sql = sql.replace("STDS", self.get_type())
 
@@ -1246,12 +1288,13 @@
                 self.relative_time.set_granularity(None)
             self.relative_time.update_all(dbif)
 
-        if connect == True:
+        if connect:
             dbif.close()
 
 ###############################################################################
 
-def create_temporal_relation_sql_where_statement(start, end, use_start=True, use_during=False, 
+def create_temporal_relation_sql_where_statement(
+    start, end, use_start=True, use_during=False,
                                         use_overlap=False, use_contain=False, use_equal=False,
                                         use_follows=False, use_precedes=False):
     """!Create a SQL WHERE statement for temporal relation selection of maps in space time datasets
@@ -1290,64 +1333,163 @@
         @param use_follows: Select maps which temporally follow the selection granule
                          map    :              s-----------e
                          granule:  s-----------e
- 
+
         @param use_precedes: Select maps which temporally precedes the selection granule
                          map    :  s-----------e
                          granule:              s-----------e
 
-    """
+        Usage:
+        
+        >>> # Relative time
+        >>> start = 1
+        >>> end = 2
+        >>> create_temporal_relation_sql_where_statement(start, end, 
+        ... use_start=False)
+        >>> create_temporal_relation_sql_where_statement(start, end)
+        '((start_time >= 1 and start_time < 2) )'
+        >>> create_temporal_relation_sql_where_statement(start, end, 
+        ... use_start=True)
+        '((start_time >= 1 and start_time < 2) )'
+        >>> create_temporal_relation_sql_where_statement(start, end, 
+        ... use_start=False, use_during=True)
+        '(((start_time > 1 and end_time < 2) OR (start_time >= 1 and end_time < 2) OR (start_time > 1 and end_time <= 2)))'
+        >>> create_temporal_relation_sql_where_statement(start, end, 
+        ... use_start=False, use_overlap=True)
+        '(((start_time < 1 and end_time > 1 and end_time < 2) OR (start_time < 2 and start_time > 1 and end_time > 2)))'
+        >>> create_temporal_relation_sql_where_statement(start, end, 
+        ... use_start=False, use_contain=True)
+        '(((start_time < 1 and end_time > 2) OR (start_time <= 1 and end_time > 2) OR (start_time < 1 and end_time >= 2)))'
+        >>> create_temporal_relation_sql_where_statement(start, end, 
+        ... use_start=False, use_equal=True)
+        '((start_time = 1 and end_time = 2))'
+        >>> create_temporal_relation_sql_where_statement(start, end, 
+        ... use_start=False, use_follows=True)
+        '((start_time = 2))'
+        >>> create_temporal_relation_sql_where_statement(start, end, 
+        ... use_start=False, use_precedes=True)
+        '((end_time = 1))'
+        >>> create_temporal_relation_sql_where_statement(start, end, 
+        ... use_start=True, use_during=True, use_overlap=True, use_contain=True,
+        ... use_equal=True, use_follows=True, use_precedes=True)
+        '((start_time >= 1 and start_time < 2)  OR ((start_time > 1 and end_time < 2) OR (start_time >= 1 and end_time < 2) OR (start_time > 1 and end_time <= 2)) OR ((start_time < 1 and end_time > 1 and end_time < 2) OR (start_time < 2 and start_time > 1 and end_time > 2)) OR ((start_time < 1 and end_time > 2) OR (start_time <= 1 and end_time > 2) OR (start_time < 1 and end_time >= 2)) OR (start_time = 1 and end_time = 2) OR (start_time = 2) OR (end_time = 1))'
 
+        >>> # Absolute time
+        >>> start = datetime(2001, 1, 1, 12, 30)
+        >>> end = datetime(2001, 3, 31, 14, 30)
+        >>> create_temporal_relation_sql_where_statement(start, end, 
+        ... use_start=False)
+        >>> create_temporal_relation_sql_where_statement(start, end)
+        "((start_time >= '2001-01-01 12:30:00' and start_time < '2001-03-31 14:30:00') )"
+        >>> create_temporal_relation_sql_where_statement(start, end, 
+        ... use_start=True)
+        "((start_time >= '2001-01-01 12:30:00' and start_time < '2001-03-31 14:30:00') )"
+        >>> create_temporal_relation_sql_where_statement(start, end, 
+        ... use_start=False, use_during=True)
+        "(((start_time > '2001-01-01 12:30:00' and end_time < '2001-03-31 14:30:00') OR (start_time >= '2001-01-01 12:30:00' and end_time < '2001-03-31 14:30:00') OR (start_time > '2001-01-01 12:30:00' and end_time <= '2001-03-31 14:30:00')))"
+        >>> create_temporal_relation_sql_where_statement(start, end, 
+        ... use_start=False, use_overlap=True)
+        "(((start_time < '2001-01-01 12:30:00' and end_time > '2001-01-01 12:30:00' and end_time < '2001-03-31 14:30:00') OR (start_time < '2001-03-31 14:30:00' and start_time > '2001-01-01 12:30:00' and end_time > '2001-03-31 14:30:00')))"
+        >>> create_temporal_relation_sql_where_statement(start, end, 
+        ... use_start=False, use_contain=True)
+        "(((start_time < '2001-01-01 12:30:00' and end_time > '2001-03-31 14:30:00') OR (start_time <= '2001-01-01 12:30:00' and end_time > '2001-03-31 14:30:00') OR (start_time < '2001-01-01 12:30:00' and end_time >= '2001-03-31 14:30:00')))"
+        >>> create_temporal_relation_sql_where_statement(start, end, 
+        ... use_start=False, use_equal=True)
+        "((start_time = '2001-01-01 12:30:00' and end_time = '2001-03-31 14:30:00'))"
+        >>> create_temporal_relation_sql_where_statement(start, end, 
+        ... use_start=False, use_follows=True)
+        "((start_time = '2001-03-31 14:30:00'))"
+        >>> create_temporal_relation_sql_where_statement(start, end, 
+        ... use_start=False, use_precedes=True)
+        "((end_time = '2001-01-01 12:30:00'))"
+        >>> create_temporal_relation_sql_where_statement(start, end, 
+        ... use_start=True, use_during=True, use_overlap=True, use_contain=True,
+        ... use_equal=True, use_follows=True, use_precedes=True)
+        "((start_time >= '2001-01-01 12:30:00' and start_time < '2001-03-31 14:30:00')  OR ((start_time > '2001-01-01 12:30:00' and end_time < '2001-03-31 14:30:00') OR (start_time >= '2001-01-01 12:30:00' and end_time < '2001-03-31 14:30:00') OR (start_time > '2001-01-01 12:30:00' and end_time <= '2001-03-31 14:30:00')) OR ((start_time < '2001-01-01 12:30:00' and end_time > '2001-01-01 12:30:00' and end_time < '2001-03-31 14:30:00') OR (start_time < '2001-03-31 14:30:00' and start_time > '2001-01-01 12:30:00' and end_time > '2001-03-31 14:30:00')) OR ((start_time < '2001-01-01 12:30:00' and end_time > '2001-03-31 14:30:00') OR (start_time <= '2001-01-01 12:30:00' and end_time > '2001-03-31 14:30:00') OR (start_time < '2001-01-01 12:30:00' and end_time >= '2001-03-31 14:30:00')) OR (start_time = '2001-01-01 12:30:00' and end_time = '2001-03-31 14:30:00') OR (start_time = '2001-03-31 14:30:00') OR (end_time = '2001-01-01 12:30:00'))"
+        """
+
     where = "("
 
     if use_start:
-        where += "(start_time >= '%s' and start_time < '%s') " % (start, end)
+        if isinstance(start, datetime):
+            where += "(start_time >= '%s' and start_time < '%s') " % (start, end)
+        else:
+            where += "(start_time >= %i and start_time < %i) " % (start, end)
 
     if use_during:
         if use_start:
             where += " OR "
-        where += "((start_time > '%s' and end_time < '%s') OR " % (start, end)
-        where += "(start_time >= '%s' and end_time < '%s') OR " % (start, end)
-        where += "(start_time > '%s' and end_time <= '%s'))" % (start, end)
+            
+        if isinstance(start, datetime):
+            where += "((start_time > '%s' and end_time < '%s') OR " % (start, end)
+            where += "(start_time >= '%s' and end_time < '%s') OR " % (start, end)
+            where += "(start_time > '%s' and end_time <= '%s'))" % (start, end)
+        else:
+            where += "((start_time > %i and end_time < %i) OR " % (start, end)
+            where += "(start_time >= %i and end_time < %i) OR " % (start, end)
+            where += "(start_time > %i and end_time <= %i))" % (start, end)
 
     if use_overlap:
         if use_start or use_during:
             where += " OR "
 
-        where += "((start_time < '%s' and end_time > '%s' and end_time < '%s') OR " % (start, start, end)
-        where += "(start_time < '%s' and start_time > '%s' and end_time > '%s'))" % (end, start, end)
+        if isinstance(start, datetime):
+            where += "((start_time < '%s' and end_time > '%s' and end_time < '%s') OR " % (start, start, end)
+            where += "(start_time < '%s' and start_time > '%s' and end_time > '%s'))" % (end, start, end)
+        else:
+            where += "((start_time < %i and end_time > %i and end_time < %i) OR " % (start, start, end)
+            where += "(start_time < %i and start_time > %i and end_time > %i))" % (end, start, end)
 
     if use_contain:
         if use_start or use_during or use_overlap:
             where += " OR "
 
-        where += "((start_time < '%s' and end_time > '%s') OR " % (start, end)
-        where += "(start_time <= '%s' and end_time > '%s') OR " % (start, end)
-        where += "(start_time < '%s' and end_time >= '%s'))" % (start, end)
+        if isinstance(start, datetime):
+            where += "((start_time < '%s' and end_time > '%s') OR " % (start, end)
+            where += "(start_time <= '%s' and end_time > '%s') OR " % (start, end)
+            where += "(start_time < '%s' and end_time >= '%s'))" % (start, end)
+        else:
+            where += "((start_time < %i and end_time > %i) OR " % (start, end)
+            where += "(start_time <= %i and end_time > %i) OR " % (start, end)
+            where += "(start_time < %i and end_time >= %i))" % (start, end)
 
     if use_equal:
         if use_start or use_during or use_overlap or use_contain:
             where += " OR "
 
-        where += "(start_time = '%s' and end_time = '%s')" % (start, end)
+        if isinstance(start, datetime):
+            where += "(start_time = '%s' and end_time = '%s')" % (start, end)
+        else:
+            where += "(start_time = %i and end_time = %i)" % (start, end)
 
     if use_follows:
         if use_start or use_during or use_overlap or use_contain or use_equal:
             where += " OR "
 
-        where += "(start_time = '%s')" % (end)
+        if isinstance(start, datetime):
+            where += "(start_time = '%s')" % (end)
+        else:
+            where += "(start_time = %i)" % (end)
 
     if use_precedes:
-        if use_start or use_during or use_overlap or use_contain or use_equal or use_follows: 
+        if use_start or use_during or use_overlap or use_contain or use_equal \
+           or use_follows:
             where += " OR "
 
-        where += "(end_time = '%s')" % (start)
+        if isinstance(start, datetime):
+            where += "(end_time = '%s')" % (start)
+        else:
+            where += "(end_time = %i)" % (start)
 
     where += ")"
-    
+
     # Catch empty where statement
     if where == "()":
-	where = None
+        where = None
 
     return where
 
+###############################################################################
 
+if __name__ == "__main__":
+    import doctest
+    doctest.testmod()

Modified: grass/trunk/lib/python/temporal/datetime_math.py
===================================================================
--- grass/trunk/lib/python/temporal/datetime_math.py	2012-08-10 16:12:42 UTC (rev 52622)
+++ grass/trunk/lib/python/temporal/datetime_math.py	2012-08-10 18:33:57 UTC (rev 52623)
@@ -53,13 +53,17 @@
 ###############################################################################
 
 def increment_datetime_by_string(mydate, increment, mult = 1):
-    """!Return a new datetime object incremented with the provided relative dates specified as string.
-       Additional a multiplier can be specified to multiply the increment before adding to the provided datetime object.
+    """!Return a new datetime object incremented with the provided 
+       relative dates specified as string.
+       Additional a multiplier can be specified to multiply the increment 
+       before adding to the provided datetime object.
 
        @param mydate A datetime object to incremented
        @param increment A string providing increment information:
-                  The string may include comma separated values of type seconds, minutes, hours, days, weeks, months and years
-                  Example: Increment the datetime 2001-01-01 00:00:00 with "60 seconds, 4 minutes, 12 hours, 10 days, 1 weeks, 5 months, 1 years"
+                  The string may include comma separated values of type seconds, 
+                  minutes, hours, days, weeks, months and years
+                  Example: Increment the datetime 2001-01-01 00:00:00 
+                  with "60 seconds, 4 minutes, 12 hours, 10 days, 1 weeks, 5 months, 1 years"
                   will result in the datetime 2003-02-18 12:05:00
        @param mult A multiplier, default is 1
     """

Modified: grass/trunk/lib/python/temporal/space_time_datasets.py
===================================================================
--- grass/trunk/lib/python/temporal/space_time_datasets.py	2012-08-10 16:12:42 UTC (rev 52622)
+++ grass/trunk/lib/python/temporal/space_time_datasets.py	2012-08-10 18:33:57 UTC (rev 52623)
@@ -36,14 +36,14 @@
 
 ###############################################################################
 
-class raster_dataset(abstract_map_dataset):
+class raster_dataset(AbstractMapDataset):
     """!Raster dataset class
 
        This class provides functions to select, update, insert or delete raster
        map information and valid time stamps into the SQL temporal database.
     """
     def __init__(self, ident):
-	abstract_map_dataset.__init__(self)
+	AbstractMapDataset.__init__(self)
 	self.reset(ident)
 
     def get_type(self):
@@ -259,14 +259,14 @@
 
 ###############################################################################
 
-class raster3d_dataset(abstract_map_dataset):
+class raster3d_dataset(AbstractMapDataset):
     """!Raster3d dataset class
 
        This class provides functions to select, update, insert or delete raster3d
        map information and valid time stamps into the SQL temporal database.
     """
     def __init__(self, ident):
-	abstract_map_dataset.__init__(self)
+	AbstractMapDataset.__init__(self)
 	self.reset(ident)
 
     def get_type(self):
@@ -470,14 +470,14 @@
 
 ###############################################################################
 
-class vector_dataset(abstract_map_dataset):
+class vector_dataset(AbstractMapDataset):
     """!Vector dataset class
 
        This class provides functions to select, update, insert or delete vector
        map information and valid time stamps into the SQL temporal database.
     """
     def __init__(self, ident):
-	abstract_map_dataset.__init__(self)
+	AbstractMapDataset.__init__(self)
 	self.reset(ident)
 
     def get_type(self):
@@ -686,11 +686,11 @@
 
 ###############################################################################
 
-class space_time_raster_dataset(abstract_space_time_dataset):
+class space_time_raster_dataset(AbstractSpaceTimeDataset):
     """!Space time raster dataset class
     """
     def __init__(self, ident):
-        abstract_space_time_dataset.__init__(self, ident)
+        AbstractSpaceTimeDataset.__init__(self, ident)
 
     def get_type(self):
         return "strds"
@@ -733,12 +733,12 @@
 
 ###############################################################################
 
-class space_time_raster3d_dataset(abstract_space_time_dataset):
+class space_time_raster3d_dataset(AbstractSpaceTimeDataset):
     """!Space time raster3d dataset class
     """
 
     def __init__(self, ident):
-        abstract_space_time_dataset.__init__(self, ident)
+        AbstractSpaceTimeDataset.__init__(self, ident)
 
     def get_type(self):
         return "str3ds"
@@ -787,12 +787,12 @@
 
 ###############################################################################
 
-class space_time_vector_dataset(abstract_space_time_dataset):
+class space_time_vector_dataset(AbstractSpaceTimeDataset):
     """!Space time vector dataset class
     """
 
     def __init__(self, ident):
-        abstract_space_time_dataset.__init__(self, ident)
+        AbstractSpaceTimeDataset.__init__(self, ident)
 
     def get_type(self):
         return "stvds"



More information about the grass-commit mailing list