[GRASS-SVN] r61066 - in grass/trunk: lib/python/temporal temporal/t.rast.aggregate temporal/t.rast.aggregate.ds

svn_grass at osgeo.org svn_grass at osgeo.org
Sun Jun 29 11:05:40 PDT 2014


Author: huhabla
Date: 2014-06-29 11:05:40 -0700 (Sun, 29 Jun 2014)
New Revision: 61066

Modified:
   grass/trunk/lib/python/temporal/abstract_space_time_dataset.py
   grass/trunk/lib/python/temporal/aggregation.py
   grass/trunk/lib/python/temporal/datetime_math.py
   grass/trunk/lib/python/temporal/register.py
   grass/trunk/lib/python/temporal/temporal_raster_algebra.py
   grass/trunk/temporal/t.rast.aggregate.ds/t.rast.aggregate.ds.py
   grass/trunk/temporal/t.rast.aggregate.ds/test.t.rast.aggregate.ds.sh
   grass/trunk/temporal/t.rast.aggregate/t.rast.aggregate.py
   grass/trunk/temporal/t.rast.aggregate/test.t.rast.aggregate.relative_time.sh
   grass/trunk/temporal/t.rast.aggregate/test.t.rast.aggregate.sh
Log:
New experimental aggregation method implemented that is faster and allows parallel processing. 
Integrated enhancement request #2294 to generate datetime based map name suffixes. 


Modified: grass/trunk/lib/python/temporal/abstract_space_time_dataset.py
===================================================================
--- grass/trunk/lib/python/temporal/abstract_space_time_dataset.py	2014-06-29 16:51:16 UTC (rev 61065)
+++ grass/trunk/lib/python/temporal/abstract_space_time_dataset.py	2014-06-29 18:05:40 UTC (rev 61066)
@@ -224,7 +224,7 @@
                 length += len(token) + 1
 
         command += "\n"
-        return command
+        return str(command)
 
     def get_semantic_type(self):
         """!Return the semantic type of this dataset

Modified: grass/trunk/lib/python/temporal/aggregation.py
===================================================================
--- grass/trunk/lib/python/temporal/aggregation.py	2014-06-29 16:51:16 UTC (rev 61065)
+++ grass/trunk/lib/python/temporal/aggregation.py	2014-06-29 18:05:40 UTC (rev 61066)
@@ -136,8 +136,8 @@
             new_map.delete(dbif)
             new_map = RasterDataset(map_id)
         else:
-            msgr.error(_("Raster map <%s> is already in temporal database, " \
-                         "use overwrite flag to overwrite"))
+            msgr.error(_("Raster map <%(name)s> is already in temporal database, " \
+                         "use overwrite flag to overwrite"%({"name":new_map.get_name()})))
             return
 
     msgr.verbose(_("Compute aggregation of maps between %(st)s - %(end)s" % {
@@ -177,3 +177,139 @@
             return None
 
     return new_map
+
+##############################################################################
+
+def aggregate_by_topology(granularity_list,  granularity,  map_list,  topo_list,  basename,  time_suffix,
+                          offset=0,  method="average",  nprocs=1,  spatial=None,  dbif=None, 
+                          overwrite=False):
+    """!Aggregate a list of raster input maps with r.series
+
+       @param granularity_list A list of AbstractMapDataset objects. 
+                               The temporal extents of the objects are used
+                               to build the spatio-temporal topology with the map list objects
+       @param granularity The granularity of the granularity list
+       @param map_list A list of RasterDataset objects that contain the raster 
+                       maps that should be aggregated
+       @param topo_list A list of strings of topological relations that are 
+                        used to select the raster maps for aggregation
+       @param basename The basename of the new generated raster maps
+       @param time_suffix Use the granularity truncated start time of the 
+                          actual granule to create the suffix for the basename
+       @param offset Use a numerical offset for suffix generation (overwritten by time_suffix)
+       @param method The aggregation method of r.series (average,min,max, ...)
+       @param nprocs The number of processes used for parallel computation
+       @param spatial This indicates if the spatial topology is created as well:
+                      spatial can be None (no spatial topology), "2D" using west, east,
+                      south, north or "3D" using west, east, south, north, bottom, top
+       @param dbif The database interface to be used
+       @param overwrite Overwrite existing raster maps
+       @return A list of RasterDataset objects that contain the new map names and
+               the temporal extent for map registration
+    """
+    import grass.script as gcore
+    import grass.pygrass.modules as pymod
+    import copy
+
+    msgr = get_tgis_message_interface()
+
+    dbif,  connected = init_dbif(dbif)
+
+    topo_builder = SpatioTemporalTopologyBuilder()
+    topo_builder.build(mapsA=granularity_list, mapsB=map_list, spatial=spatial)
+
+    # The module queue for parallel execution
+    process_queue = pymod.ParallelModuleQueue(int(nprocs))
+
+    # Dummy process object that will be deep copied
+    # and be put into the process queue
+    r_series = pymod.Module("r.series", output="spam", method=method,
+                            overwrite=overwrite, quiet=True,  run_=False, 
+                            finish_=False)
+    g_copy = pymod.Module("g.copy", rast="spam,spamspam",
+                          quiet=True,  run_=False, finish_=False)
+    output_list = []
+    count = 0
+
+    for granule in granularity_list:
+        msgr.percent(count,  len(granularity_list),  1)
+        count += 1
+
+        aggregation_list = []
+
+        if "equal" in topo_list and granule.equal:
+            for map_layer in granule.equal:
+                aggregation_list.append(map_layer.get_name())
+        if "contains" in topo_list and granule.contains:
+            for map_layer in granule.contains:
+                aggregation_list.append(map_layer.get_name())
+        if "during" in topo_list and granule.during:
+            for map_layer in granule.during:
+                aggregation_list.append(map_layer.get_name())
+        if "starts" in topo_list and granule.starts:
+            for map_layer in granule.starts:
+                aggregation_list.append(map_layer.get_name())
+        if "started" in topo_list and granule.started:
+            for map_layer in granule.started:
+                aggregation_list.append(map_layer.get_name())
+        if "finishes" in topo_list and granule.finishes:
+            for map_layer in granule.finishes:
+                aggregation_list.append(map_layer.get_name())
+        if "finished" in topo_list and granule.finished:
+            for map_layer in granule.finished:
+                aggregation_list.append(map_layer.get_name())
+        if "overlaps" in topo_list and granule.overlaps:
+            for map_layer in granule.overlaps:
+                aggregation_list.append(map_layer.get_name())
+        if "overlapped" in topo_list and granule.overlapped:
+            for map_layer in granule.overlapped:
+                aggregation_list.append(map_layer.get_name())
+
+        if aggregation_list:
+            msgr.verbose(_("Aggregate %(len)i raster maps from %(start)s to %(end)s") \
+                           %({"len":len(aggregation_list),
+                           "start":str(granule.temporal_extent.get_start_time()), 
+                           "end":str(granule.temporal_extent.get_end_time())}))
+
+            if granule.is_time_absolute() is True and time_suffix is True:
+                suffix = create_suffix_from_datetime(granule.temporal_extent.get_start_time(), 
+                                                     granularity)
+            else:
+                suffix = str(count + int(offset))
+            output_name = "%s_%s"%(basename,  suffix)
+
+            map_layer = RasterDataset("%s@%s"%(output_name,
+                                               get_current_mapset()))
+            map_layer.set_temporal_extent(granule.get_temporal_extent())
+
+            if map_layer.map_exists() is True and overwrite is False:
+                msgr.fatal(_("Unable to perform aggregation. Output raster map <%(name)s> "\
+                             "exists and overwrite flag is not set"%({"name":output_name})))
+
+            output_list.append(map_layer)
+
+            if len(aggregation_list) > 1:
+                # Create the r.series input file
+                filename = gcore.tempfile(True)
+                file = open(filename, 'w')
+                for name in aggregation_list:
+                    string = "%s\n" % (name)
+                    file.write(string)
+                file.close()
+
+                mod = copy.deepcopy(r_series)
+                mod(file=filename, output=output_name)
+                if len(aggregation_list) > 1000 :
+                    mod(flags="z")
+                process_queue.put(mod)
+            else:
+                mod = copy.deepcopy(g_copy)
+                mod(rast="%s,%s"%(aggregation_list[0],  output_name))
+                process_queue.put(mod)
+
+    if connected:
+        dbif.close()
+
+    msgr.percent(1, 1, 1)
+
+    return output_list
\ No newline at end of file

Modified: grass/trunk/lib/python/temporal/datetime_math.py
===================================================================
--- grass/trunk/lib/python/temporal/datetime_math.py	2014-06-29 16:51:16 UTC (rev 61065)
+++ grass/trunk/lib/python/temporal/datetime_math.py	2014-06-29 18:05:40 UTC (rev 61066)
@@ -828,7 +828,33 @@
     return string
 
 ###############################################################################
+suffix_units = {"years" : "%Y", 
+                         "year" : "%Y",  
+                         "months" : "%Y_%m", 
+                         "month" : "%Y_%m", 
+                         "weeks" : "%Y_%m_%d",  
+                         "week" : "%Y_%m_%d",  
+                         "days" : "%Y_%m_%d",  
+                         "day" : "%Y_%m_%d",  
+                         "hours" : "%Y_%m_%d_%H",  
+                         "hour" : "%Y_%m_%d_%H",  
+                         "minutes" : "%Y_%m_%d_%H_%M",
+                         "minute" : "%Y_%m_%d_%H_%M",} 
 
+
+def create_suffix_from_datetime(start_time,  granularity):
+    """!Create a datetime string based on a datetime object and a provided
+       granularity that can be used as suffix for map names.
+       
+       dateteime=2001-01-01 00:00:00, granularity="1 month" returns "2001_01"
+       
+       @param start_time The datetime object
+       @param granularity The granularity for example "1 month" or "100 seconds"
+       @return A string
+    """
+    global suffix_units
+    return start_time.strftime(suffix_units[granularity.split(' ')[1]]) 
+
 if __name__ == "__main__":
     import doctest
     doctest.testmod()

Modified: grass/trunk/lib/python/temporal/register.py
===================================================================
--- grass/trunk/lib/python/temporal/register.py	2014-06-29 16:51:16 UTC (rev 61065)
+++ grass/trunk/lib/python/temporal/register.py	2014-06-29 18:05:40 UTC (rev 61066)
@@ -423,14 +423,79 @@
                           str(end_time), unit))
         else:
             msgr.debug(1, _("Set relative valid time for map <%s> to %i - %s "
-                           "with unit %s") % (map.get_map_id(), start_time,
-                                              str(end_time), unit))
+                            "with unit %s") % (map.get_map_id(), start_time,
+                            str(end_time), unit))
 
         map.set_relative_time(start_time, end_time, unit)
 
 
-###############################################################################
+##############################################################################
 
+def register_map_object_list(type,  map_list, output_stds,
+                             delete_empty, unit, dbif=None):
+    """Register a list of AbstractMapDataset objects in the temporal database
+       and optional in a space time dataset.
+       
+       @param type The type of the map layer (rast, rast3d, vect)
+       @param map_list List of AbstractMapDataset objects
+       @param output_stds The output stds
+       @param delete_empty Set True to delete empty map layer found in the map_list
+       @param unit The temporal unit of the space time dataset
+       @param dbif The database interface to be used
+       
+    """
+    import grass.script as gcore
+    import grass.pygrass.modules as pymod
+    import copy
+
+    dbif,  connected = init_dbif(dbif)
+
+    filename = gcore.tempfile(True)
+    file = open(filename, 'w')
+
+    empty_maps = []
+    for map_layer in map_list:
+        # Read the raster map data
+        map_layer.load()
+        # In case of a empty map continue, do not register empty maps
+
+        if delete_empty:
+            if map_layer.metadata.get_min() is None and \
+                map_layer.metadata.get_max() is None:
+                empty_maps.append(map_layer)
+                continue
+
+        start,  end = map_layer.get_temporal_extent_as_tuple()
+        id = map_layer.get_id()
+        if not end:
+            end = start
+        string = "%s|%s|%s\n" % (id,  str(start),  str(end))
+        file.write(string)
+    file.close()
+
+    if output_stds:
+        output_stds_id = output_stds.get_id()
+    else:
+        output_stds_id = None
+
+    register_maps_in_space_time_dataset(type, output_stds_id, unit=unit,
+                                        file=filename, dbif=dbif)
+
+    g_remove = pymod.Module("g.remove", quiet=True,
+                            run_=False, finish_=True)
+
+    # Remove empty maps
+    if len(empty_maps) > 0:
+        for map in empty_maps:
+            if  map.is_in_db(dbif):
+                map.delete(dbif)
+            mod = copy.deepcopy(g_remove)
+            mod(rast=map.get_name())
+            mod.run()
+
+    if connected:
+        dbif.close()
+
 if __name__ == "__main__":
     import doctest
     doctest.testmod()

Modified: grass/trunk/lib/python/temporal/temporal_raster_algebra.py
===================================================================
--- grass/trunk/lib/python/temporal/temporal_raster_algebra.py	2014-06-29 16:51:16 UTC (rev 61065)
+++ grass/trunk/lib/python/temporal/temporal_raster_algebra.py	2014-06-29 18:05:40 UTC (rev 61066)
@@ -63,7 +63,7 @@
     def __init__(self, pid=None, run=False, debug=True, spatial = False, nprocs = 1, register_null = False):
         TemporalRasterBaseAlgebraParser.__init__(self, pid, run, debug, spatial, nprocs, register_null)
 
-        self.m_mapcalc = pymod.Module('r.mapcalc')
+        self.m_mapcalc = pymod.Module('r.mapcalc', run_=False, finish_=False)
         self.m_mremove = pymod.Module('g.mremove')
 
     def parse(self, expression, basename = None, overwrite=False):

Modified: grass/trunk/temporal/t.rast.aggregate/t.rast.aggregate.py
===================================================================
--- grass/trunk/temporal/t.rast.aggregate/t.rast.aggregate.py	2014-06-29 16:51:16 UTC (rev 61065)
+++ grass/trunk/temporal/t.rast.aggregate/t.rast.aggregate.py	2014-06-29 18:05:40 UTC (rev 61066)
@@ -30,7 +30,7 @@
 #% key: basename
 #% type: string
 #% label: Basename of the new generated output maps
-#% description: A numerical suffix separated by an underscore will be attached to create a unique identifier
+#% description: Either a numerical suffix or the start time (s-flag) separated by an underscore will be attached to create a unique identifier
 #% required: yes
 #% multiple: no
 #% gisprompt:
@@ -63,7 +63,18 @@
 #% answer: 0
 #%end
 
+#%option
+#% key: nprocs
+#% type: integer
+#% description: Number of r.mapcalc processes to run in parallel
+#% required: no
+#% multiple: no
+#% answer: 1
+#%end
+
 #%option G_OPT_T_SAMPLE
+#% options: equal,overlaps,overlapped,starts,started,finishes,finished,during,contains
+#% answer: contains
 #%end
 
 #%option G_OPT_T_WHERE
@@ -74,12 +85,16 @@
 #% description: Register Null maps
 #%end
 
-import grass.script as grass
+#%flag
+#% key: s
+#% description: Use start time - truncated accoring to granularity - as suffix. This flag overrides the offset option.
+#%end
+
+import grass.script as gcore
 import grass.temporal as tgis
 
 ############################################################################
 
-
 def main():
 
     # Get the options
@@ -92,90 +107,84 @@
     method = options["method"]
     sampling = options["sampling"]
     offset = options["offset"]
+    nprocs = options["nprocs"]
+    time_suffix = flags["s"]
+    
+    topo_list = sampling.split(",")
 
-    # Make sure the temporal database exists
     tgis.init()
-    # We need a database interface
+    
     dbif = tgis.SQLDatabaseInterfaceConnection()
     dbif.connect()
 
     sp = tgis.open_old_stds(input, "strds", dbif)
-    temporal_type, semantic_type, title, description = sp.get_initial_values()
-    new_sp = tgis.open_new_stds(output, "strds", temporal_type,
-                                              title, description, semantic_type,
-                                              dbif, grass.overwrite())
 
-    rows = sp.get_registered_maps("id,start_time,end_time", where, "start_time", dbif)
+    map_list = sp.get_registered_maps_as_objects(where=where, order="start_time", dbif=dbif)
 
-    if not rows:
+    if not map_list:
         dbif.close()
-        grass.fatal(_("Space time raster dataset <%s> is empty") % input)
+        gcore.fatal(_("Space time raster dataset <%s> is empty") % input)
 
-    # Modify the start time to fit the granularity
+    # We will create the strds later, but need to check here
+    tgis.check_new_stds(output, "strds",   dbif,  gcore.overwrite())
+    
+    start_time = map_list[0].temporal_extent.get_start_time()
 
     if sp.is_time_absolute():
-        first_start_time = tgis.adjust_datetime_to_granularity(
-            rows[0]["start_time"], gran)
-    else:
-        first_start_time = rows[0]["start_time"]
+        start_time = tgis.adjust_datetime_to_granularity(start_time,  gran)
 
     # We use the end time first
-    last_start_time = rows[len(rows) - 1]["end_time"]
-    is_end_time = True
+    end_time = map_list[-1].temporal_extent.get_end_time()
+    has_end_time = True
 
-    # In case no end time is available, then we use the start time
-    if last_start_time is None:
-        last_start_time = rows[len(rows) - 1]["start_time"]
-        is_end_time = False
+    # In case no end time is available, then we use the start time of the last map layer
+    if end_time is None:
+        end_time,  tmp_value = map_list[- 1].temporal_extent.get_start_time()
+        has_end_time = False
 
-    next_start_time = first_start_time
+    granularity_list = []
 
-    count = 0
-
+    # Build the granularity list
     while True:
-        if is_end_time is True:
-            if next_start_time >= last_start_time:
+        if has_end_time is True:
+            if start_time >= end_time:
                 break
         else:
-            if next_start_time > last_start_time:
+            if start_time > end_time:
                 break
 
-        start = next_start_time
+        granule = tgis.RasterDataset(None)
+        start = start_time
         if sp.is_time_absolute():
-            end = tgis.increment_datetime_by_string(next_start_time, gran)
+            end = tgis.increment_datetime_by_string(start_time, gran)
+            granule.set_absolute_time(start, end)
         else:
-            end = next_start_time + int(gran)
-        next_start_time = end
+            end = start_time + int(gran)
+            granule.set_relative_time(start, end,  sp.get_relative_time_unit())
+        start_time = end
+        
+        granularity_list.append(granule)
 
-        input_map_names = tgis.collect_map_names(
-            sp, dbif, start, end, sampling)
+    output_list = tgis.aggregate_by_topology(granularity_list=granularity_list,  granularity=gran,  
+                                                                       map_list=map_list,  
+                                                                       topo_list=topo_list,  basename=base, time_suffix=time_suffix,
+                                                                       offset=offset,  method=method,  nprocs=nprocs,  spatial=None, 
+                                                                       overwrite=gcore.overwrite())
 
-        if input_map_names:
-            new_map = tgis.aggregate_raster_maps(
-                input_map_names, base, start, end,
-                count, method, register_null, dbif,  offset)
+    if output_list:
+        temporal_type, semantic_type, title, description = sp.get_initial_values()
+        output_strds = tgis.open_new_stds(output, "strds", temporal_type,
+                                                                 title, description, semantic_type,
+                                                                 dbif, gcore.overwrite())
+        tgis.register_map_object_list("rast", output_list,  output_strds,  register_null,  
+                                                       sp.get_relative_time_unit(),  dbif)
 
-            if new_map:
-                # Set the time stamp and write it to the raster map
-                if sp.is_time_absolute():
-                    new_map.set_absolute_time(start, end)
-                else:
-                    new_map.set_relative_time(start,
-                                              end, sp.get_relative_time_unit())
+        # Update the raster metadata table entries with aggregation type
+        output_strds.set_aggregation_type(method)
+        output_strds.metadata.update(dbif)
 
-                # Insert map in temporal database
-                new_map.insert(dbif)
-                new_sp.register_map(new_map, dbif)
-
-                count += 1
-
-    # Update the spatio-temporal extent and the raster metadata table entries
-    new_sp.set_aggregation_type(method)
-    new_sp.metadata.update(dbif)
-    new_sp.update_from_registered_maps(dbif)
-
     dbif.close()
 
 if __name__ == "__main__":
-    options, flags = grass.parser()
+    options, flags = gcore.parser()
     main()

Modified: grass/trunk/temporal/t.rast.aggregate/test.t.rast.aggregate.relative_time.sh
===================================================================
--- grass/trunk/temporal/t.rast.aggregate/test.t.rast.aggregate.relative_time.sh	2014-06-29 16:51:16 UTC (rev 61065)
+++ grass/trunk/temporal/t.rast.aggregate/test.t.rast.aggregate.relative_time.sh	2014-06-29 18:05:40 UTC (rev 61066)
@@ -9,7 +9,7 @@
 r.mapcalc --o expr="prec_1 = rand(0, 550)"
 r.mapcalc --o expr="prec_2 = rand(0, 450)"
 r.mapcalc --o expr="prec_3 = rand(0, 320)"
-r.mapcalc --o expr="prec_4 = rand(0, 510)"
+r.mapcalc --o expr="prec_4 = null()"
 r.mapcalc --o expr="prec_5 = rand(0, 300)"
 r.mapcalc --o expr="prec_6 = rand(0, 650)"
 
@@ -18,27 +18,24 @@
 
 # The first @test
 
-t.rast.aggregate --o --v input=precip_abs1 output=precip_abs2 base=prec_sum granularity=6 method=average sampling=start,during
+t.rast.aggregate --o --v input=precip_abs1 output=precip_abs2 base=prec_sum \
+    granularity=6 method=average sampling=overlaps,overlapped,contains -ns nprocs=2
 t.info type=strds input=precip_abs2
-r.info prec_sum_0
-r.info prec_sum_1
-r.info prec_sum_2
-t.rast.aggregate --o --v input=precip_abs1 output=precip_abs2 base=prec_sum granularity=9 method=maximum sampling=start,during
+t.rast.list input=precip_abs2
+
+t.rast.aggregate --o --v input=precip_abs1 output=precip_abs2 base=prec_sum \
+    granularity=9 method=maximum sampling=contains offset=130 nprocs=3
 t.info type=strds input=precip_abs2
-r.info prec_sum_0
-r.info prec_sum_1
-t.rast.aggregate --o --v input=precip_abs1 output=precip_abs2 base=prec_sum granularity=4 method=minimum sampling=start,during
+t.rast.list input=precip_abs2
+
+t.rast.aggregate --o --v input=precip_abs1 output=precip_abs2 base=prec_sum \
+    granularity=4 method=minimum sampling=contains -s
 t.info type=strds input=precip_abs2
-r.info prec_sum_0
-r.info prec_sum_1
-r.info prec_sum_2
-r.info prec_sum_3
-t.rast.aggregate --o --v input=precip_abs1 output=precip_abs2 base=prec_sum granularity=5 method=sum sampling=start,during
+t.rast.list input=precip_abs2
+
+t.rast.aggregate --o --v input=precip_abs1 output=precip_abs2 base=prec_sum \
+    granularity=5 method=sum sampling=overlaps,overlapped,contains -n
 t.info type=strds input=precip_abs2
-r.info prec_sum_0
-r.info prec_sum_1
-r.info prec_sum_2
-r.info prec_sum_3
+t.rast.list input=precip_abs2
 
-t.unregister type=rast maps=prec_1,prec_2,prec_3,prec_4,prec_5,prec_6
-t.remove type=strds input=precip_abs1,precip_abs2
+t.remove -rf type=strds input=precip_abs1,precip_abs2

Modified: grass/trunk/temporal/t.rast.aggregate/test.t.rast.aggregate.sh
===================================================================
--- grass/trunk/temporal/t.rast.aggregate/test.t.rast.aggregate.sh	2014-06-29 16:51:16 UTC (rev 61065)
+++ grass/trunk/temporal/t.rast.aggregate/test.t.rast.aggregate.sh	2014-06-29 18:05:40 UTC (rev 61066)
@@ -14,25 +14,38 @@
 r.mapcalc expr="prec_4 = rand(0, 510)"
 r.mapcalc expr="prec_5 = rand(0, 300)"
 r.mapcalc expr="prec_6 = rand(0, 650)"
+r.mapcalc expr="prec_7 = null()"
 
 t.create type=strds temporaltype=absolute output=precip_abs1 title="A test" descr="A test"
-t.register -i type=rast input=precip_abs1 maps=prec_1,prec_2,prec_3,prec_4,prec_5,prec_6 \
+t.register -i type=rast input=precip_abs1 maps=prec_1,prec_2,prec_3,prec_4,prec_5,prec_6,prec_7 \
     start="2001-01-15 12:05:45" increment="14 days"
 
 # The first @test
 t.rast.aggregate --v input=precip_abs1 output=precip_abs2 \
-    base=prec_sum granularity="2 days" method=average sampling=start,during
+    base=prec_sum granularity="2 days" method=average \
+    sampling=overlaps,overlapped,during nprocs=5 -sn
 t.info type=strds input=precip_abs2
+t.rast.list input=precip_abs1
+t.rast.list input=precip_abs2
+
 t.rast.aggregate --v input=precip_abs1 output=precip_abs2 \
-    base=prec_sum granularity="1 months" method=maximum sampling=start,during
+    base=prec_sum granularity="1 months" method=maximum \
+    sampling=contains nprocs=5 -n 
 t.info type=strds input=precip_abs2
+t.rast.list input=precip_abs1
+t.rast.list input=precip_abs2
+
 t.rast.aggregate --v input=precip_abs1 output=precip_abs3 offset=10 \
-    base=prec_sum granularity="2 months" method=minimum sampling=start,during
-t.info type=strds input=precip_abs2
+    base=prec_sum granularity="2 months" method=minimum \
+    sampling=contains
+t.info type=strds input=precip_abs3
 t.rast.list input=precip_abs3
+
 t.rast.aggregate --v input=precip_abs1 output=precip_abs4 offset=100 \
-    base=prec_sum granularity="3 months" method=sum sampling=start,during
-t.info type=strds input=precip_abs2
+    base=prec_sum granularity="3 months" method=sum \
+    sampling=contains
+t.info type=strds input=precip_abs4
 t.rast.list input=precip_abs4
 
-t.remove -rf type=strds input=precip_abs1,precip_abs2,precip_abs3,precip_abs4
+#t.remove -rf type=strds input=precip_abs1,precip_abs2,precip_abs3,precip_abs4
+t.remove -rf type=strds input=precip_abs1,precip_abs2

Modified: grass/trunk/temporal/t.rast.aggregate.ds/t.rast.aggregate.ds.py
===================================================================
--- grass/trunk/temporal/t.rast.aggregate.ds/t.rast.aggregate.ds.py	2014-06-29 16:51:16 UTC (rev 61065)
+++ grass/trunk/temporal/t.rast.aggregate.ds/t.rast.aggregate.ds.py	2014-06-29 18:05:40 UTC (rev 61066)
@@ -6,10 +6,10 @@
 # AUTHOR(S):	Soeren Gebbert
 #
 # PURPOSE:	Aggregates data of an existing space time raster dataset using the time intervals of a second space time dataset
-# COPYRIGHT:	(C) 2011 by the GRASS Development Team
+# COPYRIGHT:	(C) 2011 by the gcore Development Team
 #
 #		This program is free software under the GNU General Public
-#		License (version 2). Read the file COPYING that comes with GRASS
+#		License (version 2). Read the file COPYING that comes with gcore
 #		for details.
 #
 #############################################################################
@@ -64,15 +64,34 @@
 #% answer: 0
 #%end
 
+#%option
+#% key: nprocs
+#% type: integer
+#% description: Number of r.mapcalc processes to run in parallel
+#% required: no
+#% multiple: no
+#% answer: 1
+#%end
+
 #%option G_OPT_T_SAMPLE
+#% options: equal,overlaps,overlapped,starts,started,finishes,finished,during,contains
+#% answer: contains
 #%end
 
+#%option G_OPT_T_WHERE
+#%end
+
 #%flag
 #% key: n
 #% description: Register Null maps
 #%end
 
-import grass.script as grass
+#%flag
+#% key: s
+#% description: Use start time - truncated accoring to granularity - as suffix. This flag overrides the offset option.
+#%end
+
+import grass.script as gcore
 import grass.temporal as tgis
 
 ############################################################################
@@ -84,16 +103,20 @@
     input = options["input"]
     output = options["output"]
     sampler = options["sample"]
+    where = options["where"]
     base = options["basename"]
     register_null = flags["n"]
     method = options["method"]
-    type = options["type"]
     sampling = options["sampling"]
     offset = options["offset"]
+    nprocs = options["nprocs"]
+    time_suffix = flags["s"]
+    type = options["type"]
+    
+    topo_list = sampling.split(",")
 
-    # Make sure the temporal database exists
     tgis.init()
-    # We need a database interface
+
     dbif = tgis.SQLDatabaseInterfaceConnection()
     dbif.connect()
 
@@ -102,60 +125,52 @@
 
     if sampler_sp.get_temporal_type() != sp.get_temporal_type():
         dbif.close()
-        grass.fatal(_("Input and aggregation dataset must have "
+        gcore.fatal(_("Input and aggregation dataset must have "
                       "the same temporal type"))
 
     # Check if intervals are present
     if sampler_sp.temporal_extent.get_map_time() != "interval":
         dbif.close()
-        grass.fatal(_("All registered maps of the aggregation dataset "
+        gcore.fatal(_("All registered maps of the aggregation dataset "
                       "must have time intervals"))
 
-    temporal_type, semantic_type, title, description = sp.get_initial_values()
-    new_sp = tgis.open_new_stds(output, "strds", temporal_type,
-                                              title, description, semantic_type,
-                                              dbif, grass.overwrite())
+    # We will create the strds later, but need to check here
+    tgis.check_new_stds(output, "strds",   dbif,  gcore.overwrite())
 
-    rows = sampler_sp.get_registered_maps(
-        "id,start_time,end_time", None, "start_time", dbif)
+    map_list = sp.get_registered_maps_as_objects(where=where, order="start_time", dbif=dbif)
 
-    if not rows:
-            dbif.close()
-            grass.fatal(_("Aggregation dataset <%s> is empty") % id)
+    if not map_list:
+        dbif.close()
+        gcore.fatal(_("Space time raster dataset <%s> is empty") % input)
 
-    count = 0
-    for row in rows:
-        count += 1
-        start = row["start_time"]
-        end = row["end_time"]
+    granularity_list = sampler_sp.get_registered_maps_as_objects(where=where, order="start_time", dbif=dbif)
 
-        input_map_names = tgis.collect_map_names(
-            sp, dbif, start, end, sampling)
+    if not granularity_list:
+        dbif.close()
+        gcore.fatal(_("Space time raster dataset <%s> is empty") % sampler)
 
-        if input_map_names:
-            new_map = tgis.aggregate_raster_maps(input_map_names, base,
-                                                 start, end, count, method,
-                                                 register_null, dbif,  offset)
+    gran = sampler_sp.get_granularity()
 
-            if new_map:
-                # Set the time stamp and write it to the raster map
-                if sp.is_time_absolute():
-                    new_map.set_absolute_time(start, end)
-                else:
-                    new_map.set_relative_time(start,
-                                              end, sp.get_relative_time_unit())
+    output_list = tgis.aggregate_by_topology(granularity_list=granularity_list,  granularity=gran,  
+                                                                       map_list=map_list,  
+                                                                       topo_list=topo_list,  basename=base, time_suffix=time_suffix,
+                                                                       offset=offset,  method=method,  nprocs=nprocs,  spatial=None, 
+                                                                       overwrite=gcore.overwrite())
 
-                # Insert map in temporal database
-                new_map.insert(dbif)
-                new_sp.register_map(new_map, dbif)
+    if output_list:
+        temporal_type, semantic_type, title, description = sp.get_initial_values()
+        output_strds = tgis.open_new_stds(output, "strds", temporal_type,
+                                                                 title, description, semantic_type,
+                                                                 dbif, gcore.overwrite())
+        tgis.register_map_object_list("rast", output_list,  output_strds,  register_null,  
+                                                       sp.get_relative_time_unit(),  dbif)
 
-    # Update the spatio-temporal extent and the raster metadata table entries
-    new_sp.set_aggregation_type(method)
-    new_sp.metadata.update(dbif)
-    new_sp.update_from_registered_maps(dbif)
+        # Update the raster metadata table entries with aggregation type
+        output_strds.set_aggregation_type(method)
+        output_strds.metadata.update(dbif)
 
     dbif.close()
 
 if __name__ == "__main__":
-    options, flags = grass.parser()
+    options, flags = gcore.parser()
     main()

Modified: grass/trunk/temporal/t.rast.aggregate.ds/test.t.rast.aggregate.ds.sh
===================================================================
--- grass/trunk/temporal/t.rast.aggregate.ds/test.t.rast.aggregate.ds.sh	2014-06-29 16:51:16 UTC (rev 61065)
+++ grass/trunk/temporal/t.rast.aggregate.ds/test.t.rast.aggregate.ds.sh	2014-06-29 18:05:40 UTC (rev 61066)
@@ -38,13 +38,15 @@
 # The @test
 
 t.rast.aggregate.ds --v input=precip_abs1 output=precip_abs2 base=prec_sum \
-    type=stvds sample=soil_abs1 method=sum sampling=start,during offset=100
+    type=stvds sample=soil_abs1 method=sum sampling=contains offset=33
 t.info type=strds input=precip_abs2
 t.rast.list input=precip_abs2 method=deltagap
 
+t.rast.aggregate.ds --v input=precip_abs1 output=precip_abs2 base=prec_sum \
+    type=stvds sample=soil_abs1 method=sum sampling=contains -ns
+t.info type=strds input=precip_abs2
+t.rast.list input=precip_abs2 method=deltagap
+
 # @postprocess
-t.unregister type=vect maps=soil_1,soil_2,soil_3
-t.remove type=stvds input=soil_abs1
-
+t.remove -rf type=stvds input=soil_abs1
 t.remove -rf type=strds input=precip_abs1,precip_abs2
-g.remove vect=soil_1,soil_2,soil_3



More information about the grass-commit mailing list