Преглед изворни кода

PEP8 compliance and better doxygen formatting. New rtree tests.

git-svn-id: https://svn.osgeo.org/grass/grass/trunk@52631 15284696-431f-4ddb-bdfa-cd5b030d7da7
Soeren Gebbert пре 12 година
родитељ
комит
799ffaf996

+ 28 - 12
lib/python/temporal/abstract_dataset.py

@@ -7,6 +7,8 @@ Temporal GIS related functions to be used in temporal GIS Python library package
 
 Usage:
 
+@code
+
 >>> import grass.temporal as tgis
 >>> ad = AbstractDataset()
 >>> ad.reset(ident="soil@PERMANENT")
@@ -19,7 +21,9 @@ Traceback (most recent call last):
     raise ImplementationError("This method must be implemented in the subclasses")
 ImplementationError: 'This method must be implemented in the subclasses'
 
-(C) 2008-2011 by the GRASS Development Team
+@endcode
+
+(C) 2011-2012 by the GRASS Development Team
 This program is free software under the GNU General Public
 License (>=v2). Read the file COPYING that comes with GRASS
 for details.
@@ -43,7 +47,8 @@ class ImplementationError(Exception):
         return repr(self.msg)
     
 class AbstractDataset(object):
-    """!This is the base class for all datasets (raster, vector, raster3d, strds, stvds, str3ds)"""
+    """!This is the base class for all datasets 
+       (raster, vector, raster3d, strds, stvds, str3ds)"""
 
     def reset(self, ident):
         """!Reset the internal structure and set the identifier
@@ -107,7 +112,8 @@ class AbstractDataset(object):
         return self.base.get_mapset()
 
     def get_valid_time(self):
-        """!Returns a tuple of the start, the end valid time, this can be either datetime or double values
+        """!Returns a tuple of the start, the end valid time, 
+           this can be either datetime or double values
            @return A tuple of (start_time, end_time)
         """
 
@@ -124,7 +130,9 @@ class AbstractDataset(object):
         return (start, end)
 
     def get_absolute_time(self):
-        """!Returns a tuple of the start, the end valid time and the timezone of the map
+        """!Returns a tuple of the start, the end 
+           valid time and the timezone of the map
+           
            @return A tuple of (start_time, end_time, timezone)
         """
 
@@ -135,7 +143,8 @@ class AbstractDataset(object):
         return (start, end, tz)
 
     def get_relative_time(self):
-        """!Returns the relative time interval (start_time, end_time, unit) or None if not present"""
+        """!Returns the relative time interval (start_time, end_time, unit) 
+           or None if not present"""
 
         start = self.relative_time.get_start_time()
         end = self.relative_time.get_end_time()
@@ -151,7 +160,8 @@ class AbstractDataset(object):
         return unit
 
     def check_relative_time_unit(self, unit):
-        """!Check if unit is of type  years, months, days, hours, minutes or seconds
+        """!Check if unit is of type  years, months, days, hours, 
+           minutes or seconds
 
            Return True if success or False otherwise
         """
@@ -166,11 +176,13 @@ class AbstractDataset(object):
         return self.base.get_ttype()
 
     def get_spatial_extent(self):
-        """!Return a tuple of spatial extent (north, south, east, west, top, bottom) """
+        """!Return a tuple of spatial extent 
+           (north, south, east, west, top, bottom) """
         return self.spatial_extent.get_spatial_extent()
 
     def select(self, dbif=None):
-        """!Select temporal dataset entry from database and fill up the internal structure"""
+        """!Select temporal dataset entry from database and fill 
+           up the internal structure"""
 
         dbif, connect = init_dbif(dbif)
 
@@ -197,12 +209,14 @@ class AbstractDataset(object):
         raise ImplementationError("This method must be implemented in the subclasses")
 
     def insert(self, dbif=None, execute=True):
-        """!Insert temporal dataset entry into database from the internal structure
+        """!Insert temporal dataset entry into 
+           database from the internal structure
 
 
            @param dbif: The database interface to be used
            @param execute: If True the SQL statements will be executed.
-                           If False the prepared SQL statements are returned and must be executed by the caller.
+                           If False the prepared SQL statements are returned 
+                           and must be executed by the caller.
         """
 
         dbif, connect = init_dbif(dbif)
@@ -234,7 +248,8 @@ class AbstractDataset(object):
 
            @param dbif: The database interface to be used
            @param execute: If True the SQL statements will be executed.
-                           If False the prepared SQL statements are returned and must be executed by the caller.
+                           If False the prepared SQL statements are returned 
+                           and must be executed by the caller.
         """
 
         dbif, connect = init_dbif(dbif)
@@ -266,7 +281,8 @@ class AbstractDataset(object):
 
            @param dbif: The database interface to be used
            @param execute: If True the SQL statements will be executed.
-                           If False the prepared SQL statements are returned and must be executed by the caller.
+                           If False the prepared SQL statements are returned 
+                           and must be executed by the caller.
         """
 
         dbif, connect = init_dbif(dbif)

+ 116 - 56
lib/python/temporal/abstract_map_dataset.py

@@ -8,8 +8,8 @@ Temporal GIS related functions to be used in temporal GIS Python library package
 Usage:
 
 >>> import grass.temporal as tgis
->>> tmr = TemporalMapRelations()
->>> amd = AbstractMapDataset()
+>>> tmr = tgis.TemporalMapRelations()
+>>> amd = tgis.AbstractMapDataset()
 
 (C) 2008-2011 by the GRASS Development Team
 This program is free software under the GNU General Public
@@ -27,8 +27,8 @@ class TemporalMapRelations(AbstractDataset):
 
        This object will be set up by temporal topology creation methods.
 
-       If correctly initialize the calls next() and prev() let the user walk temporally forward
-       and backward in time.
+       If correctly initialize the calls next() and prev() 
+       let the user walk temporally forward and backward in time.
 
        The following temporal relations with access methods are supported:
        * equal
@@ -55,11 +55,14 @@ class TemporalMapRelations(AbstractDataset):
         
         Usage:
         
-        >>> import grass.temporal as tgis
+        @code
+        
         >>> tmr = TemporalMapRelations()
         >>> tmr.print_temporal_topology_info()
          +-------------------- Temporal Topology -------------------------------------+
         >>> tmr.print_temporal_topology_shell_info()
+        
+        @code
     """
 
     def __init__(self):
@@ -89,7 +92,8 @@ class TemporalMapRelations(AbstractDataset):
            temporally located AFTER the start time of this map, but temporally
            near than other maps of the same dataset.
 
-           @param map_: This object should be of type AbstractMapDataset or derived classes
+           @param map_: This object should be of type AbstractMapDataset 
+                        or derived classes
         """
         self._temporal_topology["NEXT"] = map_
 
@@ -100,7 +104,8 @@ class TemporalMapRelations(AbstractDataset):
            temporally located BEFORE the start time of this map, but temporally
            near than other maps of the same dataset.
 
-           @param map_: This object should be of type AbstractMapDataset or derived classes
+           @param map_: This object should be of type AbstractMapDataset 
+                        or derived classes
         """
         self._temporal_topology["PREV"] = map_
 
@@ -127,7 +132,8 @@ class TemporalMapRelations(AbstractDataset):
     def append_temporal_equivalent(self, map_):
         """!Append a map with equivalent temporal extent as this map
 
-           @param map_: This object should be of type AbstractMapDataset or derived classes
+           @param map_: This object should be of type AbstractMapDataset 
+                        or derived classes
         """
         if "EQUAL" not in self._temporal_topology:
             self._temporal_topology["EQUAL"] = []
@@ -145,7 +151,8 @@ class TemporalMapRelations(AbstractDataset):
     def append_temporal_overlaps(self, map_):
         """!Append a map that this map temporally overlaps
 
-           @param map_: This object should be of type AbstractMapDataset or derived classes
+           @param map_: This object should be of type AbstractMapDataset 
+                        or derived classes
         """
         if "OVERLAPS" not in self._temporal_topology:
             self._temporal_topology["OVERLAPS"] = []
@@ -163,7 +170,8 @@ class TemporalMapRelations(AbstractDataset):
     def append_temporal_overlapped(self, map_):
         """!Append a map that this map temporally overlapped
 
-           @param map_: This object should be of type AbstractMapDataset or derived classes
+           @param map_: This object should be of type AbstractMapDataset 
+                        or derived classes
         """
         if "OVERLAPPED" not in self._temporal_topology:
             self._temporal_topology["OVERLAPPED"] = []
@@ -181,7 +189,8 @@ class TemporalMapRelations(AbstractDataset):
     def append_temporal_follows(self, map_):
         """!Append a map that this map temporally follows
 
-           @param map_: This object should be of type AbstractMapDataset or derived classes
+           @param map_: This object should be of type AbstractMapDataset 
+                        or derived classes
         """
         if "FOLLOWS" not in self._temporal_topology:
             self._temporal_topology["FOLLOWS"] = []
@@ -199,7 +208,8 @@ class TemporalMapRelations(AbstractDataset):
     def append_temporal_precedes(self, map_):
         """!Append a map that this map temporally precedes
 
-           @param map_: This object should be of type AbstractMapDataset or derived classes
+           @param map_: This object should be of type AbstractMapDataset 
+                        or derived classes
         """
         if "PRECEDES" not in self._temporal_topology:
             self._temporal_topology["PRECEDES"] = []
@@ -218,7 +228,8 @@ class TemporalMapRelations(AbstractDataset):
         """!Append a map that this map is temporally located during
            This includes temporal relationships starts and finishes
 
-           @param map_: This object should be of type AbstractMapDataset or derived classes
+           @param map_: This object should be of type 
+                        AbstractMapDataset or derived classes
         """
         if "DURING" not in self._temporal_topology:
             self._temporal_topology["DURING"] = []
@@ -238,7 +249,8 @@ class TemporalMapRelations(AbstractDataset):
         """!Append a map that this map temporally contains
            This includes temporal relationships started and finished
 
-           @param map_: This object should be of type AbstractMapDataset or derived classes
+           @param map_: This object should be of type AbstractMapDataset 
+                        or derived classes
         """
         if "CONTAINS" not in self._temporal_topology:
             self._temporal_topology["CONTAINS"] = []
@@ -389,7 +401,8 @@ class AbstractMapDataset(TemporalMapRelations):
     def set_stds_register(self, name):
         """!Set the space time dataset register table name.
 
-           This table stores all space time datasets in which this map is registered.
+           This table stores all space time datasets in 
+           which this map is registered.
 
            @param ident: The name of the register table
         """
@@ -397,9 +410,13 @@ class AbstractMapDataset(TemporalMapRelations):
             "This method must be implemented in the subclasses")
 
     def check_resolution_with_current_region(self):
-        """!Check if the raster or voxel resolution is finer than the current resolution
-           Return "finer" in case the raster/voxel resolution is finer than the current region
-           Return "coarser" in case the raster/voxel resolution is coarser than the current region
+        """!Check if the raster or voxel resolution is 
+           finer than the current resolution
+           
+           * Return "finer" in case the raster/voxel resolution is finer 
+             than the current region
+           * Return "coarser" in case the raster/voxel resolution is coarser 
+             than the current region
 
            Vector maps are alwyas finer than the current region
         """
@@ -413,14 +430,15 @@ class AbstractMapDataset(TemporalMapRelations):
             "This method must be implemented in the subclasses")
 
     def write_timestamp_to_grass(self):
-        """!Write the timestamp of this map into the map metadata in the grass file system based spatial
-           database.
+        """!Write the timestamp of this map into the map metadata 
+           in the grass file system based spatial database.
         """
         raise ImplementationError(
             "This method must be implemented in the subclasses")
 
     def remove_timestamp_from_grass(self):
-        """!Remove the timestamp from the grass file system based spatial database
+        """!Remove the timestamp from the grass file 
+           system based spatial database
         """
         raise ImplementationError(
             "This method must be implemented in the subclasses")
@@ -434,19 +452,21 @@ class AbstractMapDataset(TemporalMapRelations):
             "This method must be implemented in the subclasses")
 
     def read_info(self):
-        """!Read the map info from the grass file system based database and store the content
-           into a dictionary
+        """!Read the map info from the grass file system based database and 
+           store the content into a dictionary
         """
         raise ImplementationError(
             "This method must be implemented in the subclasses")
 
     def load(self):
-        """!Load the content of this object from the grass file system based database"""
+        """!Load the content of this object from the grass 
+           file system based database"""
         raise ImplementationError(
             "This method must be implemented in the subclasses")
 
     def _convert_timestamp(self):
-        """!Convert the valid time into a grass datetime library compatible timestamp string
+        """!Convert the valid time into a grass datetime library 
+           compatible timestamp string
 
             This methods works for reltaive and absolute time
 
@@ -486,9 +506,11 @@ class AbstractMapDataset(TemporalMapRelations):
     def build_id(self, name, mapset, layer=None):
         """!Convenient method to build the unique identifier
 
-            Existing layer and mapset definitions in the name string will be reused
+            Existing layer and mapset definitions in the name 
+            string will be reused
 
-           @param return the id of the vector map as name(:layer)@mapset while layer is optional
+           @param return the id of the vector map as name(:layer)@mapset 
+                  while layer is optional
         """
 
         # Check if the name includes any mapset
@@ -514,17 +536,14 @@ class AbstractMapDataset(TemporalMapRelations):
         if self.get_type() == "raster":
             #                1         2         3         4         5         6         7
             #      0123456789012345678901234567890123456789012345678901234567890123456789012345678
-            print ""
             print " +-------------------- Raster Dataset ----------------------------------------+"
         if self.get_type() == "raster3d":
             #                1         2         3         4         5         6         7
             #      0123456789012345678901234567890123456789012345678901234567890123456789012345678
-            print ""
             print " +-------------------- Raster3d Dataset --------------------------------------+"
         if self.get_type() == "vector":
             #                1         2         3         4         5         6         7
             #      0123456789012345678901234567890123456789012345678901234567890123456789012345678
-            print ""
             print " +-------------------- Vector Dataset ----------------------------------------+"
         print " |                                                                            |"
         self.base.print_info()
@@ -629,22 +648,37 @@ class AbstractMapDataset(TemporalMapRelations):
         """
         if start_time and not isinstance(start_time, datetime):
             if self.get_layer() is not None:
-                core.fatal(_("Start time must be of type datetime for %s map <%s> with layer: %s") % (self.get_type(), self.get_map_id(), self.get_layer()))
+                core.fatal(_("Start time must be of type datetime "
+                             "for %s map <%s> with layer: %s") % \
+                           (self.get_type(), self.get_map_id(), 
+                            self.get_layer()))
             else:
-                core.fatal(_("Start time must be of type datetime for %s map <%s>") % (self.get_type(), self.get_map_id()))
+                core.fatal(_("Start time must be of type "
+                             "datetime ""for %s map <%s>") % \
+                           (self.get_type(), self.get_map_id()))
 
         if end_time and not isinstance(end_time, datetime):
             if self.get_layer():
-                core.fatal(_("End time must be of type datetime for %s map <%s> with layer: %s") % (self.get_type(), self.get_map_id(), self.get_layer()))
+                core.fatal(_("End time must be of type datetime "
+                             "for %s map <%s> with layer: %s") % \
+                           (self.get_type(), self.get_map_id(), 
+                            self.get_layer()))
             else:
-                core.fatal(_("End time must be of type datetime for %s map <%s>") % (self.get_type(), self.get_map_id()))
+                core.fatal(_("End time must be of type datetime "
+                             "for %s map <%s>") % (self.get_type(), 
+                                                   self.get_map_id()))
 
         if start_time is not None and end_time is not None:
             if start_time > end_time:
                 if self.get_layer():
-                    core.fatal(_("End time must be greater than start time for %s map <%s> with layer: %s") % (self.get_type(), self.get_map_id(), self.get_layer()))
+                    core.fatal(_("End time must be greater than "
+                                 "start time for %s map <%s> with layer: %s") %\
+                                (self.get_type(), self.get_map_id(), 
+                                 self.get_layer()))
                 else:
-                    core.fatal(_("End time must be greater than start time for %s map <%s>") % (self.get_type(), self.get_map_id()))
+                    core.fatal(_("End time must be greater than "
+                                 "start time for %s map <%s>") % \
+                               (self.get_type(), self.get_map_id()))
             else:
                 # Do not create an interval in case start and end time are equal
                 if start_time == end_time:
@@ -655,7 +689,8 @@ class AbstractMapDataset(TemporalMapRelations):
         self.absolute_time.set_end_time(end_time)
         self.absolute_time.set_timezone(timezone)
 
-    def update_absolute_time(self, start_time, end_time=None, timezone=None, dbif=None):
+    def update_absolute_time(self, start_time, end_time=None, 
+                             timezone=None, dbif=None):
         """!Update the absolute time
 
            This functions assures that the timetsamp is written to the 
@@ -690,17 +725,28 @@ class AbstractMapDataset(TemporalMapRelations):
 
         if not self.check_relative_time_unit(unit):
             if self.get_layer() is not None:
-                core.error(_("Unsupported relative time unit type for %s map <%s> with layer %s: %s") % (self.get_type(), self.get_id(), self.get_layer(), unit))
+                core.error(_("Unsupported relative time unit type for %s map "
+                             "<%s> with layer %s: %s") % (self.get_type(), 
+                                                          self.get_id(), 
+                                                          self.get_layer(), 
+                                                          unit))
             else:
-                core.error(_("Unsupported relative time unit type for %s map <%s>: %s") % (self.get_type(), self.get_id(), unit))
+                core.error(_("Unsupported relative time unit type for %s map "
+                             "<%s>: %s") % (self.get_type(), self.get_id(), 
+                                            unit))
             return False
 
         if start_time is not None and end_time is not None:
             if int(start_time) > int(end_time):
                 if self.get_layer() is not None:
-                    core.error(_("End time must be greater than start time for %s map <%s> with layer %s") % (self.get_type(), self.get_id(), self.get_layer()))
+                    core.error(_("End time must be greater than start time for"
+                                 " %s map <%s> with layer %s") % \
+                               (self.get_type(), self.get_id(), 
+                                self.get_layer()))
                 else:
-                    core.error(_("End time must be greater than start time for %s map <%s>") % (self.get_type(), self.get_id()))
+                    core.error(_("End time must be greater than start time for"
+                                 " %s map <%s>") % (self.get_type(), 
+                                                    self.get_id()))
                 return False
             else:
                 # Do not create an interval in case start and end time are equal
@@ -763,9 +809,14 @@ class AbstractMapDataset(TemporalMapRelations):
             if end is not None:
                 if start >= end:
                     if self.get_layer() is not None:
-                        core.error(_("Map <%s> with layer %s has incorrect time interval, start time is greater than end time") % (self.get_map_id(), self.get_layer()))
+                        core.error(_("Map <%s> with layer %s has incorrect "
+                                     "time interval, start time is greater "
+                                     "than end time") % (self.get_map_id(), 
+                                                         self.get_layer()))
                     else:
-                        core.error(_("Map <%s> has incorrect time interval, start time is greater than end time") % (self.get_map_id()))
+                        core.error(_("Map <%s> has incorrect time interval, "
+                                     "start time is greater than end time") % \
+                                   (self.get_map_id()))
                     return False
         else:
             core.error(_("Map <%s> has incorrect start time") %
@@ -778,14 +829,16 @@ class AbstractMapDataset(TemporalMapRelations):
         """!Delete a map entry from database if it exists
 
             Remove dependent entries:
-            * Remove the map entry in each space time dataset in which this map is registered
+            * Remove the map entry in each space time dataset in which this map 
+              is registered
             * Remove the space time dataset register table
 
            @param dbif: The database interface to be used
            @param update: Call for each unregister statement the update from 
                           registered maps of the space time dataset. 
                           This can slow down the un-registration process significantly.
-           @param execute: If True the SQL DELETE and DROP table statements will be executed.
+           @param execute: If True the SQL DELETE and DROP table statements will 
+                           be executed.
                            If False the prepared SQL statements are 
                            returned and must be executed by the caller.
 
@@ -812,7 +865,8 @@ class AbstractMapDataset(TemporalMapRelations):
             core.verbose(_("Delete %s dataset <%s> from temporal database")
                          % (self.get_type(), self.get_id()))
 
-            # Delete yourself from the database, trigger functions will take care of dependencies
+            # Delete yourself from the database, trigger functions will 
+            # take care of dependencies
             statement += self.base.get_delete_statement()
 
         if execute:
@@ -832,13 +886,15 @@ class AbstractMapDataset(TemporalMapRelations):
         return statement
 
     def unregister(self, dbif=None, update=True, execute=True):
-        """! Remove the map entry in each space time dataset in which this map is registered
+        """! Remove the map entry in each space time dataset in which this map 
+           is registered
 
            @param dbif: The database interface to be used
-           @param update: Call for each unregister statement the update from registered maps
-                          of the space time dataset. This can slow down the 
-                          un-registration process significantly.
-           @param execute: If True the SQL DELETE and DROP table statements will be executed.
+           @param update: Call for each unregister statement the update from 
+                          registered maps of the space time dataset. This can 
+                          slow down the un-registration process significantly.
+           @param execute: If True the SQL DELETE and DROP table statements 
+                           will be executed.
                            If False the prepared SQL statements are 
                            returned and must be executed by the caller.
 
@@ -846,11 +902,14 @@ class AbstractMapDataset(TemporalMapRelations):
         """
 
         if self.get_layer() is not None:
-            core.verbose(_("Unregister %s map <%s> with layer %s from space time datasets") %
-                         (self.get_type(), self.get_map_id(), self.get_layer()))
+            core.verbose(_("Unregister %(type)s map <%(map)s> with "
+                           "layer %(layer)s from space time datasets" % \
+                         {'type':self.get_type(), 'map':self.get_map_id(), 
+                          'layer':self.get_layer()}))
         else:
-            core.verbose(_("Unregister %s map <%s> from space time datasets")
-                         % (self.get_type(), self.get_map_id()))
+            core.verbose(_("Unregister %(type)s map <%(map)s> "
+                           "from space time datasets"
+                         % {'type':self.get_type(), 'map':self.get_map_id()}))
 
         statement = ""
         dbif, connect = init_dbif(dbif)
@@ -906,7 +965,8 @@ class AbstractMapDataset(TemporalMapRelations):
                 dbif.cursor.execute(sql)
                 rows = dbif.cursor.fetchall()
         except:
-            core.error(_("Unable to select space time dataset register table <%s>") % (self.get_stds_register()))
+            core.error(_("Unable to select space time dataset register table "
+                         "<%s>") % (self.get_stds_register()))
 
         if connect:
             dbif.close()

Разлика између датотеке није приказан због своје велике величине
+ 298 - 147
lib/python/temporal/abstract_space_time_dataset.py


+ 28 - 17
lib/python/temporal/aggregation.py

@@ -9,7 +9,8 @@ Usage:
 @code
 import grass.temporal as tgis
 
-tgis.aggregate_raster_maps(dataset, mapset, inputs, base, start, end, count, method, register_null, dbif)
+tgis.aggregate_raster_maps(dataset, mapset, inputs, base, start, end,
+    count, method, register_null, dbif)
 
 ...
 @endcode
@@ -27,6 +28,7 @@ import grass.lib.gis as libgis
 
 ###############################################################################
 
+
 def collect_map_names(sp, dbif, start, end, sampling):
     """!Gather all maps from dataset using a specific sample method
 
@@ -36,7 +38,7 @@ def collect_map_names(sp, dbif, start, end, sampling):
        @param end: The end time of the sample interval, may be relative or absolute
        @param sampling: The sampling methods to use
     """
-    
+
     use_start = False
     use_during = False
     use_overlap = False
@@ -75,8 +77,15 @@ def collect_map_names(sp, dbif, start, end, sampling):
         use_follows = False
         use_precedes = False
 
-    where = create_temporal_relation_sql_where_statement(start, end, use_start, use_during, use_overlap, use_contain, use_equal, use_follows, use_precedes)
-   
+    where = create_temporal_relation_sql_where_statement(start, end, 
+                                                         use_start, 
+                                                         use_during, 
+                                                         use_overlap, 
+                                                         use_contain, 
+                                                         use_equal, 
+                                                         use_follows, 
+                                                         use_precedes)
+
     rows = sp.get_registered_maps("id", where, "start_time", dbif)
 
     if not rows:
@@ -86,13 +95,15 @@ def collect_map_names(sp, dbif, start, end, sampling):
     for row in rows:
         names.append(row["id"])
 
-    return names    
+    return names
 
 ###############################################################################
 
-def aggregate_raster_maps(inputs, base, start, end, count, method, register_null, dbif):
+
+def aggregate_raster_maps(inputs, base, start, end, count, method, 
+                          register_null, dbif):
     """!Aggregate a list of raster input maps with r.series
-       
+
        @param inputs: The names of the raster maps to be aggregated
        @param base: The basename of the new created raster maps
        @param start: The start time of the sample interval, may be relative or absolute
@@ -103,13 +114,11 @@ def aggregate_raster_maps(inputs, base, start, end, count, method, register_null
        @param dbif: The temporal database interface to use
     """
 
-    core.verbose(_("Aggregate %s raster maps") %(len(inputs)))
+    core.verbose(_("Aggregate %s raster maps") % (len(inputs)))
     output = "%s_%i" % (base, count)
-    
-    mapset = libgis.G_mapset()
 
+    mapset = libgis.G_mapset()
     map_id = output + "@" + mapset
-
     new_map = raster_dataset(map_id)
 
     # Check if new map is in the temporal database
@@ -122,7 +131,8 @@ def aggregate_raster_maps(inputs, base, start, end, count, method, register_null
             core.error(_("Raster map <%s> is already in temporal database, use overwrite flag to overwrite"))
             return
 
-    core.verbose(_("Compute aggregation of maps between %s - %s" % (str(start), str(end))))
+    core.verbose(_("Compute aggregation of maps between %s - %s" % (
+        str(start), str(end))))
 
     # Create the r.series input file
     filename = core.tempfile(True)
@@ -134,20 +144,21 @@ def aggregate_raster_maps(inputs, base, start, end, count, method, register_null
 
     file.close()
     # Run r.series
-    ret = core.run_command("r.series", flags="z", file=filename, output=output, overwrite=core.overwrite(), method=method)
+    ret = core.run_command("r.series", flags="z", file=filename,
+                           output=output, overwrite=core.overwrite(), 
+                           method=method)
 
     if ret != 0:
         dbif.close()
         core.fatal(_("Error while r.series computation"))
-        
 
     # Read the raster map data
     new_map.load()
-    
+
     # In case of a null map continue, do not register null maps
-    if new_map.metadata.get_min() == None and new_map.metadata.get_max() == None:
+    if new_map.metadata.get_min() is None and new_map.metadata.get_max() is None:
         if not register_null:
             core.run_command("g.remove", rast=output)
             return None
-    
+
     return new_map

+ 97 - 54
lib/python/temporal/base.py

@@ -5,12 +5,14 @@
 Temporal GIS base classes to be used in other
 Python temporal gis packages.
 
-This packages includes all base classes to store basic information like id, name,
-mapset creation and modification time as well as sql serialization and de-serialization
-and the sql database interface.
+This packages includes all base classes to store basic information 
+like id, name, mapset creation and modification time as well as sql 
+serialization and de-serialization and the sql database interface.
 
 Usage:
 
+@code
+
 >>> import grass.temporal as tgis
 >>> rbase = tgis.RasterBase(ident="soil@PERMANENT")
 >>> vbase = tgis.VectorBase(ident="soil:1@PERMANENT")
@@ -19,7 +21,9 @@ Usage:
 >>> stvdsbase = tgis.STVDSBase(ident="soil@PERMANENT")
 >>> str3dsbase = tgis.STR3DSBase(ident="soil@PERMANENT")
 
-(C) 2008-2011 by the GRASS Development Team
+@endcode
+
+(C) 2011-2012 by the GRASS Development Team
 This program is free software under the GNU General Public
 License (>=v2). Read the file COPYING that comes with GRASS
 for details.
@@ -38,12 +42,15 @@ class DictSQLSerializer(object):
         self.D = {}
 
     def serialize(self, type, table, where=None):
-        """!Convert the internal dictionary into a string of semicolon separated SQL statements
-            The keys are the column names and the values are the row entries
+        """!Convert the internal dictionary into a string of semicolon 
+            separated SQL statements The keys are the column names and 
+            the values are the row entries
+            
+            Usage:
             
-            >>> import grass.temporal as tgis
-            >>> from datetime import datetime, date, time, timedelta
-            >>> t = tgis.DictSQLSerializer()
+            \code
+            
+            >>> t = DictSQLSerializer()
             >>> t.D["id"] = "soil@PERMANENT"
             >>> t.D["name"] = "soil"
             >>> t.D["mapset"] = "PERMANENT"
@@ -63,6 +70,8 @@ class DictSQLSerializer(object):
             @table The name of the table to select, insert or update
             @where The optional where statement
             @return a tuple containing the SQL string and the arguments
+            
+            \endcode
         """
 
         sql = ""
@@ -166,7 +175,8 @@ class DictSQLSerializer(object):
         return sql, tuple(args)
 
     def deserialize(self, row):
-        """!Convert the content of the dbmi dictionary like row into the internal dictionary
+        """!Convert the content of the dbmi dictionary like row into the 
+           internal dictionary
 
            @param row: The dictionary like row to store in the internal dict
         """
@@ -188,10 +198,10 @@ class DictSQLSerializer(object):
 class SQLDatabaseInterface(DictSQLSerializer):
     """!This class represents the SQL database interface
 
-       Functions to insert, select and update the internal structure of this class
-       in the temporal database are implemented.
-       This is the base class for raster, raster3d, vector and space time datasets
-       data management classes:
+       Functions to insert, select and update the internal 
+       structure of this class in the temporal database are implemented.
+       This is the base class for raster, raster3d, vector and 
+       space time datasets data management classes:
        * Identification information (base)
        * Spatial extent
        * Temporal extent
@@ -199,9 +209,9 @@ class SQLDatabaseInterface(DictSQLSerializer):
        
        Usage:
        
-        >>> import grass.temporal as tgis
-        >>> from datetime import datetime, date, time, timedelta
-        >>> t = tgis.SQLDatabaseInterface("raster", "soil@PERMANENT")
+       \code
+       
+        >>> t = SQLDatabaseInterface("raster", "soil@PERMANENT")
         >>> t.D["name"] = "soil"
         >>> t.D["mapset"] = "PERMANENT"
         >>> t.D["creator"] = "soeren"
@@ -226,12 +236,15 @@ class SQLDatabaseInterface(DictSQLSerializer):
         ("UPDATE raster SET  creation_time = ?  ,mapset = ?  ,name = ?  ,creator = ? WHERE id = 'soil@PERMANENT';\\n", (datetime.datetime(2001, 1, 1, 0, 0), 'PERMANENT', 'soil', 'soeren'))
         >>> t.get_update_all_statement_mogrified()
         "UPDATE raster SET  creation_time = '2001-01-01 00:00:00'  ,mapset = 'PERMANENT'  ,name = 'soil'  ,creator = 'soeren' WHERE id = 'soil@PERMANENT';\\n"
+        
+        \endcode
     """
     def __init__(self, table=None, ident=None):
         """!Constructor of this class
 
            @param table: The name of the table
-           @param ident: The identifier (primary key) of this object in the database table
+           @param ident: The identifier (primary key) of this 
+                         object in the database table
         """
         DictSQLSerializer.__init__(self)
 
@@ -239,17 +252,20 @@ class SQLDatabaseInterface(DictSQLSerializer):
         self.ident = ident
 
     def get_table_name(self):
-        """!Return the name of the table in which the internal data are inserted, updated or selected"""
+        """!Return the name of the table in which the internal 
+           data are inserted, updated or selected"""
         return self.table
 
     def get_delete_statement(self):
         """!Return the delete string"""
-        return "DELETE FROM " + self.get_table_name() + " WHERE id = \'" + str(self.ident) + "\';\n"
+        return "DELETE FROM " + self.get_table_name() + \
+               " WHERE id = \'" + str(self.ident) + "\';\n"
 
     def delete(self, dbif=None):
         """!Delete the entry of this object from the temporal database
 
-           @param dbif: The database interface to be used, if None a temporary connection will be established
+           @param dbif: The database interface to be used, 
+                        if None a temporary connection will be established
         """
         sql = self.get_delete_statement()
         #print sql
@@ -264,12 +280,14 @@ class SQLDatabaseInterface(DictSQLSerializer):
 
     def get_is_in_db_statement(self):
         """Return the selection string"""
-        return "SELECT id FROM " + self.get_table_name() + " WHERE id = \'" + str(self.ident) + "\';\n"
+        return "SELECT id FROM " + self.get_table_name() + \
+               " WHERE id = \'" + str(self.ident) + "\';\n"
 
     def is_in_db(self, dbif=None):
         """!Check if this object is present in the temporal database
 
-           @param dbif: The database interface to be used, if None a temporary connection will be established
+           @param dbif: The database interface to be used, 
+                        if None a temporary connection will be established
         """
 
         sql = self.get_is_in_db_statement()
@@ -292,13 +310,16 @@ class SQLDatabaseInterface(DictSQLSerializer):
         return True
 
     def get_select_statement(self):
-        """!Return the sql statement and the argument list in database specific style"""
-        return self.serialize("SELECT", self.get_table_name(), "WHERE id = \'" + str(self.ident) + "\'")
+        """!Return the sql statement and the argument list in 
+           database specific style"""
+        return self.serialize("SELECT", self.get_table_name(), 
+                              "WHERE id = \'" + str(self.ident) + "\'")
 
     def get_select_statement_mogrified(self, dbif=None):
         """!Return the select statement as mogrified string
 
-           @param dbif: The database interface to be used, if None a temporary connection will be established
+           @param dbif: The database interface to be used, 
+                        if None a temporary connection will be established
         """
         if not dbif:
             dbif = SQLDatabaseInterfaceConnection()
@@ -309,7 +330,8 @@ class SQLDatabaseInterface(DictSQLSerializer):
         """!Select the content from the temporal database and store it
            in the internal dictionary structure
 
-           @param dbif: The database interface to be used, if None a temporary connection will be established
+           @param dbif: The database interface to be used, 
+                        if None a temporary connection will be established
         """
         sql, args = self.get_select_statement()
         #print sql
@@ -344,13 +366,15 @@ class SQLDatabaseInterface(DictSQLSerializer):
         return True
 
     def get_insert_statement(self):
-        """!Return the sql statement and the argument list in database specific style"""
+        """!Return the sql statement and the argument 
+           list in database specific style"""
         return self.serialize("INSERT", self.get_table_name())
 
     def get_insert_statement_mogrified(self, dbif=None):
         """!Return the insert statement as mogrified string
 
-           @param dbif: The database interface to be used, if None a temporary connection will be established
+           @param dbif: The database interface to be used, 
+                        if None a temporary connection will be established
         """
         if not dbif:
             dbif = SQLDatabaseInterfaceConnection()
@@ -361,7 +385,8 @@ class SQLDatabaseInterface(DictSQLSerializer):
         """!Serialize the content of this object and store it in the temporal
            database using the internal identifier
 
-           @param dbif: The database interface to be used, if None a temporary connection will be established
+           @param dbif: The database interface to be used, 
+                        if None a temporary connection will be established
         """
         sql, args = self.get_insert_statement()
         #print sql
@@ -376,13 +401,16 @@ class SQLDatabaseInterface(DictSQLSerializer):
             dbif.close()
 
     def get_update_statement(self):
-        """!Return the sql statement and the argument list in database specific style"""
-        return self.serialize("UPDATE", self.get_table_name(), "WHERE id = \'" + str(self.ident) + "\'")
+        """!Return the sql statement and the argument list 
+           in database specific style"""
+        return self.serialize("UPDATE", self.get_table_name(), 
+                              "WHERE id = \'" + str(self.ident) + "\'")
 
     def get_update_statement_mogrified(self, dbif=None):
         """!Return the update statement as mogrified string
 
-           @param dbif: The database interface to be used, if None a temporary connection will be established
+           @param dbif: The database interface to be used, 
+                        if None a temporary connection will be established
         """
         if not dbif:
             dbif = SQLDatabaseInterfaceConnection()
@@ -395,7 +423,8 @@ class SQLDatabaseInterface(DictSQLSerializer):
 
            Only object entries which are exists (not None) are updated
 
-           @param dbif: The database interface to be used, if None a temporary connection will be established
+           @param dbif: The database interface to be used, 
+                        if None a temporary connection will be established
         """
         if self.ident is None:
             raise IOError("Missing identifer")
@@ -413,13 +442,16 @@ class SQLDatabaseInterface(DictSQLSerializer):
             dbif.close()
 
     def get_update_all_statement(self):
-        """!Return the sql statement and the argument list in database specific style"""
-        return self.serialize("UPDATE ALL", self.get_table_name(), "WHERE id = \'" + str(self.ident) + "\'")
+        """!Return the sql statement and the argument 
+           list in database specific style"""
+        return self.serialize("UPDATE ALL", self.get_table_name(), 
+                              "WHERE id = \'" + str(self.ident) + "\'")
 
     def get_update_all_statement_mogrified(self, dbif=None):
         """!Return the update all statement as mogrified string
 
-           @param dbif: The database interface to be used, if None a temporary connection will be established
+           @param dbif: The database interface to be used, 
+                        if None a temporary connection will be established
         """
         if not dbif:
             dbif = SQLDatabaseInterfaceConnection()
@@ -427,10 +459,11 @@ class SQLDatabaseInterface(DictSQLSerializer):
         return dbif.mogrify_sql_statement(self.get_update_all_statement())
 
     def update_all(self, dbif=None):
-        """!Serialize the content of this object, including None objects, and update it in the temporal
-           database using the internal identifier
+        """!Serialize the content of this object, including None objects, 
+        and update it in the temporal database using the internal identifier
 
-           @param dbif: The database interface to be used, if None a temporary connection will be established
+           @param dbif: The database interface to be used, 
+                        if None a temporary connection will be established
         """
         if self.ident is None:
             raise IOError("Missing identifer")
@@ -455,10 +488,10 @@ class DatasetBase(SQLDatabaseInterface):
         basic identification information
         
         Usage:
+        
+        \code
 
-        >>> import grass.temporal as tgis
-        >>> from datetime import datetime, date, time, timedelta
-        >>> t = tgis.DatasetBase("raster", "soil@PERMANENT", creator="soeren", ctime=datetime(2001,1,1), ttype="absolute")
+        >>> t = DatasetBase("raster", "soil@PERMANENT", creator="soeren", ctime=datetime(2001,1,1), ttype="absolute")
         >>> t.id
         'soil@PERMANENT'
         >>> t.name
@@ -486,15 +519,19 @@ class DatasetBase(SQLDatabaseInterface):
         creator=soeren
         creation_time=2001-01-01 00:00:00
         temporal_type=absolute
+        
+        \endcode
     """
     
     def __init__(self, table=None, ident=None, name=None, mapset=None, 
                  creator=None, ctime=None,ttype=None):
         """!Constructor
         
-            @param table: The name of the temporal database table that should be used to store the values
-            @param ident: The unique identifier must be a combination of the dataset name, 
-                          layer name and the mapset name@mapset or name:1@mapset
+            @param table: The name of the temporal database table 
+                          that should be used to store the values
+            @param ident: The unique identifier must be a combination of 
+                          the dataset name, layer name and the mapset 
+                          name@mapset or name:1@mapset
                           used as as primary key in the temporal database
             @param name: The name of the map or dataset
             @param mapset: The name of the mapset 
@@ -525,8 +562,9 @@ class DatasetBase(SQLDatabaseInterface):
     def set_id(self, ident):
         """!Convenient method to set the unique identifier (primary key)
 
-           @param ident: The unique identifier must be a combination of the dataset name, 
-                         layer name and the mapset name@mapset or name:1@mapset
+           @param ident: The unique identifier must be a combination 
+                         of the dataset name, layer name and the mapset 
+                         name@mapset or name:1@mapset
         """
         self.ident = ident
         self.D["id"] = ident
@@ -575,7 +613,8 @@ class DatasetBase(SQLDatabaseInterface):
         self.D["creator"] = creator
 
     def set_ctime(self, ctime=None):
-        """!Set the creation time of the dataset, if nothing set the current time is used
+        """!Set the creation time of the dataset, 
+           if nothing set the current time is used
 
            @param ctime: The current time of type datetime
         """
@@ -617,13 +656,15 @@ class DatasetBase(SQLDatabaseInterface):
             return None
 
     def get_map_id(self):
-        """!Convenient method to get the unique map identifier without layer information
+        """!Convenient method to get the unique map identifier 
+           without layer information
 
            @param return the name of the vector map as name@mapset
         """
         if self.id.find(":") >= 0:
             # Remove the layer identifier from the id
-            return iself.d.split("@")[0].split(":")[0] + "@" + self.id.split("@")[1]
+            return iself.d.split("@")[0].split(":")[0] + "@" + \
+                   self.id.split("@")[1]
         else:
             return self.id
 
@@ -782,9 +823,9 @@ class STDSBase(DatasetBase):
        
     Usage:
 
-    >>> import grass.temporal as tgis
-    >>> from datetime import datetime, date, time, timedelta
-    >>> t = tgis.STDSBase("stds", "soil@PERMANENT", semantic_type="average", creator="soeren", ctime=datetime(2001,1,1), ttype="absolute")
+    \code
+    
+    >>> t = STDSBase("stds", "soil@PERMANENT", semantic_type="average", creator="soeren", ctime=datetime(2001,1,1), ttype="absolute")
     >>> t.semantic_type
     'average'
     >>> t.print_info()
@@ -804,6 +845,8 @@ class STDSBase(DatasetBase):
     creation_time=2001-01-01 00:00:00
     temporal_type=absolute
     semantic_type=average
+    
+    \endcode
     """
     def __init__(self, table=None, ident=None, name=None, mapset=None, 
                  semantic_type=None, creator=None, ctime=None,

+ 41 - 18
lib/python/temporal/core.py

@@ -9,6 +9,8 @@ SQL database and to establish a connection to the database.
 
 Usage:
 
+\code
+
 >>> import grass.temporal as tgis
 >>> # Create the temporal database
 >>> tgis.create_temporal_database()
@@ -18,10 +20,13 @@ Usage:
 >>> # Execute a SQL statement
 >>> dbif.execute_transaction("SELECT datetime(0, 'unixepoch', 'localtime');")
 >>> # Mogrify an SQL statement
->>> dbif.mogrify_sql_statement(["SELECT name from raster_base where name = ?", ("precipitation",)])
+>>> dbif.mogrify_sql_statement(["SELECT name from raster_base where name = ?", 
+... ("precipitation",)])
 "SELECT name from raster_base where name = 'precipitation'"
 >>> dbif.close()
 
+\endcode
+
 (C) 2008-2011 by the GRASS Development Team
 This program is free software under the GNU General Public
 License (>=v2). Read the file COPYING that comes with GRASS
@@ -47,7 +52,8 @@ if "driver" in kv:
         # Needed for dictionary like cursors
         import psycopg2.extras
     else:
-        core.fatal(_("Unable to initialize the temporal DBMI interface. Use t.connect to specify the driver and the database string"))
+        core.fatal(_("Unable to initialize the temporal DBMI interface. Use "
+                     "t.connect to specify the driver and the database string"))
 else:
     # Use the default sqlite variable
     core.run_command("t.connect", flags="d")
@@ -67,13 +73,17 @@ def get_temporal_dbmi_init_string():
                 "$LOCATION_NAME", grassenv["LOCATION_NAME"])
             return string
         else:
-            core.fatal(_("Unable to initialize the temporal GIS DBMI interface. Use t.connect to specify the driver and the database string"))
+            core.fatal(_("Unable to initialize the temporal GIS DBMI "
+                         "interface. Use t.connect to specify the driver "
+                         "and the database string"))
     elif dbmi.__name__ == "psycopg2":
         if "database" in kv:
             string = kv["database"]
             return string
         else:
-            core.fatal(_("Unable to initialize the temporal GIS DBMI interface. Use t.connect to specify the driver and the database string"))
+            core.fatal(_("Unable to initialize the temporal GIS DBMI "
+                         "interface. Use t.connect to specify the driver "
+                         "and the database string"))
             return "dbname=grass_test user=soeren password=abcdefgh"
 
 ###############################################################################
@@ -88,10 +98,12 @@ def get_sql_template_path():
 
 
 def create_temporal_database():
-    """!This function creates the grass location database structure for raster, vector and raster3d maps
-       as well as for the space-time datasets strds, str3ds and stvds
+    """!This function creates the grass location database structure for raster, 
+       vector and raster3d maps as well as for the space-time datasets strds, 
+       str3ds and stvds
 
-       This functions must be called before any spatio-temporal processing can be started
+       This functions must be called before any spatio-temporal processing 
+       can be started
     """
 
     database = get_temporal_dbmi_init_string()
@@ -108,7 +120,8 @@ def create_temporal_database():
         connection = dbmi.connect(database)
         cursor = connection.cursor()
         # Check for raster_base table
-        cursor.execute("SELECT EXISTS(SELECT * FROM information_schema.tables WHERE table_name=%s)", ('raster_base',))
+        cursor.execute("SELECT EXISTS(SELECT * FROM information_schema.tables "
+                       "WHERE table_name=%s)", ('raster_base',))
         db_exists = cursor.fetchone()[0]
         connection.commit()
         cursor.close()
@@ -214,7 +227,8 @@ class SQLDatabaseInterfaceConnection():
         init = get_temporal_dbmi_init_string()
         #print "Connect to",  self.database
         if dbmi.__name__ == "sqlite3":
-            self.connection = dbmi.connect(init, detect_types=dbmi.PARSE_DECLTYPES | dbmi.PARSE_COLNAMES)
+            self.connection = dbmi.connect(init, 
+                    detect_types=dbmi.PARSE_DECLTYPES | dbmi.PARSE_COLNAMES)
             self.connection.row_factory = dbmi.Row
             self.connection.isolation_level = None
             self.cursor = self.connection.cursor()
@@ -277,8 +291,8 @@ class SQLDatabaseInterfaceConnection():
                         break
 
                     if args[count] is None:
-                        statement = "%sNULL%s" % (statement[0:
-                                                            pos], statement[pos + 1:])
+                        statement = "%sNULL%s" % (statement[0:pos], 
+                                                  statement[pos + 1:])
                     elif isinstance(args[count], (int, long)):
                         statement = "%s%d%s" % (statement[0:pos], args[count],
                                                 statement[pos + 1:])
@@ -286,8 +300,11 @@ class SQLDatabaseInterfaceConnection():
                         statement = "%s%f%s" % (statement[0:pos], args[count],
                                                 statement[pos + 1:])
                     else:
-                        # Default is a string, this works for datetime objects too
-                        statement = "%s\'%s\'%s" % (statement[0:pos], str(args[count]), statement[pos + 1:])
+                        # Default is a string, this works for datetime 
+                        # objects too
+                        statement = "%s\'%s\'%s" % (statement[0:pos], 
+                                                    str(args[count]), 
+                                                    statement[pos + 1:])
                     count += 1
 
                 return statement
@@ -301,7 +318,7 @@ class SQLDatabaseInterfaceConnection():
             @param statement The executable SQL statement or SQL script
         """
         connect = False
-        if self.connected == False:
+        if not self.connected:
             self.connect()
             connect = True
 
@@ -309,6 +326,7 @@ class SQLDatabaseInterfaceConnection():
         sql_script += "BEGIN TRANSACTION;\n"
         sql_script += statement
         sql_script += "END TRANSACTION;"
+        
         try:
             if dbmi.__name__ == "sqlite3":
                 self.cursor.executescript(statement)
@@ -316,9 +334,10 @@ class SQLDatabaseInterfaceConnection():
                 self.cursor.execute(statement)
             self.connection.commit()
         except:
-            if connect == True:
+            if connect:
                 self.close()
-            core.error(_("Unable to execute transaction:\n %s") % (statement))
+            core.error(_("Unable to execute transaction:\n %(sql)s" % \
+                         {"sql":statement}))
             raise
 
         if connect:
@@ -327,13 +346,17 @@ class SQLDatabaseInterfaceConnection():
 ###############################################################################
 
 def init_dbif(dbif):
-    """!This method checks if the database interface connection exists, if not a new one
-        will be created, connected and True will be returned
+    """!This method checks if the database interface connection exists, 
+        if not a new one will be created, connected and True will be returned
 
         Usage code sample:
+        \code
+        
         dbif, connect = tgis.init_dbif(dbif)
         if connect:
             dbif.close()
+        
+        \code
     """
     if dbif is None:
         dbif = SQLDatabaseInterfaceConnection()

+ 290 - 57
lib/python/temporal/datetime_math.py

@@ -2,16 +2,7 @@
 
 @brief GRASS Python scripting module (temporal GIS functions)
 
-Temporal GIS datetime math functions to be used in Python scripts.
-
-Usage:
-
-@code
-import grass.temporal as tgis
-
-tgis.increment_datetime_by_string(mydate, "3 month, 2 hours")
-...
-@endcode
+Temporal GIS datetime math functions to be used in library functions and modules.
 
 (C) 2008-2011 by the GRASS Development Team
 This program is free software under the GNU General Public
@@ -26,10 +17,11 @@ import copy
 from dateutil import parser
 
 DAY_IN_SECONDS = 86400
-SECOND_AS_DAY  = 1.1574074074074073e-05
+SECOND_AS_DAY = 1.1574074074074073e-05
 
 ###############################################################################
 
+
 def relative_time_to_time_delta(value):
     """!Convert the double value representing days
        into a timedelta object.
@@ -43,6 +35,7 @@ def relative_time_to_time_delta(value):
 
 ###############################################################################
 
+
 def time_delta_to_relative_time(delta):
     """!Convert the time delta into a
        double value, representing days.
@@ -52,17 +45,69 @@ def time_delta_to_relative_time(delta):
 
 ###############################################################################
 
-def increment_datetime_by_string(mydate, increment, mult = 1):
-    """!Return a new datetime object incremented with the provided 
+
+def increment_datetime_by_string(mydate, increment, mult=1):
+    """!Return a new datetime object incremented with the provided
        relative dates specified as string.
-       Additional a multiplier can be specified to multiply the increment 
+       Additional a multiplier can be specified to multiply the increment
        before adding to the provided datetime object.
-
+       
+       Usage:
+       
+       @code
+       
+        >>> dt = datetime(2001, 9, 1, 0, 0, 0)
+        >>> string = "60 seconds, 4 minutes, 12 hours, 10 days, 1 weeks, 5 months, 1 years"
+        >>> increment_datetime_by_string(dt, string)
+        datetime.datetime(2003, 2, 18, 12, 5)
+
+        >>> dt = datetime(2001, 11, 1, 0, 0, 0)
+        >>> string = "1 months"
+        >>> increment_datetime_by_string(dt, string)
+        datetime.datetime(2001, 12, 1, 0, 0)
+
+        >>> dt = datetime(2001, 11, 1, 0, 0, 0)
+        >>> string = "13 months"
+        >>> increment_datetime_by_string(dt, string)
+        datetime.datetime(2002, 12, 1, 0, 0)
+         
+        >>> dt = datetime(2001, 1, 1, 0, 0, 0)
+        >>> string = "72 months"
+        >>> increment_datetime_by_string(dt, string)
+        datetime.datetime(2007, 1, 1, 0, 0)
+         
+        >>> dt = datetime(2001, 1, 1, 0, 0, 0)
+        >>> string = "72 months"
+        >>> increment_datetime_by_string(dt, string)
+        datetime.datetime(2007, 1, 1, 0, 0)
+         
+        >>> dt = datetime(2001, 1, 1, 0, 0, 0)
+        >>> string = "5 minutes"
+        >>> increment_datetime_by_string(dt, string)
+        datetime.datetime(2001, 1, 1, 0, 5)
+         
+        >>> dt = datetime(2001, 1, 1, 0, 0, 0)
+        >>> string = "49 hours"
+        >>> increment_datetime_by_string(dt, string)
+        datetime.datetime(2001, 1, 3, 1, 0)
+         
+        >>> dt = datetime(2001, 1, 1, 0, 0, 0)
+        >>> string = "3600 seconds"
+        >>> increment_datetime_by_string(dt, string)
+        datetime.datetime(2001, 1, 1, 1, 0)
+         
+        >>> dt = datetime(2001, 1, 1, 0, 0, 0)
+        >>> string = "30 days"
+        >>> increment_datetime_by_string(dt, string)
+        datetime.datetime(2001, 1, 31, 0, 0)
+        
+        @endcode
+       
        @param mydate A datetime object to incremented
        @param increment A string providing increment information:
-                  The string may include comma separated values of type seconds, 
+                  The string may include comma separated values of type seconds,
                   minutes, hours, days, weeks, months and years
-                  Example: Increment the datetime 2001-01-01 00:00:00 
+                  Example: Increment the datetime 2001-01-01 00:00:00
                   with "60 seconds, 4 minutes, 12 hours, 10 days, 1 weeks, 5 months, 1 years"
                   will result in the datetime 2003-02-18 12:05:00
        @param mult A multiplier, default is 1
@@ -83,12 +128,11 @@ def increment_datetime_by_string(mydate, increment, mult = 1):
         incparts = increment.split(",")
         for incpart in incparts:
             inclist.append(incpart.strip().split(" "))
-            
 
         for inc in inclist:
-	    if len(inc) < 2:
-		core.error(_("Wrong increment format: %s") % (increment))
-		return None
+            if len(inc) < 2:
+                core.error(_("Wrong increment format: %s") % (increment))
+                return None
             if inc[1].find("seconds") >= 0:
                 seconds = mult * int(inc[0])
             elif inc[1].find("minutes") >= 0:
@@ -108,13 +152,16 @@ def increment_datetime_by_string(mydate, increment, mult = 1):
                 return None
 
         return increment_datetime(mydate, years, months, weeks, days, hours, minutes, seconds)
-    
+
     return mydate
 
 ###############################################################################
 
-def increment_datetime(mydate, years=0, months=0, weeks=0, days=0, hours=0, minutes=0, seconds=0):
-    """!Return a new datetime object incremented with the provided relative dates and times"""
+
+def increment_datetime(mydate, years=0, months=0, weeks=0, days=0, hours=0, 
+                       minutes=0, seconds=0):
+    """!Return a new datetime object incremented with the provided 
+       relative dates and times"""
 
     tdelta_seconds = timedelta(seconds=seconds)
     tdelta_minutes = timedelta(minutes=minutes)
@@ -124,14 +171,13 @@ def increment_datetime(mydate, years=0, months=0, weeks=0, days=0, hours=0, minu
     tdelta_months = timedelta(0)
     tdelta_years = timedelta(0)
 
-
     if months > 0:
         # Compute the actual number of days in the month to add as timedelta
         year = mydate.year
         month = mydate.month
 
         all_months = int(months) + int(month)
-        years_to_add = int(all_months/12.001)
+        years_to_add = int(all_months / 12.001)
         residual_months = all_months - (years_to_add * 12)
 
         # Make a deep copy of the datetime object
@@ -141,7 +187,7 @@ def increment_datetime(mydate, years=0, months=0, weeks=0, days=0, hours=0, minu
         if residual_months == 0:
             residual_months = 1
 
-        dt1 = dt1.replace(year = year + years_to_add, month = residual_months)
+        dt1 = dt1.replace(year=year + years_to_add, month=residual_months)
         tdelta_months = dt1 - mydate
 
     if years > 0:
@@ -152,12 +198,60 @@ def increment_datetime(mydate, years=0, months=0, weeks=0, days=0, hours=0, minu
         tdelta_years = dt1 - mydate
 
     return mydate + tdelta_seconds + tdelta_minutes + tdelta_hours + \
-                    tdelta_days + tdelta_weeks + tdelta_months + tdelta_years
+        tdelta_days + tdelta_weeks + tdelta_months + tdelta_years
 
 ###############################################################################
 
+
 def adjust_datetime_to_granularity(mydate, granularity):
-    """!Mofiy the datetime object to fit the given granularity    """
+    """!Modify the datetime object to fit the given granularity    
+    
+        * Years will start at the first of Januar
+        * Months will start at the first day of the month
+        * Days will start at the first Hour of the day
+        * Hours will start at the first minute of an hour
+        * Minutes will start at the first second of a minute
+        
+        Usage:
+        
+        @code
+        
+        >>> dt = datetime(2001, 8, 8, 12,30,30)
+        >>> adjust_datetime_to_granularity(dt, "5 seconds")
+        datetime.datetime(2001, 8, 8, 12, 30, 30)
+        
+        >>> adjust_datetime_to_granularity(dt, "20 minutes")
+        datetime.datetime(2001, 8, 8, 12, 30)
+        
+        >>> adjust_datetime_to_granularity(dt, "20 minutes")
+        datetime.datetime(2001, 8, 8, 12, 30)
+        
+        >>> adjust_datetime_to_granularity(dt, "3 hours")
+        datetime.datetime(2001, 8, 8, 12, 0)
+        
+        >>> adjust_datetime_to_granularity(dt, "5 days")
+        datetime.datetime(2001, 8, 8, 0, 0)
+        
+        >>> adjust_datetime_to_granularity(dt, "2 weeks")
+        datetime.datetime(2001, 8, 6, 0, 0)
+        
+        >>> adjust_datetime_to_granularity(dt, "6 months")
+        datetime.datetime(2001, 8, 1, 0, 0)
+        
+        >>> adjust_datetime_to_granularity(dt, "2 years")
+        datetime.datetime(2001, 1, 1, 0, 0)
+        
+        >>> adjust_datetime_to_granularity(dt, "2 years, 3 months, 5 days, 3 hours, 3 minutes, 2 seconds")
+        datetime.datetime(2001, 8, 8, 12, 30, 30)
+        
+        >>> adjust_datetime_to_granularity(dt, "3 months, 5 days, 3 minutes")
+        datetime.datetime(2001, 8, 8, 12, 30)
+        
+        >>> adjust_datetime_to_granularity(dt, "3 weeks, 5 days")
+        datetime.datetime(2001, 8, 8, 0, 0)
+        
+        @endcode
+    """
 
     if granularity:
 
@@ -203,30 +297,30 @@ def adjust_datetime_to_granularity(mydate, granularity):
                 return None
 
         if has_seconds:
-            pass          
-        elif has_minutes: # Start at 0 seconds
+            pass
+        elif has_minutes:  # Start at 0 seconds
             seconds = 0
-        elif has_hours: # Start at 0 minutes and seconds
+        elif has_hours:  # Start at 0 minutes and seconds
             seconds = 0
             minutes = 0
-        elif has_days: # Start at 0 hours, minutes and seconds
+        elif has_days:  # Start at 0 hours, minutes and seconds
             seconds = 0
             minutes = 0
             hours = 0
-        elif has_weeks: # Start at the first day of the week (Monday) at 00:00:00
+        elif has_weeks:  # Start at the first day of the week (Monday) at 00:00:00
             seconds = 0
             minutes = 0
             hours = 0
             if days > weekday:
-                days = days - weekday # this needs to be fixed
+                days = days - weekday  # this needs to be fixed
             else:
-                days = days + weekday # this needs to be fixed
-        elif has_months: # Start at the first day of the month at 00:00:00
+                days = days + weekday  # this needs to be fixed
+        elif has_months:  # Start at the first day of the month at 00:00:00
             seconds = 0
             minutes = 0
             hours = 0
             days = 1
-        elif has_years: # Start at the first day of the first month at 00:00:00
+        elif has_years:  # Start at the first day of the first month at 00:00:00
             seconds = 0
             minutes = 0
             hours = 0
@@ -234,16 +328,142 @@ def adjust_datetime_to_granularity(mydate, granularity):
             months = 1
 
         dt = copy.copy(mydate)
-        result = dt.replace(year=years, month=months, day=days, hour=hours, minute=minutes, second=seconds)
-        core.verbose(_("Adjust datetime from %s to %s with granularity %s") % (dt, result, granularity))
-
-        return result
+        return dt.replace(year=years, month=months, day=days,
+                            hour=hours, minute=minutes, second=seconds)
 
 ###############################################################################
 
+
 def compute_datetime_delta(start, end):
-    """!Return a dictionary with the accumulated delta in year, month, day, hour, minute and second
-    
+    """!Return a dictionary with the accumulated delta in year, month, day, 
+       hour, minute and second
+       
+        Usage:
+        
+        @code
+        
+        >>> start = datetime(2001, 1, 1, 00,00,00)
+        >>> end = datetime(2001, 1, 1, 00,00,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 0, 'month': 0, 'second': 0, 'max_days': 0, 'year': 0, 'day': 0, 'minute': 0}
+        
+        >>> start = datetime(2001, 1, 1, 00,00,14)
+        >>> end = datetime(2001, 1, 1, 00,00,44)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 0, 'month': 0, 'second': 30, 'max_days': 0, 'year': 0, 'day': 0, 'minute': 0}
+        
+        >>> start = datetime(2001, 1, 1, 00,00,44)
+        >>> end = datetime(2001, 1, 1, 00,01,14)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 0, 'month': 0, 'second': 30, 'max_days': 0, 'year': 0, 'day': 0, 'minute': 1}
+        
+        >>> start = datetime(2001, 1, 1, 00,00,30)
+        >>> end = datetime(2001, 1, 1, 00,05,30)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 0, 'month': 0, 'second': 300, 'max_days': 0, 'year': 0, 'day': 0, 'minute': 5}
+        
+        >>> start = datetime(2001, 1, 1, 00,00,00)
+        >>> end = datetime(2001, 1, 1, 00,01,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 0, 'month': 0, 'second': 0, 'max_days': 0, 'year': 0, 'day': 0, 'minute': 1}
+        
+        >>> start = datetime(2011,10,31, 00,45,00)
+        >>> end = datetime(2011,10,31, 01,45,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 1, 'second': 0, 'max_days': 0, 'year': 0, 'day': 0, 'minute': 60}
+        
+        >>> start = datetime(2011,10,31, 00,45,00)
+        >>> end = datetime(2011,10,31, 01,15,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 1, 'second': 0, 'max_days': 0, 'year': 0, 'day': 0, 'minute': 30}
+        
+        >>> start = datetime(2011,10,31, 00,45,00)
+        >>> end = datetime(2011,10,31, 12,15,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 12, 'second': 0, 'max_days': 0, 'year': 0, 'day': 0, 'minute': 690}
+        
+        >>> start = datetime(2011,10,31, 00,00,00)
+        >>> end = datetime(2011,10,31, 01,00,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 1, 'second': 0, 'max_days': 0, 'year': 0, 'day': 0, 'minute': 0}
+        
+        >>> start = datetime(2011,10,31, 00,00,00)
+        >>> end = datetime(2011,11,01, 01,00,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 25, 'second': 0, 'max_days': 1, 'year': 0, 'day': 1, 'minute': 0}
+        
+        >>> start = datetime(2011,10,31, 12,00,00)
+        >>> end = datetime(2011,11,01, 06,00,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 18, 'second': 0, 'max_days': 0, 'year': 0, 'day': 0, 'minute': 0}
+        
+        >>> start = datetime(2011,11,01, 00,00,00)
+        >>> end = datetime(2011,12,01, 01,00,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 721, 'month': 1, 'second': 0, 'max_days': 30, 'year': 0, 'day': 0, 'minute': 0}
+        
+        >>> start = datetime(2011,11,01, 00,00,00)
+        >>> end = datetime(2011,11,05, 00,00,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 0, 'second': 0, 'max_days': 4, 'year': 0, 'day': 4, 'minute': 0}
+        
+        >>> start = datetime(2011,10,06, 00,00,00)
+        >>> end = datetime(2011,11,05, 00,00,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 0, 'second': 0, 'max_days': 30, 'year': 0, 'day': 30, 'minute': 0}
+        
+        >>> start = datetime(2011,12,02, 00,00,00)
+        >>> end = datetime(2012,01,01, 00,00,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 0, 'second': 0, 'max_days': 30, 'year': 1, 'day': 30, 'minute': 0}
+        
+        >>> start = datetime(2011,01,01, 00,00,00)
+        >>> end = datetime(2011,02,01, 00,00,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 0, 'month': 1, 'second': 0, 'max_days': 31, 'year': 0, 'day': 0, 'minute': 0}
+        
+        >>> start = datetime(2011,12,01, 00,00,00)
+        >>> end = datetime(2012,01,01, 00,00,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 0, 'month': 1, 'second': 0, 'max_days': 31, 'year': 1, 'day': 0, 'minute': 0}
+        
+        >>> start = datetime(2011,12,01, 00,00,00)
+        >>> end = datetime(2012,06,01, 00,00,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 0, 'month': 6, 'second': 0, 'max_days': 183, 'year': 1, 'day': 0, 'minute': 0}
+        
+        >>> start = datetime(2011,06,01, 00,00,00)
+        >>> end = datetime(2021,06,01, 00,00,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 0, 'month': 120, 'second': 0, 'max_days': 3653, 'year': 10, 'day': 0, 'minute': 0}
+        
+        >>> start = datetime(2011,06,01, 00,00,00)
+        >>> end = datetime(2012,06,01, 12,00,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 8796, 'month': 12, 'second': 0, 'max_days': 366, 'year': 1, 'day': 0, 'minute': 0}
+        
+        >>> start = datetime(2011,06,01, 00,00,00)
+        >>> end = datetime(2012,06,01, 12,30,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 8796, 'month': 12, 'second': 0, 'max_days': 366, 'year': 1, 'day': 0, 'minute': 527790}
+        
+        >>> start = datetime(2011,06,01, 00,00,00)
+        >>> end = datetime(2012,06,01, 12,00,05)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 8796, 'month': 12, 'second': 31665605, 'max_days': 366, 'year': 1, 'day': 0, 'minute': 0}
+        
+        >>> start = datetime(2011,06,01, 00,00,00)
+        >>> end = datetime(2012,06,01, 00,30,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 0, 'month': 12, 'second': 0, 'max_days': 366, 'year': 1, 'day': 0, 'minute': 527070}
+        
+        >>> start = datetime(2011,06,01, 00,00,00)
+        >>> end = datetime(2012,06,01, 00,00,05)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 0, 'month': 12, 'second': 31622405, 'max_days': 366, 'year': 1, 'day': 0, 'minute': 0}
+        
+        @endcode
+        
        @return A dictionary with year, month, day, hour, minute and second as keys()
     """
     comp = {}
@@ -281,11 +501,11 @@ def compute_datetime_delta(start, end):
     else:
         d = end.hour - start.hour
         if d < 0:
-            d = d + 24  + 24 * day_diff
+            d = d + 24 + 24 * day_diff
         else:
             d = d + 24 * day_diff
         comp["hour"] = d
-    
+
     # Minutes
     if start.minute == 0 and end.minute == 0:
         comp["minute"] = 0
@@ -298,7 +518,7 @@ def compute_datetime_delta(start, end):
                 d = d + 24 * 60 * day_diff
         elif d == 0:
             if comp["hour"]:
-                d = 60* comp["hour"]
+                d = 60 * comp["hour"]
             else:
                 d = 24 * 60 * day_diff
 
@@ -311,14 +531,14 @@ def compute_datetime_delta(start, end):
         d = end.second - start.second
         if d != 0:
             if comp["minute"]:
-                d = d + 60* comp["minute"]
+                d = d + 60 * comp["minute"]
             elif comp["hour"]:
-                d = d + 3600* comp["hour"]
+                d = d + 3600 * comp["hour"]
             else:
                 d = d + 24 * 60 * 60 * day_diff
         elif d == 0:
             if comp["minute"]:
-                d = 60* comp["minute"]
+                d = 60 * comp["minute"]
             elif comp["hour"]:
                 d = 3600 * comp["hour"]
             else:
@@ -329,12 +549,15 @@ def compute_datetime_delta(start, end):
 
 ###############################################################################
 
+
 def string_to_datetime(time_string):
-    """!Convert a string into a datetime object using the dateutil parser. Return None in case of failure"""
+    """!Convert a string into a datetime object using the dateutil parser. 
+       Return None in case of failure"""
 
     # BC is not supported
     if time_string.find("bc") > 0:
-        core.error("Dates Before Christ are not supported in the temporal database")
+        core.error("Dates Before Christ are not supported "
+                   "in the temporal database")
         return None
 
     try:
@@ -345,17 +568,27 @@ def string_to_datetime(time_string):
 
 ###############################################################################
 
+
 def datetime_to_grass_datetime_string(dt):
     """!Convert a python datetime object into a GRASS datetime string"""
 
     # GRASS datetime month names
-    month_names  = ["", "jan","feb","mar","apr","may","jun","jul","aug","sep","oct","nov","dec"]
+    month_names = ["", "jan", "feb", "mar", "apr", "may", "jun",
+                   "jul", "aug", "sep", "oct", "nov", "dec"]
 
     # Check for time zone info in the datetime object
-    if dt.tzinfo != None:
-        string = "%.2i %s %.2i %.2i:%.2i:%.2i %+.4i"%(dt.day, month_names[dt.month], dt.year, \
-                 dt.hour, dt.minute, dt.second, dt.tzinfo._offset.seconds/60)
+    if dt.tzinfo is not None:
+        string = "%.2i %s %.2i %.2i:%.2i:%.2i %+.4i" % (dt.day, 
+                 month_names[dt.month], dt.year,
+                 dt.hour, dt.minute, dt.second, dt.tzinfo._offset.seconds / 60)
     else:
-        string = "%.2i %s %.4i %.2i:%.2i:%.2i"%(dt.day, month_names[dt.month], dt.year, dt.hour, dt.minute, dt.second)
+        string = "%.2i %s %.4i %.2i:%.2i:%.2i" % (dt.day, month_names[
+            dt.month], dt.year, dt.hour, dt.minute, dt.second)
 
     return string
+
+###############################################################################
+
+if __name__ == "__main__":
+    import doctest
+    doctest.testmod()

+ 208 - 185
lib/python/temporal/extract.py

@@ -4,12 +4,6 @@
 
 Temporal GIS related functions to be used in Python scripts.
 
-Usage:
-
-@code
-import grass.temporal as tgis
-@endcode
-
 (C) 2008-2011 by the GRASS Development Team
 This program is free software under the GNU General Public
 License (>=v2). Read the file COPYING that comes with GRASS
@@ -23,30 +17,37 @@ from multiprocessing import Process
 
 ############################################################################
 
-def extract_dataset(input, output, type, where, expression, base, nprocs=1, register_null=False, layer=1, vtype="point,line,boundary,centroid,area,face"):
+
+def extract_dataset(input, output, type, where, expression, base, nprocs=1,
+                    register_null=False, layer=1,
+                    vtype="point,line,boundary,centroid,area,face"):
     """!Extract a subset of a space time raster, raster3d or vector dataset
-    
+
        A mapcalc expression can be provided to process the temporal extracted maps.
        Mapcalc expressions are supported for raster and raster3d maps.
-       
-       @param input The name of the input space time raster/raster3d dataset 
+
+       @param input The name of the input space time raster/raster3d dataset
        @param output The name of the extracted new space time raster/raster3d dataset
        @param type The type of the dataset: "raster", "raster3d" or vector
        @param where The temporal SQL WHERE statement for subset extraction
        @param expression The r(3).mapcalc expression or the v.extract where statement
-       @param base The base name of the new created maps in case a mapclac expression is provided 
+       @param base The base name of the new created maps in case a mapclac
+              expression is provided
        @param nprocs The number of parallel processes to be used for mapcalc processing
-       @param register_null Set this number True to register empty maps (only raster and raster3d maps)
-       @param layer The vector layer number to be used when no timestamped layer is present, default is 1
-       @param vtype The feature type to be extracted for vector maps, default is point,line,boundary,centroid,area and face
+       @param register_null Set this number True to register empty maps
+             (only raster and raster3d maps)
+       @param layer The vector layer number to be used when no timestamped
+              layer is present, default is 1
+       @param vtype The feature type to be extracted for vector maps, default
+              is point,line,boundary,centroid,area and face
     """
 
     # Check the parameters
 
     if expression and not base:
         core.fatal(_("You need to specify the base name of new created maps"))
-    
-    mapset =  core.gisenv()["MAPSET"]
+
+    mapset = core.gisenv()["MAPSET"]
 
     if input.find("@") >= 0:
         id = input
@@ -54,23 +55,23 @@ def extract_dataset(input, output, type, where, expression, base, nprocs=1, regi
         id = input + "@" + mapset
 
     if type == "raster":
-	sp = space_time_raster_dataset(id)
+        sp = space_time_raster_dataset(id)
     elif type == "raster3d":
-	sp = space_time_raster3d_dataset(id)
+        sp = space_time_raster3d_dataset(id)
     elif type == "vector":
-	sp = space_time_vector_dataset(id)
-	
+        sp = space_time_vector_dataset(id)
+
     dummy = sp.get_new_map_instance(None)
-	
+
     dbif = ()
     dbif.connect()
-    
-    if sp.is_in_db(dbif) == False:
-	dbif.close()
+
+    if not sp.is_in_db(dbif):
+        dbif.close()
         core.fatal(_("Space time %s dataset <%s> not found") % (type, id))
 
     if expression and not base:
-	dbif.close()
+        dbif.close()
         core.fatal(_("Please specify base="))
 
     sp.select(dbif)
@@ -82,192 +83,214 @@ def extract_dataset(input, output, type, where, expression, base, nprocs=1, regi
 
     # The new space time dataset
     new_sp = sp.get_new_instance(out_id)
-	
+
     if new_sp.is_in_db():
-        if core.overwrite() == False:
-	    dbif.close()
-            core.fatal(_("Space time %s dataset <%s> is already in database, use overwrite flag to overwrite") % (type, out_id))
+        if not core.overwrite():
+            dbif.close()
+            core.fatal(_("Space time %s dataset <%s> is already in database,"
+                         " use overwrite flag to overwrite") % (type, out_id))
     if type == "vector":
-	rows = sp.get_registered_maps("id,name,mapset,layer", where, "start_time", dbif)
+        rows = sp.get_registered_maps(
+            "id,name,mapset,layer", where, "start_time", dbif)
     else:
-	rows = sp.get_registered_maps("id", where, "start_time", dbif)
+        rows = sp.get_registered_maps("id", where, "start_time", dbif)
 
     new_maps = {}
     if rows:
-	num_rows = len(rows)
-	
-	core.percent(0, num_rows, 1)
-	
-	# Run the mapcalc expression
+        num_rows = len(rows)
+
+        core.percent(0, num_rows, 1)
+
+        # Run the mapcalc expression
         if expression:
-	    count = 0
-	    proc_count = 0
-	    proc_list = []
-	    
-	    for row in rows:
-		count += 1
-		
-		core.percent(count, num_rows, 1)
-		
-		map_name = "%s_%i" % (base, count)
-		
-		# We need to modify the r(3).mapcalc expression
-		if type != "vector":
-		    expr = "%s = %s" % (map_name, expression)
-		    
-		    expr = expr.replace(sp.base.get_map_id(), row["id"])
-		    expr = expr.replace(sp.base.get_name(), row["id"])
-		    
-		    # We need to build the id
-		    map_id = dummy.build_id(map_name, mapset)
-		else:
-		    map_id = dummy.build_id(map_name, mapset, row["layer"])
-
-		new_map = sp.get_new_map_instance(map_id)
-
-		# Check if new map is in the temporal database
-		if new_map.is_in_db(dbif):
-		    if core.overwrite() == True:
-			# Remove the existing temporal database entry
-			new_map.delete(dbif)
-			new_map = sp.get_new_map_instance(map_id)
-		    else:
-			core.error(_("Map <%s> is already in temporal database, use overwrite flag to overwrite")%(new_map.get_map_id()))
-			continue
-		
-		# Add process to the process list
-		if type == "raster":
-		    core.verbose(_("Apply r.mapcalc expression: \"%s\"") % expr)
-		    proc_list.append(Process(target=run_mapcalc2d, args=(expr,)))
-		elif type == "raster3d":
-		    core.verbose(_("Apply r3.mapcalc expression: \"%s\"") % expr)
-		    proc_list.append(Process(target=run_mapcalc3d, args=(expr,)))
-		elif type == "vector":
-		    core.verbose(_("Apply v.extract where statement: \"%s\"") % expression)
-		    if row["layer"]:
-			proc_list.append(Process(target=run_vector_extraction, args=(row["name"] + "@" + row["mapset"], map_name, row["layer"], vtype, expression)))
-		    else:
-			proc_list.append(Process(target=run_vector_extraction, args=(row["name"] + "@" + row["mapset"], map_name, layer, vtype, expression)))
-		
-		proc_list[proc_count].start()
-		proc_count += 1
-		
-		# Join processes if the maximum number of processes are reached or the end of the
-		# loop is reached
-		if proc_count == nprocs or proc_count == num_rows:
-		    proc_count = 0
-		    exitcodes = 0
-		    for proc in proc_list:
-			proc.join()
-			exitcodes += proc.exitcode
-			
-		    if exitcodes != 0:
-			dbif.close()
-			core.fatal(_("Error while computation"))
-			
-		    # Empty process list
-		    proc_list = []
-		    
-		# Store the new maps
-		new_maps[row["id"]] = new_map
-	
-	core.percent(0, num_rows, 1)
-	
-	# Insert the new space time dataset
-	if new_sp.is_in_db(dbif):
-	    if core.overwrite() == True:
-		new_sp.delete(dbif)
-		new_sp = sp.get_new_instance(out_id)
-
-	temporal_type, semantic_type, title, description = sp.get_initial_values()
-	new_sp.set_initial_values(temporal_type, semantic_type, title, description)
-	new_sp.insert(dbif)
-	
-	# collect empty maps to remove them
-	empty_maps = []
-	
-	# Register the maps in the database
+            count = 0
+            proc_count = 0
+            proc_list = []
+
+            for row in rows:
+                count += 1
+
+                core.percent(count, num_rows, 1)
+
+                map_name = "%s_%i" % (base, count)
+
+                # We need to modify the r(3).mapcalc expression
+                if type != "vector":
+                    expr = "%s = %s" % (map_name, expression)
+
+                    expr = expr.replace(sp.base.get_map_id(), row["id"])
+                    expr = expr.replace(sp.base.get_name(), row["id"])
+
+                    # We need to build the id
+                    map_id = dummy.build_id(map_name, mapset)
+                else:
+                    map_id = dummy.build_id(map_name, mapset, row["layer"])
+
+                new_map = sp.get_new_map_instance(map_id)
+
+                # Check if new map is in the temporal database
+                if new_map.is_in_db(dbif):
+                    if core.overwrite():
+                        # Remove the existing temporal database entry
+                        new_map.delete(dbif)
+                        new_map = sp.get_new_map_instance(map_id)
+                    else:
+                        core.error(_("Map <%s> is already in temporal database,"
+                                     " use overwrite flag to overwrite") %
+                                    (new_map.get_map_id()))
+                        continue
+
+                # Add process to the process list
+                if type == "raster":
+                    core.verbose(_("Apply r.mapcalc expression: \"%s\"")
+                                 % expr)
+                    proc_list.append(Process(target=run_mapcalc2d,
+                                             args=(expr,)))
+                elif type == "raster3d":
+                    core.verbose(_("Apply r3.mapcalc expression: \"%s\"")
+                                 % expr)
+                    proc_list.append(Process(target=run_mapcalc3d,
+                                             args=(expr,)))
+                elif type == "vector":
+                    core.verbose(_("Apply v.extract where statement: \"%s\"")
+                                 % expression)
+                    if row["layer"]:
+                        proc_list.append(Process(target=run_vector_extraction,
+                                                 args=(row["name"] + "@" + row["mapset"],
+                                                 map_name, row["layer"], 
+                                                 vtype, expression)))
+                    else:
+                        proc_list.append(Process(target=run_vector_extraction,
+                                                 args=(row["name"] + "@" + row["mapset"],
+                                                 map_name, layer, vtype, 
+                                                 expression)))
+
+                proc_list[proc_count].start()
+                proc_count += 1
+
+                # Join processes if the maximum number of processes are reached or the end of the
+                # loop is reached
+                if proc_count == nprocs or proc_count == num_rows:
+                    proc_count = 0
+                    exitcodes = 0
+                    for proc in proc_list:
+                        proc.join()
+                        exitcodes += proc.exitcode
+
+                    if exitcodes != 0:
+                        dbif.close()
+                        core.fatal(_("Error while computation"))
+
+                    # Empty process list
+                    proc_list = []
+
+                # Store the new maps
+                new_maps[row["id"]] = new_map
+
+        core.percent(0, num_rows, 1)
+
+        # Insert the new space time dataset
+        if new_sp.is_in_db(dbif):
+            if core.overwrite():
+                new_sp.delete(dbif)
+                new_sp = sp.get_new_instance(out_id)
+
+        temporal_type, semantic_type, title, description = sp.get_initial_values()
+        new_sp.set_initial_values(
+            temporal_type, semantic_type, title, description)
+        new_sp.insert(dbif)
+
+        # collect empty maps to remove them
+        empty_maps = []
+
+        # Register the maps in the database
         count = 0
         for row in rows:
             count += 1
-	    
-	    core.percent(count, num_rows, 1)
+
+            core.percent(count, num_rows, 1)
 
             old_map = sp.get_new_map_instance(row["id"])
             old_map.select(dbif)
-            
+
             if expression:
-		# Register the new maps
-		if new_maps.has_key(row["id"]):
-		    new_map = new_maps[row["id"]]
-
-		    # Read the raster map data
-		    new_map.load()
-		    
-		    # In case of a empty map continue, do not register empty maps
-		    if type == "raster" or type == "raster3d":
-			if new_map.metadata.get_min() == None and new_map.metadata.get_max() == None:
-			    if not register_null:
-				empty_maps.append(new_map)
-				continue
-		    elif type == "vector":
-			if new_map.metadata.get_primitives() == 0 or new_map.metadata.get_primitives() == None:
-			    if not register_null:
-				empty_maps.append(new_map)
-				continue
-
-		    # Set the time stamp
-		    if old_map.is_time_absolute():
-			start, end, tz = old_map.get_absolute_time()
-			new_map.set_absolute_time(start, end, tz)
-		    else:
-			start, end = old_map.get_relative_time()
-			new_map.set_relative_time(start, end)
-
-		    # Insert map in temporal database
-		    new_map.insert(dbif)
-
-		    new_sp.register_map(new_map, dbif)
-	    else:
-		new_sp.register_map(old_map, dbif)          
-                
+                # Register the new maps
+                if row["id"] in new_maps:
+                    new_map = new_maps[row["id"]]
+
+                    # Read the raster map data
+                    new_map.load()
+
+                    # In case of a empty map continue, do not register empty maps
+                    if type == "raster" or type == "raster3d":
+                        if new_map.metadata.get_min() is None and \
+                            new_map.metadata.get_max() is None:
+                            if not register_null:
+                                empty_maps.append(new_map)
+                                continue
+                    elif type == "vector":
+                        if new_map.metadata.get_primitives() == 0 or \
+                           new_map.metadata.get_primitives() is None:
+                            if not register_null:
+                                empty_maps.append(new_map)
+                                continue
+
+                    # Set the time stamp
+                    if old_map.is_time_absolute():
+                        start, end, tz = old_map.get_absolute_time()
+                        new_map.set_absolute_time(start, end, tz)
+                    else:
+                        start, end = old_map.get_relative_time()
+                        new_map.set_relative_time(start, end)
+
+                    # Insert map in temporal database
+                    new_map.insert(dbif)
+
+                    new_sp.register_map(new_map, dbif)
+            else:
+                new_sp.register_map(old_map, dbif)
+
         # Update the spatio-temporal extent and the metadata table entries
         new_sp.update_from_registered_maps(dbif)
-	
-	core.percent(num_rows, num_rows, 1)
-	
-	# Remove empty maps
-	if len(empty_maps) > 0:
-	    names = ""
-	    count = 0
-	    for map in empty_maps:
-		if count == 0:
-		    names += "%s"%(map.get_name())
-		else:
-		    names += ",%s"%(map.get_name())
-		count += 1
-	    if type == "raster":
-		core.run_command("g.remove", rast=names, quiet=True)
-	    elif type == "raster3d":
-		core.run_command("g.remove", rast3d=names, quiet=True)
-	    elif type == "vector":
-		core.run_command("g.remove", vect=names, quiet=True)
-        
+
+        core.percent(num_rows, num_rows, 1)
+
+        # Remove empty maps
+        if len(empty_maps) > 0:
+            names = ""
+            count = 0
+            for map in empty_maps:
+                if count == 0:
+                    names += "%s" % (map.get_name())
+                else:
+                    names += ",%s" % (map.get_name())
+                count += 1
+            if type == "raster":
+                core.run_command("g.remove", rast=names, quiet=True)
+            elif type == "raster3d":
+                core.run_command("g.remove", rast3d=names, quiet=True)
+            elif type == "vector":
+                core.run_command("g.remove", vect=names, quiet=True)
+
     dbif.close()
 
 ###############################################################################
 
+
 def run_mapcalc2d(expr):
     """Helper function to run r.mapcalc in parallel"""
-    return core.run_command("r.mapcalc", expression=expr, overwrite=core.overwrite(), quiet=True)
+    return core.run_command("r.mapcalc", expression=expr,
+                            overwrite=core.overwrite(), quiet=True)
 
 
 def run_mapcalc3d(expr):
     """Helper function to run r3.mapcalc in parallel"""
-    return core.run_command("r3.mapcalc", expression=expr, overwrite=core.overwrite(), quiet=True)
-    
+    return core.run_command("r3.mapcalc", expression=expr,
+                            overwrite=core.overwrite(), quiet=True)
+
 
 def run_vector_extraction(input, output, layer, type, where):
     """Helper function to run r.mapcalc in parallel"""
-    return core.run_command("v.extract", input=input, output=output, layer=layer, type=type, where=where, overwrite=core.overwrite(), quiet=True)
-
+    return core.run_command("v.extract", input=input, output=output,
+                            layer=layer, type=type, where=where,
+                            overwrite=core.overwrite(), quiet=True)

+ 259 - 244
lib/python/temporal/mapcalc.py

@@ -4,12 +4,6 @@
 
 Temporal GIS related functions to be used in Python scripts.
 
-Usage:
-
-@code
-import grass.temporal as tgis
-@endcode
-
 (C) 2008-2011 by the GRASS Development Team
 This program is free software under the GNU General Public
 License (>=v2). Read the file COPYING that comes with GRASS
@@ -23,87 +17,96 @@ from multiprocessing import Process
 
 ############################################################################
 
-def dataset_mapcalculator(inputs, output, type, expression, base, method, nprocs=1, register_null=False, spatial=False):
-    """!Perform map-calculations of maps from different space time raster/raster3d datasets, using
-       a specific sampling method to select temporal related maps.
-    
-       A mapcalc expression can be provided to process the temporal extracted maps.
+
+def dataset_mapcalculator(inputs, output, type, expression, base, method, 
+                          nprocs=1, register_null=False, spatial=False):
+    """!Perform map-calculations of maps from different space time 
+       raster/raster3d datasets, using a specific sampling method 
+       to select temporal related maps.
+
+       A mapcalc expression can be provided to process the temporal 
+       extracted maps.
        Mapcalc expressions are supported for raster and raster3d maps.
-       
-       @param input The name of the input space time raster/raster3d dataset 
-       @param output The name of the extracted new space time raster/raster3d dataset
+
+       @param input The name of the input space time raster/raster3d dataset
+       @param output The name of the extracted new space time raster(3d) dataset
        @param type The type of the dataset: "raster" or "raster3d"
        @param method The method to be used for temporal sampling
        @param expression The r(3).mapcalc expression
-       @param base The base name of the new created maps in case a mapclac expression is provided 
-       @param nprocs The number of parallel processes to be used for mapcalc processing
+       @param base The base name of the new created maps in case a 
+              mapclac expression is provided
+       @param nprocs The number of parallel processes to be used for 
+              mapcalc processing
        @param register_null Set this number True to register empty maps
        @param spatial Check spatial overlap
     """
-    
+
     # We need a database interface for fast computation
     dbif = ()
     dbif.connect()
 
-    mapset =  core.gisenv()["MAPSET"]
-    
+    mapset = core.gisenv()["MAPSET"]
+
     input_name_list = inputs.split(",")
-    
+
     # Process the first input
     if input_name_list[0].find("@") >= 0:
-	id = input_name_list[0]
+        id = input_name_list[0]
     else:
-	id = input_name_list[0] + "@" + mapset
-	
+        id = input_name_list[0] + "@" + mapset
+
     if type == "raster":
-	first_input = space_time_raster_dataset(id)
+        first_input = space_time_raster_dataset(id)
     else:
-	first_input = space_time_raster3d_dataset(id)
-    
-    if first_input.is_in_db(dbif) == False:
-	dbif.close()
+        first_input = space_time_raster3d_dataset(id)
+
+    if not first_input.is_in_db(dbif):
+        dbif.close()
         core.fatal(_("Space time %s dataset <%s> not found") % (type, id))
 
     # Fill the object with data from the temporal database
     first_input.select(dbif)
-    
-    # All additional inputs in reverse sorted order to avoid wrong name substitution
+
+    # All additional inputs in reverse sorted order to avoid 
+    # wrong name substitution
     input_name_list = input_name_list[1:]
     input_name_list.sort()
     input_name_list.reverse()
     input_list = []
-        
+
     for input in input_name_list:
 
-	if input.find("@") >= 0:
-	    id = input
-	else:
-	    id = input + "@" + mapset
-	    
-	sp = first_input.get_new_instance(id)
-	
-	if sp.is_in_db(dbif) == False:
-	    dbif.close()
-	    core.fatal(_("Space time %s dataset <%s> not found in temporal database") % (type, id))
-
-	sp.select(dbif)
-	
-	input_list.append(copy.copy(sp))
+        if input.find("@") >= 0:
+            id = input
+        else:
+            id = input + "@" + mapset
+
+        sp = first_input.get_new_instance(id)
+
+        if not sp.is_in_db(dbif):
+            dbif.close()
+            core.fatal(_("Space time %s dataset <%s> not "
+                         "found in temporal database") % (type, id))
+
+        sp.select(dbif)
+
+        input_list.append(copy.copy(sp))
 
     # Create the new space time dataset
     if output.find("@") >= 0:
         out_id = output
     else:
         out_id = output + "@" + mapset
-        
+
     new_sp = first_input.get_new_instance(out_id)
-    
+
     # Check if in database
     if new_sp.is_in_db(dbif):
-        if core.overwrite() == False:
-	    dbif.close()
-            core.fatal(_("Space time %s dataset <%s> is already in database, use overwrite flag to overwrite") % (type, out_id))
- 
+        if not core.overwrite():
+            dbif.close()
+            core.fatal(_("Space time %s dataset <%s> is already in database, "
+                         "use overwrite flag to overwrite") % (type, out_id))
+
     # Sample all inputs by the first input and create a sample matrix
     if spatial:
         core.message(_("Start spatio-temporal sampling"))
@@ -114,217 +117,226 @@ def dataset_mapcalculator(inputs, output, type, expression, base, method, nprocs
     sample_map_list = []
     # First entry is the first dataset id
     id_list.append(first_input.get_name())
-    
+
     if len(input_list) > 0:
-	has_samples = False
-	for dataset in input_list:
-	    list = dataset.sample_by_dataset(stds=first_input, method=method, spatial=spatial, dbif=dbif)
-	    
-	    # In case samples are not found
-	    if not list and len(list) == 0:
-		dbif.close()
-		core.message(_("No samples found for map calculation"))
-		return 0
-	    
-	    # The fist entries are the samples
-	    map_name_list = []
-	    if has_samples == False:
-		for entry in list:
-		    granule = entry["granule"]
-		    # Do not consider gaps
-		    if granule.get_id() == None:
-			continue
-		    sample_map_list.append(granule)
-		    map_name_list.append(granule.get_name())
-		# Attach the map names
-		map_matrix.append(copy.copy(map_name_list))
-		has_samples = True
-		
-	    map_name_list = []
-	    for entry in list:
-		maplist = entry["samples"]
-		granule = entry["granule"]
-		
-		# Do not consider gaps in the sampler
-		if granule.get_id() == None:
-		    continue
-		
-		if len(maplist) > 1:
-		    core.warning(_("Found more than a single map in a sample granule. "\
-		    "Only the first map is used for computation. "\
-		    "Use t.rast.aggregate.ds to create synchronous raster datasets."))
-		
-		# Store all maps! This includes non existent maps, identified by id == None 
-		map_name_list.append(maplist[0].get_name())
-	    
-	    # Attach the map names
-	    map_matrix.append(copy.copy(map_name_list))
-
-	    id_list.append(dataset.get_name())
+        has_samples = False
+        for dataset in input_list:
+            list = dataset.sample_by_dataset(stds=first_input,
+                                             method=method, spatial=spatial, 
+                                             dbif=dbif)
+
+            # In case samples are not found
+            if not list and len(list) == 0:
+                dbif.close()
+                core.message(_("No samples found for map calculation"))
+                return 0
+
+            # The fist entries are the samples
+            map_name_list = []
+            if not has_samples:
+                for entry in list:
+                    granule = entry["granule"]
+                    # Do not consider gaps
+                    if granule.get_id() is None:
+                        continue
+                    sample_map_list.append(granule)
+                    map_name_list.append(granule.get_name())
+                # Attach the map names
+                map_matrix.append(copy.copy(map_name_list))
+                has_samples = True
+
+            map_name_list = []
+            for entry in list:
+                maplist = entry["samples"]
+                granule = entry["granule"]
+
+                # Do not consider gaps in the sampler
+                if granule.get_id() is None:
+                    continue
+
+                if len(maplist) > 1:
+                    core.warning(_("Found more than a single map in a sample "
+                                   "granule. Only the first map is used for "
+                                   "computation. Use t.rast.aggregate.ds to "
+                                   "create synchronous raster datasets."))
+
+                # Store all maps! This includes non existent maps, 
+                # identified by id == None
+                map_name_list.append(maplist[0].get_name())
+
+            # Attach the map names
+            map_matrix.append(copy.copy(map_name_list))
+
+            id_list.append(dataset.get_name())
     else:
-	list = first_input.get_registered_maps_as_objects(dbif=dbif)
-	
-	if list == None:
-	    dbif.close()
+        list = first_input.get_registered_maps_as_objects(dbif=dbif)
+
+        if list is None:
+            dbif.close()
             core.message(_("No maps in input dataset"))
             return 0
-	
-	map_name_list = []
-	for map in list:
-	    map_name_list.append(map.get_name())
-	    sample_map_list.append(map)
-	
-	# Attach the map names
-	map_matrix.append(copy.copy(map_name_list))
-   
+
+        map_name_list = []
+        for map in list:
+            map_name_list.append(map.get_name())
+            sample_map_list.append(map)
+
+        # Attach the map names
+        map_matrix.append(copy.copy(map_name_list))
+
     # Needed for map registration
     map_list = []
-	
+
     if len(map_matrix) > 0:
-	
-	core.message(_("Start mapcalc computation"))
-	    
-	count = 0
-	# Get the number of samples
-	num = len(map_matrix[0])
-	
-	# Parallel processing
+
+        core.message(_("Start mapcalc computation"))
+
+        count = 0
+        # Get the number of samples
+        num = len(map_matrix[0])
+
+        # Parallel processing
         proc_list = []
         proc_count = 0
-	
-	# For all samples
+
+        # For all samples
         for i in range(num):
-            
+
             count += 1
-	    core.percent(count, num, 1)
+            core.percent(count, num, 1)
+
+            # Create the r.mapcalc statement for the current time step
+            map_name = "%s_%i" % (base, count)
+            expr = "%s = %s" % (map_name, expression)
 
-	    # Create the r.mapcalc statement for the current time step
-	    map_name = "%s_%i" % (base, count)   
-	    expr = "%s = %s" % (map_name, expression)
-            
             # Check that all maps are in the sample
             valid_maps = True
-            # Replace all dataset names with their map names of the current time step
+            # Replace all dataset names with their map names of the 
+            # current time step
             for j in range(len(map_matrix)):
-		if map_matrix[j][i] == None:
-		    valid_maps = False
-		    break
-		# Substitute the dataset name with the map name
-		expr = expr.replace(id_list[j], map_matrix[j][i])
-
-	    # Proceed with the next sample
-	    if valid_maps == False:
-		continue
-		
-	    # Create the new map id and check if the map is already in the database
-	    map_id = map_name + "@" + mapset
-
-	    new_map = first_input.get_new_map_instance(map_id)
-
-	    # Check if new map is in the temporal database
-	    if new_map.is_in_db(dbif):
-		if core.overwrite() == True:
-		    # Remove the existing temporal database entry
-		    new_map.delete(dbif)
-		    new_map = first_input.get_new_map_instance(map_id)
-		else:
-		    core.error(_("Map <%s> is already in temporal database, use overwrite flag to overwrite"))
-		    continue
-
-	    # Set the time stamp
-	    if sample_map_list[i].is_time_absolute():
-		start, end, tz = sample_map_list[i].get_absolute_time()
-		new_map.set_absolute_time(start, end, tz)
-	    else:
-		start, end = sample_map_list[i].get_relative_time()
-		new_map.set_relative_time(start, end)
-	    
-	    map_list.append(new_map)
-	    
-	    # Start the parallel r.mapcalc computation
-	    core.verbose(_("Apply mapcalc expression: \"%s\"") % expr)
-
-	    if type == "raster":
-		proc_list.append(Process(target=run_mapcalc2d, args=(expr,)))
-	    else:
-		proc_list.append(Process(target=run_mapcalc3d, args=(expr,)))
-	    proc_list[proc_count].start()
-	    proc_count += 1
-	    
-	    if proc_count == nprocs or proc_count == num:
-		proc_count = 0
-		exitcodes = 0
-		for proc in proc_list:
-		    proc.join()
-		    exitcodes += proc.exitcode
-		    
-		if exitcodes != 0:
-		    dbif.close()
-		    core.fatal(_("Error while mapcalc computation"))
-		    
-		# Empty process list
-		proc_list = []
-		
-	# Register the new maps in the output space time dataset
-	core.message(_("Start map registration in temporal database"))
-	    
-	# Overwrite an existing dataset if requested
-	if new_sp.is_in_db(dbif):
-	    if core.overwrite() == True:
-		new_sp.delete(dbif)
-		new_sp = first_input.get_new_instance(out_id)
-		
-	# Copy the ids from the first input
-	temporal_type, semantic_type, title, description = first_input.get_initial_values()
-	new_sp.set_initial_values(temporal_type, semantic_type, title, description)
-	# Insert the dataset in the temporal database
-	new_sp.insert(dbif)
-    
-	count = 0
-	
-	# collect empty maps to remove them
-	empty_maps = []
-	
-	# Insert maps in the temporal database and in the new space time dataset
-	for new_map in map_list:
+                if map_matrix[j][i] is None:
+                    valid_maps = False
+                    break
+                # Substitute the dataset name with the map name
+                expr = expr.replace(id_list[j], map_matrix[j][i])
+
+            # Proceed with the next sample
+            if not valid_maps:
+                continue
+
+            # Create the new map id and check if the map is already 
+            # in the database
+            map_id = map_name + "@" + mapset
+
+            new_map = first_input.get_new_map_instance(map_id)
+
+            # Check if new map is in the temporal database
+            if new_map.is_in_db(dbif):
+                if core.overwrite():
+                    # Remove the existing temporal database entry
+                    new_map.delete(dbif)
+                    new_map = first_input.get_new_map_instance(map_id)
+                else:
+                    core.error(_("Map <%s> is already in temporal database, "
+                                 "use overwrite flag to overwrite"))
+                    continue
+
+            # Set the time stamp
+            if sample_map_list[i].is_time_absolute():
+                start, end, tz = sample_map_list[i].get_absolute_time()
+                new_map.set_absolute_time(start, end, tz)
+            else:
+                start, end = sample_map_list[i].get_relative_time()
+                new_map.set_relative_time(start, end)
+
+            map_list.append(new_map)
+
+            # Start the parallel r.mapcalc computation
+            core.verbose(_("Apply mapcalc expression: \"%s\"") % expr)
+
+            if type == "raster":
+                proc_list.append(Process(target=run_mapcalc2d, args=(expr,)))
+            else:
+                proc_list.append(Process(target=run_mapcalc3d, args=(expr,)))
+            proc_list[proc_count].start()
+            proc_count += 1
+
+            if proc_count == nprocs or proc_count == num:
+                proc_count = 0
+                exitcodes = 0
+                for proc in proc_list:
+                    proc.join()
+                    exitcodes += proc.exitcode
+
+                if exitcodes != 0:
+                    dbif.close()
+                    core.fatal(_("Error while mapcalc computation"))
+
+                # Empty process list
+                proc_list = []
+
+        # Register the new maps in the output space time dataset
+        core.message(_("Start map registration in temporal database"))
+
+        # Overwrite an existing dataset if requested
+        if new_sp.is_in_db(dbif):
+            if core.overwrite():
+                new_sp.delete(dbif)
+                new_sp = first_input.get_new_instance(out_id)
+
+        # Copy the ids from the first input
+        temporal_type, semantic_type, title, description = first_input.get_initial_values()
+        new_sp.set_initial_values(
+            temporal_type, semantic_type, title, description)
+        # Insert the dataset in the temporal database
+        new_sp.insert(dbif)
+
+        count = 0
+
+        # collect empty maps to remove them
+        empty_maps = []
+
+        # Insert maps in the temporal database and in the new space time dataset
+        for new_map in map_list:
 
             count += 1
-	    core.percent(count, num, 1)
-	    
-	    # Read the map data
-	    new_map.load()
-	    
-	    # In case of a null map continue, do not register null maps
-	    if new_map.metadata.get_min() == None and new_map.metadata.get_max() == None:
-		if not register_null:
-		    empty_maps.append(new_map)
-		    continue
-
-	    # Insert map in temporal database
-	    new_map.insert(dbif)
-
-	    new_sp.register_map(new_map, dbif)
+            core.percent(count, num, 1)
+
+            # Read the map data
+            new_map.load()
+
+            # In case of a null map continue, do not register null maps
+            if new_map.metadata.get_min() is None and \
+               new_map.metadata.get_max() is None:
+                if not register_null:
+                    empty_maps.append(new_map)
+                    continue
+
+            # Insert map in temporal database
+            new_map.insert(dbif)
+
+            new_sp.register_map(new_map, dbif)
 
         # Update the spatio-temporal extent and the metadata table entries
         new_sp.update_from_registered_maps(dbif)
-		
-	core.percent(1, 1, 1)
-
-	# Remove empty maps
-	if len(empty_maps) > 0:
-	    names = ""
-	    count = 0
-	    for map in empty_maps:
-		if count == 0:
-		    names += "%s"%(map.get_name())
-		else:
-		    names += ",%s"%(map.get_name())
-		count += 1
-	    if type == "raster":
-		core.run_command("g.remove", rast=names, quiet=True)
-	    elif type == "raster3d":
-		core.run_command("g.remove", rast3d=names, quiet=True)
-        
+
+        core.percent(1, 1, 1)
+
+        # Remove empty maps
+        if len(empty_maps) > 0:
+            names = ""
+            count = 0
+            for map in empty_maps:
+                if count == 0:
+                    names += "%s" % (map.get_name())
+                else:
+                    names += ",%s" % (map.get_name())
+                count += 1
+            if type == "raster":
+                core.run_command("g.remove", rast=names, quiet=True)
+            elif type == "raster3d":
+                core.run_command("g.remove", rast3d=names, quiet=True)
+
     dbif.close()
 
 
@@ -332,10 +344,13 @@ def dataset_mapcalculator(inputs, output, type, expression, base, method, nprocs
 
 def run_mapcalc2d(expr):
     """Helper function to run r.mapcalc in parallel"""
-    return core.run_command("r.mapcalc", expression=expr, overwrite=core.overwrite(), quiet=True)
+    return core.run_command("r.mapcalc", expression=expr, 
+                            overwrite=core.overwrite(), quiet=True)
 
 ###############################################################################
 
+
 def run_mapcalc3d(expr):
     """Helper function to run r3.mapcalc in parallel"""
-    return core.run_command("r3.mapcalc", expression=expr, overwrite=core.overwrite(), quiet=True)
+    return core.run_command("r3.mapcalc", expression=expr, 
+                            overwrite=core.overwrite(), quiet=True)

+ 69 - 35
lib/python/temporal/metadata.py

@@ -6,6 +6,8 @@ Temporal GIS related metadata functions to be used in Python scripts and tgis pa
 
 Usage:
 
+@code
+
 >>> import grass.temporal as tgis
 >>> meta = tgis.RasterMetadata()
 >>> meta = tgis.Raster3DMetadata()
@@ -14,6 +16,8 @@ Usage:
 >>> meta = tgis.STR3DSMetadata()
 >>> meta = tgis.STVDSMetadata()
 
+@endcode
+
 (C) 2008-2011 by the GRASS Development Team
 This program is free software under the GNU General Public
 License (>=v2). Read the file COPYING that comes with GRASS
@@ -31,8 +35,9 @@ class RasterMetadataBase(SQLDatabaseInterface):
     
         Usage:
         
-        >>> import grass.temporal as tgis
-        >>> meta = tgis.RasterMetadataBase(table="metadata", ident="soil@PERMANENT",
+        @code
+        
+        >>> meta = RasterMetadataBase(table="metadata", ident="soil@PERMANENT",
         ... datatype="CELL", cols=100, rows=100, number_of_cells=10000, nsres=0.1,
         ... ewres=0.1, min=0, max=100)
         >>> meta.datatype
@@ -69,7 +74,8 @@ class RasterMetadataBase(SQLDatabaseInterface):
         ewres=0.1
         min=0.0
         max=100.0
-    
+        
+        @endcode
     """
     def __init__(self, table=None, ident=None, datatype=None, cols=None, 
 		rows=None, number_of_cells=None, nsres=None, ewres=None, 
@@ -267,9 +273,10 @@ class RasterMetadata(RasterMetadataBase):
         register table is stored.
        
         Usage:
+        
+        @code
        
-        >>> import grass.temporal as tgis
-        >>> meta = tgis.RasterMetadata(ident="soil@PERMANENT",
+        >>> meta = RasterMetadata(ident="soil@PERMANENT",
         ... datatype="CELL", cols=100, rows=100, number_of_cells=10000, nsres=0.1,
         ... ewres=0.1, min=0, max=100)
         >>> meta.datatype
@@ -310,6 +317,8 @@ class RasterMetadata(RasterMetadataBase):
         min=0.0
         max=100.0
         strds_register=None
+        
+        @endcode
     """
     def __init__(self, ident=None, strds_register=None, datatype=None, 
 		 cols=None, rows=None, number_of_cells=None, nsres=None, 
@@ -363,9 +372,10 @@ class Raster3DMetadata(RasterMetadataBase):
         raster dataset register table is stored.
        
         Usage:
+        
+        @code
        
-        >>> import grass.temporal as tgis
-        >>> meta = tgis.Raster3DMetadata(ident="soil@PERMANENT",
+        >>> meta = Raster3DMetadata(ident="soil@PERMANENT",
         ... datatype="FCELL", cols=100, rows=100, depths=100,
         ... number_of_cells=1000000, nsres=0.1, ewres=0.1, tbres=0.1,
         ... min=0, max=100)
@@ -415,6 +425,8 @@ class Raster3DMetadata(RasterMetadataBase):
         str3ds_register=None
         depths=100
         tbres=0.1
+        
+        @endcode
     """
     def __init__(self, ident=None, str3ds_register=None, datatype=None, 
 		 cols=None, rows=None, depths=None, number_of_cells=None, 
@@ -507,8 +519,9 @@ class VectorMetadata(SQLDatabaseInterface):
         raster dataset register table is stored.
        
         Usage:
+        
+        @code
        
-        >>> import grass.temporal as tgis
         >>> meta = VectorMetadata(ident="lidar@PERMANENT", is_3d=True, 
         ... number_of_points=1, number_of_lines=2, number_of_boundaries=3,
         ... number_of_centroids=4, number_of_faces=5, number_of_kernels=6, 
@@ -573,6 +586,8 @@ class VectorMetadata(SQLDatabaseInterface):
         islands=10
         holes=11
         volumes=12
+        
+        @endcode
     """
     def __init__(
         self, ident=None, stvds_register=None, is_3d=False, 
@@ -851,13 +866,15 @@ class VectorMetadata(SQLDatabaseInterface):
 
 
 class STDSMetadataBase(SQLDatabaseInterface):
-    """!This is the space time dataset metadata base class for strds, stvds and str3ds datasets
+    """!This is the space time dataset metadata base class for 
+       strds, stvds and str3ds datasets
        setting/getting the id, the title and the description
        
         Usage:
         
-        >>> import grass.temporal as tgis
-        >>> meta = tgis.STDSMetadataBase(ident="soils@PERMANENT",
+        @code
+        
+        >>> meta = STDSMetadataBase(ident="soils@PERMANENT",
         ... title="Soils", description="Soils 1950 - 2010")
         >>> meta.id
         'soils@PERMANENT'
@@ -874,6 +891,8 @@ class STDSMetadataBase(SQLDatabaseInterface):
          | Soils 1950 - 2010
         >>> meta.print_shell_info()
         number_of_maps=None
+        
+        @endcode
     """
     def __init__(self, table=None, ident=None, title=None, description=None):
 
@@ -956,16 +975,18 @@ class STDSMetadataBase(SQLDatabaseInterface):
 
 
 class STDSRasterMetadataBase(STDSMetadataBase):
-    """!This is the space time dataset metadata base class for strds and str3ds datasets
+    """!This is the space time dataset metadata base 
+       class for strds and str3ds datasets
 
        Most of the metadata values are set by triggers in the database when
-       new raster or voxel maps are added. Therefor only some set- an many get-functions
-       are available.
+       new raster or voxel maps are added. Therefor only some 
+       set- an many get-functions are available.
        
         Usage:
         
-        >>> import grass.temporal as tgis
-        >>> meta = tgis.STDSRasterMetadataBase(ident="soils@PERMANENT",
+        @code
+        
+        >>> meta = STDSRasterMetadataBase(ident="soils@PERMANENT",
         ... title="Soils", description="Soils 1950 - 2010")
         >>> meta.id
         'soils@PERMANENT'
@@ -1006,6 +1027,8 @@ class STDSRasterMetadataBase(STDSMetadataBase):
         min_max=None
         max_min=None
         max_max=None
+        
+        @endcode
     """
     def __init__(self, table=None, ident=None, title=None, description=None):
 
@@ -1147,8 +1170,9 @@ class STRDSMetadata(STDSRasterMetadataBase):
         
         Usage:
         
-        >>> import grass.temporal as tgis
-        >>> meta = tgis.STRDSMetadata(ident="soils@PERMANENT",
+        @code
+        
+        >>> meta = STRDSMetadata(ident="soils@PERMANENT",
         ... title="Soils", description="Soils 1950 - 2010")
         >>> meta.id
         'soils@PERMANENT'
@@ -1193,6 +1217,8 @@ class STRDSMetadata(STDSRasterMetadataBase):
         max_min=None
         max_max=None
         raster_register=None
+        
+        @endcode
     """
     def __init__(self, ident=None, raster_register=None, title=None, description=None):
 
@@ -1241,8 +1267,9 @@ class STR3DSMetadata(STDSRasterMetadataBase):
         
         Usage:
         
-        >>> import grass.temporal as tgis
-        >>> meta = tgis.STR3DSMetadata(ident="soils@PERMANENT",
+        @code
+        
+        >>> meta = STR3DSMetadata(ident="soils@PERMANENT",
         ... title="Soils", description="Soils 1950 - 2010")
         >>> meta.id
         'soils@PERMANENT'
@@ -1293,6 +1320,8 @@ class STR3DSMetadata(STDSRasterMetadataBase):
         tbres_min=None
         tbres_max=None
         raster3d_register=None
+        
+        @endcode
         """
     def __init__(self, ident=None, raster3d_register=None, title=None, description=None):
 
@@ -1366,9 +1395,12 @@ class STVDSMetadata(STDSMetadataBase):
        Most of the metadata values are set by triggers in the database when
        new vector maps are added. Therefor only some set- an many get-functions
        are available.
-       
-        >>> import grass.temporal as tgis
-        >>> meta = tgis.STVDSMetadata(ident="lidars@PERMANENT",
+        
+        Usage:
+        
+        @code
+        
+        >>> meta = STVDSMetadata(ident="lidars@PERMANENT",
         ... title="LIDARS", description="LIDARS 2008 - 2010")
         >>> meta.id
         'lidars@PERMANENT'
@@ -1424,6 +1456,8 @@ class STVDSMetadata(STDSMetadataBase):
         islands=None
         holes=None
         volumes=None
+        
+        @endcode
     """
     def __init__(
         self, ident=None, vector_register=None, title=None, description=None):
@@ -1600,18 +1634,18 @@ class STVDSMetadata(STDSMetadataBase):
         STDSMetadataBase.print_info(self)
         print " | Vector register table:...... " + str(
             self.get_vector_register())
-        print " | Number of points ........... " + str(self.get_number_of_points())
-        print " | Number of lines ............ " + str(self.get_number_of_lines())
-        print " | Number of boundaries ....... " + str(self.get_number_of_boundaries())
-        print " | Number of centroids ........ " + str(self.get_number_of_centroids())
-        print " | Number of faces ............ " + str(self.get_number_of_faces())
-        print " | Number of kernels .......... " + str(self.get_number_of_kernels())
-        print " | Number of primitives ....... " + str(self.get_number_of_primitives())
-        print " | Number of nodes ............ " + str(self.get_number_of_nodes())
-        print " | Number of areas ............ " + str(self.get_number_of_areas())
-        print " | Number of islands .......... " + str(self.get_number_of_islands())
-        print " | Number of holes ............ " + str(self.get_number_of_holes())
-        print " | Number of volumes .......... " + str(self.get_number_of_volumes())
+        print " | Number of points ........... " + str(self.number_of_points)
+        print " | Number of lines ............ " + str(self.number_of_lines)
+        print " | Number of boundaries ....... " + str(self.number_of_boundaries)
+        print " | Number of centroids ........ " + str(self.number_of_centroids)
+        print " | Number of faces ............ " + str(self.number_of_faces)
+        print " | Number of kernels .......... " + str(self.number_of_kernels)
+        print " | Number of primitives ....... " + str(self.number_of_primitives)
+        print " | Number of nodes ............ " + str(self.number_of_nodes)
+        print " | Number of areas ............ " + str(self.number_of_areas)
+        print " | Number of islands .......... " + str(self.number_of_islands)
+        print " | Number of holes ............ " + str(self.number_of_holes)
+        print " | Number of volumes .......... " + str(self.number_of_volumes)
 
     def print_shell_info(self):
         """!Print information about this class in shell style"""

Разлика између датотеке није приказан због своје велике величине
+ 543 - 419
lib/python/temporal/space_time_datasets.py


+ 226 - 166
lib/python/temporal/space_time_datasets_tools.py

@@ -23,13 +23,15 @@ for details.
 """
 
 from space_time_datasets import *
- 
+
 ###############################################################################
 
-def register_maps_in_space_time_dataset(type, name, maps=None, file=None, start=None, \
-                                        end=None, unit=None, increment=None, dbif = None, \
-                                        interval=False, fs="|"):
-    """!Use this method to register maps in space time datasets. This function is generic and
+
+def register_maps_in_space_time_dataset(
+    type, name, maps=None, file=None, start=None,
+    end=None, unit=None, increment=None, dbif=None,
+        interval=False, fs="|"):
+    """!Use this method to register maps in space time datasets. 
 
        Additionally a start time string and an increment string can be specified
        to assign a time interval automatically to the maps.
@@ -40,86 +42,100 @@ def register_maps_in_space_time_dataset(type, name, maps=None, file=None, start=
        @param type: The type of the maps rast, rast3d or vect
        @param name: The name of the space time dataset
        @param maps: A comma separated list of map names
-       @param file: Input file one map with start and optional end time, one per line
-       @param start: The start date and time of the first raster map (format absolute: "yyyy-mm-dd HH:MM:SS" or "yyyy-mm-dd", format relative is integer 5)
-       @param end: The end date and time of the first raster map (format absolute: "yyyy-mm-dd HH:MM:SS" or "yyyy-mm-dd", format relative is integer 5)
-       @param unit: The unit of the relative time: years, months, days, hours, minutes, seconds
-       @param increment: Time increment between maps for time stamp creation (format absolute: NNN seconds, minutes, hours, days, weeks, months, years; format relative: 1.0)
+       @param file: Input file one map with start and optional end time, 
+                    one per line
+       @param start: The start date and time of the first raster map
+                    (format absolute: "yyyy-mm-dd HH:MM:SS" or "yyyy-mm-dd",
+                    format relative is integer 5)
+       @param end: The end date and time of the first raster map
+                   (format absolute: "yyyy-mm-dd HH:MM:SS" or "yyyy-mm-dd",
+                   format relative is integer 5)
+       @param unit: The unit of the relative time: years, months, days,
+                    hours, minutes, seconds
+       @param increment: Time increment between maps for time stamp creation
+                         (format absolute: NNN seconds, minutes, hours, days,
+                         weeks, months, years; format relative: 1.0)
        @param dbif: The database interface to be used
-       @param interval: If True, time intervals are created in case the start time and an increment is provided
+       @param interval: If True, time intervals are created in case the start
+                        time and an increment is provided
        @param fs: Field separator used in input file
     """
 
     start_time_in_file = False
     end_time_in_file = False
-    
+
     if maps and file:
-        core.fatal(_("%s= and %s= are mutually exclusive") % ("input","file"))
+        core.fatal(_("%s= and %s= are mutually exclusive") % ("input", "file"))
 
     if end and increment:
-        core.fatal(_("%s= and %s= are mutually exclusive") % ("end","increment"))
+        core.fatal(_("%s= and %s= are mutually exclusive") % (
+            "end", "increment"))
 
     if end and not start:
-        core.fatal(_("Please specify %s= and %s=") % ("start_time","end_time"))
+        core.fatal(_("Please specify %s= and %s=") % ("start_time",
+                                                      "end_time"))
 
     if not maps and not file:
-        core.fatal(_("Please specify %s= or %s=") % ("input","file"))
+        core.fatal(_("Please specify %s= or %s=") % ("input", "file"))
 
     # We may need the mapset
-    mapset =  core.gisenv()["MAPSET"]
-    
+    mapset = core.gisenv()["MAPSET"]
+
     # The name of the space time dataset is optional
     if name:
-	# Check if the dataset name contains the mapset as well
-	if name.find("@") < 0:
-	    id = name + "@" + mapset
-	else:
-	    id = name
-
-	if type == "rast" or type == "raster":
-	    sp = dataset_factory("strds", id)
-	elif type == "rast3d":
-	    sp = dataset_factory("str3ds", id)
-	elif type == "vect" or type == "vector":
-	    sp = dataset_factory("stvds", id)
-	else:
-	    core.fatal(_("Unkown map type: %s")%(type))
-
-        
+        # Check if the dataset name contains the mapset as well
+        if name.find("@") < 0:
+            id = name + "@" + mapset
+        else:
+            id = name
+
+        if type == "rast" or type == "raster":
+            sp = dataset_factory("strds", id)
+        elif type == "rast3d":
+            sp = dataset_factory("str3ds", id)
+        elif type == "vect" or type == "vector":
+            sp = dataset_factory("stvds", id)
+        else:
+            core.fatal(_("Unkown map type: %s") % (type))
+
     dbif, connect = init_dbif(None)
 
     if name:
-	# Read content from temporal database
-	sp.select(dbif)
+        # Read content from temporal database
+        sp.select(dbif)
 
-	if sp.is_in_db(dbif) == False:
-	    dbif.close()
-	    core.fatal(_("Space time %s dataset <%s> no found") % (sp.get_new_map_instance(None).get_type(), name))
+        if not sp.is_in_db(dbif):
+            dbif.close()
+            core.fatal(_("Space time %s dataset <%s> no found") %
+                       (sp.get_new_map_instance(None).get_type(), name))
+
+        if sp.is_time_relative() and not unit:
+            dbif.close()
+            core.fatal(_("Space time %s dataset <%s> with relative time found, "
+                         "but no relative unit set for %s maps") %
+                       (sp.get_new_map_instance(None).get_type(),
+                        name, sp.get_new_map_instance(None).get_type()))
 
-	if sp.is_time_relative() and not unit:
-	    dbif.close()
-	    core.fatal(_("Space time %s dataset <%s> with relative time found, but no relative unit set for %s maps") % (sp.get_new_map_instance(None).get_type(), name, sp.get_new_map_instance(None).get_type()))
-    
     # We need a dummy map object to build the map ids
     dummy = dataset_factory(type, None)
-        
+
     maplist = []
-    
+
     # Map names as comma separated string
     if maps:
         if maps.find(",") < 0:
-            maplist = [maps,]
+            maplist = [maps, ]
         else:
             maplist = maps.split(",")
 
-	# Build the map list again with the ids
-	for count in range(len(maplist)):
-	    row = {}
-	    mapid = dummy.build_id(maplist[count], mapset, None)
-		
-	    row["id"] = mapid
+        # Build the map list again with the ids
+        for count in range(len(maplist)):
+            row = {}
+            mapid = dummy.build_id(maplist[count], mapset, None)
+
+            row["id"] = mapid
             maplist[count] = row
-            
+
     # Read the map list from file
     if file:
         fd = open(file, "r")
@@ -145,69 +161,73 @@ def register_maps_in_space_time_dataset(type, name, maps=None, file=None, start=
 
             mapname = line_list[0].strip()
             row = {}
-            
-	    if start_time_in_file and  end_time_in_file:
-	        row["start"] = line_list[1].strip()
-	        row["end"] = line_list[2].strip()
 
-	    if start_time_in_file and  not end_time_in_file:
-	        row["start"] = line_list[1].strip()
-	    
-	    row["id"] = dummy.build_id(mapname, mapset)
+            if start_time_in_file and end_time_in_file:
+                row["start"] = line_list[1].strip()
+                row["end"] = line_list[2].strip()
+
+            if start_time_in_file and not end_time_in_file:
+                row["start"] = line_list[1].strip()
+
+            row["id"] = dummy.build_id(mapname, mapset)
 
             maplist.append(row)
-    
+
     num_maps = len(maplist)
     map_object_list = []
     statement = ""
-    
+
     core.message(_("Gathering map informations"))
-    
+
     for count in range(len(maplist)):
-	core.percent(count, num_maps, 1)
+        core.percent(count, num_maps, 1)
 
         # Get a new instance of the map type
         map = dataset_factory(type, maplist[count]["id"])
 
         # Use the time data from file
-        if maplist[count].has_key("start"):
+        if "start" in maplist[count]:
             start = maplist[count]["start"]
-        if maplist[count].has_key("end"):
+        if "end" in maplist[count]:
             end = maplist[count]["end"]
-            
+
         is_in_db = False
 
         # Put the map into the database
-        if map.is_in_db(dbif) == False:
+        if not map.is_in_db(dbif):
             is_in_db = False
             # Break in case no valid time is provided
-            if start == "" or start == None:
+            if start == "" or start is None:
                 dbif.close()
                 if map.get_layer():
-		    core.fatal(_("Unable to register %s map <%s> with layer %s. The map has no valid time and the start time is not set.") % \
-				(map.get_type(), map.get_map_id(), map.get_layer() ))
-		else:
-		    core.fatal(_("Unable to register %s map <%s>. The map has no valid time and the start time is not set.") % \
-				(map.get_type(), map.get_map_id() ))
-	    
-	    if unit:
+                    core.fatal(_("Unable to register %s map <%s> with layer %s. "
+                                 "The map has no valid time and the start time is not set.") %
+                               (map.get_type(), map.get_map_id(), map.get_layer()))
+                else:
+                    core.fatal(_("Unable to register %s map <%s>. The map has no valid"
+                                 " time and the start time is not set.") %
+                               (map.get_type(), map.get_map_id()))
+
+            if unit:
                 map.set_time_to_relative()
             else:
                 map.set_time_to_absolute()
- 
+
         else:
             is_in_db = True
-            if core.overwrite == False:
-		continue
+            if not core.overwrite:
+                continue
             map.select(dbif)
             if name and map.get_temporal_type() != sp.get_temporal_type():
                 dbif.close()
                 if map.get_layer():
-		    core.fatal(_("Unable to register %s map <%s> with layer. The temporal types are different.") %  \
-		                 (map.get_type(), map.get_map_id(), map.get_layer()))
-		else:
-		    core.fatal(_("Unable to register %s map <%s>. The temporal types are different.") %  \
-		                 (map.get_type(), map.get_map_id()))
+                    core.fatal(_("Unable to register %s map <%s> with layer. "
+                                 "The temporal types are different.") %
+                               (map.get_type(), map.get_map_id(), map.get_layer()))
+                else:
+                    core.fatal(_("Unable to register %s map <%s>. "
+                                 "The temporal types are different.") %
+                               (map.get_type(), map.get_map_id()))
 
         # Load the data from the grass file database
         map.load()
@@ -217,21 +237,25 @@ def register_maps_in_space_time_dataset(type, name, maps=None, file=None, start=
             # In case the time is in the input file we ignore the increment counter
             if start_time_in_file:
                 count = 1
-            assign_valid_time_to_map(ttype=map.get_temporal_type(), map=map, start=start, end=end, unit=unit, increment=increment, mult=count, interval=interval)
+            assign_valid_time_to_map(ttype=map.get_temporal_type(),
+                                     map=map, start=start, end=end, unit=unit,
+                                     increment=increment, mult=count,
+                                     interval=interval)
 
         if is_in_db:
-           #  Gather the SQL update statement
-           statement += map.update_all(dbif=dbif, execute=False)
+            #  Gather the SQL update statement
+            statement += map.update_all(dbif=dbif, execute=False)
         else:
-           #  Gather the SQL insert statement
-           statement += map.insert(dbif=dbif, execute=False)
+            #  Gather the SQL insert statement
+            statement += map.insert(dbif=dbif, execute=False)
 
         # Sqlite3 performace better for huge datasets when committing in small chunks
         if dbmi.__name__ == "sqlite3":
             if count % 100 == 0:
-                if statement != None and statement != "":
-                    core.message(_("Registering maps in the temporal database"))
-		    dbif.execute_transaction(statement)
+                if statement is not None and statement != "":
+                    core.message(_("Registering maps in the temporal database")
+                                 )
+                    dbif.execute_transaction(statement)
                     statement = ""
 
         # Store the maps in a list to register in a space time dataset
@@ -240,7 +264,7 @@ def register_maps_in_space_time_dataset(type, name, maps=None, file=None, start=
 
     core.percent(num_maps, num_maps, 1)
 
-    if statement != None and statement != "":
+    if statement is not None and statement != "":
         core.message(_("Register maps in the temporal database"))
         dbif.execute_transaction(statement)
 
@@ -251,63 +275,80 @@ def register_maps_in_space_time_dataset(type, name, maps=None, file=None, start=
         num_maps = len(map_object_list)
         core.message(_("Register maps in the space time raster dataset"))
         for map in map_object_list:
-	    core.percent(count, num_maps, 1)
-	    sp.register_map(map=map, dbif=dbif)
+            core.percent(count, num_maps, 1)
+            sp.register_map(map=map, dbif=dbif)
             count += 1
-        
+
     # Update the space time tables
     if name:
         core.message(_("Update space time raster dataset"))
-	sp.update_from_registered_maps(dbif)
+        sp.update_from_registered_maps(dbif)
 
     if connect == True:
         dbif.close()
 
     core.percent(num_maps, num_maps, 1)
-        
+
 
 ###############################################################################
 
 def assign_valid_time_to_map(ttype, map, start, end, unit, increment=None, mult=1, interval=False):
     """!Assign the valid time to a map dataset
 
-       @param ttype: The temporal type which should be assigned and which the time format is of
+       @param ttype: The temporal type which should be assigned
+                     and which the time format is of
        @param map: A map dataset object derived from abstract_map_dataset
-       @param start: The start date and time of the first raster map (format absolute: "yyyy-mm-dd HH:MM:SS" or "yyyy-mm-dd", format relative is integer 5)
-       @param end: The end date and time of the first raster map (format absolute: "yyyy-mm-dd HH:MM:SS" or "yyyy-mm-dd", format relative is integer 5)
-       @param unit: The unit of the relative time: years, months, days, hours, minutes, seconds
-       @param increment: Time increment between maps for time stamp creation (format absolute: NNN seconds, minutes, hours, days, weeks, months, years; format relative is integer 1)
+       @param start: The start date and time of the first raster map
+                     (format absolute: "yyyy-mm-dd HH:MM:SS" or "yyyy-mm-dd",
+                     format relative is integer 5)
+       @param end: The end date and time of the first raster map
+                   (format absolute: "yyyy-mm-dd HH:MM:SS" or "yyyy-mm-dd",
+                   format relative is integer 5)
+       @param unit: The unit of the relative time: years, months,
+                    days, hours, minutes, seconds
+       @param increment: Time increment between maps for time stamp creation
+                        (format absolute: NNN seconds, minutes, hours, days,
+                        weeks, months, years; format relative is integer 1)
        @param multi: A multiplier for the increment
-       @param interval: If True, time intervals are created in case the start time and an increment is provided
+       @param interval: If True, time intervals are created in case the start
+                        time and an increment is provided
     """
 
     if ttype == "absolute":
         start_time = string_to_datetime(start)
-        if start_time == None:
+        if start_time is None:
             dbif.close()
-            core.fatal(_("Unable to convert string \"%s\"into a datetime object")%(start))
+            core.fatal(_("Unable to convert string \"%s\"into a "
+                         "datetime object") % (start))
         end_time = None
 
         if end:
             end_time = string_to_datetime(end)
-            if end_time == None:
+            if end_time is None:
                 dbif.close()
-                core.fatal(_("Unable to convert string \"%s\"into a datetime object")%(end))
+                core.fatal(_("Unable to convert string \"%s\"into a "
+                             "datetime object") % (end))
 
         # Add the increment
         if increment:
-            start_time = increment_datetime_by_string(start_time, increment, mult)
-            if start_time == None:
-		core.fatal(_("Error in increment computation"))
+            start_time = increment_datetime_by_string(
+                start_time, increment, mult)
+            if start_time is None:
+                core.fatal(_("Error in increment computation"))
             if interval:
-                end_time = increment_datetime_by_string(start_time, increment, 1)
-		if end_time == None:
-		    core.fatal(_("Error in increment computation"))
-	if map.get_layer():
-	    core.verbose(_("Set absolute valid time for map <%s> with layer %s to %s - %s") % (map.get_map_id(), map.get_layer(), str(start_time), str(end_time)))
+                end_time = increment_datetime_by_string(
+                    start_time, increment, 1)
+                if end_time is None:
+                    core.fatal(_("Error in increment computation"))
+        if map.get_layer():
+            core.verbose(_("Set absolute valid time for map <%(id)s> with "
+                           "layer %(layer)s to %(start)s - %(end)s") %
+                         {'id': map.get_map_id(), 'layer': map.get_layer(),
+                          'start': str(start_time), 'end': str(end_time)})
         else:
-	    core.verbose(_("Set absolute valid time for map <%s> to %s - %s") % (map.get_map_id(), str(start_time), str(end_time)))
-        
+            core.verbose(_("Set absolute valid time for map <%s> to %s - %s") %
+                         (map.get_map_id(), str(start_time), str(end_time)))
+
         map.set_absolute_time(start_time, end_time, None)
     else:
         start_time = int(start)
@@ -321,19 +362,26 @@ def assign_valid_time_to_map(ttype, map, start, end, unit, increment=None, mult=
             if interval:
                 end_time = start_time + int(increment)
 
-	if map.get_layer():
-	    core.verbose(_("Set relative valid time for map <%s> with layer %s to %i - %s with unit %s") % (map.get_map_id(), map.get_layer(), start_time,  str(end_time), unit))
+        if map.get_layer():
+            core.verbose(_("Set relative valid time for map <%s> with layer %s "
+                           "to %i - %s with unit %s") %
+                         (map.get_map_id(), map.get_layer(), start_time,
+                          str(end_time), unit))
         else:
-	    core.verbose(_("Set relative valid time for map <%s> to %i - %s with unit %s") % (map.get_map_id(), start_time,  str(end_time), unit))
-	    
+            core.verbose(_("Set relative valid time for map <%s> to %i - %s "
+                           "with unit %s") % (map.get_map_id(), start_time,
+                                              str(end_time), unit))
+
         map.set_relative_time(start_time, end_time, unit)
 
 ###############################################################################
 
+
 def dataset_factory(type, id):
     """!A factory functions to create space time or map datasets
-    
-       @param type: the dataset type: rast or raster, rast3d, vect or vector, strds, str3ds, stvds
+
+       @param type: the dataset type: rast or raster, rast3d,
+                    vect or vector, strds, str3ds, stvds
        @param id: The id of the dataset ("name@mapset")
     """
     if type == "strds":
@@ -346,7 +394,7 @@ def dataset_factory(type, id):
         sp = raster_dataset(id)
     elif type == "rast3d":
         sp = raster3d_dataset(id)
-    elif type == "vect" or  type == "vector":
+    elif type == "vect" or type == "vector":
         sp = vector_dataset(id)
     else:
         core.error(_("Unknown dataset type: %s") % type)
@@ -356,24 +404,32 @@ def dataset_factory(type, id):
 
 ###############################################################################
 
+
 def list_maps_of_stds(type, input, columns, order, where, separator, method, header):
     """! List the maps of a space time dataset using diffetent methods
 
         @param type: The type of the maps raster, raster3d or vector
         @param input: Name of a space time raster dataset
-        @param columns: A comma separated list of columns to be printed to stdout 
-        @param order: A comma separated list of columns to order the space time dataset by category 
-        @param where: A where statement for selected listing without "WHERE" e.g: start_time < "2001-01-01" and end_time > "2001-01-01"
+        @param columns: A comma separated list of columns to be printed to stdout
+        @param order: A comma separated list of columns to order the
+                      space time dataset by category
+        @param where: A where statement for selected listing without "WHERE"
+                      e.g: start_time < "2001-01-01" and end_time > "2001-01-01"
         @param separator: The field separator character between the columns
-        @param method: String identifier to select a method out of cols,comma,delta or deltagaps
+        @param method: String identifier to select a method out of cols,
+                       comma,delta or deltagaps
             * "cols": Print preselected columns specified by columns
             * "comma": Print the map ids (name@mapset) as comma separated string
-            * "delta": Print the map ids (name@mapset) with start time, end time, relative length of intervals and the relative distance to the begin
-            * "deltagaps": Same as "delta" with additional listing of gaps. Gaps can be simply identified as the id is "None"
-            * "gran": List map using the granularity of the space time dataset, columns are identical to deltagaps 
-        @param header: Set True to print column names 
+            * "delta": Print the map ids (name@mapset) with start time,
+                       end time, relative length of intervals and the relative
+                       distance to the begin
+            * "deltagaps": Same as "delta" with additional listing of gaps.
+                           Gaps can be simply identified as the id is "None"
+            * "gran": List map using the granularity of the space time dataset,
+                      columns are identical to deltagaps
+        @param header: Set True to print column names
     """
-    mapset =  core.gisenv()["MAPSET"]
+    mapset = core.gisenv()["MAPSET"]
 
     if input.find("@") >= 0:
         id = input
@@ -381,21 +437,21 @@ def list_maps_of_stds(type, input, columns, order, where, separator, method, hea
         id = input + "@" + mapset
 
     sp = dataset_factory(type, id)
-    
-    if sp.is_in_db() == False:
+
+    if not sp.is_in_db():
         core.fatal(_("Dataset <%s> not found in temporal database") % (id))
 
     sp.select()
 
-    if separator == None or separator == "":
+    if separator is None or separator == "":
         separator = "\t"
-           
+
     # This method expects a list of objects for gap detection
     if method == "delta" or method == "deltagaps" or method == "gran":
-	if type == "stvds":
-	    columns = "id,name,layer,mapset,start_time,end_time"
-	else:
-	    columns = "id,name,mapset,start_time,end_time"
+        if type == "stvds":
+            columns = "id,name,layer,mapset,start_time,end_time"
+        else:
+            columns = "id,name,mapset,start_time,end_time"
         if method == "deltagaps":
             maps = sp.get_registered_maps_as_objects_with_gaps(where, None)
         elif method == "delta":
@@ -405,15 +461,15 @@ def list_maps_of_stds(type, input, columns, order, where, separator, method, hea
 
         if header:
             string = ""
-	    string += "%s%s" % ("id", separator)
-	    string += "%s%s" % ("name", separator)
+            string += "%s%s" % ("id", separator)
+            string += "%s%s" % ("name", separator)
             if type == "stvds":
-		string += "%s%s" % ("layer", separator)
-	    string += "%s%s" % ("mapset", separator)
+                string += "%s%s" % ("layer", separator)
+            string += "%s%s" % ("mapset", separator)
             string += "%s%s" % ("start_time", separator)
             string += "%s%s" % ("end_time", separator)
             string += "%s%s" % ("interval_length", separator)
-            string += "%s"   % ("distance_from_begin")
+            string += "%s" % ("distance_from_begin")
 
         if maps and len(maps) > 0:
 
@@ -431,7 +487,7 @@ def list_maps_of_stds(type, input, columns, order, where, separator, method, hea
 
                 start, end = map.get_valid_time()
                 if end:
-                    delta = end -start
+                    delta = end - start
                 else:
                     delta = None
                 delta_first = start - first_time
@@ -444,13 +500,13 @@ def list_maps_of_stds(type, input, columns, order, where, separator, method, hea
                 string = ""
                 string += "%s%s" % (map.get_id(), separator)
                 string += "%s%s" % (map.get_name(), separator)
-		if type == "stvds":
-		    string += "%s%s" % (map.get_layer(), separator)
+                if type == "stvds":
+                    string += "%s%s" % (map.get_layer(), separator)
                 string += "%s%s" % (map.get_mapset(), separator)
                 string += "%s%s" % (start, separator)
                 string += "%s%s" % (end, separator)
                 string += "%s%s" % (delta, separator)
-                string += "%s"   % (delta_first)
+                string += "%s" % (delta_first)
                 print string
 
     else:
@@ -497,17 +553,21 @@ def list_maps_of_stds(type, input, columns, order, where, separator, method, hea
                         else:
                             output += str(col)
                         count += 1
-                        
+
                     print output
 
 ###############################################################################
 
+
 def sample_stds_by_stds_topology(intype, sampletype, inputs, sampler, header, separator, method, spatial=False):
-    """! Sample the input space time datasets with a sample space time dataset and print the result to stdout
+    """!Sample the input space time datasets with a sample 
+       space time dataset and print the result to stdout
+
+        In case multiple maps are located in the current granule, 
+        the map names are separated by comma.
 
-        In case multiple maps are located in the current granule, the map names are separated by comma.
-        
-        In case a layer is present, the names map ids are extended in this form: name:layer@mapset 
+        In case a layer is present, the names map ids are extended 
+        in this form: name:layer@mapset
 
         Attention: Do not use the comma as separator
 
@@ -515,12 +575,13 @@ def sample_stds_by_stds_topology(intype, sampletype, inputs, sampler, header, se
         @param samtype: Type of the sample space time dataset (strds, stvds or str3ds)
         @param input: Name of a space time dataset
         @param sampler: Name of a space time dataset used for temporal sampling
-        @param header: Set True to print column names 
+        @param header: Set True to print column names
         @param separator: The field separator character between the columns
-        @param method: The method to be used for temporal sampling (start,during,contain,overlap,equal)
+        @param method: The method to be used for temporal sampling 
+                       (start,during,contain,overlap,equal)
         @param spatial: Perform spatial overlapping check
     """
-    mapset =  core.gisenv()["MAPSET"]
+    mapset = core.gisenv()["MAPSET"]
 
     input_list = inputs.split(",")
     sts = []
@@ -554,9 +615,9 @@ def sample_stds_by_stds_topology(intype, sampletype, inputs, sampler, header, se
 
     sst.select(dbif)
 
-    if separator == None or separator == "" or separator.find(",") >= 0:
+    if separator is None or separator == "" or separator.find(",") >= 0:
         separator = " | "
-       
+
     mapmatrizes = []
     for st in sts:
         mapmatrix = st.sample_by_dataset(sst, method, spatial, dbif)
@@ -573,7 +634,7 @@ def sample_stds_by_stds_topology(intype, sampletype, inputs, sampler, header, se
             string += "%s%s" % ("start_time", separator)
             string += "%s%s" % ("end_time", separator)
             string += "%s%s" % ("interval_length", separator)
-            string += "%s"   % ("distance_from_begin")
+            string += "%s" % ("distance_from_begin")
 
         first_time, dummy = mapmatrizes[0][0]["granule"].get_valid_time()
 
@@ -590,7 +651,7 @@ def sample_stds_by_stds_topology(intype, sampletype, inputs, sampler, header, se
                         mapnames += ",%s" % str(sample.get_id())
                     count += 1
                 mapname_list.append(mapnames)
-                
+
             entry = mapmatrizes[0][i]
             map = entry["granule"]
 
@@ -613,8 +674,7 @@ def sample_stds_by_stds_topology(intype, sampletype, inputs, sampler, header, se
             string += "%s%s" % (start, separator)
             string += "%s%s" % (end, separator)
             string += "%s%s" % (delta, separator)
-            string += "%s"   % (delta_first)
+            string += "%s" % (delta_first)
             print string
 
     dbif.close()
-

+ 185 - 141
lib/python/temporal/spatial_extent.py

@@ -6,6 +6,8 @@ Temporal GIS related spatial extent functions to be used in Python scripts and t
 
 Usage:
 
+@code
+
 >>> import grass.temporal as tgis
 >>> extent = tgis.RasterSpatialExtent( 
 ... ident="raster@PERMANENT", north=90, south=90, east=180, west=180,
@@ -26,6 +28,7 @@ Usage:
 ... ident="stvds@PERMANENT", north=90, south=90, east=180, west=180,
 ... top=100, bottom=-20)
 
+@endcode
 
 (C) 2008-2011 by the GRASS Development Team
 This program is free software under the GNU General Public
@@ -43,8 +46,11 @@ class SpatialExtent(SQLDatabaseInterface):
         This class implements a three dimensional axis aligned bounding box
         and functions to compute topological relationships
         
-        >>> import grass.temporal as tgis
-        >>> extent = tgis.SpatialExtent(table="raster_spatial_extent", 
+        Usage:
+        
+        @code
+        
+        >>> extent = SpatialExtent(table="raster_spatial_extent", 
         ... ident="soil@PERMANENT", north=90, south=90, east=180, west=180,
         ... top=100, bottom=-20)
         >>> extent.id
@@ -76,6 +82,8 @@ class SpatialExtent(SQLDatabaseInterface):
         west=180.0
         top=100.0
         bottom=-20.0
+        
+        @endcode
     """
     def __init__(self, table=None, ident=None, north=None, south=None, 
                  east=None, west=None, top=None, bottom=None, proj="XY"):
@@ -98,7 +106,8 @@ class SpatialExtent(SQLDatabaseInterface):
         """
 
         if self.get_projection() != extent.get_projection():
-            core.error(_("Projections are different. Unable to compute overlapping_2d for spatial extents"))
+            core.error(_("Projections are different. Unable to compute "
+                         "overlapping_2d for spatial extents"))
             return False
 
         N = extent.get_north()
@@ -142,11 +151,14 @@ class SpatialExtent(SQLDatabaseInterface):
            
         Usage:
         
-        >>> import grass.temporal as tgis
-        >>> A = tgis.SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
-        >>> B = tgis.SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
+        @code
+        
+        >>> A = SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
         >>> A.overlapping(B)
         True
+        
+        @endcode
         """
 
         if not self.overlapping_2d(extent):
@@ -164,8 +176,8 @@ class SpatialExtent(SQLDatabaseInterface):
         return True
 
     def intersect_2d(self, extent):
-        """!Return the two dimensional intersection as spatial_extent object or None
-           in case no intersection was found.
+        """!Return the two dimensional intersection as spatial_extent 
+           object or None in case no intersection was found.
         """
 
         if not self.overlapping_2d(extent):
@@ -211,14 +223,15 @@ class SpatialExtent(SQLDatabaseInterface):
         return new
 
     def intersect(self, extent):
-        """!Return the three dimensional intersection as spatial_extent object or None
-        in case no intersection was found.
+        """!Return the three dimensional intersection as spatial_extent 
+        object or None in case no intersection was found.
         
         Usage:
         
-        >>> import grass.temporal as tgis
-        >>> A = tgis.SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
-        >>> B = tgis.SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
+        @code
+        
+        >>> A = SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
         >>> C = A.intersect(B)
         >>> C.print_info()
          +-------------------- Spatial extent ----------------------------------------+
@@ -228,7 +241,7 @@ class SpatialExtent(SQLDatabaseInterface):
          | West:....................... 10.0
          | Top:........................ 50.0
          | Bottom:..................... -50.0
-        >>> B = tgis.SpatialExtent(north=40, south=30, east=60, west=10, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=40, south=30, east=60, west=10, bottom=-50, top=50)
         >>> C = A.intersect(B)
         >>> C.print_info()
          +-------------------- Spatial extent ----------------------------------------+
@@ -238,7 +251,7 @@ class SpatialExtent(SQLDatabaseInterface):
          | West:....................... 10.0
          | Top:........................ 50.0
          | Bottom:..................... -50.0
-        >>> B = tgis.SpatialExtent(north=40, south=30, east=60, west=30, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=40, south=30, east=60, west=30, bottom=-50, top=50)
         >>> C = A.intersect(B)
         >>> C.print_info()
          +-------------------- Spatial extent ----------------------------------------+
@@ -248,7 +261,7 @@ class SpatialExtent(SQLDatabaseInterface):
          | West:....................... 30.0
          | Top:........................ 50.0
          | Bottom:..................... -50.0
-        >>> B = tgis.SpatialExtent(north=40, south=30, east=60, west=30, bottom=-30, top=50)
+        >>> B = SpatialExtent(north=40, south=30, east=60, west=30, bottom=-30, top=50)
         >>> C = A.intersect(B)
         >>> C.print_info()
          +-------------------- Spatial extent ----------------------------------------+
@@ -258,7 +271,7 @@ class SpatialExtent(SQLDatabaseInterface):
          | West:....................... 30.0
          | Top:........................ 50.0
          | Bottom:..................... -30.0
-        >>> B = tgis.SpatialExtent(north=40, south=30, east=60, west=30, bottom=-30, top=30)
+        >>> B = SpatialExtent(north=40, south=30, east=60, west=30, bottom=-30, top=30)
         >>> C = A.intersect(B)
         >>> C.print_info()
          +-------------------- Spatial extent ----------------------------------------+
@@ -268,6 +281,8 @@ class SpatialExtent(SQLDatabaseInterface):
          | West:....................... 30.0
          | Top:........................ 30.0
          | Bottom:..................... -30.0
+         
+         @endcode
         """
 
         if not self.overlapping(extent):
@@ -295,15 +310,19 @@ class SpatialExtent(SQLDatabaseInterface):
         return new
 
     def is_in_2d(self, extent):
-        """Check two dimensional if the self is located in extent
-
+        """!Check two dimensional if the self is located in extent
+        
+        @verbatim
          _____
         |A _  |
         | |_| |
         |_____|B 
+        
+        @endverbatim
         """
         if self.get_projection() != extent.get_projection():
-            core.error(_("Projections are different. Unable to compute is_in_2d for spatial extents"))
+            core.error(_("Projections are different. Unable to compute "
+                         "is_in_2d for spatial extents"))
             return False
 
         eN = extent.get_north()
@@ -338,17 +357,20 @@ class SpatialExtent(SQLDatabaseInterface):
         return True
 
     def is_in(self, extent):
-        """Check three dimensional if the self is located in extent 
+        """!Check three dimensional if the self is located in extent 
         
         Usage:
         
-        >>> import grass.temporal as tgis
-        >>> A = tgis.SpatialExtent(north=79, south=21, east=59, west=11, bottom=-49, top=49)
-        >>> B = tgis.SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
+        @code
+        
+        >>> A = SpatialExtent(north=79, south=21, east=59, west=11, bottom=-49, top=49)
+        >>> B = SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
         >>> A.is_in(B)
         True
         >>> B.is_in(A)
         False
+        
+        @endcode
         """
         if not self.is_in_2d(extent):
             return False
@@ -367,18 +389,19 @@ class SpatialExtent(SQLDatabaseInterface):
         return True
 
     def contain_2d(self, extent):
-        """Check two dimensional if self contains extent """
+        """!Check two dimensional if self contains extent """
         return extent.is_in_2d(self)
 
     def contain(self, extent):
-        """Check three dimensional if self contains extent """
+        """!Check three dimensional if self contains extent """
         return extent.is_in(self)
 
     def equivalent_2d(self, extent):
-        """Check two dimensional if self is equivalent to extent """
+        """!Check two dimensional if self is equivalent to extent """
 
         if self.get_projection() != extent.get_projection():
-            core.error(_("Projections are different. Unable to compute equivalent_2d for spatial extents"))
+            core.error(_("Projections are different. Unable to compute "
+                         "equivalent_2d for spatial extents"))
             return False
 
         eN = extent.get_north()
@@ -413,7 +436,7 @@ class SpatialExtent(SQLDatabaseInterface):
         return True
 
     def equivalent(self, extent):
-        """Check three dimensional if self is equivalent to extent """
+        """!Check three dimensional if self is equivalent to extent """
 
         if not self.equivalent_2d(extent):
             return False
@@ -432,7 +455,9 @@ class SpatialExtent(SQLDatabaseInterface):
         return True
 
     def cover_2d(self, extent):
-        """Return True if two dimensional self covers extent
+        """!Return True if two dimensional self covers extent
+           
+           @verbatim
             _____    _____    _____    _____
            |A  __|  |__  A|  |A | B|  |B | A|
            |  |B |  | B|  |  |  |__|  |__|  |
@@ -447,6 +472,8 @@ class SpatialExtent(SQLDatabaseInterface):
            |A|B  |  |_____|A |A|B|A|  |_____|A
            | |   |  |B    |  | | | |  |_____|B
            |_|___|  |_____|  |_|_|_|  |_____|A
+           
+           @endverbatim
 
            The following cases are excluded:
            * contain
@@ -513,7 +540,7 @@ class SpatialExtent(SQLDatabaseInterface):
         return True
 
     def cover(self, extent):
-        """Return True if three dimensional self covers extent
+        """!Return True if three dimensional self covers extent
 
            The following cases are excluded:
            * contain
@@ -598,22 +625,27 @@ class SpatialExtent(SQLDatabaseInterface):
         return True
 
     def covered_2d(self, extent):
-        """Check two dimensional if self is covered by  extent """
+        """!Check two dimensional if self is covered by  extent """
 
         return extent.cover_2d(self)
 
     def covered(self, extent):
-        """Check three dimensional if self is covered by extent """
+        """!Check three dimensional if self is covered by extent """
 
         return extent.cover(self)
 
     def overlap_2d(self, extent):
-        """Return True if the two dimensional extents overlap. Code is lend from wind_overlap.c in lib/gis
+        """!Return True if the two dimensional extents overlap. Code is 
+           lend from wind_overlap.c in lib/gis
+           
+           @verbatim
             _____
            |A  __|__
            |  |  | B|
            |__|__|  |
               |_____|
+              
+           @endverbatim
 
            The following cases are excluded:
            * contain
@@ -668,7 +700,7 @@ class SpatialExtent(SQLDatabaseInterface):
         return True
 
     def overlap(self, extent):
-        """Return True if the three dimensional extents overlap
+        """!Return True if the three dimensional extents overlap
 
            The following cases are excluded:
            * contain
@@ -731,22 +763,26 @@ class SpatialExtent(SQLDatabaseInterface):
         return True
 
     def meet_2d(self, extent):
-        """ Check if self and extent meet each other in two dimensions
+        """!Check if self and extent meet each other in two dimensions
+        
+        @verbatim
           _____ _____    _____ _____
          |  A  |  B  |  |  B  |  A  |
          |_____|     |  |     |     |
                |_____|  |_____|_____|
 
-                 ___
-                | A |
-                |   |
-                |___|    _____
-               |  B  |  |  B  |
-               |     |  |     |
-               |_____|  |_____|_
-                          |  A  |
-                          |     |
-                          |_____|
+           ___
+          | A |
+          |   |
+          |___|    _____
+         |  B  |  |  B  |
+         |     |  |     |
+         |_____|  |_____|_
+                    |  A  |
+                    |     |
+                    |_____|
+         
+         @endverbatim
 
         """
 
@@ -805,7 +841,7 @@ class SpatialExtent(SQLDatabaseInterface):
         return True
 
     def meet(self, extent):
-        """ Check if self and extent meet each other in three dimensions"""
+        """!Check if self and extent meet each other in three dimensions"""
         eN = extent.get_north()
         eS = extent.get_south()
         eE = extent.get_east()
@@ -880,7 +916,7 @@ class SpatialExtent(SQLDatabaseInterface):
         return True
 
     def disjoint_2d(self, extent):
-        """Return True if the two dimensional extents are disjoint
+        """!Return True if the two dimensional extents are disjoint
         """
 
         if self.overlapping_2d(extent) or self.meet_2d(extent):
@@ -888,7 +924,7 @@ class SpatialExtent(SQLDatabaseInterface):
         return True
 
     def disjoint(self, extent):
-        """Return True if the three dimensional extents are disjoint
+        """!Return True if the three dimensional extents are disjoint
         """
 
         if self.overlapping(extent) or self.meet(extent):
@@ -896,7 +932,7 @@ class SpatialExtent(SQLDatabaseInterface):
         return True
 
     def spatial_relation_2d(self, extent):
-        """Returns the two dimensional spatial relation between self and extent
+        """!Returns the two dimensional spatial relation between self and extent
 
         Spatial relations are:
         * disjoint
@@ -931,7 +967,7 @@ class SpatialExtent(SQLDatabaseInterface):
         return "unknown"
 
     def spatial_relation(self, extent):
-        """Returns the three dimensional spatial relation between self and extent
+        """!Returns the three dimensional spatial relation between self and extent
 
         Spatial relations are:
         * disjoint
@@ -946,19 +982,20 @@ class SpatialExtent(SQLDatabaseInterface):
         
         Usage:
         
-        >>> import grass.temporal as tgis
-        >>> A = tgis.SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
-        >>> B = tgis.SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
+        @code
+        
+        >>> A = SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
         >>> A.spatial_relation(B)
         'equivalent'
         >>> B.spatial_relation(A)
         'equivalent'
-        >>> B = tgis.SpatialExtent(north=70, south=20, east=60, west=10, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=70, south=20, east=60, west=10, bottom=-50, top=50)
         >>> A.spatial_relation_2d(B)
         'cover'
         >>> A.spatial_relation(B)
         'cover'
-        >>> B = tgis.SpatialExtent(north=70, south=30, east=60, west=10, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=70, south=30, east=60, west=10, bottom=-50, top=50)
         >>> A.spatial_relation_2d(B)
         'cover'
         >>> A.spatial_relation(B)
@@ -967,135 +1004,137 @@ class SpatialExtent(SQLDatabaseInterface):
         'covered'
         >>> B.spatial_relation(A)
         'covered'
-        >>> B = tgis.SpatialExtent(north=70, south=30, east=50, west=10, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=70, south=30, east=50, west=10, bottom=-50, top=50)
         >>> A.spatial_relation_2d(B)
         'cover'
         >>> B.spatial_relation_2d(A)
         'covered'
         >>> A.spatial_relation(B)
         'cover'
-        >>> B = tgis.SpatialExtent(north=70, south=30, east=50, west=20, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=70, south=30, east=50, west=20, bottom=-50, top=50)
         >>> B.spatial_relation(A)
         'covered'
-        >>> B = tgis.SpatialExtent(north=70, south=30, east=50, west=20, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=70, south=30, east=50, west=20, bottom=-50, top=50)
         >>> A.spatial_relation_2d(B)
         'contain'
         >>> A.spatial_relation(B)
         'cover'
-        >>> B = tgis.SpatialExtent(north=70, south=30, east=50, west=20, bottom=-40, top=50)
+        >>> B = SpatialExtent(north=70, south=30, east=50, west=20, bottom=-40, top=50)
         >>> A.spatial_relation(B)
         'cover'
-        >>> B = tgis.SpatialExtent(north=70, south=30, east=50, west=20, bottom=-40, top=40)
+        >>> B = SpatialExtent(north=70, south=30, east=50, west=20, bottom=-40, top=40)
         >>> A.spatial_relation(B)
         'contain'
         >>> B.spatial_relation(A)
         'in'
-        >>> B = tgis.SpatialExtent(north=90, south=30, east=50, west=20, bottom=-40, top=40)
+        >>> B = SpatialExtent(north=90, south=30, east=50, west=20, bottom=-40, top=40)
         >>> A.spatial_relation_2d(B)
         'overlap'
         >>> A.spatial_relation(B)
         'overlap'
-        >>> B = tgis.SpatialExtent(north=90, south=5, east=70, west=5, bottom=-40, top=40)
+        >>> B = SpatialExtent(north=90, south=5, east=70, west=5, bottom=-40, top=40)
         >>> A.spatial_relation_2d(B)
         'in'
         >>> A.spatial_relation(B)
         'overlap'
-        >>> B = tgis.SpatialExtent(north=90, south=5, east=70, west=5, bottom=-40, top=60)
+        >>> B = SpatialExtent(north=90, south=5, east=70, west=5, bottom=-40, top=60)
         >>> A.spatial_relation(B)
         'overlap'
-        >>> B = tgis.SpatialExtent(north=90, south=5, east=70, west=5, bottom=-60, top=60)
+        >>> B = SpatialExtent(north=90, south=5, east=70, west=5, bottom=-60, top=60)
         >>> A.spatial_relation(B)
         'in'
-        >>> A = tgis.SpatialExtent(north=80, south=60, east=60, west=10, bottom=-50, top=50)
-        >>> B = tgis.SpatialExtent(north=60, south=20, east=60, west=10, bottom=-50, top=50)
+        >>> A = SpatialExtent(north=80, south=60, east=60, west=10, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=60, south=20, east=60, west=10, bottom=-50, top=50)
         >>> A.spatial_relation_2d(B)
         'meet'
         >>> A.spatial_relation(B)
         'meet'
-        >>> A = tgis.SpatialExtent(north=60, south=40, east=60, west=10, bottom=-50, top=50)
-        >>> B = tgis.SpatialExtent(north=80, south=60, east=60, west=10, bottom=-50, top=50)
+        >>> A = SpatialExtent(north=60, south=40, east=60, west=10, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=80, south=60, east=60, west=10, bottom=-50, top=50)
         >>> A.spatial_relation_2d(B)
         'meet'
         >>> A.spatial_relation(B)
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=60, west=40, bottom=-50, top=50)
-        >>> B = tgis.SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
+        >>> A = SpatialExtent(north=80, south=40, east=60, west=40, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
         >>> A.spatial_relation_2d(B)
         'meet'
         >>> A.spatial_relation(B)
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
-        >>> B = tgis.SpatialExtent(north=90, south=30, east=60, west=40, bottom=-50, top=50)
+        >>> A = SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=90, south=30, east=60, west=40, bottom=-50, top=50)
         >>> A.spatial_relation_2d(B)
         'meet'
         >>> A.spatial_relation(B)
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
-        >>> B = tgis.SpatialExtent(north=70, south=50, east=60, west=40, bottom=-50, top=50)
+        >>> A = SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=70, south=50, east=60, west=40, bottom=-50, top=50)
         >>> A.spatial_relation_2d(B)
         'meet'
         >>> A.spatial_relation(B)
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
-        >>> B = tgis.SpatialExtent(north=60, south=20, east=60, west=40, bottom=-50, top=50)
+        >>> A = SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=60, south=20, east=60, west=40, bottom=-50, top=50)
         >>> A.spatial_relation_2d(B)
         'meet'
         >>> A.spatial_relation(B)
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
-        >>> B = tgis.SpatialExtent(north=40, south=20, east=60, west=40, bottom=-50, top=50)
+        >>> A = SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=40, south=20, east=60, west=40, bottom=-50, top=50)
         >>> A.spatial_relation_2d(B)
         'disjoint'
         >>> A.spatial_relation(B)
         'disjoint'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
-        >>> B = tgis.SpatialExtent(north=60, south=20, east=60, west=40, bottom=-60, top=60)
+        >>> A = SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=60, south=20, east=60, west=40, bottom=-60, top=60)
         >>> A.spatial_relation(B)
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
-        >>> B = tgis.SpatialExtent(north=90, south=30, east=60, west=40, bottom=-40, top=40)
+        >>> A = SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=90, south=30, east=60, west=40, bottom=-40, top=40)
         >>> A.spatial_relation(B)
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
-        >>> B = tgis.SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
+        >>> A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
+        >>> B = SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
         >>> A.spatial_relation(B)
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
-        >>> B = tgis.SpatialExtent(north=80, south=50, east=60, west=30, bottom=-50, top=0)
+        >>> A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
+        >>> B = SpatialExtent(north=80, south=50, east=60, west=30, bottom=-50, top=0)
         >>> A.spatial_relation(B)
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
-        >>> B = tgis.SpatialExtent(north=70, south=50, east=50, west=30, bottom=-50, top=0)
+        >>> A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
+        >>> B = SpatialExtent(north=70, south=50, east=50, west=30, bottom=-50, top=0)
         >>> A.spatial_relation(B)
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
-        >>> B = tgis.SpatialExtent(north=90, south=30, east=70, west=10, bottom=-50, top=0)
+        >>> A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
+        >>> B = SpatialExtent(north=90, south=30, east=70, west=10, bottom=-50, top=0)
         >>> A.spatial_relation(B)
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
-        >>> B = tgis.SpatialExtent(north=70, south=30, east=50, west=10, bottom=-50, top=0)
+        >>> A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
+        >>> B = SpatialExtent(north=70, south=30, east=50, west=10, bottom=-50, top=0)
         >>> A.spatial_relation(B)
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
-        >>> B = tgis.SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
+        >>> A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
+        >>> B = SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
         >>> A.spatial_relation(B)
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
-        >>> B = tgis.SpatialExtent(north=80, south=50, east=60, west=30, bottom=0, top=50)
+        >>> A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
+        >>> B = SpatialExtent(north=80, south=50, east=60, west=30, bottom=0, top=50)
         >>> A.spatial_relation(B)
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
-        >>> B = tgis.SpatialExtent(north=70, south=50, east=50, west=30, bottom=0, top=50)
+        >>> A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
+        >>> B = SpatialExtent(north=70, south=50, east=50, west=30, bottom=0, top=50)
         >>> A.spatial_relation(B)
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
-        >>> B = tgis.SpatialExtent(north=90, south=30, east=70, west=10, bottom=0, top=50)
+        >>> A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
+        >>> B = SpatialExtent(north=90, south=30, east=70, west=10, bottom=0, top=50)
         >>> A.spatial_relation(B)
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
-        >>> B = tgis.SpatialExtent(north=70, south=30, east=50, west=10, bottom=0, top=50)
+        >>> A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
+        >>> B = SpatialExtent(north=70, south=30, east=50, west=10, bottom=0, top=50)
         >>> A.spatial_relation(B)
         'meet'
+        
+        @endverbatim
         """
 
         if self.equivalent(extent):
@@ -1118,7 +1157,7 @@ class SpatialExtent(SQLDatabaseInterface):
         return "unknown"
 
     def set_spatial_extent(self, north, south, east, west, top, bottom):
-        """Set the spatial extent"""
+        """!Set the spatial extent"""
 
         self.set_north(north)
         self.set_south(south)
@@ -1128,7 +1167,7 @@ class SpatialExtent(SQLDatabaseInterface):
         self.set_bottom(bottom)
 
     def set_projection(self, proj):
-        """Set the projection of the spatial extent it should be XY or LL.
+        """!Set the projection of the spatial extent it should be XY or LL.
            As default the projection is XY
         """
         if proj is None or (proj != "XY" and proj != "LL"):
@@ -1144,54 +1183,54 @@ class SpatialExtent(SQLDatabaseInterface):
         self.set_west(west)
 
     def set_id(self, ident):
-        """Convenient method to set the unique identifier (primary key)"""
+        """!Convenient method to set the unique identifier (primary key)"""
         self.ident = ident
         self.D["id"] = ident
 
     def set_north(self, north):
-        """Set the northern edge of the map"""
+        """!Set the northern edge of the map"""
         if north is not None:
             self.D["north"] = float(north)
         else:
             self.D["north"] = None
 
     def set_south(self, south):
-        """Set the southern edge of the map"""
+        """!Set the southern edge of the map"""
         if south is not None:
             self.D["south"] = float(south)
         else:
             self.D["south"] = None
 
     def set_west(self, west):
-        """Set the western edge of the map"""
+        """!Set the western edge of the map"""
         if west is not None:
             self.D["west"] = float(west)
         else:
             self.D["west"] = None
 
     def set_east(self, east):
-        """Set the eastern edge of the map"""
+        """!Set the eastern edge of the map"""
         if east is not None:
             self.D["east"] = float(east)
         else:
             self.D["east"] = None
 
     def set_top(self, top):
-        """Set the top edge of the map"""
+        """!Set the top edge of the map"""
         if top is not None:
             self.D["top"] = float(top)
         else:
             self.D["top"] = None
 
     def set_bottom(self, bottom):
-        """Set the bottom edge of the map"""
+        """!Set the bottom edge of the map"""
         if bottom is not None:
             self.D["bottom"] = float(bottom)
         else:
             self.D["bottom"] = None
 
     def get_id(self):
-        """Convenient method to get the unique identifier (primary key)
+        """!Convenient method to get the unique identifier (primary key)
            @return None if not found
         """
         if "id" in self.D:
@@ -1200,15 +1239,16 @@ class SpatialExtent(SQLDatabaseInterface):
             return None
 
     def get_projection(self):
-        """Get the projection of the spatial extent"""
+        """!Get the projection of the spatial extent"""
         return self.D["proj"]
 
     def get_volume(self):
-        """Compute the volume of the extent, in case z is zero 
+        """!Compute the volume of the extent, in case z is zero 
            (top == bottom or top - bottom = 1) the area is returned"""
 
         if self.get_projection() == "LL":
-            core.error(_("Volume computation is not supported for LL projections"))
+            core.error(_("Volume computation is not supported "
+                         "for LL projections"))
 
         area = self.get_area()
 
@@ -1222,10 +1262,11 @@ class SpatialExtent(SQLDatabaseInterface):
         return area * z
 
     def get_area(self):
-        """Compute the area of the extent, extent in z direction is ignored"""
+        """!Compute the area of the extent, extent in z direction is ignored"""
 
         if self.get_projection() == "LL":
-            core.error(_("Area computation is not supported for LL projections"))
+            core.error(_("Area computation is not supported "
+                         "for LL projections"))
 
         bbox = self.get_spatial_extent()
 
@@ -1235,18 +1276,20 @@ class SpatialExtent(SQLDatabaseInterface):
         return x * y
 
     def get_spatial_extent(self):
-        """Return a tuple (north, south, east, west, top, bottom) of the spatial extent"""
+        """!Return a tuple (north, south, east, west, top, bottom) 
+           of the spatial extent"""
 
         return (
-            self.get_north(), self.get_south, self.get_east(), self.get_west(),
-            self.get_top(), self.get_bottom())
+            self.north, self.south, self.east, self.west,
+            self.top, self.bottom)
 
     def get_spatial_extent_2d(self):
-        """Return a tuple (north, south, east, west,) of the 2d spatial extent"""
-        return (self.get_north(), self.get_south, self.get_east(), self.get_west())
+        """!Return a tuple (north, south, east, west,) of the 2d spatial extent
+        """
+        return (self.north, self.south, self.east, self.west)
 
     def get_north(self):
-        """Get the northern edge of the map
+        """!Get the northern edge of the map
            @return None if not found"""
         if "north" in self.D:
             return self.D["north"]
@@ -1254,7 +1297,7 @@ class SpatialExtent(SQLDatabaseInterface):
             return None
 
     def get_south(self):
-        """Get the southern edge of the map
+        """!Get the southern edge of the map
            @return None if not found"""
         if "south" in self.D:
             return self.D["south"]
@@ -1262,7 +1305,7 @@ class SpatialExtent(SQLDatabaseInterface):
             return None
 
     def get_east(self):
-        """Get the eastern edge of the map
+        """!Get the eastern edge of the map
            @return None if not found"""
         if "east" in self.D:
             return self.D["east"]
@@ -1270,7 +1313,7 @@ class SpatialExtent(SQLDatabaseInterface):
             return None
 
     def get_west(self):
-        """Get the western edge of the map
+        """!Get the western edge of the map
            @return None if not found"""
         if "west" in self.D:
             return self.D["west"]
@@ -1278,7 +1321,7 @@ class SpatialExtent(SQLDatabaseInterface):
             return None
 
     def get_top(self):
-        """Get the top edge of the map
+        """!Get the top edge of the map
            @return None if not found"""
         if "top" in self.D:
             return self.D["top"]
@@ -1286,7 +1329,7 @@ class SpatialExtent(SQLDatabaseInterface):
             return None
 
     def get_bottom(self):
-        """Get the bottom edge of the map
+        """!Get the bottom edge of the map
            @return None if not found"""
         if "bottom" in self.D:
             return self.D["bottom"]
@@ -1302,7 +1345,7 @@ class SpatialExtent(SQLDatabaseInterface):
     bottom= property(fget=get_bottom, fset=set_bottom)
 
     def print_info(self):
-        """Print information about this class in human readable style"""
+        """!Print information about this class in human readable style"""
         #      0123456789012345678901234567890
         print " +-------------------- Spatial extent ----------------------------------------+"
         print " | North:...................... " + str(self.get_north())
@@ -1325,37 +1368,38 @@ class SpatialExtent(SQLDatabaseInterface):
 ###############################################################################
 
 class RasterSpatialExtent(SpatialExtent):
-    def __init__(self, ident=None, north=None, south=None, east=None, west=None, top=None, bottom=None):
+    def __init__(self, ident=None, north=None, south=None, east=None, 
+                 west=None, top=None, bottom=None):
         SpatialExtent.__init__(self, "raster_spatial_extent",
                                 ident, north, south, east, west, top, bottom)
 
-
 class Raster3DSpatialExtent(SpatialExtent):
-    def __init__(self, ident=None, north=None, south=None, east=None, west=None, top=None, bottom=None):
+    def __init__(self, ident=None, north=None, south=None, east=None, 
+                 west=None, top=None, bottom=None):
         SpatialExtent.__init__(self, "raster3d_spatial_extent",
                                 ident, north, south, east, west, top, bottom)
 
-
 class VectorSpatialExtent(SpatialExtent):
-    def __init__(self, ident=None, north=None, south=None, east=None, west=None, top=None, bottom=None):
+    def __init__(self, ident=None, north=None, south=None, east=None, 
+                 west=None, top=None, bottom=None):
         SpatialExtent.__init__(self, "vector_spatial_extent",
                                 ident, north, south, east, west, top, bottom)
 
-
 class STRDSSpatialExtent(SpatialExtent):
-    def __init__(self, ident=None, north=None, south=None, east=None, west=None, top=None, bottom=None):
+    def __init__(self, ident=None, north=None, south=None, east=None, 
+                 west=None, top=None, bottom=None):
         SpatialExtent.__init__(self, "strds_spatial_extent",
                                 ident, north, south, east, west, top, bottom)
 
-
 class STR3DSSpatialExtent(SpatialExtent):
-    def __init__(self, ident=None, north=None, south=None, east=None, west=None, top=None, bottom=None):
+    def __init__(self, ident=None, north=None, south=None, east=None, 
+                 west=None, top=None, bottom=None):
         SpatialExtent.__init__(self, "str3ds_spatial_extent",
                                 ident, north, south, east, west, top, bottom)
 
-
 class STVDSSpatialExtent(SpatialExtent):
-    def __init__(self, ident=None, north=None, south=None, east=None, west=None, top=None, bottom=None):
+    def __init__(self, ident=None, north=None, south=None, east=None, 
+                 west=None, top=None, bottom=None):
         SpatialExtent.__init__(self, "stvds_spatial_extent",
                                 ident, north, south, east, west, top, bottom)
 

+ 228 - 190
lib/python/temporal/stds_export.py

@@ -14,9 +14,9 @@ output="/tmp/temp_1950_2012.tar.gz"
 compression="gzip"
 workdir="/tmp"
 where=None
-_format="GTiff"
-_type="strds"
-tgis.export_stds(input, output, compression, workdir, where, _format, _type)
+format_="GTiff"
+type_="strds"
+tgis.export_stds(input, output, compression, workdir, where, format_, type_)
 ...
 @endcode
 
@@ -39,7 +39,7 @@ init_file_name = "init.txt"
 metadata_file_name = "metadata.txt"
 read_file_name = "readme.txt"
 list_file_name = "list.txt"
-tmp_tar_file_name = "archive" 
+tmp_tar_file_name = "archive"
 
 # This global variable is for unique vector map export,
 # since single vector maps may have several layer
@@ -47,6 +47,8 @@ tmp_tar_file_name = "archive"
 exported_maps = {}
 
 ############################################################################
+
+
 def _export_raster_maps_as_geotiff(rows, tar, list_file, new_cwd, fs):
     for row in rows:
         name = row["name"]
@@ -59,40 +61,46 @@ def _export_raster_maps_as_geotiff(rows, tar, list_file, new_cwd, fs):
             end = start
         string = "%s%s%s%s%s\n" % (name, fs, start, fs, end)
         # Write the filename, the start_time and the end_time
-        list_file.write(string) 
+        list_file.write(string)
         # Export the raster map with r.out.gdal as tif
         out_name = name + ".tif"
         if datatype == "CELL":
             nodata = max_val + 1
             if nodata < 256 and min_val >= 0:
-                gdal_type = "Byte" 
+                gdal_type = "Byte"
             elif nodata < 65536 and min_val >= 0:
-                gdal_type = "UInt16" 
+                gdal_type = "UInt16"
             elif min_val >= 0:
-                gdal_type = "UInt32" 
+                gdal_type = "UInt32"
             else:
-                gdal_type = "Int32" 
-            ret = core.run_command("r.out.gdal", flags="c", input=name, output=out_name, nodata=nodata, type=gdal_type, format="GTiff")
+                gdal_type = "Int32"
+            ret = core.run_command("r.out.gdal", flags="c", input=name, 
+                                   output=out_name, nodata=nodata, 
+                                   type=gdal_type, format="GTiff")
         else:
-            ret = core.run_command("r.out.gdal", flags="c", input=name, output=out_name, format="GTiff")
+            ret = core.run_command("r.out.gdal", flags="c",
+                                   input=name, output=out_name, format="GTiff")
         if ret != 0:
             shutil.rmtree(new_cwd)
             tar.close()
             core.fatal(_("Unable to export raster map <%s>" % name))
-            
+
         tar.add(out_name)
 
-        # Export the color rules 
+        # Export the color rules
         out_name = name + ".color"
         ret = core.run_command("r.colors.out", map=name, rules=out_name)
         if ret != 0:
             shutil.rmtree(new_cwd)
             tar.close()
-            core.fatal(_("Unable to export color rules for raster map <%s> r.out.gdal" % name))
-            
+            core.fatal(_("Unable to export color rules for raster "
+                         "map <%s> r.out.gdal" % name))
+
         tar.add(out_name)
 
 ############################################################################
+
+
 def _export_raster_maps(rows, tar, list_file, new_cwd, fs):
     for row in rows:
         name = row["name"]
@@ -102,17 +110,20 @@ def _export_raster_maps(rows, tar, list_file, new_cwd, fs):
             end = start
         string = "%s%s%s%s%s\n" % (name, fs, start, fs, end)
         # Write the filename, the start_time and the end_time
-        list_file.write(string) 
+        list_file.write(string)
         # Export the raster map with r.pack
         ret = core.run_command("r.pack", input=name, flags="c")
         if ret != 0:
             shutil.rmtree(new_cwd)
             tar.close()
-            core.fatal(_("Unable to export raster map <%s> with r.pack" % name))
-            
+            core.fatal(_("Unable to export raster map <%s> with r.pack" %
+                         name))
+
         tar.add(name + ".pack")
-        
+
 ############################################################################
+
+
 def _export_vector_maps_as_gml(rows, tar, list_file, new_cwd, fs):
     for row in rows:
         name = row["name"]
@@ -125,29 +136,33 @@ def _export_vector_maps_as_gml(rows, tar, list_file, new_cwd, fs):
             end = start
         string = "%s%s%s%s%s\n" % (name, fs, start, fs, end)
         # Write the filename, the start_time and the end_time
-        list_file.write(string) 
+        list_file.write(string)
         # Export the vector map with v.out.ogr
-        ret = core.run_command("v.out.ogr", input=name, dsn=(name + ".xml"), layer=layer, format="GML")
+        ret = core.run_command("v.out.ogr", input=name, 
+                               dsn=(name + ".xml"), layer=layer, format="GML")
         if ret != 0:
             shutil.rmtree(new_cwd)
             tar.close()
-            core.fatal(_("Unable to export vector map <%s> as GML with v.out.ogr" % name))
-            
+            core.fatal(_("Unable to export vector map <%s> as "
+                         "GML with v.out.ogr" % name))
+
         tar.add(name + ".xml")
         tar.add(name + ".xsd")
-                
+
 ############################################################################
+
+
 def _export_vector_maps(rows, tar, list_file, new_cwd, fs):
     for row in rows:
         name = row["name"]
         start = row["start_time"]
         end = row["end_time"]
         layer = row["layer"]
-        
+
         # Export unique maps only
         if name in exported_maps:
             continue
-        
+
         if not layer:
             layer = 1
         if not end:
@@ -160,13 +175,16 @@ def _export_vector_maps(rows, tar, list_file, new_cwd, fs):
         if ret != 0:
             shutil.rmtree(new_cwd)
             tar.close()
-            core.fatal(_("Unable to export vector map <%s> with v.pack" % name))
-            
+            core.fatal(_("Unable to export vector map <%s> with v.pack" %
+                         name))
+
         tar.add(name + ".pack")
-        
+
         exported_maps[name] = name
-        
+
 ############################################################################
+
+
 def _export_raster3d_maps(rows, tar, list_file, new_cwd, fs):
     for row in rows:
         name = row["name"]
@@ -176,172 +194,192 @@ def _export_raster3d_maps(rows, tar, list_file, new_cwd, fs):
             end = start
         string = "%s%s%s%s%s\n" % (name, fs, start, fs, end)
         # Write the filename, the start_time and the end_time
-        list_file.write(string) 
+        list_file.write(string)
         # Export the raster map with r3.pack
         ret = core.run_command("r3.pack", input=name, flags="c")
         if ret != 0:
             shutil.rmtree(new_cwd)
             tar.close()
-            core.fatal(_("Unable to export raster map <%s> with r3.pack" % name))
-            
+            core.fatal(_("Unable to export raster map <%s> with r3.pack" %
+                         name))
+
         tar.add(name + ".pack")
 
 ############################################################################
-def export_stds(input, output, compression, workdir, where, _format="pack", _type="strds"):
-	"""
-		!Export space time datasets as tar archive with optional compression
-		
-		This method should be used to export space time datasets of type raster and vector
-		as tar archive that can be reimported with the method import_stds().
-		
-		@param input The name of the space time dataset to export
-		@param output The name of the archive file
-		@param compression The compression of the archive file: 
-		  * "no"  no compression
-		  * "gzip" GNU zip compression
-		  * "bzip2" Bzip compression
-		@param workdir The working directory used for extraction and packing
-		@param where The temporal WHERE SQL statement to select a subset of maps from the space time dataset
-		@param _format The export format:
-		  * "GTiff" Geotiff format, only for raster maps
-		  * "pack" The GRASS raster, 3D raster or vector Pack format, this is the default setting
-		  * "GML" GML file export format, only for vector maps, v.out.ogr export option
-		@param type The space time dataset type
-		  * "strds" Space time raster dataset
-		  * "str3ds" Space time 3D raster dataset
-		  * "stvds" Space time vector dataset
-	"""
-	mapset =  core.gisenv()["MAPSET"]
-
-	if input.find("@") >= 0:
-		id = input
-	else:
-		id = input + "@" + mapset
-		
-	sp = dataset_factory(_type, id)
-
-	if sp.is_in_db() == False:
-		core.fatal(_("Space time %s dataset <%s> not found") % (sp.get_new_map_instance(None).get_type(), id))
-
-	# Save current working directory path
-	old_cwd = os.getcwd()
-
-	# Create the temporary directory and jump into it
-	new_cwd = tempfile.mkdtemp(dir=workdir)
-	os.chdir(new_cwd)
-
-	sp.select()
-	   
-	if _type == "strds":
-		columns = "name,start_time,end_time,min,max,datatype"
-	elif _type == "stvds":
-		columns = "name,start_time,end_time,layer"
-	else:
-		columns = "name,start_time,end_time"
-	rows = sp.get_registered_maps(columns, where, "start_time", None)
-
-	if compression == "gzip":
-		flag = "w:gz"
-	elif compression == "bzip2":
-		flag = "w:bz2"
-	else:
-		flag = "w:"
-
-	# Open the tar archive to add the files
-	tar = tarfile.open(tmp_tar_file_name, flag)
-	list_file = open(list_file_name, "w")
-
-	fs = "|"
-
-	if rows:
-		if _type == "strds":
-			if _format == "GTiff":
-				_export_raster_maps_as_geotiff(rows, tar, list_file, new_cwd, fs)
-			else:
-				_export_raster_maps(rows, tar, list_file, new_cwd, fs)
-		elif _type == "stvds":
-			if _format == "GML":
-				_export_vector_maps_as_gml(rows, tar, list_file, new_cwd, fs)
-			else:
-				_export_vector_maps(rows, tar, list_file, new_cwd, fs)
-		elif _type == "str3ds":
-			_export_raster3d_maps(rows, tar, list_file, new_cwd, fs)
-		
-	list_file.close()
-
-	# Write projection and metadata
-	proj = core.read_command("g.proj", flags="j")
-
-	proj_file = open(proj_file_name, "w")
-	proj_file.write(proj)
-	proj_file.close()
-
-	init_file = open(init_file_name, "w")
-	# Create the init string
-	string = ""
-	string += "%s=%s\n" % ("stds_type", sp.get_type()) # This is optional, if not present strds will be assumed for backward compatibility
-	string += "%s=%s\n" % ("format", _format) # This is optional, if not present gtiff will be assumed for backward compatibility
-	string += "%s=%s\n" % ("temporal_type", sp.get_temporal_type())
-	string += "%s=%s\n" % ("semantic_type", sp.get_semantic_type())
-	string += "%s=%s\n" % ("number_of_maps", sp.metadata.get_number_of_maps())
-	north, south, east, west, top, bottom = sp.get_spatial_extent()
-	string += "%s=%s\n" % ("north", north)
-	string += "%s=%s\n" % ("south", south)
-	string += "%s=%s\n" % ("east", east)
-	string += "%s=%s\n" % ("west", west)
-	init_file.write(string)
-	init_file.close()
-
-	metadata = core.read_command("t.info", type=_type, input=id)
-	metadata_file = open(metadata_file_name, "w")
-	metadata_file.write(metadata)
-	metadata_file.close()
-
-	read_file = open(read_file_name, "w")
-	if _type == "strds":
-		read_file.write("This space time raster dataset was exported with t.rast.export of GRASS GIS 7\n")
-	elif _type == "stvds":
-		read_file.write("This space time vector dataset was exported with t.vect.export of GRASS GIS 7\n")
-	elif _type == "str3ds":
-		read_file.write("This space time 3D raster dataset was exported with t.rast3d.export of GRASS GIS 7\n")
-	read_file.write("\n")
-	read_file.write("Files:\n")
-	if _type == "strds":
-		if _format == "GTiff":
-					#123456789012345678901234567890
-			read_file.write("       *.tif  -- GeoTIFF raster files\n")
-			read_file.write("     *.color  -- GRASS GIS raster color rules\n")
-		elif _format == "pack":
-			read_file.write("      *.pack  -- GRASS raster files packed with r.pack\n")
-	elif _type == "stvds":
-					#123456789012345678901234567890
-		if _format == "GML":
-			read_file.write("       *.xml  -- Vector GML files\n")
-		else:
-			read_file.write("      *.pack  -- GRASS vector files packed with v.pack\n")
-	elif _type == "str3ds":
-		read_file.write("      *.pack  -- GRASS 3D raster files packed with r3.pack\n")
-	read_file.write("%13s -- Projection information in PROJ.4 format\n" % (proj_file_name))
-	read_file.write("%13s -- GRASS GIS space time %s dataset information\n" % (init_file_name, sp.get_new_map_instance(None).get_type()))
-	read_file.write("%13s -- Time series file, lists all maps by name with interval\n"  % (list_file_name))
-	read_file.write("                 time stamps in ISO-Format. Field separator is |\n")
-	read_file.write("%13s -- Projection information in PROJ.4 format\n" % (metadata_file_name))
-	read_file.write("%13s -- This file\n" % (read_file_name))
-	read_file.close()
-
-	# Append the file list
-	tar.add(list_file_name)
-	tar.add(proj_file_name)
-	tar.add(init_file_name)
-	tar.add(read_file_name)
-	tar.add(metadata_file_name)
-	tar.close()
-
-	os.chdir(old_cwd)
-
-	# Move the archive to its destination
-	shutil.move(os.path.join(new_cwd, tmp_tar_file_name), output)
-
-	# Remove the temporary created working directory
-	shutil.rmtree(new_cwd)
 
+
+def export_stds(input, output, compression, workdir, where, format_="pack", 
+                type_="strds"):
+    """
+            !Export space time datasets as tar archive with optional compression
+
+            This method should be used to export space time datasets 
+            of type raster and vector as tar archive that can be reimported 
+            with the method import_stds().
+
+            @param input: The name of the space time dataset to export
+            @param output: The name of the archive file
+            @param compression: The compression of the archive file:
+              * "no"  no compression
+              * "gzip" GNU zip compression
+              * "bzip2" Bzip compression
+            @param workdir: The working directory used for extraction and packing
+            @param where: The temporal WHERE SQL statement to select a subset 
+                          of maps from the space time dataset
+            @param format:_ The export format:
+              * "GTiff" Geotiff format, only for raster maps
+              * "pack" The GRASS raster, 3D raster or vector Pack format, 
+                       this is the default setting
+              * "GML" GML file export format, only for vector maps, 
+                      v.out.ogr export option
+            @param type_: The space time dataset type
+              * "strds" Space time raster dataset
+              * "str3ds" Space time 3D raster dataset
+              * "stvds" Space time vector dataset
+    """
+    mapset = core.gisenv()["MAPSET"]
+
+    if input.find("@") >= 0:
+        id = input
+    else:
+        id = input + "@" + mapset
+
+    sp = dataset_factory(type_, id)
+
+    if sp.is_in_db() == False:
+        core.fatal(_("Space time %s dataset <%s> not found") % (
+            sp.get_new_map_instance(None).get_type(), id))
+
+    # Save current working directory path
+    old_cwd = os.getcwd()
+
+    # Create the temporary directory and jump into it
+    new_cwd = tempfile.mkdtemp(dir=workdir)
+    os.chdir(new_cwd)
+
+    sp.select()
+
+    if type_ == "strds":
+        columns = "name,start_time,end_time,min,max,datatype"
+    elif type_ == "stvds":
+        columns = "name,start_time,end_time,layer"
+    else:
+        columns = "name,start_time,end_time"
+    rows = sp.get_registered_maps(columns, where, "start_time", None)
+
+    if compression == "gzip":
+        flag = "w:gz"
+    elif compression == "bzip2":
+        flag = "w:bz2"
+    else:
+        flag = "w:"
+
+    # Open the tar archive to add the files
+    tar = tarfile.open(tmp_tar_file_name, flag)
+    list_file = open(list_file_name, "w")
+
+    fs = "|"
+
+    if rows:
+        if type_ == "strds":
+            if format_ == "GTiff":
+                _export_raster_maps_as_geotiff(
+                    rows, tar, list_file, new_cwd, fs)
+            else:
+                _export_raster_maps(rows, tar, list_file, new_cwd, fs)
+        elif type_ == "stvds":
+            if format_ == "GML":
+                _export_vector_maps_as_gml(rows, tar, list_file, new_cwd, fs)
+            else:
+                _export_vector_maps(rows, tar, list_file, new_cwd, fs)
+        elif type_ == "str3ds":
+            _export_raster3d_maps(rows, tar, list_file, new_cwd, fs)
+
+    list_file.close()
+
+    # Write projection and metadata
+    proj = core.read_command("g.proj", flags="j")
+
+    proj_file = open(proj_file_name, "w")
+    proj_file.write(proj)
+    proj_file.close()
+
+    init_file = open(init_file_name, "w")
+    # Create the init string
+    string = ""
+     # This is optional, if not present strds will be assumed for backward 
+     # compatibility
+    string += "%s=%s\n" % ("stds_type", sp.get_type()) 
+     # This is optional, if not present gtiff will be assumed for 
+     # backward compatibility
+    string += "%s=%s\n" % ("format", format_) 
+    string += "%s=%s\n" % ("temporal_type", sp.get_temporal_type())
+    string += "%s=%s\n" % ("semantic_type", sp.get_semantic_type())
+    string += "%s=%s\n" % ("number_of_maps", sp.metadata.get_number_of_maps())
+    north, south, east, west, top, bottom = sp.get_spatial_extent()
+    string += "%s=%s\n" % ("north", north)
+    string += "%s=%s\n" % ("south", south)
+    string += "%s=%s\n" % ("east", east)
+    string += "%s=%s\n" % ("west", west)
+    init_file.write(string)
+    init_file.close()
+
+    metadata = core.read_command("t.info", type=type_, input=id)
+    metadata_file = open(metadata_file_name, "w")
+    metadata_file.write(metadata)
+    metadata_file.close()
+
+    read_file = open(read_file_name, "w")
+    if type_ == "strds":
+        read_file.write("This space time raster dataset was exported with "
+                        "t.rast.export of GRASS GIS 7\n")
+    elif type_ == "stvds":
+        read_file.write("This space time vector dataset was exported with "
+                        "t.vect.export of GRASS GIS 7\n")
+    elif type_ == "str3ds":
+        read_file.write("This space time 3D raster dataset was exported "
+                        "with t.rast3d.export of GRASS GIS 7\n")
+    read_file.write("\n")
+    read_file.write("Files:\n")
+    if type_ == "strds":
+        if format_ == "GTiff":
+                                #123456789012345678901234567890
+            read_file.write("       *.tif  -- GeoTIFF raster files\n")
+            read_file.write("     *.color  -- GRASS GIS raster color rules\n")
+        elif format_ == "pack":
+            read_file.write("      *.pack  -- GRASS raster files packed with r.pack\n")
+    elif type_ == "stvds":
+                                #123456789012345678901234567890
+        if format_ == "GML":
+            read_file.write("       *.xml  -- Vector GML files\n")
+        else:
+            read_file.write("      *.pack  -- GRASS vector files packed with v.pack\n")
+    elif type_ == "str3ds":
+        read_file.write("      *.pack  -- GRASS 3D raster files packed with r3.pack\n")
+    read_file.write("%13s -- Projection information in PROJ.4 format\n" %
+                    (proj_file_name))
+    read_file.write("%13s -- GRASS GIS space time %s dataset information\n" %
+                    (init_file_name, sp.get_new_map_instance(None).get_type()))
+    read_file.write("%13s -- Time series file, lists all maps by name "
+                    "with interval\n" % (list_file_name))
+    read_file.write("                 time stamps in ISO-Format. Field separator is |\n")
+    read_file.write("%13s -- Projection information in PROJ.4 format\n" %
+                    (metadata_file_name))
+    read_file.write("%13s -- This file\n" % (read_file_name))
+    read_file.close()
+
+    # Append the file list
+    tar.add(list_file_name)
+    tar.add(proj_file_name)
+    tar.add(init_file_name)
+    tar.add(read_file_name)
+    tar.add(metadata_file_name)
+    tar.close()
+
+    os.chdir(old_cwd)
+
+    # Move the archive to its destination
+    shutil.move(os.path.join(new_cwd, tmp_tar_file_name), output)
+
+    # Remove the temporary created working directory
+    shutil.rmtree(new_cwd)

+ 331 - 303
lib/python/temporal/stds_import.py

@@ -19,7 +19,7 @@ link=True
 exp=True
 overr=False
 create=False
-tgis.import_stds(input, output, extrdir, title, descr, location, 
+tgis.import_stds(input, output, extrdir, title, descr, location,
                 link, exp, overr, create, "strds")
 ...
 @endcode
@@ -51,317 +51,345 @@ list_file_name = "list.txt"
 imported_maps = {}
 
 ############################################################################
+
 def _import_raster_maps_from_geotiff(maplist, overr, exp, location, link):
-	impflags = ""
-	if overr:
-		impflags += "o"
-	if exp or location:
-		impflags += "e"
-	for row in maplist:
-		name = row["name"]
-		filename = str(row["name"]) + ".tif"
-
-		if link:
-			ret = core.run_command("r.external", input = filename,
-						output = name,
-						flags = impflags,
-						overwrite = core.overwrite())
-		else:
-			ret = core.run_command("r.in.gdal", input = filename,
-						output = name,
-						flags = impflags,
-						overwrite = core.overwrite())
-
-		if ret != 0:
-			core.fatal(_("Unable to import/link raster map <%s>.") % name)
-
-		# Set the color rules if present
-		filename = str(row["name"]) + ".color"
-		if os.path.isfile(filename):
-			ret = core.run_command("r.colors", map = name,
-						rules = filename,
-						overwrite = core.overwrite())
-			if ret != 0:
-				core.fatal(_("Unable to set the color rules for raster map <%s>.") % name)
-                                                        
+    impflags = ""
+    if overr:
+        impflags += "o"
+    if exp or location:
+        impflags += "e"
+    for row in maplist:
+        name = row["name"]
+        filename = str(row["name"]) + ".tif"
+
+        if link:
+            ret = core.run_command("r.external", input=filename,
+                                   output=name,
+                                   flags=impflags,
+                                   overwrite=core.overwrite())
+        else:
+            ret = core.run_command("r.in.gdal", input=filename,
+                                   output=name,
+                                   flags=impflags,
+                                   overwrite=core.overwrite())
+
+        if ret != 0:
+            core.fatal(_("Unable to import/link raster map <%s>.") % name)
+
+        # Set the color rules if present
+        filename = str(row["name"]) + ".color"
+        if os.path.isfile(filename):
+            ret = core.run_command("r.colors", map=name,
+                                   rules=filename,
+                                   overwrite=core.overwrite())
+            if ret != 0:
+                core.fatal(_("Unable to set the color rules for "
+                             "raster map <%s>.") % name)
+
 ############################################################################
+
 def _import_raster_maps(maplist):
-	# We need to disable the projection check because of its simple implementation
-	impflags = "o"
-	for row in maplist:
-		name = row["name"]
-		filename = str(row["name"]) + ".pack"
-		ret = core.run_command("r.unpack", input = filename,
-						output = name,
-						flags = impflags,
-						overwrite = core.overwrite(),
-						verbose = True)
-
-		if ret != 0:
-			core.fatal(_("Unable to unpack raster map <%s>.") % name)
+    # We need to disable the projection check because of its 
+    # simple implementation
+    impflags = "o"
+    for row in maplist:
+        name = row["name"]
+        filename = str(row["name"]) + ".pack"
+        ret = core.run_command("r.unpack", input=filename,
+                               output=name,
+                               flags=impflags,
+                               overwrite=core.overwrite(),
+                               verbose=True)
+
+        if ret != 0:
+            core.fatal(_("Unable to unpack raster map <%s>.") % name)
 
 ############################################################################
+
 def _import_vector_maps_from_gml(maplist, overr, exp, location, link):
-        impflags = "o"
-        if exp or location:
-                impflags += "e"
-        for row in maplist:
-                name = row["name"]
-                filename = str(row["name"]) + ".xml"
+    impflags = "o"
+    if exp or location:
+        impflags += "e"
+    for row in maplist:
+        name = row["name"]
+        filename = str(row["name"]) + ".xml"
+
+        ret = core.run_command("v.in.ogr", dsn=filename,
+                               output=name,
+                               flags=impflags,
+                               overwrite=core.overwrite())
 
-                ret = core.run_command("v.in.ogr", dsn = filename,
-                                        output = name,
-                                        flags = impflags,
-                                        overwrite = core.overwrite())
+        if ret != 0:
+            core.fatal(_("Unable to import vector map <%s>.") % name)
 
-                if ret != 0:
-                        core.fatal(_("Unable to import vector map <%s>.") % name)
-                        
 ############################################################################
+
 def _import_vector_maps(maplist):
-        # We need to disable the projection check because of its simple implementation
-        impflags = "o"
-        for row in maplist:
-        	# Separate the name from the layer
-                name = row["name"].split(":")[0]
-                # Import only unique maps
-                if name in imported_maps:
-                    continue
-                filename = name + ".pack"
-                ret = core.run_command("v.unpack", input = filename,
-                                                output = name,
-                                                flags = impflags,
-                                                overwrite = core.overwrite(),
-                                                verbose = True)
-
-                if ret != 0:
-                        core.fatal(_("Unable to unpack vector map <%s>.") % name)
-                
-                imported_maps[name] = name
+    # We need to disable the projection check because of its 
+    # simple implementation
+    impflags = "o"
+    for row in maplist:
+        # Separate the name from the layer
+        name = row["name"].split(":")[0]
+        # Import only unique maps
+        if name in imported_maps:
+            continue
+        filename = name + ".pack"
+        ret = core.run_command("v.unpack", input=filename,
+                               output=name,
+                               flags=impflags,
+                               overwrite=core.overwrite(),
+                               verbose=True)
+
+        if ret != 0:
+            core.fatal(_("Unable to unpack vector map <%s>.") % name)
+
+        imported_maps[name] = name
 ############################################################################
 
-def import_stds(input, output, extrdir, title = None, descr = None, location = None,
-                link = False, exp = False, overr = False, create = False, stds_type = "strds"):
-	"""
-		!Import space time datasets of type raster and vector
-		
-		@param input Name of the input archive file
-		@param output The name of the output space time dataset
-		@param extrdir The extraction directory
-		@param title The title of the new created space time dataset
-		@param description The description of the new created space time dataset
-		@param location The name of the location that should be created, 
-		                maps are imported into this location
-		@param link Switch to link raster maps instead importing them
-		@param exp Extend location extents based on new dataset
-		@param overr Override projection (use location's projection)
-		@param create Create the location specified by the "location" parameter and exit. 
-		              Do not import the space time datasets.
-		@param stds_type The type of the space time dataset that should be imported
-	"""
-
-	core.set_raise_on_error(True)
-
-	# Check if input file and extraction directory exits
-	if not os.path.exists(input):
-		core.fatal(_("Space time raster dataset archive <%s> not found") % input)
-	if not create and not os.path.exists(extrdir):
-		core.fatal(_("Extraction directory <%s> not found") % extrdir)
-
-	tar = tarfile.open(name = input, mode = 'r')
-
-	# Check for important files
-	members = tar.getnames()
-
-	if init_file_name not in members:
-		core.fatal(_("Unable to find init file <%s>") % init_file_name)
-	if list_file_name not in members:
-		core.fatal(_("Unable to find list file <%s>") % list_file_name)
-	if proj_file_name not in members:
-		core.fatal(_("Unable to find projection file <%s>") % proj_file_name)
-
-	tar.extractall(path = extrdir)
-	tar.close()
-
-	# Save current working directory path
-	old_cwd = os.getcwd()
-
-	# Switch into the data directory
-	os.chdir(extrdir)
-
-	# Check projection information
-	if not location:
-		temp_name = core.tempfile()
-		temp_file = open(temp_name, "w")
-		proj_name = os.path.abspath(proj_file_name)
-
-		p = core.start_command("g.proj", flags = "j", stdout = temp_file)
-		p.communicate()
-		temp_file.close()
-
-		if not core.compare_key_value_text_files(temp_name, proj_name, sep="="):
-			if overr:
-				core.warning(_("Projection information does not match. Proceeding..."))
-			else:
-				core.fatal(_("Projection information does not match. Aborting."))
-
-	# Create a new location based on the projection information and switch into it
-	old_env = core.gisenv()
-	if location:
-		try:
-			proj4_string = open(proj_file_name, 'r').read()
-			core.create_location(dbase = old_env["GISDBASE"],
-								  location = location,
-								  proj4 = proj4_string)
-			# Just create a new location and return
-			if create:
-				os.chdir(old_cwd)
-				return
-		except Exception as e:
-				core.fatal(_("Unable to create location %s. Reason: %s") % (location, str(e)))
-		# Switch to the new created location
-		ret = core.run_command("g.mapset", mapset = "PERMANENT",
-					location = location,
-					gisdbase = old_env["GISDBASE"])
-		if ret != 0:
-			core.fatal(_("Unable to switch to location %s") % location)
-		# create default database connection
-		ret = core.run_command("t.connect", flags = "d")
-		if ret != 0:
-			core.fatal(_("Unable to create default temporal database in new location %s") % location)
-
-	try:
-		# Make sure the temporal database exists
-		create_temporal_database()
-
-		fs = "|"
-		maplist = []
-		mapset = core.gisenv()["MAPSET"]
-		list_file = open(list_file_name, "r")
-
-		# Read the map list from file
-		line_count = 0
-		while True:
-			line = list_file.readline()
-			if not line:
-				break
-
-			line_list = line.split(fs)
-
-			mapname = line_list[0].strip()
-			mapid = mapname + "@" + mapset
-
-			row = {}
-			row["name"] = mapname
-			row["id"] = mapid
-			row["start"] = line_list[1].strip()
-			row["end"] = line_list[2].strip()
-
-			maplist.append(row)
-			line_count += 1
-
-		list_file.close()
-
-		# Read the init file
-		fs = "="
-		init = {}
-		init_file = open(init_file_name, "r")
-		while True:
-			line = init_file.readline()
-			if not line:
-				break
-
-			kv = line.split(fs)
-			init[kv[0]] = kv[1].strip()
-
-		init_file.close()
-
-		if not init.has_key("temporal_type") or \
-		   not init.has_key("semantic_type") or \
-		   not init.has_key("number_of_maps"):
-			core.fatal(_("Key words %s, %s or %s not found in init file.") %
-			("temporal_type", "semantic_type", "number_of_maps"))
-
-		if line_count != int(init["number_of_maps"]):
-			core.fatal(_("Number of maps mismatch in init and list file."))
-
-		_format = "GTiff"
-		_type = "strds"
-
-		if init.has_key("stds_type"):
-			_type = init["stds_type"]
-		if init.has_key("format"):
-			_format = init["format"]
-
-		if stds_type != _type:
-			core.fatal(_("The archive file is of wrong space time dataset type"))
-
-		# Check the existence of the files 
-		if _format == "GTiff":
-			for row in maplist:
-				filename = str(row["name"]) + ".tif"
-				if not os.path.exists(filename):
-					core.fatal(_("Unable to find geotiff raster file <%s> in archive.") % filename)
-		elif _format == "GML":
-			for row in maplist:
-				filename = str(row["name"]) + ".xml"
-				if not os.path.exists(filename):
-					core.fatal(_("Unable to find GML vector file <%s> in archive.") % filename)
-		elif _format == "pack":
-			for row in maplist:
-				if _type == "stvds":
-					filename = str(row["name"].split(":")[0]) + ".pack"
-				else:
-					filename = str(row["name"]) + ".pack"
-				if not os.path.exists(filename):
-					core.fatal(_("Unable to find GRASS package file <%s> in archive.") % filename)
-		else:
-			core.fatal(_("Unsupported input format"))
-
-		# Check the space time dataset
-		id = output + "@" + mapset
-		sp = dataset_factory(_type, id)
-		if sp.is_in_db() and core.overwrite() == False:
-			core.fatal(_("Space time %s dataset <%s> is already in the database. Use the overwrite flag.") % (_type, sp.get_id()))
-
-		# Import the maps
-		if _type == "strds":
-			if _format == "GTiff":
-				_import_raster_maps_from_geotiff(maplist, overr, exp, location, link)
-			if _format == "pack":
-				_import_raster_maps(maplist)
-                elif _type == "stvds":
-                        if _format == "GML":
-                                _import_vector_maps_from_gml(maplist, overr, exp, location, link)
-                        if _format == "pack":
-                                _import_vector_maps(maplist)
-
-		# Create the space time dataset
-		if sp.is_in_db() and core.overwrite() == True:
-			core.info(_("Overwrite space time %s dataset <%s> and unregister all maps.") % (sp.get_new_map_instance(None).get_type(), sp.get_id()))
-			sp.delete()
-			sp = sp.get_new_instance(id)
-
-		temporal_type = init["temporal_type"]
-		semantic_type = init["semantic_type"]
-		core.verbose(_("Create space time %s dataset.") % sp.get_new_map_instance(None).get_type())
-
-		sp.set_initial_values(temporal_type = temporal_type, semantic_type = semantic_type, title = title, description = descr)
-		sp.insert()
-
-		# register the maps
-		fs = "|"
-		register_maps_in_space_time_dataset(type = sp.get_new_map_instance(None).get_type(),
-					 name = output, file = list_file_name, start = "file", end = "file", dbif = None, fs = fs)
-
-		os.chdir(old_cwd)
-	except:
-		raise
-
-	# Make sure the location is switched back correctly
-	finally:
-		if location:
-			# Switch to the old location
-			ret = core.run_command("g.mapset", mapset = old_env["MAPSET"],
-						location = old_env["LOCATION_NAME"],
-						gisdbase = old_env["GISDBASE"])
+def import_stds(
+    input, output, extrdir, title=None, descr=None, location=None,
+        link=False, exp=False, overr=False, create=False, stds_type="strds"):
+    """!Import space time datasets of type raster and vector
+
+        @param input: Name of the input archive file
+        @param output: The name of the output space time dataset
+        @param extrdir: The extraction directory
+        @param title: The title of the new created space time dataset
+        @param description: The description of the new created 
+                            space time dataset
+        @param location: The name of the location that should be created,
+                        maps are imported into this location
+        @param link: Switch to link raster maps instead importing them
+        @param exp: Extend location extents based on new dataset
+        @param overr: Override projection (use location's projection)
+        @param create: Create the location specified by the "location" 
+                      parameter and exit.
+                      Do not import the space time datasets.
+        @param stds_type: The type of the space time dataset that 
+                          should be imported
+    """
+
+    core.set_raise_on_error(True)
+
+    # Check if input file and extraction directory exits
+    if not os.path.exists(input):
+        core.fatal(_("Space time raster dataset archive <%s> not found")
+                   % input)
+    if not create and not os.path.exists(extrdir):
+        core.fatal(_("Extraction directory <%s> not found") % extrdir)
+
+    tar = tarfile.open(name=input, mode='r')
+
+    # Check for important files
+    members = tar.getnames()
+
+    if init_file_name not in members:
+        core.fatal(_("Unable to find init file <%s>") % init_file_name)
+    if list_file_name not in members:
+        core.fatal(_("Unable to find list file <%s>") % list_file_name)
+    if proj_file_name not in members:
+        core.fatal(_("Unable to find projection file <%s>") % proj_file_name)
+
+    tar.extractall(path=extrdir)
+    tar.close()
+
+    # Save current working directory path
+    old_cwd = os.getcwd()
+
+    # Switch into the data directory
+    os.chdir(extrdir)
+
+    # Check projection information
+    if not location:
+        temp_name = core.tempfile()
+        temp_file = open(temp_name, "w")
+        proj_name = os.path.abspath(proj_file_name)
+
+        p = core.start_command("g.proj", flags="j", stdout=temp_file)
+        p.communicate()
+        temp_file.close()
+
+        if not core.compare_key_value_text_files(temp_name, proj_name, sep="="):
+            if overr:
+                core.warning(_("Projection information does not match. "
+                               "Proceeding..."))
+            else:
+                core.fatal(_("Projection information does not match. Aborting."))
+
+    # Create a new location based on the projection information and switch into it
+    old_env = core.gisenv()
+    if location:
+        try:
+            proj4_string = open(proj_file_name, 'r').read()
+            core.create_location(dbase=old_env["GISDBASE"],
+                                 location=location,
+                                 proj4=proj4_string)
+            # Just create a new location and return
+            if create:
+                os.chdir(old_cwd)
+                return
+        except Exception as e:
+            core.fatal(_("Unable to create location %s. Reason: %s")
+                       % (location, str(e)))
+        # Switch to the new created location
+        ret = core.run_command("g.mapset", mapset="PERMANENT",
+                               location=location,
+                               gisdbase=old_env["GISDBASE"])
+        if ret != 0:
+            core.fatal(_("Unable to switch to location %s") % location)
+        # create default database connection
+        ret = core.run_command("t.connect", flags="d")
+        if ret != 0:
+            core.fatal(_("Unable to create default temporal database "
+                         "in new location %s") % location)
+
+    try:
+        # Make sure the temporal database exists
+        create_temporal_database()
+
+        fs = "|"
+        maplist = []
+        mapset = core.gisenv()["MAPSET"]
+        list_file = open(list_file_name, "r")
+
+        # Read the map list from file
+        line_count = 0
+        while True:
+            line = list_file.readline()
+            if not line:
+                break
+
+            line_list = line.split(fs)
+
+            mapname = line_list[0].strip()
+            mapid = mapname + "@" + mapset
+
+            row = {}
+            row["name"] = mapname
+            row["id"] = mapid
+            row["start"] = line_list[1].strip()
+            row["end"] = line_list[2].strip()
+
+            maplist.append(row)
+            line_count += 1
+
+        list_file.close()
+
+        # Read the init file
+        fs = "="
+        init = {}
+        init_file = open(init_file_name, "r")
+        while True:
+            line = init_file.readline()
+            if not line:
+                break
+
+            kv = line.split(fs)
+            init[kv[0]] = kv[1].strip()
+
+        init_file.close()
+
+        if "temporal_type" not in init or \
+           "semantic_type" not in init or \
+           "number_of_maps" not in init:
+            core.fatal(_("Key words %s, %s or %s not found in init file.") %
+                       ("temporal_type", "semantic_type", "number_of_maps"))
+
+        if line_count != int(init["number_of_maps"]):
+            core.fatal(_("Number of maps mismatch in init and list file."))
+
+        _format = "GTiff"
+        _type = "strds"
+
+        if "stds_type" in init:
+            _type = init["stds_type"]
+        if "format" in init:
+            _format = init["format"]
+
+        if stds_type != _type:
+            core.fatal(_("The archive file is of wrong space time dataset type"))
+
+        # Check the existence of the files
+        if _format == "GTiff":
+            for row in maplist:
+                filename = str(row["name"]) + ".tif"
+                if not os.path.exists(filename):
+                    core.fatal(_("Unable to find geotiff raster file "
+                                 "<%s> in archive.") % filename)
+        elif _format == "GML":
+            for row in maplist:
+                filename = str(row["name"]) + ".xml"
+                if not os.path.exists(filename):
+                    core.fatal(_("Unable to find GML vector file "
+                                 "<%s> in archive.") % filename)
+        elif _format == "pack":
+            for row in maplist:
+                if _type == "stvds":
+                    filename = str(row["name"].split(":")[0]) + ".pack"
+                else:
+                    filename = str(row["name"]) + ".pack"
+                if not os.path.exists(filename):
+                    core.fatal(_("Unable to find GRASS package file "
+                                 "<%s> in archive.") % filename)
+        else:
+            core.fatal(_("Unsupported input format"))
+
+        # Check the space time dataset
+        id = output + "@" + mapset
+        sp = dataset_factory(_type, id)
+        if sp.is_in_db() and core.overwrite() == False:
+            core.fatal(_("Space time %s dataset <%s> is already in the "
+                         "database. Use the overwrite flag.") % \
+                        (_type, sp.get_id()))
+
+        # Import the maps
+        if _type == "strds":
+            if _format == "GTiff":
+                _import_raster_maps_from_geotiff(
+                    maplist, overr, exp, location, link)
+            if _format == "pack":
+                _import_raster_maps(maplist)
+        elif _type == "stvds":
+            if _format == "GML":
+                _import_vector_maps_from_gml(
+                    maplist, overr, exp, location, link)
+            if _format == "pack":
+                _import_vector_maps(maplist)
+
+        # Create the space time dataset
+        if sp.is_in_db() and core.overwrite() == True:
+            core.info(_("Overwrite space time %s dataset "
+                        "<%s> and unregister all maps.") % \
+                       (sp.get_new_map_instance(None).get_type(), sp.get_id()))
+            sp.delete()
+            sp = sp.get_new_instance(id)
+
+        temporal_type = init["temporal_type"]
+        semantic_type = init["semantic_type"]
+        core.verbose(_("Create space time %s dataset.") %
+                     sp.get_new_map_instance(None).get_type())
+
+        sp.set_initial_values(temporal_type=temporal_type, 
+                              semantic_type=semantic_type, title=title, 
+                              description=descr)
+        sp.insert()
+
+        # register the maps
+        fs = "|"
+        register_maps_in_space_time_dataset(
+            type=sp.get_new_map_instance(None).get_type(),
+            name=output, file=list_file_name, start="file", 
+            end="file", dbif=None, fs=fs)
+
+        os.chdir(old_cwd)
+    except:
+        raise
+
+    # Make sure the location is switched back correctly
+    finally:
+        if location:
+            # Switch to the old location
+            ret = core.run_command("g.mapset", mapset=old_env["MAPSET"],
+                                   location=old_env["LOCATION_NAME"],
+                                   gisdbase=old_env["GISDBASE"])

+ 147 - 58
lib/python/temporal/temporal_extent.py

@@ -6,11 +6,15 @@ Temporal GIS related temporal extent functions to be used in Python scripts and
 
 Usage:
 
+@code
+
 >>> import grass.temporal as tgis
 >>> from datetime import datetime
 >>> t = tgis.RasterRelativeTime()
 >>> t = tgis.RasterAbsoluteTime()
 
+@endcode
+
 (C) 2008-2011 by the GRASS Development Team
 This program is free software under the GNU General Public
 License (>=v2). Read the file COPYING that comes with GRASS
@@ -34,8 +38,9 @@ class AbstractTemporalExtent(SQLDatabaseInterface):
         
         Usage:
         
-        >>> import grass.temporal as tgis
-        >>> A = tgis.AbstractTemporalExtent(table="raster_absolute_time",
+        @code
+        
+        >>> A = AbstractTemporalExtent(table="raster_absolute_time",
         ... ident="soil@PERMANENT", start_time=datetime(2001, 01, 01),
         ... end_time=datetime(2005,01,01) )
         >>> A.id
@@ -51,7 +56,7 @@ class AbstractTemporalExtent(SQLDatabaseInterface):
         start_time=2001-01-01 00:00:00
         end_time=2005-01-01 00:00:00
         >>> # relative time
-        >>> A = tgis.AbstractTemporalExtent(table="raster_absolute_time",
+        >>> A = AbstractTemporalExtent(table="raster_absolute_time",
         ... ident="soil@PERMANENT", start_time=0, end_time=1 )
         >>> A.id
         'soil@PERMANENT'
@@ -65,6 +70,8 @@ class AbstractTemporalExtent(SQLDatabaseInterface):
         >>> A.print_shell_info()
         start_time=0
         end_time=1
+        
+        @endcode
     """
     def __init__(self, table=None, ident=None, start_time=None, end_time=None):
 
@@ -77,21 +84,25 @@ class AbstractTemporalExtent(SQLDatabaseInterface):
     def starts(self, extent):
         """!Return True if this temporal extent (A) starts at the start of the 
            provided temporal extent (B) and finishes within it
+           @verbatim
            A  |-----|
            B  |---------|
+           @endverbatim
            
            @param extent: The temporal extent object with which this extent starts
            
            Usage:
            
+           @code
            
-           >>> import grass.temporal as tgis
-           >>> A = tgis.AbstractTemporalExtent(start_time=5, end_time=6 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=5, end_time=7 )
+           >>> A = AbstractTemporalExtent(start_time=5, end_time=6 )
+           >>> B = AbstractTemporalExtent(start_time=5, end_time=7 )
            >>> A.starts(B)
            True
            >>> B.starts(A)
            False
+           
+           @endcode
         """
         if  self.D["end_time"] is None or extent.D["end_time"] is None:
             return False
@@ -105,20 +116,25 @@ class AbstractTemporalExtent(SQLDatabaseInterface):
     def started(self, extent):
         """!Return True if this temporal extent (A) started at the start of the 
            provided temporal extent (B) and finishes after it
+           @verbatim
            A  |---------|
            B  |-----|
+           @endverbatim
            
            @param extent: The temporal extent object with which this extent started
            
            Usage:
            
-           >>> import grass.temporal as tgis
-           >>> A = tgis.AbstractTemporalExtent(start_time=5, end_time=7 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=5, end_time=6 )
+           @code
+           
+           >>> A = AbstractTemporalExtent(start_time=5, end_time=7 )
+           >>> B = AbstractTemporalExtent(start_time=5, end_time=6 )
            >>> A.started(B)
            True
            >>> B.started(A)
            False
+           
+           @endcode
         """
         if  self.D["end_time"] is None or extent.D["end_time"] is None:
             return False
@@ -132,20 +148,25 @@ class AbstractTemporalExtent(SQLDatabaseInterface):
     def finishes(self, extent):
         """!Return True if this temporal extent (A) starts after the start of the 
            provided temporal extent (B) and finishes with it
+           @verbatim
            A      |-----|
            B  |---------|
+           @endverbatim
            
            @param extent: The temporal extent object with which this extent finishes
            
            Usage:
            
-           >>> import grass.temporal as tgis
-           >>> A = tgis.AbstractTemporalExtent(start_time=6, end_time=7 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=5, end_time=7 )
+           @code
+           
+           >>> A = AbstractTemporalExtent(start_time=6, end_time=7 )
+           >>> B = AbstractTemporalExtent(start_time=5, end_time=7 )
            >>> A.finishes(B)
            True
            >>> B.finishes(A)
            False
+           
+           @endcode
         """
         if  self.D["end_time"] is None or extent.D["end_time"] is None:
             return False
@@ -159,20 +180,25 @@ class AbstractTemporalExtent(SQLDatabaseInterface):
     def finished(self, extent):
         """!Return True if this temporal extent (A) starts before the start of the 
            provided temporal extent (B) and finishes with it
+           @verbatim
            A  |---------|
            B      |-----|
+           @endverbatim
            
            @param extent: The temporal extent object with which this extent finishes
            
            Usage:
            
-           >>> import grass.temporal as tgis
-           >>> A = tgis.AbstractTemporalExtent(start_time=5, end_time=7 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=6, end_time=7 )
+           @code
+           
+           >>> A = AbstractTemporalExtent(start_time=5, end_time=7 )
+           >>> B = AbstractTemporalExtent(start_time=6, end_time=7 )
            >>> A.finished(B)
            True
            >>> B.finished(A)
            False
+           
+           @endcode
         """
         if  self.D["end_time"] is None or extent.D["end_time"] is None:
             return False
@@ -186,20 +212,25 @@ class AbstractTemporalExtent(SQLDatabaseInterface):
     def after(self, extent):
         """!Return True if this temporal extent (A) is located after the  
            provided temporal extent (B)
+           @verbatim
            A             |---------|
            B  |---------|
+           @endverbatim
            
            @param extent: The temporal extent object that is located before this extent
            
            Usage:
            
-           >>> import grass.temporal as tgis
-           >>> A = tgis.AbstractTemporalExtent(start_time=8, end_time=9 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=6, end_time=7 )
+           @code
+           
+           >>> A = AbstractTemporalExtent(start_time=8, end_time=9 )
+           >>> B = AbstractTemporalExtent(start_time=6, end_time=7 )
            >>> A.after(B)
            True
            >>> B.after(A)
            False
+           
+           @endcode
         """
         if extent.D["end_time"] is None:
             if self.D["start_time"] > extent.D["start_time"]:
@@ -215,20 +246,25 @@ class AbstractTemporalExtent(SQLDatabaseInterface):
     def before(self, extent):
         """!Return True if this temporal extent (A) is located before the  
            provided temporal extent (B)
+           @verbatim
            A  |---------|
            B             |---------|
+           @endverbatim
            
            @param extent: The temporal extent object that is located after this extent
            
            Usage:
            
-           >>> import grass.temporal as tgis
-           >>> A = tgis.AbstractTemporalExtent(start_time=6, end_time=7 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=8, end_time=9 )
+           @code
+           
+           >>> A = AbstractTemporalExtent(start_time=6, end_time=7 )
+           >>> B = AbstractTemporalExtent(start_time=8, end_time=9 )
            >>> A.before(B)
            True
            >>> B.before(A)
            False
+           
+           @endcode
         """
         if self.D["end_time"] is None:
             if self.D["start_time"] < extent.D["start_time"]:
@@ -244,29 +280,34 @@ class AbstractTemporalExtent(SQLDatabaseInterface):
     def adjacent(self, extent):
         """!Return True if this temporal extent (A) is a meeting neighbor the 
            provided temporal extent (B)
+           @verbatim
            A            |---------|
            B  |---------|
            A  |---------|
            B            |---------|
+           @endverbatim
            
            @param extent: The temporal extent object that is a meeting neighbor
                           of this extent
            
            Usage:
            
-           >>> import grass.temporal as tgis
-           >>> A = tgis.AbstractTemporalExtent(start_time=5, end_time=7 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=7, end_time=9 )
+           @code
+           
+           >>> A = AbstractTemporalExtent(start_time=5, end_time=7 )
+           >>> B = AbstractTemporalExtent(start_time=7, end_time=9 )
            >>> A.adjacent(B)
            True
            >>> B.adjacent(A)
            True
-           >>> A = tgis.AbstractTemporalExtent(start_time=5, end_time=7 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=3, end_time=5 )
+           >>> A = AbstractTemporalExtent(start_time=5, end_time=7 )
+           >>> B = AbstractTemporalExtent(start_time=3, end_time=5 )
            >>> A.adjacent(B)
            True
            >>> B.adjacent(A)
            True
+           
+           @endcode
         """
         if  self.D["end_time"] is None and extent.D["end_time"] is None:
             return False
@@ -280,19 +321,26 @@ class AbstractTemporalExtent(SQLDatabaseInterface):
     def follows(self, extent):
         """!Return True if this temporal extent (A) follows the  
            provided temporal extent (B)
+           @verbatim
            A            |---------|
            B  |---------|
+           @endverbatim
            
            @param extent: The temporal extent object that is the predecessor
                           of this extent
            
-           >>> import grass.temporal as tgis
-           >>> A = tgis.AbstractTemporalExtent(start_time=5, end_time=7 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=3, end_time=5 )
+           Usage:
+           
+           @code
+           
+           >>> A = AbstractTemporalExtent(start_time=5, end_time=7 )
+           >>> B = AbstractTemporalExtent(start_time=3, end_time=5 )
            >>> A.follows(B)
            True
            >>> B.follows(A)
            False
+           
+           @endcode
         """
         if  extent.D["end_time"] is None:
             return False
@@ -305,21 +353,26 @@ class AbstractTemporalExtent(SQLDatabaseInterface):
     def precedes(self, extent):
         """!Return True if this temporal extent (A) precedes the provided 
            temporal extent (B)
+           @verbatim
            A  |---------|
            B            |---------|
+           @endverbatim
            
            @param extent: The temporal extent object that is the successor
                           of this extent
            
            Usage:
            
-           >>> import grass.temporal as tgis
-           >>> A = tgis.AbstractTemporalExtent(start_time=5, end_time=7 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=7, end_time=9 )
+           @code
+           
+           >>> A = AbstractTemporalExtent(start_time=5, end_time=7 )
+           >>> B = AbstractTemporalExtent(start_time=7, end_time=9 )
            >>> A.precedes(B)
            True
            >>> B.precedes(A)
            False
+           
+           @endcode
         """
         if  self.D["end_time"] is None:
             return False
@@ -332,20 +385,25 @@ class AbstractTemporalExtent(SQLDatabaseInterface):
     def during(self, extent):
         """!Return True if this temporal extent (A) is located during the provided 
            temporal extent (B)
+           @verbatim
            A   |-------|
            B  |---------|
-           
+           @endverbatim
+                      
            @param extent: The temporal extent object that contains this extent
            
            Usage:
            
-           >>> import grass.temporal as tgis
-           >>> A = tgis.AbstractTemporalExtent(start_time=5, end_time=7 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=4, end_time=9 )
+           @code
+           
+           >>> A = AbstractTemporalExtent(start_time=5, end_time=7 )
+           >>> B = AbstractTemporalExtent(start_time=4, end_time=9 )
            >>> A.during(B)
            True
            >>> B.during(A)
            False
+           
+           @endcode
         """
         # Check single point of time in interval
         if  extent.D["end_time"] is None:
@@ -368,21 +426,26 @@ class AbstractTemporalExtent(SQLDatabaseInterface):
     def contains(self, extent):
         """!Return True if this temporal extent (A) contains the provided 
            temporal extent (B)
+           @verbatim
            A  |---------|
            B   |-------|
+           @endverbatim
            
            @param extent: The temporal extent object that is located 
                           during this extent
            
            Usage:
            
-           >>> import grass.temporal as tgis
-           >>> A = tgis.AbstractTemporalExtent(start_time=4, end_time=9 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=5, end_time=8 )
+           @code
+           
+           >>> A = AbstractTemporalExtent(start_time=4, end_time=9 )
+           >>> B = AbstractTemporalExtent(start_time=5, end_time=8 )
            >>> A.contains(B)
            True
            >>> B.contains(A)
            False
+           
+           @endcode
         """
         # Check single point of time in interval
         if  self.D["end_time"] is None:
@@ -405,21 +468,26 @@ class AbstractTemporalExtent(SQLDatabaseInterface):
     def equivalent(self, extent):
         """!Return True if this temporal extent (A) is equivalent to the provided 
            temporal extent (B)
+           @verbatim
            A  |---------|
            B  |---------|
+           @endverbatim
            
            @param extent: The temporal extent object that is equivalent 
                           during this extent
            
            Usage:
            
-           >>> import grass.temporal as tgis
-           >>> A = tgis.AbstractTemporalExtent(start_time=5, end_time=6 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=5, end_time=6 )
+           @code
+           
+           >>> A = AbstractTemporalExtent(start_time=5, end_time=6 )
+           >>> B = AbstractTemporalExtent(start_time=5, end_time=6 )
            >>> A.equivalent(B)
            True
            >>> B.equivalent(A)
            True
+           
+           @endcode
         """
         if  self.D["end_time"] is None and extent.D["end_time"] is None:
             if self.D["start_time"] == extent.D["start_time"]:
@@ -439,21 +507,25 @@ class AbstractTemporalExtent(SQLDatabaseInterface):
     def overlapped(self, extent):
         """!Return True if this temporal extent (A) overlapped the provided 
            temporal extent (B)
+           @verbatim
            A  |---------|
            B    |---------|
-           
+           @endverbatim
            @param extent: The temporal extent object that is overlaps 
                           this extent
            
            Usage:
            
-           >>> import grass.temporal as tgis
-           >>> A = tgis.AbstractTemporalExtent(start_time=5, end_time=7 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=6, end_time=8 )
+           @code
+           
+           >>> A = AbstractTemporalExtent(start_time=5, end_time=7 )
+           >>> B = AbstractTemporalExtent(start_time=6, end_time=8 )
            >>> A.overlapped(B)
            True
            >>> B.overlapped(A)
            False
+           
+           @endcode
         """
         if  self.D["end_time"] is None or extent.D["end_time"] is None:
             return False
@@ -468,21 +540,26 @@ class AbstractTemporalExtent(SQLDatabaseInterface):
     def overlaps(self, extent):
         """!Return True if this temporal extent (A) overlapps the provided 
            temporal extent (B)
+           @verbatim
            A    |---------|
            B  |---------|
+           @endverbatim
            
            @param extent: The temporal extent object that is overlapped 
                           this extent
            
            Usage:
            
-           >>> import grass.temporal as tgis
-           >>> A = tgis.AbstractTemporalExtent(start_time=6, end_time=8 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=5, end_time=7 )
+           @code
+           
+           >>> A = AbstractTemporalExtent(start_time=6, end_time=8 )
+           >>> B = AbstractTemporalExtent(start_time=5, end_time=7 )
            >>> A.overlaps(B)
            True
            >>> B.overlaps(A)
            False
+           
+           @endcode
         """
         if  self.D["end_time"] is None or extent.D["end_time"] is None:
             return False
@@ -663,17 +740,20 @@ class AbsoluteTemporalExtent(AbstractTemporalExtent):
 ###############################################################################
 
 class RasterAbsoluteTime(AbsoluteTemporalExtent):
-    def __init__(self, ident=None, start_time=None, end_time=None, timezone=None):
+    def __init__(self, ident=None, start_time=None, end_time=None, 
+                 timezone=None):
         AbsoluteTemporalExtent.__init__(self, "raster_absolute_time",
             ident, start_time, end_time, timezone)
 
 class Raster3DAbsoluteTime(AbsoluteTemporalExtent):
-    def __init__(self, ident=None, start_time=None, end_time=None, timezone=None):
+    def __init__(self, ident=None, start_time=None, end_time=None, 
+                 timezone=None):
         AbsoluteTemporalExtent.__init__(self, "raster3d_absolute_time",
             ident, start_time, end_time, timezone)
 
 class VectorAbsoluteTime(AbsoluteTemporalExtent):
-    def __init__(self, ident=None, start_time=None, end_time=None, timezone=None):
+    def __init__(self, ident=None, start_time=None, end_time=None, 
+                 timezone=None):
         AbsoluteTemporalExtent.__init__(self, "vector_absolute_time",
             ident, start_time, end_time, timezone)
 
@@ -687,8 +767,9 @@ class STDSAbsoluteTime(AbsoluteTemporalExtent):
         
         Usage:
         
-        >>> import grass.temporal as tgis
-        >>> A = tgis.STDSAbsoluteTime(table="strds_absolute_time",
+        @code
+        
+        >>> A = STDSAbsoluteTime(table="strds_absolute_time",
         ... ident="strds@PERMANENT", start_time=datetime(2001, 01, 01),
         ... end_time=datetime(2005,01,01), granularity="1 days",
         ... map_time="interval")
@@ -713,6 +794,8 @@ class STDSAbsoluteTime(AbsoluteTemporalExtent):
         end_time=2005-01-01 00:00:00
         granularity=1 days
         map_time=interval
+        
+        @endcode
     """
     def __init__(self, table=None, ident=None, start_time=None, end_time=None, 
                  granularity=None, timezone=None, map_time=None):
@@ -808,9 +891,10 @@ class RelativeTemporalExtent(AbstractTemporalExtent):
         start_time and end_time must be of type integer
        
         Usage:
+        
+        @code
        
-        >>> import grass.temporal as tgis
-        >>> A = tgis.RelativeTemporalExtent(table="raster_absolute_time",
+        >>> A = RelativeTemporalExtent(table="raster_absolute_time",
         ... ident="soil@PERMANENT", start_time=0, end_time=1, unit="years")
         >>> A.id
         'soil@PERMANENT'
@@ -829,6 +913,8 @@ class RelativeTemporalExtent(AbstractTemporalExtent):
         start_time=0
         end_time=1
         unit=years
+        
+        @endcode
     """
     def __init__(self, table=None, ident=None, start_time=None, end_time=None, 
                  unit=None):
@@ -916,9 +1002,10 @@ class STDSRelativeTime(RelativeTemporalExtent):
         start_time and end_time must be of type integer
        
         Usage:
+        
+        @code
        
-        >>> import grass.temporal as tgis
-        >>> A = tgis.STDSRelativeTime(table="raster_absolute_time",
+        >>> A = STDSRelativeTime(table="raster_absolute_time",
         ... ident="soil@PERMANENT", start_time=0, end_time=1, unit="years",
         ... granularity=5, map_time="interval")
         >>> A.id
@@ -946,6 +1033,8 @@ class STDSRelativeTime(RelativeTemporalExtent):
         unit=years
         granularity=5
         map_time=interval
+        
+        @endcode
     """
     def __init__(self, table=None, ident=None, start_time=None, end_time=None, 
                  unit=None, granularity=None, map_time=None):

+ 85 - 79
lib/python/temporal/temporal_granularity.py

@@ -25,14 +25,14 @@ from datetime_math import *
 
 ###############################################################################
 
-def compute_relative_time_granularity(maps):            
+
+def compute_relative_time_granularity(maps):
     """!Compute the relative time granularity
-        
-        Attention: The computation of the granularity is only correct in case of not
-        overlapping intervals. Hence a correct temporal topology is required for
-        computation.
-    
-	
+
+        Attention: The computation of the granularity 
+        is only correct in case of not overlapping intervals. 
+        Hence a correct temporal topology is required for computation.
+
         @param maps: a ordered by start_time list of map objects
     """
 
@@ -44,7 +44,7 @@ def compute_relative_time_granularity(maps):
     for map in maps:
         start, end = map.get_valid_time()
         if start and end:
-            t =  abs(end - start)
+            t = abs(end - start)
             delta.append(int(t))
 
     # Compute the timedelta of the gaps
@@ -54,12 +54,13 @@ def compute_relative_time_granularity(maps):
             if relation == "after":
                 start1, end1 = maps[i].get_valid_time()
                 start2, end2 = maps[i + 1].get_valid_time()
-                # Gaps are between intervals, intervals and points, points and points
+                # Gaps are between intervals, intervals and 
+                # points, points and points
                 if end1 and start2:
-                    t =  abs(end1 - start2)
+                    t = abs(end1 - start2)
                     delta.append(int(t))
-                if  not end1 and start2:
-                    t =  abs(start1 - start2)
+                if not end1 and start2:
+                    t = abs(start1 - start2)
                     delta.append(int(t))
 
     delta.sort()
@@ -76,16 +77,16 @@ def compute_relative_time_granularity(maps):
 
 ###############################################################################
 
-def compute_absolute_time_granularity(maps):                  
+
+def compute_absolute_time_granularity(maps):
     """!Compute the absolute time granularity
-        
-        Attention: The computation of the granularity is only correct in case of not
-        overlapping intervals. Hence a correct temporal topology is required for
-        computation.
-    
-	
+
+        Attention: The computation of the granularity 
+        is only correct in case of not overlapping intervals. 
+        Hence a correct temporal topology is required for computation.
+
         @param maps: a ordered by start_time list of map objects
-    """     
+    """
 
     has_seconds = False
     has_minutes = False
@@ -117,83 +118,85 @@ def compute_absolute_time_granularity(maps):
             if relation == "after":
                 start1, end1 = maps[i].get_valid_time()
                 start2, end2 = maps[i + 1].get_valid_time()
-                # Gaps are between intervals, intervals and points, points and points
+                # Gaps are between intervals, intervals and 
+                # points, points and points
                 if end1 and start2:
                     delta.append(end1 - start2)
                     datetime_delta.append(compute_datetime_delta(end1, start2))
-                if  not end1 and start2:
+                if not end1 and start2:
                     delta.append(start2 - start1)
-                    datetime_delta.append(compute_datetime_delta(start1, start2))
+                    datetime_delta.append(compute_datetime_delta(
+                        start1, start2))
 
     # Check what changed
     dlist = []
     for d in datetime_delta:
-        if d.has_key("second") and d["second"] > 0:
+        if "second" in d and d["second"] > 0:
             has_seconds = True
-        if d.has_key("minute") and d["minute"] > 0:
+        if "minute" in d and d["minute"] > 0:
             has_minutes = True
-        if d.has_key("hour") and d["hour"] > 0:
+        if "hour" in d and d["hour"] > 0:
             has_hours = True
-        if d.has_key("day") and d["day"] > 0:
+        if "day" in d and d["day"] > 0:
             has_days = True
-        if d.has_key("month") and d["month"] > 0:
+        if "month" in d and d["month"] > 0:
             has_months = True
-        if d.has_key("year") and d["year"] > 0:
+        if "year" in d and d["year"] > 0:
             has_years = True
 
     # Create a list with a single time unit only
     if has_seconds:
         for d in datetime_delta:
-            if d.has_key("second"):
-                dlist.append(d["second"])   
-            elif d.has_key("minute"):
-                dlist.append(d["minute"] * 60)   
-            elif d.has_key("hour"):
-                dlist.append(d["hour"] * 3600)   
-            elif d.has_key("day"):
-                dlist.append(d["day"] * 24 * 3600)   
+            if "second" in d:
+                dlist.append(d["second"])
+            elif "minute" in d:
+                dlist.append(d["minute"] * 60)
+            elif "hour" in d:
+                dlist.append(d["hour"] * 3600)
+            elif "day" in d:
+                dlist.append(d["day"] * 24 * 3600)
             else:
-                dlist.append(d["max_days"] * 24 * 3600)   
-        use_seconds = True        
+                dlist.append(d["max_days"] * 24 * 3600)
+        use_seconds = True
     elif has_minutes:
         for d in datetime_delta:
-            if d.has_key("minute"):
-                dlist.append(d["minute"])   
-            elif d.has_key("hour"):
-                dlist.append(d["hour"] * 60)   
-            elif d.has_key("day"):
-                dlist.append(d["day"] * 24 * 60)   
+            if "minute" in d:
+                dlist.append(d["minute"])
+            elif "hour" in d:
+                dlist.append(d["hour"] * 60)
+            elif "day" in d:
+                dlist.append(d["day"] * 24 * 60)
             else:
-                dlist.append(d["max_days"] * 24 * 60)   
-        use_minutes = True        
+                dlist.append(d["max_days"] * 24 * 60)
+        use_minutes = True
     elif has_hours:
         for d in datetime_delta:
-            if d.has_key("hour"):
-                dlist.append(d["hour"])   
-            elif d.has_key("day"):
-                dlist.append(d["day"] * 24)   
+            if "hour" in d:
+                dlist.append(d["hour"])
+            elif "day" in d:
+                dlist.append(d["day"] * 24)
             else:
-                dlist.append(d["max_days"] * 24)   
-        use_hours = True        
+                dlist.append(d["max_days"] * 24)
+        use_hours = True
     elif has_days:
         for d in datetime_delta:
-            if d.has_key("day"):
-                dlist.append(d["day"])   
+            if "day" in d:
+                dlist.append(d["day"])
             else:
-                dlist.append(d["max_days"])   
-        use_days = True        
+                dlist.append(d["max_days"])
+        use_days = True
     elif has_months:
         for d in datetime_delta:
-            if d.has_key("month"):
-                dlist.append(d["month"])   
-            elif d.has_key("year"):
-                dlist.append(d["year"] * 12)   
-        use_months = True        
+            if "month" in d:
+                dlist.append(d["month"])
+            elif "year" in d:
+                dlist.append(d["year"] * 12)
+        use_months = True
     elif has_years:
         for d in datetime_delta:
-            if d.has_key("year"):
-                dlist.append(d["year"])   
-        use_years = True        
+            if "year" in d:
+                dlist.append(d["year"])
+        use_years = True
 
     dlist.sort()
     ulist = list(set(dlist))
@@ -229,20 +232,23 @@ def compute_absolute_time_granularity(maps):
 #  See http://www.opensource.org/licenses/mit-license.php
 # Error Codes:
 #   None
-def gcd(a,b):
-	"""!The Euclidean Algorithm """
-	a = abs(a)
-	b = abs(b)
-        while a:
-                a, b = b%a, a
-        return b
-        
+
+
+def gcd(a, b):
+    """!The Euclidean Algorithm """
+    a = abs(a)
+    b = abs(b)
+    while a:
+        a, b = b % a, a
+    return b
+
 ###############################################################################
 
+
 def gcd_list(list):
-	"""!Finds the GCD of numbers in a list.
-	Input: List of numbers you want to find the GCD of
-		E.g. [8, 24, 12]
-	Returns: GCD of all numbers
-	"""
-	return reduce(gcd, list)
+    """!Finds the GCD of numbers in a list.
+    Input: List of numbers you want to find the GCD of
+            E.g. [8, 24, 12]
+    Returns: GCD of all numbers
+    """
+    return reduce(gcd, list)

+ 303 - 272
lib/python/temporal/temporal_relationships.py

@@ -25,43 +25,45 @@ from datetime_math import *
 
 ###############################################################################
 
+
 class temporal_topology_builder(object):
-    """!This class is designed to build the temporal topology based on a lists of maps
-    
-	Example:
-	@code
-	# We have a space time raster dataset and build a map list
-	# from all registered maps ordered by start time
-	maps = strds.get_registered_maps_as_objects()
-	
-	# Now lets build the temporal topology of the maps in the list
-	tb = temporal_topology_builder()
-	tb.build(maps)
-	
-	for _map in tb:
-	    _map.print_temporal_topology_info()
-	    _follows = _map.get_follows()
-	    if _follows:
-		for f in _follows:
-		    f.print_temporal_topology_info()
-	    
-	# Using the next and previous methods, we can iterate over the 
-	# topological related maps in this way
-	
-	_first = tb.get_first()
-	
-	while _first:
-	    _first.print_temporal_topology_info()
-	    _first = _first.next()
-	
-	# Dictionary like accessed
-	_map = tb["name@mapset"]
-	@endcode
-    
+    """!This class is designed to build the temporal topology 
+       based on a lists of maps
+
+        Example:
+        @code
+        # We have a space time raster dataset and build a map list
+        # from all registered maps ordered by start time
+        maps = strds.get_registered_maps_as_objects()
+
+        # Now lets build the temporal topology of the maps in the list
+        tb = temporal_topology_builder()
+        tb.build(maps)
+
+        for _map in tb:
+            _map.print_temporal_topology_info()
+            _follows = _map.get_follows()
+            if _follows:
+                for f in _follows:
+                    f.print_temporal_topology_info()
+
+        # Using the next and previous methods, we can iterate over the
+        # topological related maps in this way
+
+        _first = tb.get_first()
+
+        while _first:
+            _first.print_temporal_topology_info()
+            _first = _first.next()
+
+        # Dictionary like accessed
+        _map = tb["name@mapset"]
+        @endcode
+
     """
     def __init__(self):
-	self._reset()
-        
+        self._reset()
+
     def _reset(self):
         self._store = {}
         self._first = None
@@ -69,192 +71,214 @@ class temporal_topology_builder(object):
 
     def _set_first(self, first):
         self._first = first
-        self._insert(first)        
-        
+        self._insert(first)
+
     def _detect_first(self):
-	if len(self) > 0:
-	    _prev = self._store.values()[0]
-	    while _prev != None:
-		self._first = _prev
-		_prev = _prev.prev()
-		
+        if len(self) > 0:
+            prev_ = self._store.values()[0]
+            while prev_ is not None:
+                self._first = prev_
+                prev_ = prev_.temporal_prev()
+
     def _insert(self, t):
         self._store[t.get_id()] = t
-        
+
     def get_first(self):
-	"""!Return the first map with the earliest start time
-	
-	   @return The map with the earliest start time
-	"""
-	return self._first
+        """!Return the first map with the earliest start time
+
+           @return The map with the earliest start time
+        """
+        return self._first
 
     def _build_internal_iteratable(self, maps):
-	"""!Build an iteratable temporal topology structure for all maps in the list and store the maps internally
-	
-	   Basically the "next" and "prev" relations will be set in the temporal topology structure of each map
-	   The maps will be added to the object, so they can be accessed using the iterator of this class
-	   
-	   @param maps: A sorted (by start_time)list of abstract_dataset objects with initiated temporal extent
-	"""
-	self._build_iteratable(maps)
-
-	for _map in maps:
-	    self._insert(_map)
-	
-	# Detect the first map
-	self._detect_first()
-	
+        """!Build an iteratable temporal topology structure for all maps in 
+           the list and store the maps internally
+
+           Basically the "next" and "prev" relations will be set in the 
+           temporal topology structure of each map
+           The maps will be added to the object, so they can be 
+           accessed using the iterator of this class
+
+           @param maps: A sorted (by start_time)list of abstract_dataset 
+                        objects with initiated temporal extent
+        """
+        self._build_iteratable(maps)
+
+        for _map in maps:
+            self._insert(_map)
+
+        # Detect the first map
+        self._detect_first()
+
     def _build_iteratable(self, maps):
-	"""!Build an iteratable temporal topology structure for all maps in the list
-	
-	   Basically the "next" and "prev" relations will be set in the temporal topology structure of each map.
-	   
-	   @param maps: A sorted (by start_time)list of abstract_dataset objects with initiated temporal extent
-	"""
-	for i in xrange(len(maps)):
-	    offset = i + 1
-	    for j in xrange(offset, len(maps)):		
-		# Get the temporal relationship
-		relation = maps[j].temporal_relation(maps[i])
-		
-		# Build the next reference
-		if relation != "equivalent" and relation != "started":
-		    maps[i].set_next(maps[j])
-		    break
-		
-	for _map in maps:
-	    _next = _map.next()
-	    if _next:
-		_next.set_prev(_map)
-	    _map.set_temporal_topology_build_true()
-	
+        """!Build an iteratable temporal topology structure for 
+           all maps in the list
+
+           Basically the "next" and "prev" relations will be set in 
+           the temporal topology structure of each map.
+
+           @param maps: A sorted (by start_time)list of abstract_dataset 
+                        objects with initiated temporal extent
+        """
+        for i in xrange(len(maps)):
+            offset = i + 1
+            for j in xrange(offset, len(maps)):
+                # Get the temporal relationship
+                relation = maps[j].temporal_relation(maps[i])
+
+                # Build the next reference
+                if relation != "equivalent" and relation != "started":
+                    maps[i].set_temporal_next(maps[j])
+                    break
+
+        for map_ in maps:
+            next_ = map_.temporal_next()
+            if next_:
+                next_.set_temporal_prev(map_)
+            map_.set_temporal_topology_build_true()
+
     def build2(self, mapsA, mapsB):
-	"""!Build the temporal topology structure between two ordered lists of maps
-	
-	   This method builds the temporal topology from mapsA to mapsB and vice verse.
-	   The temporal topology structure of each map, defined in class temporal_map_relations,
-	   will be reseted and rebuild for mapsA and mapsB. 
-	   
-	   After building the temporal topology the modified map objects of mapsA can be accessed 
-	   in the same way as a dictionary using there id. The implemented iterator assures 
-	   the chronological iteration over the mapsA.	    
-	   
-	   @param mapsA: A sorted list (by start_time) of abstract_dataset objects with initiated temporal extent
-	   @param mapsB: A sorted list (by start_time) of abstract_dataset objects with initiated temporal extent
-	"""
-	
-	if mapsA == mapsB:
-	    self.build(mapsA, True)
-	    return
-	
-	for _map in mapsA:
-	    _map.reset_temporal_topology()
-	    
-	for _map in mapsB:
-	    _map.reset_temporal_topology()
-	
-	for i in xrange(len(mapsA)):
-	    for j in xrange(len(mapsB)):
-		
-		# Get the temporal relationship
-		relation = mapsB[j].temporal_relation(mapsA[i])
-		
-		if relation == "before":
-		    continue
-			    
-		if relation == "equivalent":
-		    mapsB[j].append_equivalent(mapsA[i])
-		    mapsA[i].append_equivalent(mapsB[j])
-		elif relation == "follows":
-		    mapsB[j].append_follows(mapsA[i])
-		    mapsA[i].append_precedes(mapsB[j])
-		elif relation == "precedes":
-		    mapsB[j].append_precedes(mapsA[i])
-		    mapsA[i].append_follows(mapsB[j])
-		elif relation == "during" or relation == "starts" or relation == "finishes":
-		    mapsB[j].append_during(mapsA[i])
-		    mapsA[i].append_contains(mapsB[j])
-		elif relation == "contains" or relation == "started" or relation == "finished":
-		    mapsB[j].append_contains(mapsA[i])
-		    mapsA[i].append_during(mapsB[j])
-		elif relation == "overlaps":
-		    mapsB[j].append_overlaps(mapsA[i])
-		    mapsA[i].append_overlapped(mapsB[j])
-		elif relation == "overlapped":
-		    mapsB[j].append_overlapped(mapsA[i])
-		    mapsA[i].append_overlaps(mapsB[j])
-
-		# Break if the next map follows and the over-next maps is after
-		if relation == "follows":
-		    if j < len(mapsB) - 1:
-			relation = mapsB[j + 1].temporal_relation(mapsA[i])
-			if relation == "after":
-			    break
-		# Break if the the next map is after
-		if relation == "after":
-		    break 
-	
-	self._build_internal_iteratable(mapsA)
-	self._build_iteratable(mapsB)
-			    
+        """!Build the temporal topology structure between 
+           two ordered lists of maps
+
+           This method builds the temporal topology from mapsA to 
+           mapsB and vice verse. The temporal topology structure of each map, 
+           defined in class temporal_map_relations,
+           will be reseted and rebuild for mapsA and mapsB.
+
+           After building the temporal topology the modified 
+           map objects of mapsA can be accessed
+           in the same way as a dictionary using there id. 
+           The implemented iterator assures
+           the chronological iteration over the mapsA.
+
+           @param mapsA: A sorted list (by start_time) of abstract_dataset 
+                         objects with initiated temporal extent
+           @param mapsB: A sorted list (by start_time) of abstract_dataset 
+                         objects with initiated temporal extent
+        """
+
+        if mapsA == mapsB:
+            self.build(mapsA, True)
+            return
+
+        for map_ in mapsA:
+            map_.reset_temporal_topology()
+
+        for map_ in mapsB:
+            map_.reset_temporal_topology()
+
+        for i in xrange(len(mapsA)):
+            for j in xrange(len(mapsB)):
+
+                # Get the temporal relationship
+                relation = mapsB[j].temporal_relation(mapsA[i])
+
+                if relation == "before":
+                    continue
+
+                if relation == "equivalent":
+                    mapsB[j].append_temporal_equivalent(mapsA[i])
+                    mapsA[i].append_temporal_equivalent(mapsB[j])
+                elif relation == "follows":
+                    mapsB[j].append_temporal_follows(mapsA[i])
+                    mapsA[i].append_temporal_precedes(mapsB[j])
+                elif relation == "precedes":
+                    mapsB[j].append_temporal_precedes(mapsA[i])
+                    mapsA[i].append_temporal_follows(mapsB[j])
+                elif relation == "during" or relation == "starts" or \
+                     relation == "finishes":
+                    mapsB[j].append_temporal_during(mapsA[i])
+                    mapsA[i].append_temporal_contains(mapsB[j])
+                elif relation == "contains" or relation == "started" or \
+                     relation == "finished":
+                    mapsB[j].append_temporal_contains(mapsA[i])
+                    mapsA[i].append_temporal_during(mapsB[j])
+                elif relation == "overlaps":
+                    mapsB[j].append_temporal_overlaps(mapsA[i])
+                    mapsA[i].append_temporal_overlapped(mapsB[j])
+                elif relation == "overlapped":
+                    mapsB[j].append_temporal_overlapped(mapsA[i])
+                    mapsA[i].append_temporal_overlaps(mapsB[j])
+
+                # Break if the next map follows and the over-next maps is after
+                if relation == "follows":
+                    if j < len(mapsB) - 1:
+                        relation = mapsB[j + 1].temporal_relation(mapsA[i])
+                        if relation == "after":
+                            break
+                # Break if the the next map is after
+                if relation == "after":
+                    break
+
+        self._build_internal_iteratable(mapsA)
+        self._build_iteratable(mapsB)
+
     def build(self, maps):
-	"""!Build the temporal topology structure
-	
-	   This method builds the temporal topology based on all maps in the provided map list.
-	   The temporal topology structure of each map, defined in class temporal_map_relations,
-	   will be reseted and rebuild. 
-	   
-	   After building the temporal topology the modified map objects can be accessed 
-	   in the same way as a dictionary using there id. The implemented iterator assures 
-	   the chronological iteration over the maps.	   
-	   
-	   @param maps: A sorted list (by start_time) of abstract_dataset objects with initiated temporal extent
-	"""
-	for _map in maps:
-	    _map.reset_temporal_topology()
-	
-	for i in xrange(len(maps)):
-	    offset = i + 1
-	    for j in xrange(offset, len(maps)):
-		
-		# Get the temporal relationship
-		relation = maps[j].temporal_relation(maps[i])
-			    
-		# The start time of map j is equal or later than map i
-		if relation == "equivalent":
-		    maps[j].append_equivalent(maps[i])
-		    maps[i].append_equivalent(maps[j])
-		elif relation == "follows":
-		    maps[j].append_follows(maps[i])
-		    maps[i].append_precedes(maps[j])
-		elif relation == "during" or relation == "starts" or relation == "finishes":
-		    maps[j].append_during(maps[i])
-		    maps[i].append_contains(maps[j])
-		elif relation == "started":
-		    # Consider equal start time, in case "started" map j contains map i
-		    maps[j].append_contains(maps[i])
-		    maps[i].append_during(maps[j])
-		elif relation == "overlaps":
-		    maps[j].append_overlaps(maps[i])
-		    maps[i].append_overlapped(maps[j])
-
-		# Break if the last map follows
-		if relation == "follows":
-		    if j < len(maps) - 1:
-			relation = maps[j + 1].temporal_relation(maps[i])
-			if relation == "after":
-			    break
-		# Break if the the next map is after
-		if relation == "after":
-		    break 
-		    
-	self._build_internal_iteratable(maps)
-	
+        """!Build the temporal topology structure
+
+           This method builds the temporal topology based on 
+           all maps in the provided map list.
+           The temporal topology structure of each map, 
+           defined in class temporal_map_relations,
+           will be reseted and rebuild.
+
+           After building the temporal topology the 
+           modified map objects can be accessed
+           in the same way as a dictionary using there id. 
+           The implemented iterator assures
+           the chronological iteration over the maps.
+
+           @param maps: A sorted list (by start_time) of abstract_dataset 
+                        objects with initiated temporal extent
+        """
+        for map_ in maps:
+            map_.reset_temporal_topology()
+
+        for i in xrange(len(maps)):
+            offset = i + 1
+            for j in xrange(offset, len(maps)):
+
+                # Get the temporal relationship
+                relation = maps[j].temporal_relation(maps[i])
+
+                # The start time of map j is equal or later than map i
+                if relation == "equivalent":
+                    maps[j].append_temporal_equivalent(maps[i])
+                    maps[i].append_temporal_equivalent(maps[j])
+                elif relation == "follows":
+                    maps[j].append_temporal_follows(maps[i])
+                    maps[i].append_temporal_precedes(maps[j])
+                elif relation == "during" or relation == "starts" or \
+                     relation == "finishes":
+                    maps[j].append_temporal_during(maps[i])
+                    maps[i].append_temporal_contains(maps[j])
+                elif relation == "started":
+                    # Consider equal start time, in case 
+                    # "started" map j contains map i
+                    maps[j].append_temporal_contains(maps[i])
+                    maps[i].append_temporal_during(maps[j])
+                elif relation == "overlaps":
+                    maps[j].append_temporal_overlaps(maps[i])
+                    maps[i].append_temporal_overlapped(maps[j])
+
+                # Break if the last map follows
+                if relation == "follows":
+                    if j < len(maps) - 1:
+                        relation = maps[j + 1].temporal_relation(maps[i])
+                        if relation == "after":
+                            break
+                # Break if the the next map is after
+                if relation == "after":
+                    break
+
+        self._build_internal_iteratable(maps)
+
     def __iter__(self):
-	_start = self._first
-	while _start != None:
-	    yield _start
-	    _start = _start.next()
+        start_ = self._first
+        while start_ is not None:
+            yield start_
+            start_ = start_.temporal_next()
 
     def __getitem__(self, index):
         return self._store[index.get_id()]
@@ -269,86 +293,93 @@ class temporal_topology_builder(object):
 ###############################################################################
 
 def print_temporal_topology_relationships(maps1, maps2):
-    """!Print the temporal relation matrix of the temporal ordered map lists maps1 and maps2
-       to stdout.
-	
-	@param maps1: A sorted (by start_time) list of abstract_dataset objects with initiated temporal extent
-	@param maps2: A sorted (by start_time) list of abstract_dataset objects with initiated temporal extent
+    """!Print the temporal relation matrix of the temporal ordered 
+       map lists maps1 and maps2 to stdout.
+
+        @param maps1: A sorted (by start_time) list of abstract_dataset 
+                      objects with initiated temporal extent
+        @param maps2: A sorted (by start_time) list of abstract_dataset 
+                      objects with initiated temporal extent
     """
-    
+
     identical = False
     use_id = True
-    
+
     if maps1 == maps2:
-	identical = True
-	use_id = False
+        identical = True
+        use_id = False
 
     for i in range(len(maps1)):
-	if identical == True:
-	    start = i + 1
-	else:
-	    start = 0
-	for j in range(start, len(maps2)):
-	    relation = maps1[j].temporal_relation(maps2[i])
-
-	    if use_id == False:
-		print maps2[j].base.get_name(), relation, maps1[i].base.get_name()
-	    else:
-		print maps2[j].base.get_id(), relation, maps1[i].base.get_id()
-
-	    # Break if the last map follows
-	    if relation == "follows":
-		if j < len(maps1) - 1:
-		    relation = maps1[j + 1].temporal_relation(maps2[i])
-		    if relation == "after":
-			break
-	    # Break if the the next map is after
-	    if relation == "after":
-		break
+        if identical == True:
+            start = i + 1
+        else:
+            start = 0
+        for j in range(start, len(maps2)):
+            relation = maps1[j].temporal_relation(maps2[i])
+
+            if use_id == False:
+                print maps2[j].base.get_name(
+                ), relation, maps1[i].base.get_name()
+            else:
+                print maps2[j].base.get_id(), relation, maps1[i].base.get_id()
+
+            # Break if the last map follows
+            if relation == "follows":
+                if j < len(maps1) - 1:
+                    relation = maps1[j + 1].temporal_relation(maps2[i])
+                    if relation == "after":
+                        break
+            # Break if the the next map is after
+            if relation == "after":
+                break
 
 ###############################################################################
 
+
 def count_temporal_topology_relationships(maps1, maps2):
     """!Count the temporal relations between the two lists of maps
 
-	The map lists must be ordered by start time. Temporal relations are counted 
-	by analyzing the sparse (upper right side in case maps1 == maps2) temporal relationships matrix.
+        The map lists must be ordered by start time. 
+        Temporal relations are counted by analyzing the sparse 
+        (upper right side in case maps1 == maps2) temporal relationships matrix.
 
-	@param maps1: A sorted (by start_time) list of abstract_dataset objects with initiated temporal extent
-	@param maps2: A sorted (by start_time) list of abstract_dataset objects with initiated temporal extent
-	@return A dictionary with counted temporal relationships
+        @param maps1: A sorted (by start_time) list of abstract_dataset 
+                      objects with initiated temporal extent
+        @param maps2: A sorted (by start_time) list of abstract_dataset 
+                      objects with initiated temporal extent
+        @return A dictionary with counted temporal relationships
     """
-    
+
     tcount = {}
     identical = False
-    
+
     if maps1 == maps2:
-	identical = True
+        identical = True
 
     for i in range(len(maps1)):
-	if identical == True:
-	    start = i + 1
-	else:
-	    start = 0
-	for j in range(start, len(maps2)):
-	    relation = maps1[j].temporal_relation(maps2[i])
-
-	    if relation == "before":
-		continue
-	    
-	    if tcount.has_key(relation):
-		tcount[relation] = tcount[relation] + 1
-	    else:
-		tcount[relation] = 1
-
-	    # Break if the last map follows
-	    if relation == "follows":
-		if j < len(maps1) - 1:
-		    relation = maps1[j + 1].temporal_relation(maps2[i])
-		    if relation == "after":
-			break
-	    # Break if the the next map is after
-	    if relation == "after":
-		break  
+        if identical:
+            start = i + 1
+        else:
+            start = 0
+        for j in range(start, len(maps2)):
+            relation = maps1[j].temporal_relation(maps2[i])
+
+            if relation == "before":
+                continue
+
+            if relation in tcount:
+                tcount[relation] = tcount[relation] + 1
+            else:
+                tcount[relation] = 1
+
+            # Break if the last map follows
+            if relation == "follows":
+                if j < len(maps1) - 1:
+                    relation = maps1[j + 1].temporal_relation(maps2[i])
+                    if relation == "after":
+                        break
+            # Break if the the next map is after
+            if relation == "after":
+                break
 
     return tcount

Разлика између датотеке није приказан због своје велике величине
+ 656 - 522
lib/python/temporal/unit_tests.py


+ 117 - 80
lib/python/temporal/univar_statistics.py

@@ -9,7 +9,8 @@ Usage:
 @code
 import grass.temporal as tgis
 
-tgis.print_gridded_dataset_univar_statistics(type, input, where, extended, header, fs)
+tgis.print_gridded_dataset_univar_statistics(
+    type, input, where, extended, header, fs)
 
 ...
 @endcode
@@ -25,23 +26,24 @@ for details.
 from space_time_datasets_tools import *
 
 ###############################################################################
-    
+
+
 def print_gridded_dataset_univar_statistics(type, input, where, extended, header, fs):
     """!Print univariate statistics for a space time raster or raster3d dataset
-    
-      param type Must be "strds" or "str3ds"
+
+       @param type Must be "strds" or "str3ds"
        @param input The name of the space time dataset
        @param where A temporal database where statement
-       @param extended If True compute extended statistics 
-       @param header   If True print column names as header 
-       @param fs Field separator 
+       @param extended If True compute extended statistics
+       @param header   If True print column names as header
+       @param fs Field separator
     """
-    
+
     # We need a database interface
     dbif = SQLDatabaseInterfaceConnection()
     dbif.connect()
-   
-    mapset =  core.gisenv()["MAPSET"]
+
+    mapset = core.gisenv()["MAPSET"]
 
     if input.find("@") >= 0:
         id = input
@@ -49,76 +51,90 @@ def print_gridded_dataset_univar_statistics(type, input, where, extended, header
         id = input + "@" + mapset
 
     sp = dataset_factory(type, id)
-    
+
     if sp.is_in_db(dbif) == False:
         dbif.close()
-        core.fatal(_("Space time %s dataset <%s> not found") % (sp.get_new_map_instance(None).get_type(), id))
+        core.fatal(_("Space time %s dataset <%s> not found") % (
+            sp.get_new_map_instance(None).get_type(), id))
 
     sp.select(dbif)
 
-    rows = sp.get_registered_maps("id,start_time,end_time", where, "start_time", dbif)
+    rows = sp.get_registered_maps(
+        "id,start_time,end_time", where, "start_time", dbif)
 
     if not rows:
         dbif.close()
-        core.fatal(_("Space time %s dataset <%s> is empty") % (sp.get_new_map_instance(None).get_type(), id))
+        core.fatal(_("Space time %s dataset <%s> is empty") % (
+            sp.get_new_map_instance(None).get_type(), id))
 
     if header == True:
-        print "id" + fs + "start" + fs + "end" + fs + "mean" + fs + "min" + fs + "max" + fs,
+        print "id" + fs + "start" + fs + "end" + fs + "mean" + fs + \
+            "min" + fs + "max" + fs,
         print "mean_of_abs" + fs + "stddev" + fs + "variance" + fs,
         if extended == True:
-            print "coeff_var" + fs + "sum" + fs + "null_cells" + fs + "cells" + fs,
-            print "first_quartile" + fs + "median" + fs + "third_quartile" + fs + "percentile_90" 
+            print "coeff_var" + fs + "sum" + fs + \
+                "null_cells" + fs + "cells" + fs,
+            print "first_quartile" + fs + "median" + fs + \
+                "third_quartile" + fs + "percentile_90"
         else:
-            print "coeff_var" + fs + "sum" + fs + "null_cells" + fs + "cells" 
+            print "coeff_var" + fs + "sum" + fs + "null_cells" + fs + "cells"
 
     for row in rows:
         id = row["id"]
         start = row["start_time"]
         end = row["end_time"]
 
-        flag="g"
+        flag = "g"
 
         if extended == True:
             flag += "e"
 
-	if type == "strds":
-	    stats = core.parse_command("r.univar", map=id, flags=flag)
-	elif type == "str3ds":
-	    stats = core.parse_command("r3.univar", map=id, flags=flag)
+        if type == "strds":
+            stats = core.parse_command("r.univar", map=id, flags=flag)
+        elif type == "str3ds":
+            stats = core.parse_command("r3.univar", map=id, flags=flag)
 
         print str(id) + fs + str(start) + fs + str(end),
-        print fs + str(stats["mean"]) + fs + str(stats["min"]) + fs + str(stats["max"]) + fs + str(stats["mean_of_abs"]),
-        print fs + str(stats["stddev"]) + fs + str(stats["variance"]) + fs + str(stats["coeff_var"]) + fs + str(stats["sum"]),
+        print fs + str(stats["mean"]) + fs + str(stats["min"]) + \
+            fs + str(stats["max"]) + fs + str(stats["mean_of_abs"]),
+        print fs + str(stats["stddev"]) + fs + str(stats["variance"]) + \
+            fs + str(stats["coeff_var"]) + fs + str(stats["sum"]),
 
         if extended == True:
-            print fs + str(stats["null_cells"]) + fs + str(stats["cells"]) + fs,
-            print str(stats["first_quartile"]) + fs + str(stats["median"]) + fs + str(stats["third_quartile"]) + fs + str(stats["percentile_90"]) 
+            print fs + str(stats["null_cells"]) + fs + str(
+                stats["cells"]) + fs,
+            print str(stats["first_quartile"]) + fs + str(stats["median"]) + \
+                  fs + str(stats["third_quartile"]) + \
+                  fs + str(stats["percentile_90"])
         else:
             print fs + str(stats["null_cells"]) + fs + str(stats["cells"])
-        
+
     dbif.close()
 
 ###############################################################################
-    
-def print_vector_dataset_univar_statistics(input, twhere, layer, type, column, where, extended, header, fs):
+
+
+def print_vector_dataset_univar_statistics(input, twhere, layer, type, column, 
+                                           where, extended, header, fs):
     """!Print univariate statistics for a space time vector dataset
-    
-       @param input The name of the space time dataset
-       @param twhere A temporal database where statement
-       @param layer The layer number used in case no layer is present in the temporal dataset
-       @param type options: point,line,boundary,centroid,area
-       @param column The name of the attribute column
-       @param where A temporal database where statement
-       @param extended If True compute extended statistics 
-       @param header   If True print column names as header 
-       @param fs Field separator 
+
+       @param input: The name of the space time dataset
+       @param twhere: A temporal database where statement
+       @param layer: The layer number used in case no layer is present 
+              in the temporal dataset
+       @param type: options: point,line,boundary,centroid,area
+       @param column: The name of the attribute column
+       @param where: A temporal database where statement
+       @param extended: If True compute extended statistics
+       @param header:   If True print column names as header
+       @param fs: Field separator
     """
 
     # We need a database interface
     dbif = SQLDatabaseInterfaceConnection()
     dbif.connect()
-   
-    mapset =  core.gisenv()["MAPSET"]
+
+    mapset = core.gisenv()["MAPSET"]
 
     if input.find("@") >= 0:
         id = input
@@ -126,71 +142,92 @@ def print_vector_dataset_univar_statistics(input, twhere, layer, type, column, w
         id = input + "@" + mapset
 
     sp = dataset_factory("stvds", id)
-    
+
     if sp.is_in_db(dbif) == False:
         dbif.close()
-        core.fatal(_("Space time %s dataset <%s> not found") % (sp.get_new_map_instance(None).get_type(), id))
+        core.fatal(_("Space time %s dataset <%s> not found") % (
+            sp.get_new_map_instance(None).get_type(), id))
 
     sp.select(dbif)
 
-    rows = sp.get_registered_maps("id,name,mapset,start_time,end_time,layer", twhere, "start_time", dbif)
+    rows = sp.get_registered_maps("id,name,mapset,start_time,end_time,layer",
+                                  twhere, "start_time", dbif)
 
     if not rows:
         dbif.close()
-        core.fatal(_("Space time %s dataset <%s> is empty") % (sp.get_new_map_instance(None).get_type(), id))
+        core.fatal(_("Space time %s dataset <%s> is empty") % (
+            sp.get_new_map_instance(None).get_type(), id))
 
     string = ""
     if header == True:
-        string += "id" + fs + "start" + fs + "end" + fs + "n" + fs + "nmissing" + fs + "nnull" + fs
+        string += "id" + fs + "start" + fs + "end" + fs + "n" + \
+            fs + "nmissing" + fs + "nnull" + fs
         string += "min" + fs + "max" + fs + "range"
-	if type == "point" or type == "centroid":
-            string += fs + "mean" + fs + "mean_abs" + fs + "population_stddev" + fs + "population_variance" + fs
-	    string += "population_coeff_variation" + fs + "sample_stddev" + fs + "sample_variance" + fs
-	    string += "kurtosis" + fs + "skewness"
+        if type == "point" or type == "centroid":
+            string += fs + "mean" + fs + "mean_abs" + fs + "population_stddev" +\
+                      fs + "population_variance" + fs
+            string += "population_coeff_variation" + fs + \
+                "sample_stddev" + fs + "sample_variance" + fs
+            string += "kurtosis" + fs + "skewness"
             if extended == True:
-                string+= fs + "first_quartile" + fs + "median" + fs + "third_quartile" + fs + "percentile_90" 
+                string += fs + "first_quartile" + fs + "median" + fs + \
+                    "third_quartile" + fs + "percentile_90"
 
-	print string
+        print string
 
     for row in rows:
         id = row["name"] + "@" + row["mapset"]
         start = row["start_time"]
         end = row["end_time"]
-	mylayer = row["layer"]
+        mylayer = row["layer"]
 
-        flags="g"
+        flags = "g"
 
         if extended == True:
             flags += "e"
 
-	if not mylayer:
-	    mylayer = layer
+        if not mylayer:
+            mylayer = layer
 
-        stats = core.parse_command("v.univar", map=id, where=where, column=column, layer=mylayer, type=type, flags=flags)
+        stats = core.parse_command("v.univar", map=id, where=where,
+                                   column=column, layer=mylayer, 
+                                   type=type, flags=flags)
 
-	string = ""
-	if stats:
+        string = ""
+        if stats:
             string += str(id) + fs + str(start) + fs + str(end)
-            string += fs + str(stats["n"]) + fs + str(stats["nmissing"]) + fs + str(stats["nnull"])
-	    if stats.has_key("min"):
-            	string += fs + str(stats["min"]) + fs + str(stats["max"]) + fs + str(stats["range"])
-	    else:
-            	string += fs + fs + fs
-
-	    if type == "point" or type == "centroid":
-		if stats.has_key("mean"):
-            	    string += fs + str(stats["mean"]) + fs + str(stats["mean_abs"]) + fs + str(stats["population_stddev"]) + fs + str(stats["population_variance"])
-            	    string += fs + str(stats["population_coeff_variation"]) + fs + str(stats["sample_stddev"]) + fs + str(stats["sample_variance"])
-            	    string += fs + str(stats["kurtosis"]) + fs + str(stats["skewness"])
-	        else:
-            	    string += fs + fs + fs + fs + fs + fs + fs + fs + fs
+            string += fs + str(stats["n"]) + fs + str(stats[
+                "nmissing"]) + fs + str(stats["nnull"])
+            if "min" in stats:
+                string += fs + str(stats["min"]) + fs + str(
+                    stats["max"]) + fs + str(stats["range"])
+            else:
+                string += fs + fs + fs
+
+            if type == "point" or type == "centroid":
+                if "mean" in stats:
+                    string += fs + str(stats["mean"]) + fs + \
+                    str(stats["mean_abs"]) + fs + \
+                    str(stats["population_stddev"]) + fs + \
+                    str(stats["population_variance"])
+                    
+                    string += fs + str(stats["population_coeff_variation"]) + \
+                    fs + str(stats["sample_stddev"]) + fs + \
+                    str(stats["sample_variance"])
+                    
+                    string += fs + str(stats["kurtosis"]) + fs + \
+                    str(stats["skewness"])
+                else:
+                    string += fs + fs + fs + fs + fs + fs + fs + fs + fs
                 if extended == True:
-		    if stats.has_key("first_quartile"):
-                        string += fs + str(stats["first_quartile"]) + fs + str(stats["median"]) + fs + str(stats["third_quartile"]) + fs + str(stats["percentile_90"]) 
-		    else:
-                        string += fs + fs + fs + fs 
-        
-	    print string
+                    if "first_quartile" in stats:
+                        string += fs + str(stats["first_quartile"]) + fs + \
+                        str(stats["median"]) + fs + \
+                        str(stats["third_quartile"]) + fs + \
+                        str(stats["percentile_90"])
+                    else:
+                        string += fs + fs + fs + fs
 
-    dbif.close()
+            print string
 
+    dbif.close()