浏览代码

PEP8 compliance and better doxygen formatting. New rtree tests.

git-svn-id: https://svn.osgeo.org/grass/grass/trunk@52631 15284696-431f-4ddb-bdfa-cd5b030d7da7
Soeren Gebbert 12 年之前
父节点
当前提交
799ffaf996

+ 28 - 12
lib/python/temporal/abstract_dataset.py

@@ -7,6 +7,8 @@ Temporal GIS related functions to be used in temporal GIS Python library package
 
 
 Usage:
 Usage:
 
 
+@code
+
 >>> import grass.temporal as tgis
 >>> import grass.temporal as tgis
 >>> ad = AbstractDataset()
 >>> ad = AbstractDataset()
 >>> ad.reset(ident="soil@PERMANENT")
 >>> ad.reset(ident="soil@PERMANENT")
@@ -19,7 +21,9 @@ Traceback (most recent call last):
     raise ImplementationError("This method must be implemented in the subclasses")
     raise ImplementationError("This method must be implemented in the subclasses")
 ImplementationError: 'This method must be implemented in the subclasses'
 ImplementationError: 'This method must be implemented in the subclasses'
 
 
-(C) 2008-2011 by the GRASS Development Team
+@endcode
+
+(C) 2011-2012 by the GRASS Development Team
 This program is free software under the GNU General Public
 This program is free software under the GNU General Public
 License (>=v2). Read the file COPYING that comes with GRASS
 License (>=v2). Read the file COPYING that comes with GRASS
 for details.
 for details.
@@ -43,7 +47,8 @@ class ImplementationError(Exception):
         return repr(self.msg)
         return repr(self.msg)
     
     
 class AbstractDataset(object):
 class AbstractDataset(object):
-    """!This is the base class for all datasets (raster, vector, raster3d, strds, stvds, str3ds)"""
+    """!This is the base class for all datasets 
+       (raster, vector, raster3d, strds, stvds, str3ds)"""
 
 
     def reset(self, ident):
     def reset(self, ident):
         """!Reset the internal structure and set the identifier
         """!Reset the internal structure and set the identifier
@@ -107,7 +112,8 @@ class AbstractDataset(object):
         return self.base.get_mapset()
         return self.base.get_mapset()
 
 
     def get_valid_time(self):
     def get_valid_time(self):
-        """!Returns a tuple of the start, the end valid time, this can be either datetime or double values
+        """!Returns a tuple of the start, the end valid time, 
+           this can be either datetime or double values
            @return A tuple of (start_time, end_time)
            @return A tuple of (start_time, end_time)
         """
         """
 
 
@@ -124,7 +130,9 @@ class AbstractDataset(object):
         return (start, end)
         return (start, end)
 
 
     def get_absolute_time(self):
     def get_absolute_time(self):
-        """!Returns a tuple of the start, the end valid time and the timezone of the map
+        """!Returns a tuple of the start, the end 
+           valid time and the timezone of the map
+           
            @return A tuple of (start_time, end_time, timezone)
            @return A tuple of (start_time, end_time, timezone)
         """
         """
 
 
@@ -135,7 +143,8 @@ class AbstractDataset(object):
         return (start, end, tz)
         return (start, end, tz)
 
 
     def get_relative_time(self):
     def get_relative_time(self):
-        """!Returns the relative time interval (start_time, end_time, unit) or None if not present"""
+        """!Returns the relative time interval (start_time, end_time, unit) 
+           or None if not present"""
 
 
         start = self.relative_time.get_start_time()
         start = self.relative_time.get_start_time()
         end = self.relative_time.get_end_time()
         end = self.relative_time.get_end_time()
@@ -151,7 +160,8 @@ class AbstractDataset(object):
         return unit
         return unit
 
 
     def check_relative_time_unit(self, unit):
     def check_relative_time_unit(self, unit):
-        """!Check if unit is of type  years, months, days, hours, minutes or seconds
+        """!Check if unit is of type  years, months, days, hours, 
+           minutes or seconds
 
 
            Return True if success or False otherwise
            Return True if success or False otherwise
         """
         """
@@ -166,11 +176,13 @@ class AbstractDataset(object):
         return self.base.get_ttype()
         return self.base.get_ttype()
 
 
     def get_spatial_extent(self):
     def get_spatial_extent(self):
-        """!Return a tuple of spatial extent (north, south, east, west, top, bottom) """
+        """!Return a tuple of spatial extent 
+           (north, south, east, west, top, bottom) """
         return self.spatial_extent.get_spatial_extent()
         return self.spatial_extent.get_spatial_extent()
 
 
     def select(self, dbif=None):
     def select(self, dbif=None):
-        """!Select temporal dataset entry from database and fill up the internal structure"""
+        """!Select temporal dataset entry from database and fill 
+           up the internal structure"""
 
 
         dbif, connect = init_dbif(dbif)
         dbif, connect = init_dbif(dbif)
 
 
@@ -197,12 +209,14 @@ class AbstractDataset(object):
         raise ImplementationError("This method must be implemented in the subclasses")
         raise ImplementationError("This method must be implemented in the subclasses")
 
 
     def insert(self, dbif=None, execute=True):
     def insert(self, dbif=None, execute=True):
-        """!Insert temporal dataset entry into database from the internal structure
+        """!Insert temporal dataset entry into 
+           database from the internal structure
 
 
 
 
            @param dbif: The database interface to be used
            @param dbif: The database interface to be used
            @param execute: If True the SQL statements will be executed.
            @param execute: If True the SQL statements will be executed.
-                           If False the prepared SQL statements are returned and must be executed by the caller.
+                           If False the prepared SQL statements are returned 
+                           and must be executed by the caller.
         """
         """
 
 
         dbif, connect = init_dbif(dbif)
         dbif, connect = init_dbif(dbif)
@@ -234,7 +248,8 @@ class AbstractDataset(object):
 
 
            @param dbif: The database interface to be used
            @param dbif: The database interface to be used
            @param execute: If True the SQL statements will be executed.
            @param execute: If True the SQL statements will be executed.
-                           If False the prepared SQL statements are returned and must be executed by the caller.
+                           If False the prepared SQL statements are returned 
+                           and must be executed by the caller.
         """
         """
 
 
         dbif, connect = init_dbif(dbif)
         dbif, connect = init_dbif(dbif)
@@ -266,7 +281,8 @@ class AbstractDataset(object):
 
 
            @param dbif: The database interface to be used
            @param dbif: The database interface to be used
            @param execute: If True the SQL statements will be executed.
            @param execute: If True the SQL statements will be executed.
-                           If False the prepared SQL statements are returned and must be executed by the caller.
+                           If False the prepared SQL statements are returned 
+                           and must be executed by the caller.
         """
         """
 
 
         dbif, connect = init_dbif(dbif)
         dbif, connect = init_dbif(dbif)

+ 116 - 56
lib/python/temporal/abstract_map_dataset.py

@@ -8,8 +8,8 @@ Temporal GIS related functions to be used in temporal GIS Python library package
 Usage:
 Usage:
 
 
 >>> import grass.temporal as tgis
 >>> import grass.temporal as tgis
->>> tmr = TemporalMapRelations()
->>> amd = AbstractMapDataset()
+>>> tmr = tgis.TemporalMapRelations()
+>>> amd = tgis.AbstractMapDataset()
 
 
 (C) 2008-2011 by the GRASS Development Team
 (C) 2008-2011 by the GRASS Development Team
 This program is free software under the GNU General Public
 This program is free software under the GNU General Public
@@ -27,8 +27,8 @@ class TemporalMapRelations(AbstractDataset):
 
 
        This object will be set up by temporal topology creation methods.
        This object will be set up by temporal topology creation methods.
 
 
-       If correctly initialize the calls next() and prev() let the user walk temporally forward
-       and backward in time.
+       If correctly initialize the calls next() and prev() 
+       let the user walk temporally forward and backward in time.
 
 
        The following temporal relations with access methods are supported:
        The following temporal relations with access methods are supported:
        * equal
        * equal
@@ -55,11 +55,14 @@ class TemporalMapRelations(AbstractDataset):
         
         
         Usage:
         Usage:
         
         
-        >>> import grass.temporal as tgis
+        @code
+        
         >>> tmr = TemporalMapRelations()
         >>> tmr = TemporalMapRelations()
         >>> tmr.print_temporal_topology_info()
         >>> tmr.print_temporal_topology_info()
          +-------------------- Temporal Topology -------------------------------------+
          +-------------------- Temporal Topology -------------------------------------+
         >>> tmr.print_temporal_topology_shell_info()
         >>> tmr.print_temporal_topology_shell_info()
+        
+        @code
     """
     """
 
 
     def __init__(self):
     def __init__(self):
@@ -89,7 +92,8 @@ class TemporalMapRelations(AbstractDataset):
            temporally located AFTER the start time of this map, but temporally
            temporally located AFTER the start time of this map, but temporally
            near than other maps of the same dataset.
            near than other maps of the same dataset.
 
 
-           @param map_: This object should be of type AbstractMapDataset or derived classes
+           @param map_: This object should be of type AbstractMapDataset 
+                        or derived classes
         """
         """
         self._temporal_topology["NEXT"] = map_
         self._temporal_topology["NEXT"] = map_
 
 
@@ -100,7 +104,8 @@ class TemporalMapRelations(AbstractDataset):
            temporally located BEFORE the start time of this map, but temporally
            temporally located BEFORE the start time of this map, but temporally
            near than other maps of the same dataset.
            near than other maps of the same dataset.
 
 
-           @param map_: This object should be of type AbstractMapDataset or derived classes
+           @param map_: This object should be of type AbstractMapDataset 
+                        or derived classes
         """
         """
         self._temporal_topology["PREV"] = map_
         self._temporal_topology["PREV"] = map_
 
 
@@ -127,7 +132,8 @@ class TemporalMapRelations(AbstractDataset):
     def append_temporal_equivalent(self, map_):
     def append_temporal_equivalent(self, map_):
         """!Append a map with equivalent temporal extent as this map
         """!Append a map with equivalent temporal extent as this map
 
 
-           @param map_: This object should be of type AbstractMapDataset or derived classes
+           @param map_: This object should be of type AbstractMapDataset 
+                        or derived classes
         """
         """
         if "EQUAL" not in self._temporal_topology:
         if "EQUAL" not in self._temporal_topology:
             self._temporal_topology["EQUAL"] = []
             self._temporal_topology["EQUAL"] = []
@@ -145,7 +151,8 @@ class TemporalMapRelations(AbstractDataset):
     def append_temporal_overlaps(self, map_):
     def append_temporal_overlaps(self, map_):
         """!Append a map that this map temporally overlaps
         """!Append a map that this map temporally overlaps
 
 
-           @param map_: This object should be of type AbstractMapDataset or derived classes
+           @param map_: This object should be of type AbstractMapDataset 
+                        or derived classes
         """
         """
         if "OVERLAPS" not in self._temporal_topology:
         if "OVERLAPS" not in self._temporal_topology:
             self._temporal_topology["OVERLAPS"] = []
             self._temporal_topology["OVERLAPS"] = []
@@ -163,7 +170,8 @@ class TemporalMapRelations(AbstractDataset):
     def append_temporal_overlapped(self, map_):
     def append_temporal_overlapped(self, map_):
         """!Append a map that this map temporally overlapped
         """!Append a map that this map temporally overlapped
 
 
-           @param map_: This object should be of type AbstractMapDataset or derived classes
+           @param map_: This object should be of type AbstractMapDataset 
+                        or derived classes
         """
         """
         if "OVERLAPPED" not in self._temporal_topology:
         if "OVERLAPPED" not in self._temporal_topology:
             self._temporal_topology["OVERLAPPED"] = []
             self._temporal_topology["OVERLAPPED"] = []
@@ -181,7 +189,8 @@ class TemporalMapRelations(AbstractDataset):
     def append_temporal_follows(self, map_):
     def append_temporal_follows(self, map_):
         """!Append a map that this map temporally follows
         """!Append a map that this map temporally follows
 
 
-           @param map_: This object should be of type AbstractMapDataset or derived classes
+           @param map_: This object should be of type AbstractMapDataset 
+                        or derived classes
         """
         """
         if "FOLLOWS" not in self._temporal_topology:
         if "FOLLOWS" not in self._temporal_topology:
             self._temporal_topology["FOLLOWS"] = []
             self._temporal_topology["FOLLOWS"] = []
@@ -199,7 +208,8 @@ class TemporalMapRelations(AbstractDataset):
     def append_temporal_precedes(self, map_):
     def append_temporal_precedes(self, map_):
         """!Append a map that this map temporally precedes
         """!Append a map that this map temporally precedes
 
 
-           @param map_: This object should be of type AbstractMapDataset or derived classes
+           @param map_: This object should be of type AbstractMapDataset 
+                        or derived classes
         """
         """
         if "PRECEDES" not in self._temporal_topology:
         if "PRECEDES" not in self._temporal_topology:
             self._temporal_topology["PRECEDES"] = []
             self._temporal_topology["PRECEDES"] = []
@@ -218,7 +228,8 @@ class TemporalMapRelations(AbstractDataset):
         """!Append a map that this map is temporally located during
         """!Append a map that this map is temporally located during
            This includes temporal relationships starts and finishes
            This includes temporal relationships starts and finishes
 
 
-           @param map_: This object should be of type AbstractMapDataset or derived classes
+           @param map_: This object should be of type 
+                        AbstractMapDataset or derived classes
         """
         """
         if "DURING" not in self._temporal_topology:
         if "DURING" not in self._temporal_topology:
             self._temporal_topology["DURING"] = []
             self._temporal_topology["DURING"] = []
@@ -238,7 +249,8 @@ class TemporalMapRelations(AbstractDataset):
         """!Append a map that this map temporally contains
         """!Append a map that this map temporally contains
            This includes temporal relationships started and finished
            This includes temporal relationships started and finished
 
 
-           @param map_: This object should be of type AbstractMapDataset or derived classes
+           @param map_: This object should be of type AbstractMapDataset 
+                        or derived classes
         """
         """
         if "CONTAINS" not in self._temporal_topology:
         if "CONTAINS" not in self._temporal_topology:
             self._temporal_topology["CONTAINS"] = []
             self._temporal_topology["CONTAINS"] = []
@@ -389,7 +401,8 @@ class AbstractMapDataset(TemporalMapRelations):
     def set_stds_register(self, name):
     def set_stds_register(self, name):
         """!Set the space time dataset register table name.
         """!Set the space time dataset register table name.
 
 
-           This table stores all space time datasets in which this map is registered.
+           This table stores all space time datasets in 
+           which this map is registered.
 
 
            @param ident: The name of the register table
            @param ident: The name of the register table
         """
         """
@@ -397,9 +410,13 @@ class AbstractMapDataset(TemporalMapRelations):
             "This method must be implemented in the subclasses")
             "This method must be implemented in the subclasses")
 
 
     def check_resolution_with_current_region(self):
     def check_resolution_with_current_region(self):
-        """!Check if the raster or voxel resolution is finer than the current resolution
-           Return "finer" in case the raster/voxel resolution is finer than the current region
-           Return "coarser" in case the raster/voxel resolution is coarser than the current region
+        """!Check if the raster or voxel resolution is 
+           finer than the current resolution
+           
+           * Return "finer" in case the raster/voxel resolution is finer 
+             than the current region
+           * Return "coarser" in case the raster/voxel resolution is coarser 
+             than the current region
 
 
            Vector maps are alwyas finer than the current region
            Vector maps are alwyas finer than the current region
         """
         """
@@ -413,14 +430,15 @@ class AbstractMapDataset(TemporalMapRelations):
             "This method must be implemented in the subclasses")
             "This method must be implemented in the subclasses")
 
 
     def write_timestamp_to_grass(self):
     def write_timestamp_to_grass(self):
-        """!Write the timestamp of this map into the map metadata in the grass file system based spatial
-           database.
+        """!Write the timestamp of this map into the map metadata 
+           in the grass file system based spatial database.
         """
         """
         raise ImplementationError(
         raise ImplementationError(
             "This method must be implemented in the subclasses")
             "This method must be implemented in the subclasses")
 
 
     def remove_timestamp_from_grass(self):
     def remove_timestamp_from_grass(self):
-        """!Remove the timestamp from the grass file system based spatial database
+        """!Remove the timestamp from the grass file 
+           system based spatial database
         """
         """
         raise ImplementationError(
         raise ImplementationError(
             "This method must be implemented in the subclasses")
             "This method must be implemented in the subclasses")
@@ -434,19 +452,21 @@ class AbstractMapDataset(TemporalMapRelations):
             "This method must be implemented in the subclasses")
             "This method must be implemented in the subclasses")
 
 
     def read_info(self):
     def read_info(self):
-        """!Read the map info from the grass file system based database and store the content
-           into a dictionary
+        """!Read the map info from the grass file system based database and 
+           store the content into a dictionary
         """
         """
         raise ImplementationError(
         raise ImplementationError(
             "This method must be implemented in the subclasses")
             "This method must be implemented in the subclasses")
 
 
     def load(self):
     def load(self):
-        """!Load the content of this object from the grass file system based database"""
+        """!Load the content of this object from the grass 
+           file system based database"""
         raise ImplementationError(
         raise ImplementationError(
             "This method must be implemented in the subclasses")
             "This method must be implemented in the subclasses")
 
 
     def _convert_timestamp(self):
     def _convert_timestamp(self):
-        """!Convert the valid time into a grass datetime library compatible timestamp string
+        """!Convert the valid time into a grass datetime library 
+           compatible timestamp string
 
 
             This methods works for reltaive and absolute time
             This methods works for reltaive and absolute time
 
 
@@ -486,9 +506,11 @@ class AbstractMapDataset(TemporalMapRelations):
     def build_id(self, name, mapset, layer=None):
     def build_id(self, name, mapset, layer=None):
         """!Convenient method to build the unique identifier
         """!Convenient method to build the unique identifier
 
 
-            Existing layer and mapset definitions in the name string will be reused
+            Existing layer and mapset definitions in the name 
+            string will be reused
 
 
-           @param return the id of the vector map as name(:layer)@mapset while layer is optional
+           @param return the id of the vector map as name(:layer)@mapset 
+                  while layer is optional
         """
         """
 
 
         # Check if the name includes any mapset
         # Check if the name includes any mapset
@@ -514,17 +536,14 @@ class AbstractMapDataset(TemporalMapRelations):
         if self.get_type() == "raster":
         if self.get_type() == "raster":
             #                1         2         3         4         5         6         7
             #                1         2         3         4         5         6         7
             #      0123456789012345678901234567890123456789012345678901234567890123456789012345678
             #      0123456789012345678901234567890123456789012345678901234567890123456789012345678
-            print ""
             print " +-------------------- Raster Dataset ----------------------------------------+"
             print " +-------------------- Raster Dataset ----------------------------------------+"
         if self.get_type() == "raster3d":
         if self.get_type() == "raster3d":
             #                1         2         3         4         5         6         7
             #                1         2         3         4         5         6         7
             #      0123456789012345678901234567890123456789012345678901234567890123456789012345678
             #      0123456789012345678901234567890123456789012345678901234567890123456789012345678
-            print ""
             print " +-------------------- Raster3d Dataset --------------------------------------+"
             print " +-------------------- Raster3d Dataset --------------------------------------+"
         if self.get_type() == "vector":
         if self.get_type() == "vector":
             #                1         2         3         4         5         6         7
             #                1         2         3         4         5         6         7
             #      0123456789012345678901234567890123456789012345678901234567890123456789012345678
             #      0123456789012345678901234567890123456789012345678901234567890123456789012345678
-            print ""
             print " +-------------------- Vector Dataset ----------------------------------------+"
             print " +-------------------- Vector Dataset ----------------------------------------+"
         print " |                                                                            |"
         print " |                                                                            |"
         self.base.print_info()
         self.base.print_info()
@@ -629,22 +648,37 @@ class AbstractMapDataset(TemporalMapRelations):
         """
         """
         if start_time and not isinstance(start_time, datetime):
         if start_time and not isinstance(start_time, datetime):
             if self.get_layer() is not None:
             if self.get_layer() is not None:
-                core.fatal(_("Start time must be of type datetime for %s map <%s> with layer: %s") % (self.get_type(), self.get_map_id(), self.get_layer()))
+                core.fatal(_("Start time must be of type datetime "
+                             "for %s map <%s> with layer: %s") % \
+                           (self.get_type(), self.get_map_id(), 
+                            self.get_layer()))
             else:
             else:
-                core.fatal(_("Start time must be of type datetime for %s map <%s>") % (self.get_type(), self.get_map_id()))
+                core.fatal(_("Start time must be of type "
+                             "datetime ""for %s map <%s>") % \
+                           (self.get_type(), self.get_map_id()))
 
 
         if end_time and not isinstance(end_time, datetime):
         if end_time and not isinstance(end_time, datetime):
             if self.get_layer():
             if self.get_layer():
-                core.fatal(_("End time must be of type datetime for %s map <%s> with layer: %s") % (self.get_type(), self.get_map_id(), self.get_layer()))
+                core.fatal(_("End time must be of type datetime "
+                             "for %s map <%s> with layer: %s") % \
+                           (self.get_type(), self.get_map_id(), 
+                            self.get_layer()))
             else:
             else:
-                core.fatal(_("End time must be of type datetime for %s map <%s>") % (self.get_type(), self.get_map_id()))
+                core.fatal(_("End time must be of type datetime "
+                             "for %s map <%s>") % (self.get_type(), 
+                                                   self.get_map_id()))
 
 
         if start_time is not None and end_time is not None:
         if start_time is not None and end_time is not None:
             if start_time > end_time:
             if start_time > end_time:
                 if self.get_layer():
                 if self.get_layer():
-                    core.fatal(_("End time must be greater than start time for %s map <%s> with layer: %s") % (self.get_type(), self.get_map_id(), self.get_layer()))
+                    core.fatal(_("End time must be greater than "
+                                 "start time for %s map <%s> with layer: %s") %\
+                                (self.get_type(), self.get_map_id(), 
+                                 self.get_layer()))
                 else:
                 else:
-                    core.fatal(_("End time must be greater than start time for %s map <%s>") % (self.get_type(), self.get_map_id()))
+                    core.fatal(_("End time must be greater than "
+                                 "start time for %s map <%s>") % \
+                               (self.get_type(), self.get_map_id()))
             else:
             else:
                 # Do not create an interval in case start and end time are equal
                 # Do not create an interval in case start and end time are equal
                 if start_time == end_time:
                 if start_time == end_time:
@@ -655,7 +689,8 @@ class AbstractMapDataset(TemporalMapRelations):
         self.absolute_time.set_end_time(end_time)
         self.absolute_time.set_end_time(end_time)
         self.absolute_time.set_timezone(timezone)
         self.absolute_time.set_timezone(timezone)
 
 
-    def update_absolute_time(self, start_time, end_time=None, timezone=None, dbif=None):
+    def update_absolute_time(self, start_time, end_time=None, 
+                             timezone=None, dbif=None):
         """!Update the absolute time
         """!Update the absolute time
 
 
            This functions assures that the timetsamp is written to the 
            This functions assures that the timetsamp is written to the 
@@ -690,17 +725,28 @@ class AbstractMapDataset(TemporalMapRelations):
 
 
         if not self.check_relative_time_unit(unit):
         if not self.check_relative_time_unit(unit):
             if self.get_layer() is not None:
             if self.get_layer() is not None:
-                core.error(_("Unsupported relative time unit type for %s map <%s> with layer %s: %s") % (self.get_type(), self.get_id(), self.get_layer(), unit))
+                core.error(_("Unsupported relative time unit type for %s map "
+                             "<%s> with layer %s: %s") % (self.get_type(), 
+                                                          self.get_id(), 
+                                                          self.get_layer(), 
+                                                          unit))
             else:
             else:
-                core.error(_("Unsupported relative time unit type for %s map <%s>: %s") % (self.get_type(), self.get_id(), unit))
+                core.error(_("Unsupported relative time unit type for %s map "
+                             "<%s>: %s") % (self.get_type(), self.get_id(), 
+                                            unit))
             return False
             return False
 
 
         if start_time is not None and end_time is not None:
         if start_time is not None and end_time is not None:
             if int(start_time) > int(end_time):
             if int(start_time) > int(end_time):
                 if self.get_layer() is not None:
                 if self.get_layer() is not None:
-                    core.error(_("End time must be greater than start time for %s map <%s> with layer %s") % (self.get_type(), self.get_id(), self.get_layer()))
+                    core.error(_("End time must be greater than start time for"
+                                 " %s map <%s> with layer %s") % \
+                               (self.get_type(), self.get_id(), 
+                                self.get_layer()))
                 else:
                 else:
-                    core.error(_("End time must be greater than start time for %s map <%s>") % (self.get_type(), self.get_id()))
+                    core.error(_("End time must be greater than start time for"
+                                 " %s map <%s>") % (self.get_type(), 
+                                                    self.get_id()))
                 return False
                 return False
             else:
             else:
                 # Do not create an interval in case start and end time are equal
                 # Do not create an interval in case start and end time are equal
@@ -763,9 +809,14 @@ class AbstractMapDataset(TemporalMapRelations):
             if end is not None:
             if end is not None:
                 if start >= end:
                 if start >= end:
                     if self.get_layer() is not None:
                     if self.get_layer() is not None:
-                        core.error(_("Map <%s> with layer %s has incorrect time interval, start time is greater than end time") % (self.get_map_id(), self.get_layer()))
+                        core.error(_("Map <%s> with layer %s has incorrect "
+                                     "time interval, start time is greater "
+                                     "than end time") % (self.get_map_id(), 
+                                                         self.get_layer()))
                     else:
                     else:
-                        core.error(_("Map <%s> has incorrect time interval, start time is greater than end time") % (self.get_map_id()))
+                        core.error(_("Map <%s> has incorrect time interval, "
+                                     "start time is greater than end time") % \
+                                   (self.get_map_id()))
                     return False
                     return False
         else:
         else:
             core.error(_("Map <%s> has incorrect start time") %
             core.error(_("Map <%s> has incorrect start time") %
@@ -778,14 +829,16 @@ class AbstractMapDataset(TemporalMapRelations):
         """!Delete a map entry from database if it exists
         """!Delete a map entry from database if it exists
 
 
             Remove dependent entries:
             Remove dependent entries:
-            * Remove the map entry in each space time dataset in which this map is registered
+            * Remove the map entry in each space time dataset in which this map 
+              is registered
             * Remove the space time dataset register table
             * Remove the space time dataset register table
 
 
            @param dbif: The database interface to be used
            @param dbif: The database interface to be used
            @param update: Call for each unregister statement the update from 
            @param update: Call for each unregister statement the update from 
                           registered maps of the space time dataset. 
                           registered maps of the space time dataset. 
                           This can slow down the un-registration process significantly.
                           This can slow down the un-registration process significantly.
-           @param execute: If True the SQL DELETE and DROP table statements will be executed.
+           @param execute: If True the SQL DELETE and DROP table statements will 
+                           be executed.
                            If False the prepared SQL statements are 
                            If False the prepared SQL statements are 
                            returned and must be executed by the caller.
                            returned and must be executed by the caller.
 
 
@@ -812,7 +865,8 @@ class AbstractMapDataset(TemporalMapRelations):
             core.verbose(_("Delete %s dataset <%s> from temporal database")
             core.verbose(_("Delete %s dataset <%s> from temporal database")
                          % (self.get_type(), self.get_id()))
                          % (self.get_type(), self.get_id()))
 
 
-            # Delete yourself from the database, trigger functions will take care of dependencies
+            # Delete yourself from the database, trigger functions will 
+            # take care of dependencies
             statement += self.base.get_delete_statement()
             statement += self.base.get_delete_statement()
 
 
         if execute:
         if execute:
@@ -832,13 +886,15 @@ class AbstractMapDataset(TemporalMapRelations):
         return statement
         return statement
 
 
     def unregister(self, dbif=None, update=True, execute=True):
     def unregister(self, dbif=None, update=True, execute=True):
-        """! Remove the map entry in each space time dataset in which this map is registered
+        """! Remove the map entry in each space time dataset in which this map 
+           is registered
 
 
            @param dbif: The database interface to be used
            @param dbif: The database interface to be used
-           @param update: Call for each unregister statement the update from registered maps
-                          of the space time dataset. This can slow down the 
-                          un-registration process significantly.
-           @param execute: If True the SQL DELETE and DROP table statements will be executed.
+           @param update: Call for each unregister statement the update from 
+                          registered maps of the space time dataset. This can 
+                          slow down the un-registration process significantly.
+           @param execute: If True the SQL DELETE and DROP table statements 
+                           will be executed.
                            If False the prepared SQL statements are 
                            If False the prepared SQL statements are 
                            returned and must be executed by the caller.
                            returned and must be executed by the caller.
 
 
@@ -846,11 +902,14 @@ class AbstractMapDataset(TemporalMapRelations):
         """
         """
 
 
         if self.get_layer() is not None:
         if self.get_layer() is not None:
-            core.verbose(_("Unregister %s map <%s> with layer %s from space time datasets") %
-                         (self.get_type(), self.get_map_id(), self.get_layer()))
+            core.verbose(_("Unregister %(type)s map <%(map)s> with "
+                           "layer %(layer)s from space time datasets" % \
+                         {'type':self.get_type(), 'map':self.get_map_id(), 
+                          'layer':self.get_layer()}))
         else:
         else:
-            core.verbose(_("Unregister %s map <%s> from space time datasets")
-                         % (self.get_type(), self.get_map_id()))
+            core.verbose(_("Unregister %(type)s map <%(map)s> "
+                           "from space time datasets"
+                         % {'type':self.get_type(), 'map':self.get_map_id()}))
 
 
         statement = ""
         statement = ""
         dbif, connect = init_dbif(dbif)
         dbif, connect = init_dbif(dbif)
@@ -906,7 +965,8 @@ class AbstractMapDataset(TemporalMapRelations):
                 dbif.cursor.execute(sql)
                 dbif.cursor.execute(sql)
                 rows = dbif.cursor.fetchall()
                 rows = dbif.cursor.fetchall()
         except:
         except:
-            core.error(_("Unable to select space time dataset register table <%s>") % (self.get_stds_register()))
+            core.error(_("Unable to select space time dataset register table "
+                         "<%s>") % (self.get_stds_register()))
 
 
         if connect:
         if connect:
             dbif.close()
             dbif.close()

文件差异内容过多而无法显示
+ 298 - 147
lib/python/temporal/abstract_space_time_dataset.py


+ 28 - 17
lib/python/temporal/aggregation.py

@@ -9,7 +9,8 @@ Usage:
 @code
 @code
 import grass.temporal as tgis
 import grass.temporal as tgis
 
 
-tgis.aggregate_raster_maps(dataset, mapset, inputs, base, start, end, count, method, register_null, dbif)
+tgis.aggregate_raster_maps(dataset, mapset, inputs, base, start, end,
+    count, method, register_null, dbif)
 
 
 ...
 ...
 @endcode
 @endcode
@@ -27,6 +28,7 @@ import grass.lib.gis as libgis
 
 
 ###############################################################################
 ###############################################################################
 
 
+
 def collect_map_names(sp, dbif, start, end, sampling):
 def collect_map_names(sp, dbif, start, end, sampling):
     """!Gather all maps from dataset using a specific sample method
     """!Gather all maps from dataset using a specific sample method
 
 
@@ -36,7 +38,7 @@ def collect_map_names(sp, dbif, start, end, sampling):
        @param end: The end time of the sample interval, may be relative or absolute
        @param end: The end time of the sample interval, may be relative or absolute
        @param sampling: The sampling methods to use
        @param sampling: The sampling methods to use
     """
     """
-    
+
     use_start = False
     use_start = False
     use_during = False
     use_during = False
     use_overlap = False
     use_overlap = False
@@ -75,8 +77,15 @@ def collect_map_names(sp, dbif, start, end, sampling):
         use_follows = False
         use_follows = False
         use_precedes = False
         use_precedes = False
 
 
-    where = create_temporal_relation_sql_where_statement(start, end, use_start, use_during, use_overlap, use_contain, use_equal, use_follows, use_precedes)
-   
+    where = create_temporal_relation_sql_where_statement(start, end, 
+                                                         use_start, 
+                                                         use_during, 
+                                                         use_overlap, 
+                                                         use_contain, 
+                                                         use_equal, 
+                                                         use_follows, 
+                                                         use_precedes)
+
     rows = sp.get_registered_maps("id", where, "start_time", dbif)
     rows = sp.get_registered_maps("id", where, "start_time", dbif)
 
 
     if not rows:
     if not rows:
@@ -86,13 +95,15 @@ def collect_map_names(sp, dbif, start, end, sampling):
     for row in rows:
     for row in rows:
         names.append(row["id"])
         names.append(row["id"])
 
 
-    return names    
+    return names
 
 
 ###############################################################################
 ###############################################################################
 
 
-def aggregate_raster_maps(inputs, base, start, end, count, method, register_null, dbif):
+
+def aggregate_raster_maps(inputs, base, start, end, count, method, 
+                          register_null, dbif):
     """!Aggregate a list of raster input maps with r.series
     """!Aggregate a list of raster input maps with r.series
-       
+
        @param inputs: The names of the raster maps to be aggregated
        @param inputs: The names of the raster maps to be aggregated
        @param base: The basename of the new created raster maps
        @param base: The basename of the new created raster maps
        @param start: The start time of the sample interval, may be relative or absolute
        @param start: The start time of the sample interval, may be relative or absolute
@@ -103,13 +114,11 @@ def aggregate_raster_maps(inputs, base, start, end, count, method, register_null
        @param dbif: The temporal database interface to use
        @param dbif: The temporal database interface to use
     """
     """
 
 
-    core.verbose(_("Aggregate %s raster maps") %(len(inputs)))
+    core.verbose(_("Aggregate %s raster maps") % (len(inputs)))
     output = "%s_%i" % (base, count)
     output = "%s_%i" % (base, count)
-    
-    mapset = libgis.G_mapset()
 
 
+    mapset = libgis.G_mapset()
     map_id = output + "@" + mapset
     map_id = output + "@" + mapset
-
     new_map = raster_dataset(map_id)
     new_map = raster_dataset(map_id)
 
 
     # Check if new map is in the temporal database
     # Check if new map is in the temporal database
@@ -122,7 +131,8 @@ def aggregate_raster_maps(inputs, base, start, end, count, method, register_null
             core.error(_("Raster map <%s> is already in temporal database, use overwrite flag to overwrite"))
             core.error(_("Raster map <%s> is already in temporal database, use overwrite flag to overwrite"))
             return
             return
 
 
-    core.verbose(_("Compute aggregation of maps between %s - %s" % (str(start), str(end))))
+    core.verbose(_("Compute aggregation of maps between %s - %s" % (
+        str(start), str(end))))
 
 
     # Create the r.series input file
     # Create the r.series input file
     filename = core.tempfile(True)
     filename = core.tempfile(True)
@@ -134,20 +144,21 @@ def aggregate_raster_maps(inputs, base, start, end, count, method, register_null
 
 
     file.close()
     file.close()
     # Run r.series
     # Run r.series
-    ret = core.run_command("r.series", flags="z", file=filename, output=output, overwrite=core.overwrite(), method=method)
+    ret = core.run_command("r.series", flags="z", file=filename,
+                           output=output, overwrite=core.overwrite(), 
+                           method=method)
 
 
     if ret != 0:
     if ret != 0:
         dbif.close()
         dbif.close()
         core.fatal(_("Error while r.series computation"))
         core.fatal(_("Error while r.series computation"))
-        
 
 
     # Read the raster map data
     # Read the raster map data
     new_map.load()
     new_map.load()
-    
+
     # In case of a null map continue, do not register null maps
     # In case of a null map continue, do not register null maps
-    if new_map.metadata.get_min() == None and new_map.metadata.get_max() == None:
+    if new_map.metadata.get_min() is None and new_map.metadata.get_max() is None:
         if not register_null:
         if not register_null:
             core.run_command("g.remove", rast=output)
             core.run_command("g.remove", rast=output)
             return None
             return None
-    
+
     return new_map
     return new_map

+ 97 - 54
lib/python/temporal/base.py

@@ -5,12 +5,14 @@
 Temporal GIS base classes to be used in other
 Temporal GIS base classes to be used in other
 Python temporal gis packages.
 Python temporal gis packages.
 
 
-This packages includes all base classes to store basic information like id, name,
-mapset creation and modification time as well as sql serialization and de-serialization
-and the sql database interface.
+This packages includes all base classes to store basic information 
+like id, name, mapset creation and modification time as well as sql 
+serialization and de-serialization and the sql database interface.
 
 
 Usage:
 Usage:
 
 
+@code
+
 >>> import grass.temporal as tgis
 >>> import grass.temporal as tgis
 >>> rbase = tgis.RasterBase(ident="soil@PERMANENT")
 >>> rbase = tgis.RasterBase(ident="soil@PERMANENT")
 >>> vbase = tgis.VectorBase(ident="soil:1@PERMANENT")
 >>> vbase = tgis.VectorBase(ident="soil:1@PERMANENT")
@@ -19,7 +21,9 @@ Usage:
 >>> stvdsbase = tgis.STVDSBase(ident="soil@PERMANENT")
 >>> stvdsbase = tgis.STVDSBase(ident="soil@PERMANENT")
 >>> str3dsbase = tgis.STR3DSBase(ident="soil@PERMANENT")
 >>> str3dsbase = tgis.STR3DSBase(ident="soil@PERMANENT")
 
 
-(C) 2008-2011 by the GRASS Development Team
+@endcode
+
+(C) 2011-2012 by the GRASS Development Team
 This program is free software under the GNU General Public
 This program is free software under the GNU General Public
 License (>=v2). Read the file COPYING that comes with GRASS
 License (>=v2). Read the file COPYING that comes with GRASS
 for details.
 for details.
@@ -38,12 +42,15 @@ class DictSQLSerializer(object):
         self.D = {}
         self.D = {}
 
 
     def serialize(self, type, table, where=None):
     def serialize(self, type, table, where=None):
-        """!Convert the internal dictionary into a string of semicolon separated SQL statements
-            The keys are the column names and the values are the row entries
+        """!Convert the internal dictionary into a string of semicolon 
+            separated SQL statements The keys are the column names and 
+            the values are the row entries
+            
+            Usage:
             
             
-            >>> import grass.temporal as tgis
-            >>> from datetime import datetime, date, time, timedelta
-            >>> t = tgis.DictSQLSerializer()
+            \code
+            
+            >>> t = DictSQLSerializer()
             >>> t.D["id"] = "soil@PERMANENT"
             >>> t.D["id"] = "soil@PERMANENT"
             >>> t.D["name"] = "soil"
             >>> t.D["name"] = "soil"
             >>> t.D["mapset"] = "PERMANENT"
             >>> t.D["mapset"] = "PERMANENT"
@@ -63,6 +70,8 @@ class DictSQLSerializer(object):
             @table The name of the table to select, insert or update
             @table The name of the table to select, insert or update
             @where The optional where statement
             @where The optional where statement
             @return a tuple containing the SQL string and the arguments
             @return a tuple containing the SQL string and the arguments
+            
+            \endcode
         """
         """
 
 
         sql = ""
         sql = ""
@@ -166,7 +175,8 @@ class DictSQLSerializer(object):
         return sql, tuple(args)
         return sql, tuple(args)
 
 
     def deserialize(self, row):
     def deserialize(self, row):
-        """!Convert the content of the dbmi dictionary like row into the internal dictionary
+        """!Convert the content of the dbmi dictionary like row into the 
+           internal dictionary
 
 
            @param row: The dictionary like row to store in the internal dict
            @param row: The dictionary like row to store in the internal dict
         """
         """
@@ -188,10 +198,10 @@ class DictSQLSerializer(object):
 class SQLDatabaseInterface(DictSQLSerializer):
 class SQLDatabaseInterface(DictSQLSerializer):
     """!This class represents the SQL database interface
     """!This class represents the SQL database interface
 
 
-       Functions to insert, select and update the internal structure of this class
-       in the temporal database are implemented.
-       This is the base class for raster, raster3d, vector and space time datasets
-       data management classes:
+       Functions to insert, select and update the internal 
+       structure of this class in the temporal database are implemented.
+       This is the base class for raster, raster3d, vector and 
+       space time datasets data management classes:
        * Identification information (base)
        * Identification information (base)
        * Spatial extent
        * Spatial extent
        * Temporal extent
        * Temporal extent
@@ -199,9 +209,9 @@ class SQLDatabaseInterface(DictSQLSerializer):
        
        
        Usage:
        Usage:
        
        
-        >>> import grass.temporal as tgis
-        >>> from datetime import datetime, date, time, timedelta
-        >>> t = tgis.SQLDatabaseInterface("raster", "soil@PERMANENT")
+       \code
+       
+        >>> t = SQLDatabaseInterface("raster", "soil@PERMANENT")
         >>> t.D["name"] = "soil"
         >>> t.D["name"] = "soil"
         >>> t.D["mapset"] = "PERMANENT"
         >>> t.D["mapset"] = "PERMANENT"
         >>> t.D["creator"] = "soeren"
         >>> t.D["creator"] = "soeren"
@@ -226,12 +236,15 @@ class SQLDatabaseInterface(DictSQLSerializer):
         ("UPDATE raster SET  creation_time = ?  ,mapset = ?  ,name = ?  ,creator = ? WHERE id = 'soil@PERMANENT';\\n", (datetime.datetime(2001, 1, 1, 0, 0), 'PERMANENT', 'soil', 'soeren'))
         ("UPDATE raster SET  creation_time = ?  ,mapset = ?  ,name = ?  ,creator = ? WHERE id = 'soil@PERMANENT';\\n", (datetime.datetime(2001, 1, 1, 0, 0), 'PERMANENT', 'soil', 'soeren'))
         >>> t.get_update_all_statement_mogrified()
         >>> t.get_update_all_statement_mogrified()
         "UPDATE raster SET  creation_time = '2001-01-01 00:00:00'  ,mapset = 'PERMANENT'  ,name = 'soil'  ,creator = 'soeren' WHERE id = 'soil@PERMANENT';\\n"
         "UPDATE raster SET  creation_time = '2001-01-01 00:00:00'  ,mapset = 'PERMANENT'  ,name = 'soil'  ,creator = 'soeren' WHERE id = 'soil@PERMANENT';\\n"
+        
+        \endcode
     """
     """
     def __init__(self, table=None, ident=None):
     def __init__(self, table=None, ident=None):
         """!Constructor of this class
         """!Constructor of this class
 
 
            @param table: The name of the table
            @param table: The name of the table
-           @param ident: The identifier (primary key) of this object in the database table
+           @param ident: The identifier (primary key) of this 
+                         object in the database table
         """
         """
         DictSQLSerializer.__init__(self)
         DictSQLSerializer.__init__(self)
 
 
@@ -239,17 +252,20 @@ class SQLDatabaseInterface(DictSQLSerializer):
         self.ident = ident
         self.ident = ident
 
 
     def get_table_name(self):
     def get_table_name(self):
-        """!Return the name of the table in which the internal data are inserted, updated or selected"""
+        """!Return the name of the table in which the internal 
+           data are inserted, updated or selected"""
         return self.table
         return self.table
 
 
     def get_delete_statement(self):
     def get_delete_statement(self):
         """!Return the delete string"""
         """!Return the delete string"""
-        return "DELETE FROM " + self.get_table_name() + " WHERE id = \'" + str(self.ident) + "\';\n"
+        return "DELETE FROM " + self.get_table_name() + \
+               " WHERE id = \'" + str(self.ident) + "\';\n"
 
 
     def delete(self, dbif=None):
     def delete(self, dbif=None):
         """!Delete the entry of this object from the temporal database
         """!Delete the entry of this object from the temporal database
 
 
-           @param dbif: The database interface to be used, if None a temporary connection will be established
+           @param dbif: The database interface to be used, 
+                        if None a temporary connection will be established
         """
         """
         sql = self.get_delete_statement()
         sql = self.get_delete_statement()
         #print sql
         #print sql
@@ -264,12 +280,14 @@ class SQLDatabaseInterface(DictSQLSerializer):
 
 
     def get_is_in_db_statement(self):
     def get_is_in_db_statement(self):
         """Return the selection string"""
         """Return the selection string"""
-        return "SELECT id FROM " + self.get_table_name() + " WHERE id = \'" + str(self.ident) + "\';\n"
+        return "SELECT id FROM " + self.get_table_name() + \
+               " WHERE id = \'" + str(self.ident) + "\';\n"
 
 
     def is_in_db(self, dbif=None):
     def is_in_db(self, dbif=None):
         """!Check if this object is present in the temporal database
         """!Check if this object is present in the temporal database
 
 
-           @param dbif: The database interface to be used, if None a temporary connection will be established
+           @param dbif: The database interface to be used, 
+                        if None a temporary connection will be established
         """
         """
 
 
         sql = self.get_is_in_db_statement()
         sql = self.get_is_in_db_statement()
@@ -292,13 +310,16 @@ class SQLDatabaseInterface(DictSQLSerializer):
         return True
         return True
 
 
     def get_select_statement(self):
     def get_select_statement(self):
-        """!Return the sql statement and the argument list in database specific style"""
-        return self.serialize("SELECT", self.get_table_name(), "WHERE id = \'" + str(self.ident) + "\'")
+        """!Return the sql statement and the argument list in 
+           database specific style"""
+        return self.serialize("SELECT", self.get_table_name(), 
+                              "WHERE id = \'" + str(self.ident) + "\'")
 
 
     def get_select_statement_mogrified(self, dbif=None):
     def get_select_statement_mogrified(self, dbif=None):
         """!Return the select statement as mogrified string
         """!Return the select statement as mogrified string
 
 
-           @param dbif: The database interface to be used, if None a temporary connection will be established
+           @param dbif: The database interface to be used, 
+                        if None a temporary connection will be established
         """
         """
         if not dbif:
         if not dbif:
             dbif = SQLDatabaseInterfaceConnection()
             dbif = SQLDatabaseInterfaceConnection()
@@ -309,7 +330,8 @@ class SQLDatabaseInterface(DictSQLSerializer):
         """!Select the content from the temporal database and store it
         """!Select the content from the temporal database and store it
            in the internal dictionary structure
            in the internal dictionary structure
 
 
-           @param dbif: The database interface to be used, if None a temporary connection will be established
+           @param dbif: The database interface to be used, 
+                        if None a temporary connection will be established
         """
         """
         sql, args = self.get_select_statement()
         sql, args = self.get_select_statement()
         #print sql
         #print sql
@@ -344,13 +366,15 @@ class SQLDatabaseInterface(DictSQLSerializer):
         return True
         return True
 
 
     def get_insert_statement(self):
     def get_insert_statement(self):
-        """!Return the sql statement and the argument list in database specific style"""
+        """!Return the sql statement and the argument 
+           list in database specific style"""
         return self.serialize("INSERT", self.get_table_name())
         return self.serialize("INSERT", self.get_table_name())
 
 
     def get_insert_statement_mogrified(self, dbif=None):
     def get_insert_statement_mogrified(self, dbif=None):
         """!Return the insert statement as mogrified string
         """!Return the insert statement as mogrified string
 
 
-           @param dbif: The database interface to be used, if None a temporary connection will be established
+           @param dbif: The database interface to be used, 
+                        if None a temporary connection will be established
         """
         """
         if not dbif:
         if not dbif:
             dbif = SQLDatabaseInterfaceConnection()
             dbif = SQLDatabaseInterfaceConnection()
@@ -361,7 +385,8 @@ class SQLDatabaseInterface(DictSQLSerializer):
         """!Serialize the content of this object and store it in the temporal
         """!Serialize the content of this object and store it in the temporal
            database using the internal identifier
            database using the internal identifier
 
 
-           @param dbif: The database interface to be used, if None a temporary connection will be established
+           @param dbif: The database interface to be used, 
+                        if None a temporary connection will be established
         """
         """
         sql, args = self.get_insert_statement()
         sql, args = self.get_insert_statement()
         #print sql
         #print sql
@@ -376,13 +401,16 @@ class SQLDatabaseInterface(DictSQLSerializer):
             dbif.close()
             dbif.close()
 
 
     def get_update_statement(self):
     def get_update_statement(self):
-        """!Return the sql statement and the argument list in database specific style"""
-        return self.serialize("UPDATE", self.get_table_name(), "WHERE id = \'" + str(self.ident) + "\'")
+        """!Return the sql statement and the argument list 
+           in database specific style"""
+        return self.serialize("UPDATE", self.get_table_name(), 
+                              "WHERE id = \'" + str(self.ident) + "\'")
 
 
     def get_update_statement_mogrified(self, dbif=None):
     def get_update_statement_mogrified(self, dbif=None):
         """!Return the update statement as mogrified string
         """!Return the update statement as mogrified string
 
 
-           @param dbif: The database interface to be used, if None a temporary connection will be established
+           @param dbif: The database interface to be used, 
+                        if None a temporary connection will be established
         """
         """
         if not dbif:
         if not dbif:
             dbif = SQLDatabaseInterfaceConnection()
             dbif = SQLDatabaseInterfaceConnection()
@@ -395,7 +423,8 @@ class SQLDatabaseInterface(DictSQLSerializer):
 
 
            Only object entries which are exists (not None) are updated
            Only object entries which are exists (not None) are updated
 
 
-           @param dbif: The database interface to be used, if None a temporary connection will be established
+           @param dbif: The database interface to be used, 
+                        if None a temporary connection will be established
         """
         """
         if self.ident is None:
         if self.ident is None:
             raise IOError("Missing identifer")
             raise IOError("Missing identifer")
@@ -413,13 +442,16 @@ class SQLDatabaseInterface(DictSQLSerializer):
             dbif.close()
             dbif.close()
 
 
     def get_update_all_statement(self):
     def get_update_all_statement(self):
-        """!Return the sql statement and the argument list in database specific style"""
-        return self.serialize("UPDATE ALL", self.get_table_name(), "WHERE id = \'" + str(self.ident) + "\'")
+        """!Return the sql statement and the argument 
+           list in database specific style"""
+        return self.serialize("UPDATE ALL", self.get_table_name(), 
+                              "WHERE id = \'" + str(self.ident) + "\'")
 
 
     def get_update_all_statement_mogrified(self, dbif=None):
     def get_update_all_statement_mogrified(self, dbif=None):
         """!Return the update all statement as mogrified string
         """!Return the update all statement as mogrified string
 
 
-           @param dbif: The database interface to be used, if None a temporary connection will be established
+           @param dbif: The database interface to be used, 
+                        if None a temporary connection will be established
         """
         """
         if not dbif:
         if not dbif:
             dbif = SQLDatabaseInterfaceConnection()
             dbif = SQLDatabaseInterfaceConnection()
@@ -427,10 +459,11 @@ class SQLDatabaseInterface(DictSQLSerializer):
         return dbif.mogrify_sql_statement(self.get_update_all_statement())
         return dbif.mogrify_sql_statement(self.get_update_all_statement())
 
 
     def update_all(self, dbif=None):
     def update_all(self, dbif=None):
-        """!Serialize the content of this object, including None objects, and update it in the temporal
-           database using the internal identifier
+        """!Serialize the content of this object, including None objects, 
+        and update it in the temporal database using the internal identifier
 
 
-           @param dbif: The database interface to be used, if None a temporary connection will be established
+           @param dbif: The database interface to be used, 
+                        if None a temporary connection will be established
         """
         """
         if self.ident is None:
         if self.ident is None:
             raise IOError("Missing identifer")
             raise IOError("Missing identifer")
@@ -455,10 +488,10 @@ class DatasetBase(SQLDatabaseInterface):
         basic identification information
         basic identification information
         
         
         Usage:
         Usage:
+        
+        \code
 
 
-        >>> import grass.temporal as tgis
-        >>> from datetime import datetime, date, time, timedelta
-        >>> t = tgis.DatasetBase("raster", "soil@PERMANENT", creator="soeren", ctime=datetime(2001,1,1), ttype="absolute")
+        >>> t = DatasetBase("raster", "soil@PERMANENT", creator="soeren", ctime=datetime(2001,1,1), ttype="absolute")
         >>> t.id
         >>> t.id
         'soil@PERMANENT'
         'soil@PERMANENT'
         >>> t.name
         >>> t.name
@@ -486,15 +519,19 @@ class DatasetBase(SQLDatabaseInterface):
         creator=soeren
         creator=soeren
         creation_time=2001-01-01 00:00:00
         creation_time=2001-01-01 00:00:00
         temporal_type=absolute
         temporal_type=absolute
+        
+        \endcode
     """
     """
     
     
     def __init__(self, table=None, ident=None, name=None, mapset=None, 
     def __init__(self, table=None, ident=None, name=None, mapset=None, 
                  creator=None, ctime=None,ttype=None):
                  creator=None, ctime=None,ttype=None):
         """!Constructor
         """!Constructor
         
         
-            @param table: The name of the temporal database table that should be used to store the values
-            @param ident: The unique identifier must be a combination of the dataset name, 
-                          layer name and the mapset name@mapset or name:1@mapset
+            @param table: The name of the temporal database table 
+                          that should be used to store the values
+            @param ident: The unique identifier must be a combination of 
+                          the dataset name, layer name and the mapset 
+                          name@mapset or name:1@mapset
                           used as as primary key in the temporal database
                           used as as primary key in the temporal database
             @param name: The name of the map or dataset
             @param name: The name of the map or dataset
             @param mapset: The name of the mapset 
             @param mapset: The name of the mapset 
@@ -525,8 +562,9 @@ class DatasetBase(SQLDatabaseInterface):
     def set_id(self, ident):
     def set_id(self, ident):
         """!Convenient method to set the unique identifier (primary key)
         """!Convenient method to set the unique identifier (primary key)
 
 
-           @param ident: The unique identifier must be a combination of the dataset name, 
-                         layer name and the mapset name@mapset or name:1@mapset
+           @param ident: The unique identifier must be a combination 
+                         of the dataset name, layer name and the mapset 
+                         name@mapset or name:1@mapset
         """
         """
         self.ident = ident
         self.ident = ident
         self.D["id"] = ident
         self.D["id"] = ident
@@ -575,7 +613,8 @@ class DatasetBase(SQLDatabaseInterface):
         self.D["creator"] = creator
         self.D["creator"] = creator
 
 
     def set_ctime(self, ctime=None):
     def set_ctime(self, ctime=None):
-        """!Set the creation time of the dataset, if nothing set the current time is used
+        """!Set the creation time of the dataset, 
+           if nothing set the current time is used
 
 
            @param ctime: The current time of type datetime
            @param ctime: The current time of type datetime
         """
         """
@@ -617,13 +656,15 @@ class DatasetBase(SQLDatabaseInterface):
             return None
             return None
 
 
     def get_map_id(self):
     def get_map_id(self):
-        """!Convenient method to get the unique map identifier without layer information
+        """!Convenient method to get the unique map identifier 
+           without layer information
 
 
            @param return the name of the vector map as name@mapset
            @param return the name of the vector map as name@mapset
         """
         """
         if self.id.find(":") >= 0:
         if self.id.find(":") >= 0:
             # Remove the layer identifier from the id
             # Remove the layer identifier from the id
-            return iself.d.split("@")[0].split(":")[0] + "@" + self.id.split("@")[1]
+            return iself.d.split("@")[0].split(":")[0] + "@" + \
+                   self.id.split("@")[1]
         else:
         else:
             return self.id
             return self.id
 
 
@@ -782,9 +823,9 @@ class STDSBase(DatasetBase):
        
        
     Usage:
     Usage:
 
 
-    >>> import grass.temporal as tgis
-    >>> from datetime import datetime, date, time, timedelta
-    >>> t = tgis.STDSBase("stds", "soil@PERMANENT", semantic_type="average", creator="soeren", ctime=datetime(2001,1,1), ttype="absolute")
+    \code
+    
+    >>> t = STDSBase("stds", "soil@PERMANENT", semantic_type="average", creator="soeren", ctime=datetime(2001,1,1), ttype="absolute")
     >>> t.semantic_type
     >>> t.semantic_type
     'average'
     'average'
     >>> t.print_info()
     >>> t.print_info()
@@ -804,6 +845,8 @@ class STDSBase(DatasetBase):
     creation_time=2001-01-01 00:00:00
     creation_time=2001-01-01 00:00:00
     temporal_type=absolute
     temporal_type=absolute
     semantic_type=average
     semantic_type=average
+    
+    \endcode
     """
     """
     def __init__(self, table=None, ident=None, name=None, mapset=None, 
     def __init__(self, table=None, ident=None, name=None, mapset=None, 
                  semantic_type=None, creator=None, ctime=None,
                  semantic_type=None, creator=None, ctime=None,

+ 41 - 18
lib/python/temporal/core.py

@@ -9,6 +9,8 @@ SQL database and to establish a connection to the database.
 
 
 Usage:
 Usage:
 
 
+\code
+
 >>> import grass.temporal as tgis
 >>> import grass.temporal as tgis
 >>> # Create the temporal database
 >>> # Create the temporal database
 >>> tgis.create_temporal_database()
 >>> tgis.create_temporal_database()
@@ -18,10 +20,13 @@ Usage:
 >>> # Execute a SQL statement
 >>> # Execute a SQL statement
 >>> dbif.execute_transaction("SELECT datetime(0, 'unixepoch', 'localtime');")
 >>> dbif.execute_transaction("SELECT datetime(0, 'unixepoch', 'localtime');")
 >>> # Mogrify an SQL statement
 >>> # Mogrify an SQL statement
->>> dbif.mogrify_sql_statement(["SELECT name from raster_base where name = ?", ("precipitation",)])
+>>> dbif.mogrify_sql_statement(["SELECT name from raster_base where name = ?", 
+... ("precipitation",)])
 "SELECT name from raster_base where name = 'precipitation'"
 "SELECT name from raster_base where name = 'precipitation'"
 >>> dbif.close()
 >>> dbif.close()
 
 
+\endcode
+
 (C) 2008-2011 by the GRASS Development Team
 (C) 2008-2011 by the GRASS Development Team
 This program is free software under the GNU General Public
 This program is free software under the GNU General Public
 License (>=v2). Read the file COPYING that comes with GRASS
 License (>=v2). Read the file COPYING that comes with GRASS
@@ -47,7 +52,8 @@ if "driver" in kv:
         # Needed for dictionary like cursors
         # Needed for dictionary like cursors
         import psycopg2.extras
         import psycopg2.extras
     else:
     else:
-        core.fatal(_("Unable to initialize the temporal DBMI interface. Use t.connect to specify the driver and the database string"))
+        core.fatal(_("Unable to initialize the temporal DBMI interface. Use "
+                     "t.connect to specify the driver and the database string"))
 else:
 else:
     # Use the default sqlite variable
     # Use the default sqlite variable
     core.run_command("t.connect", flags="d")
     core.run_command("t.connect", flags="d")
@@ -67,13 +73,17 @@ def get_temporal_dbmi_init_string():
                 "$LOCATION_NAME", grassenv["LOCATION_NAME"])
                 "$LOCATION_NAME", grassenv["LOCATION_NAME"])
             return string
             return string
         else:
         else:
-            core.fatal(_("Unable to initialize the temporal GIS DBMI interface. Use t.connect to specify the driver and the database string"))
+            core.fatal(_("Unable to initialize the temporal GIS DBMI "
+                         "interface. Use t.connect to specify the driver "
+                         "and the database string"))
     elif dbmi.__name__ == "psycopg2":
     elif dbmi.__name__ == "psycopg2":
         if "database" in kv:
         if "database" in kv:
             string = kv["database"]
             string = kv["database"]
             return string
             return string
         else:
         else:
-            core.fatal(_("Unable to initialize the temporal GIS DBMI interface. Use t.connect to specify the driver and the database string"))
+            core.fatal(_("Unable to initialize the temporal GIS DBMI "
+                         "interface. Use t.connect to specify the driver "
+                         "and the database string"))
             return "dbname=grass_test user=soeren password=abcdefgh"
             return "dbname=grass_test user=soeren password=abcdefgh"
 
 
 ###############################################################################
 ###############################################################################
@@ -88,10 +98,12 @@ def get_sql_template_path():
 
 
 
 
 def create_temporal_database():
 def create_temporal_database():
-    """!This function creates the grass location database structure for raster, vector and raster3d maps
-       as well as for the space-time datasets strds, str3ds and stvds
+    """!This function creates the grass location database structure for raster, 
+       vector and raster3d maps as well as for the space-time datasets strds, 
+       str3ds and stvds
 
 
-       This functions must be called before any spatio-temporal processing can be started
+       This functions must be called before any spatio-temporal processing 
+       can be started
     """
     """
 
 
     database = get_temporal_dbmi_init_string()
     database = get_temporal_dbmi_init_string()
@@ -108,7 +120,8 @@ def create_temporal_database():
         connection = dbmi.connect(database)
         connection = dbmi.connect(database)
         cursor = connection.cursor()
         cursor = connection.cursor()
         # Check for raster_base table
         # Check for raster_base table
-        cursor.execute("SELECT EXISTS(SELECT * FROM information_schema.tables WHERE table_name=%s)", ('raster_base',))
+        cursor.execute("SELECT EXISTS(SELECT * FROM information_schema.tables "
+                       "WHERE table_name=%s)", ('raster_base',))
         db_exists = cursor.fetchone()[0]
         db_exists = cursor.fetchone()[0]
         connection.commit()
         connection.commit()
         cursor.close()
         cursor.close()
@@ -214,7 +227,8 @@ class SQLDatabaseInterfaceConnection():
         init = get_temporal_dbmi_init_string()
         init = get_temporal_dbmi_init_string()
         #print "Connect to",  self.database
         #print "Connect to",  self.database
         if dbmi.__name__ == "sqlite3":
         if dbmi.__name__ == "sqlite3":
-            self.connection = dbmi.connect(init, detect_types=dbmi.PARSE_DECLTYPES | dbmi.PARSE_COLNAMES)
+            self.connection = dbmi.connect(init, 
+                    detect_types=dbmi.PARSE_DECLTYPES | dbmi.PARSE_COLNAMES)
             self.connection.row_factory = dbmi.Row
             self.connection.row_factory = dbmi.Row
             self.connection.isolation_level = None
             self.connection.isolation_level = None
             self.cursor = self.connection.cursor()
             self.cursor = self.connection.cursor()
@@ -277,8 +291,8 @@ class SQLDatabaseInterfaceConnection():
                         break
                         break
 
 
                     if args[count] is None:
                     if args[count] is None:
-                        statement = "%sNULL%s" % (statement[0:
-                                                            pos], statement[pos + 1:])
+                        statement = "%sNULL%s" % (statement[0:pos], 
+                                                  statement[pos + 1:])
                     elif isinstance(args[count], (int, long)):
                     elif isinstance(args[count], (int, long)):
                         statement = "%s%d%s" % (statement[0:pos], args[count],
                         statement = "%s%d%s" % (statement[0:pos], args[count],
                                                 statement[pos + 1:])
                                                 statement[pos + 1:])
@@ -286,8 +300,11 @@ class SQLDatabaseInterfaceConnection():
                         statement = "%s%f%s" % (statement[0:pos], args[count],
                         statement = "%s%f%s" % (statement[0:pos], args[count],
                                                 statement[pos + 1:])
                                                 statement[pos + 1:])
                     else:
                     else:
-                        # Default is a string, this works for datetime objects too
-                        statement = "%s\'%s\'%s" % (statement[0:pos], str(args[count]), statement[pos + 1:])
+                        # Default is a string, this works for datetime 
+                        # objects too
+                        statement = "%s\'%s\'%s" % (statement[0:pos], 
+                                                    str(args[count]), 
+                                                    statement[pos + 1:])
                     count += 1
                     count += 1
 
 
                 return statement
                 return statement
@@ -301,7 +318,7 @@ class SQLDatabaseInterfaceConnection():
             @param statement The executable SQL statement or SQL script
             @param statement The executable SQL statement or SQL script
         """
         """
         connect = False
         connect = False
-        if self.connected == False:
+        if not self.connected:
             self.connect()
             self.connect()
             connect = True
             connect = True
 
 
@@ -309,6 +326,7 @@ class SQLDatabaseInterfaceConnection():
         sql_script += "BEGIN TRANSACTION;\n"
         sql_script += "BEGIN TRANSACTION;\n"
         sql_script += statement
         sql_script += statement
         sql_script += "END TRANSACTION;"
         sql_script += "END TRANSACTION;"
+        
         try:
         try:
             if dbmi.__name__ == "sqlite3":
             if dbmi.__name__ == "sqlite3":
                 self.cursor.executescript(statement)
                 self.cursor.executescript(statement)
@@ -316,9 +334,10 @@ class SQLDatabaseInterfaceConnection():
                 self.cursor.execute(statement)
                 self.cursor.execute(statement)
             self.connection.commit()
             self.connection.commit()
         except:
         except:
-            if connect == True:
+            if connect:
                 self.close()
                 self.close()
-            core.error(_("Unable to execute transaction:\n %s") % (statement))
+            core.error(_("Unable to execute transaction:\n %(sql)s" % \
+                         {"sql":statement}))
             raise
             raise
 
 
         if connect:
         if connect:
@@ -327,13 +346,17 @@ class SQLDatabaseInterfaceConnection():
 ###############################################################################
 ###############################################################################
 
 
 def init_dbif(dbif):
 def init_dbif(dbif):
-    """!This method checks if the database interface connection exists, if not a new one
-        will be created, connected and True will be returned
+    """!This method checks if the database interface connection exists, 
+        if not a new one will be created, connected and True will be returned
 
 
         Usage code sample:
         Usage code sample:
+        \code
+        
         dbif, connect = tgis.init_dbif(dbif)
         dbif, connect = tgis.init_dbif(dbif)
         if connect:
         if connect:
             dbif.close()
             dbif.close()
+        
+        \code
     """
     """
     if dbif is None:
     if dbif is None:
         dbif = SQLDatabaseInterfaceConnection()
         dbif = SQLDatabaseInterfaceConnection()

+ 290 - 57
lib/python/temporal/datetime_math.py

@@ -2,16 +2,7 @@
 
 
 @brief GRASS Python scripting module (temporal GIS functions)
 @brief GRASS Python scripting module (temporal GIS functions)
 
 
-Temporal GIS datetime math functions to be used in Python scripts.
-
-Usage:
-
-@code
-import grass.temporal as tgis
-
-tgis.increment_datetime_by_string(mydate, "3 month, 2 hours")
-...
-@endcode
+Temporal GIS datetime math functions to be used in library functions and modules.
 
 
 (C) 2008-2011 by the GRASS Development Team
 (C) 2008-2011 by the GRASS Development Team
 This program is free software under the GNU General Public
 This program is free software under the GNU General Public
@@ -26,10 +17,11 @@ import copy
 from dateutil import parser
 from dateutil import parser
 
 
 DAY_IN_SECONDS = 86400
 DAY_IN_SECONDS = 86400
-SECOND_AS_DAY  = 1.1574074074074073e-05
+SECOND_AS_DAY = 1.1574074074074073e-05
 
 
 ###############################################################################
 ###############################################################################
 
 
+
 def relative_time_to_time_delta(value):
 def relative_time_to_time_delta(value):
     """!Convert the double value representing days
     """!Convert the double value representing days
        into a timedelta object.
        into a timedelta object.
@@ -43,6 +35,7 @@ def relative_time_to_time_delta(value):
 
 
 ###############################################################################
 ###############################################################################
 
 
+
 def time_delta_to_relative_time(delta):
 def time_delta_to_relative_time(delta):
     """!Convert the time delta into a
     """!Convert the time delta into a
        double value, representing days.
        double value, representing days.
@@ -52,17 +45,69 @@ def time_delta_to_relative_time(delta):
 
 
 ###############################################################################
 ###############################################################################
 
 
-def increment_datetime_by_string(mydate, increment, mult = 1):
-    """!Return a new datetime object incremented with the provided 
+
+def increment_datetime_by_string(mydate, increment, mult=1):
+    """!Return a new datetime object incremented with the provided
        relative dates specified as string.
        relative dates specified as string.
-       Additional a multiplier can be specified to multiply the increment 
+       Additional a multiplier can be specified to multiply the increment
        before adding to the provided datetime object.
        before adding to the provided datetime object.
-
+       
+       Usage:
+       
+       @code
+       
+        >>> dt = datetime(2001, 9, 1, 0, 0, 0)
+        >>> string = "60 seconds, 4 minutes, 12 hours, 10 days, 1 weeks, 5 months, 1 years"
+        >>> increment_datetime_by_string(dt, string)
+        datetime.datetime(2003, 2, 18, 12, 5)
+
+        >>> dt = datetime(2001, 11, 1, 0, 0, 0)
+        >>> string = "1 months"
+        >>> increment_datetime_by_string(dt, string)
+        datetime.datetime(2001, 12, 1, 0, 0)
+
+        >>> dt = datetime(2001, 11, 1, 0, 0, 0)
+        >>> string = "13 months"
+        >>> increment_datetime_by_string(dt, string)
+        datetime.datetime(2002, 12, 1, 0, 0)
+         
+        >>> dt = datetime(2001, 1, 1, 0, 0, 0)
+        >>> string = "72 months"
+        >>> increment_datetime_by_string(dt, string)
+        datetime.datetime(2007, 1, 1, 0, 0)
+         
+        >>> dt = datetime(2001, 1, 1, 0, 0, 0)
+        >>> string = "72 months"
+        >>> increment_datetime_by_string(dt, string)
+        datetime.datetime(2007, 1, 1, 0, 0)
+         
+        >>> dt = datetime(2001, 1, 1, 0, 0, 0)
+        >>> string = "5 minutes"
+        >>> increment_datetime_by_string(dt, string)
+        datetime.datetime(2001, 1, 1, 0, 5)
+         
+        >>> dt = datetime(2001, 1, 1, 0, 0, 0)
+        >>> string = "49 hours"
+        >>> increment_datetime_by_string(dt, string)
+        datetime.datetime(2001, 1, 3, 1, 0)
+         
+        >>> dt = datetime(2001, 1, 1, 0, 0, 0)
+        >>> string = "3600 seconds"
+        >>> increment_datetime_by_string(dt, string)
+        datetime.datetime(2001, 1, 1, 1, 0)
+         
+        >>> dt = datetime(2001, 1, 1, 0, 0, 0)
+        >>> string = "30 days"
+        >>> increment_datetime_by_string(dt, string)
+        datetime.datetime(2001, 1, 31, 0, 0)
+        
+        @endcode
+       
        @param mydate A datetime object to incremented
        @param mydate A datetime object to incremented
        @param increment A string providing increment information:
        @param increment A string providing increment information:
-                  The string may include comma separated values of type seconds, 
+                  The string may include comma separated values of type seconds,
                   minutes, hours, days, weeks, months and years
                   minutes, hours, days, weeks, months and years
-                  Example: Increment the datetime 2001-01-01 00:00:00 
+                  Example: Increment the datetime 2001-01-01 00:00:00
                   with "60 seconds, 4 minutes, 12 hours, 10 days, 1 weeks, 5 months, 1 years"
                   with "60 seconds, 4 minutes, 12 hours, 10 days, 1 weeks, 5 months, 1 years"
                   will result in the datetime 2003-02-18 12:05:00
                   will result in the datetime 2003-02-18 12:05:00
        @param mult A multiplier, default is 1
        @param mult A multiplier, default is 1
@@ -83,12 +128,11 @@ def increment_datetime_by_string(mydate, increment, mult = 1):
         incparts = increment.split(",")
         incparts = increment.split(",")
         for incpart in incparts:
         for incpart in incparts:
             inclist.append(incpart.strip().split(" "))
             inclist.append(incpart.strip().split(" "))
-            
 
 
         for inc in inclist:
         for inc in inclist:
-	    if len(inc) < 2:
-		core.error(_("Wrong increment format: %s") % (increment))
-		return None
+            if len(inc) < 2:
+                core.error(_("Wrong increment format: %s") % (increment))
+                return None
             if inc[1].find("seconds") >= 0:
             if inc[1].find("seconds") >= 0:
                 seconds = mult * int(inc[0])
                 seconds = mult * int(inc[0])
             elif inc[1].find("minutes") >= 0:
             elif inc[1].find("minutes") >= 0:
@@ -108,13 +152,16 @@ def increment_datetime_by_string(mydate, increment, mult = 1):
                 return None
                 return None
 
 
         return increment_datetime(mydate, years, months, weeks, days, hours, minutes, seconds)
         return increment_datetime(mydate, years, months, weeks, days, hours, minutes, seconds)
-    
+
     return mydate
     return mydate
 
 
 ###############################################################################
 ###############################################################################
 
 
-def increment_datetime(mydate, years=0, months=0, weeks=0, days=0, hours=0, minutes=0, seconds=0):
-    """!Return a new datetime object incremented with the provided relative dates and times"""
+
+def increment_datetime(mydate, years=0, months=0, weeks=0, days=0, hours=0, 
+                       minutes=0, seconds=0):
+    """!Return a new datetime object incremented with the provided 
+       relative dates and times"""
 
 
     tdelta_seconds = timedelta(seconds=seconds)
     tdelta_seconds = timedelta(seconds=seconds)
     tdelta_minutes = timedelta(minutes=minutes)
     tdelta_minutes = timedelta(minutes=minutes)
@@ -124,14 +171,13 @@ def increment_datetime(mydate, years=0, months=0, weeks=0, days=0, hours=0, minu
     tdelta_months = timedelta(0)
     tdelta_months = timedelta(0)
     tdelta_years = timedelta(0)
     tdelta_years = timedelta(0)
 
 
-
     if months > 0:
     if months > 0:
         # Compute the actual number of days in the month to add as timedelta
         # Compute the actual number of days in the month to add as timedelta
         year = mydate.year
         year = mydate.year
         month = mydate.month
         month = mydate.month
 
 
         all_months = int(months) + int(month)
         all_months = int(months) + int(month)
-        years_to_add = int(all_months/12.001)
+        years_to_add = int(all_months / 12.001)
         residual_months = all_months - (years_to_add * 12)
         residual_months = all_months - (years_to_add * 12)
 
 
         # Make a deep copy of the datetime object
         # Make a deep copy of the datetime object
@@ -141,7 +187,7 @@ def increment_datetime(mydate, years=0, months=0, weeks=0, days=0, hours=0, minu
         if residual_months == 0:
         if residual_months == 0:
             residual_months = 1
             residual_months = 1
 
 
-        dt1 = dt1.replace(year = year + years_to_add, month = residual_months)
+        dt1 = dt1.replace(year=year + years_to_add, month=residual_months)
         tdelta_months = dt1 - mydate
         tdelta_months = dt1 - mydate
 
 
     if years > 0:
     if years > 0:
@@ -152,12 +198,60 @@ def increment_datetime(mydate, years=0, months=0, weeks=0, days=0, hours=0, minu
         tdelta_years = dt1 - mydate
         tdelta_years = dt1 - mydate
 
 
     return mydate + tdelta_seconds + tdelta_minutes + tdelta_hours + \
     return mydate + tdelta_seconds + tdelta_minutes + tdelta_hours + \
-                    tdelta_days + tdelta_weeks + tdelta_months + tdelta_years
+        tdelta_days + tdelta_weeks + tdelta_months + tdelta_years
 
 
 ###############################################################################
 ###############################################################################
 
 
+
 def adjust_datetime_to_granularity(mydate, granularity):
 def adjust_datetime_to_granularity(mydate, granularity):
-    """!Mofiy the datetime object to fit the given granularity    """
+    """!Modify the datetime object to fit the given granularity    
+    
+        * Years will start at the first of Januar
+        * Months will start at the first day of the month
+        * Days will start at the first Hour of the day
+        * Hours will start at the first minute of an hour
+        * Minutes will start at the first second of a minute
+        
+        Usage:
+        
+        @code
+        
+        >>> dt = datetime(2001, 8, 8, 12,30,30)
+        >>> adjust_datetime_to_granularity(dt, "5 seconds")
+        datetime.datetime(2001, 8, 8, 12, 30, 30)
+        
+        >>> adjust_datetime_to_granularity(dt, "20 minutes")
+        datetime.datetime(2001, 8, 8, 12, 30)
+        
+        >>> adjust_datetime_to_granularity(dt, "20 minutes")
+        datetime.datetime(2001, 8, 8, 12, 30)
+        
+        >>> adjust_datetime_to_granularity(dt, "3 hours")
+        datetime.datetime(2001, 8, 8, 12, 0)
+        
+        >>> adjust_datetime_to_granularity(dt, "5 days")
+        datetime.datetime(2001, 8, 8, 0, 0)
+        
+        >>> adjust_datetime_to_granularity(dt, "2 weeks")
+        datetime.datetime(2001, 8, 6, 0, 0)
+        
+        >>> adjust_datetime_to_granularity(dt, "6 months")
+        datetime.datetime(2001, 8, 1, 0, 0)
+        
+        >>> adjust_datetime_to_granularity(dt, "2 years")
+        datetime.datetime(2001, 1, 1, 0, 0)
+        
+        >>> adjust_datetime_to_granularity(dt, "2 years, 3 months, 5 days, 3 hours, 3 minutes, 2 seconds")
+        datetime.datetime(2001, 8, 8, 12, 30, 30)
+        
+        >>> adjust_datetime_to_granularity(dt, "3 months, 5 days, 3 minutes")
+        datetime.datetime(2001, 8, 8, 12, 30)
+        
+        >>> adjust_datetime_to_granularity(dt, "3 weeks, 5 days")
+        datetime.datetime(2001, 8, 8, 0, 0)
+        
+        @endcode
+    """
 
 
     if granularity:
     if granularity:
 
 
@@ -203,30 +297,30 @@ def adjust_datetime_to_granularity(mydate, granularity):
                 return None
                 return None
 
 
         if has_seconds:
         if has_seconds:
-            pass          
-        elif has_minutes: # Start at 0 seconds
+            pass
+        elif has_minutes:  # Start at 0 seconds
             seconds = 0
             seconds = 0
-        elif has_hours: # Start at 0 minutes and seconds
+        elif has_hours:  # Start at 0 minutes and seconds
             seconds = 0
             seconds = 0
             minutes = 0
             minutes = 0
-        elif has_days: # Start at 0 hours, minutes and seconds
+        elif has_days:  # Start at 0 hours, minutes and seconds
             seconds = 0
             seconds = 0
             minutes = 0
             minutes = 0
             hours = 0
             hours = 0
-        elif has_weeks: # Start at the first day of the week (Monday) at 00:00:00
+        elif has_weeks:  # Start at the first day of the week (Monday) at 00:00:00
             seconds = 0
             seconds = 0
             minutes = 0
             minutes = 0
             hours = 0
             hours = 0
             if days > weekday:
             if days > weekday:
-                days = days - weekday # this needs to be fixed
+                days = days - weekday  # this needs to be fixed
             else:
             else:
-                days = days + weekday # this needs to be fixed
-        elif has_months: # Start at the first day of the month at 00:00:00
+                days = days + weekday  # this needs to be fixed
+        elif has_months:  # Start at the first day of the month at 00:00:00
             seconds = 0
             seconds = 0
             minutes = 0
             minutes = 0
             hours = 0
             hours = 0
             days = 1
             days = 1
-        elif has_years: # Start at the first day of the first month at 00:00:00
+        elif has_years:  # Start at the first day of the first month at 00:00:00
             seconds = 0
             seconds = 0
             minutes = 0
             minutes = 0
             hours = 0
             hours = 0
@@ -234,16 +328,142 @@ def adjust_datetime_to_granularity(mydate, granularity):
             months = 1
             months = 1
 
 
         dt = copy.copy(mydate)
         dt = copy.copy(mydate)
-        result = dt.replace(year=years, month=months, day=days, hour=hours, minute=minutes, second=seconds)
-        core.verbose(_("Adjust datetime from %s to %s with granularity %s") % (dt, result, granularity))
-
-        return result
+        return dt.replace(year=years, month=months, day=days,
+                            hour=hours, minute=minutes, second=seconds)
 
 
 ###############################################################################
 ###############################################################################
 
 
+
 def compute_datetime_delta(start, end):
 def compute_datetime_delta(start, end):
-    """!Return a dictionary with the accumulated delta in year, month, day, hour, minute and second
-    
+    """!Return a dictionary with the accumulated delta in year, month, day, 
+       hour, minute and second
+       
+        Usage:
+        
+        @code
+        
+        >>> start = datetime(2001, 1, 1, 00,00,00)
+        >>> end = datetime(2001, 1, 1, 00,00,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 0, 'month': 0, 'second': 0, 'max_days': 0, 'year': 0, 'day': 0, 'minute': 0}
+        
+        >>> start = datetime(2001, 1, 1, 00,00,14)
+        >>> end = datetime(2001, 1, 1, 00,00,44)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 0, 'month': 0, 'second': 30, 'max_days': 0, 'year': 0, 'day': 0, 'minute': 0}
+        
+        >>> start = datetime(2001, 1, 1, 00,00,44)
+        >>> end = datetime(2001, 1, 1, 00,01,14)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 0, 'month': 0, 'second': 30, 'max_days': 0, 'year': 0, 'day': 0, 'minute': 1}
+        
+        >>> start = datetime(2001, 1, 1, 00,00,30)
+        >>> end = datetime(2001, 1, 1, 00,05,30)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 0, 'month': 0, 'second': 300, 'max_days': 0, 'year': 0, 'day': 0, 'minute': 5}
+        
+        >>> start = datetime(2001, 1, 1, 00,00,00)
+        >>> end = datetime(2001, 1, 1, 00,01,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 0, 'month': 0, 'second': 0, 'max_days': 0, 'year': 0, 'day': 0, 'minute': 1}
+        
+        >>> start = datetime(2011,10,31, 00,45,00)
+        >>> end = datetime(2011,10,31, 01,45,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 1, 'second': 0, 'max_days': 0, 'year': 0, 'day': 0, 'minute': 60}
+        
+        >>> start = datetime(2011,10,31, 00,45,00)
+        >>> end = datetime(2011,10,31, 01,15,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 1, 'second': 0, 'max_days': 0, 'year': 0, 'day': 0, 'minute': 30}
+        
+        >>> start = datetime(2011,10,31, 00,45,00)
+        >>> end = datetime(2011,10,31, 12,15,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 12, 'second': 0, 'max_days': 0, 'year': 0, 'day': 0, 'minute': 690}
+        
+        >>> start = datetime(2011,10,31, 00,00,00)
+        >>> end = datetime(2011,10,31, 01,00,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 1, 'second': 0, 'max_days': 0, 'year': 0, 'day': 0, 'minute': 0}
+        
+        >>> start = datetime(2011,10,31, 00,00,00)
+        >>> end = datetime(2011,11,01, 01,00,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 25, 'second': 0, 'max_days': 1, 'year': 0, 'day': 1, 'minute': 0}
+        
+        >>> start = datetime(2011,10,31, 12,00,00)
+        >>> end = datetime(2011,11,01, 06,00,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 18, 'second': 0, 'max_days': 0, 'year': 0, 'day': 0, 'minute': 0}
+        
+        >>> start = datetime(2011,11,01, 00,00,00)
+        >>> end = datetime(2011,12,01, 01,00,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 721, 'month': 1, 'second': 0, 'max_days': 30, 'year': 0, 'day': 0, 'minute': 0}
+        
+        >>> start = datetime(2011,11,01, 00,00,00)
+        >>> end = datetime(2011,11,05, 00,00,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 0, 'second': 0, 'max_days': 4, 'year': 0, 'day': 4, 'minute': 0}
+        
+        >>> start = datetime(2011,10,06, 00,00,00)
+        >>> end = datetime(2011,11,05, 00,00,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 0, 'second': 0, 'max_days': 30, 'year': 0, 'day': 30, 'minute': 0}
+        
+        >>> start = datetime(2011,12,02, 00,00,00)
+        >>> end = datetime(2012,01,01, 00,00,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 0, 'second': 0, 'max_days': 30, 'year': 1, 'day': 30, 'minute': 0}
+        
+        >>> start = datetime(2011,01,01, 00,00,00)
+        >>> end = datetime(2011,02,01, 00,00,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 0, 'month': 1, 'second': 0, 'max_days': 31, 'year': 0, 'day': 0, 'minute': 0}
+        
+        >>> start = datetime(2011,12,01, 00,00,00)
+        >>> end = datetime(2012,01,01, 00,00,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 0, 'month': 1, 'second': 0, 'max_days': 31, 'year': 1, 'day': 0, 'minute': 0}
+        
+        >>> start = datetime(2011,12,01, 00,00,00)
+        >>> end = datetime(2012,06,01, 00,00,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 0, 'month': 6, 'second': 0, 'max_days': 183, 'year': 1, 'day': 0, 'minute': 0}
+        
+        >>> start = datetime(2011,06,01, 00,00,00)
+        >>> end = datetime(2021,06,01, 00,00,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 0, 'month': 120, 'second': 0, 'max_days': 3653, 'year': 10, 'day': 0, 'minute': 0}
+        
+        >>> start = datetime(2011,06,01, 00,00,00)
+        >>> end = datetime(2012,06,01, 12,00,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 8796, 'month': 12, 'second': 0, 'max_days': 366, 'year': 1, 'day': 0, 'minute': 0}
+        
+        >>> start = datetime(2011,06,01, 00,00,00)
+        >>> end = datetime(2012,06,01, 12,30,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 8796, 'month': 12, 'second': 0, 'max_days': 366, 'year': 1, 'day': 0, 'minute': 527790}
+        
+        >>> start = datetime(2011,06,01, 00,00,00)
+        >>> end = datetime(2012,06,01, 12,00,05)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 8796, 'month': 12, 'second': 31665605, 'max_days': 366, 'year': 1, 'day': 0, 'minute': 0}
+        
+        >>> start = datetime(2011,06,01, 00,00,00)
+        >>> end = datetime(2012,06,01, 00,30,00)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 0, 'month': 12, 'second': 0, 'max_days': 366, 'year': 1, 'day': 0, 'minute': 527070}
+        
+        >>> start = datetime(2011,06,01, 00,00,00)
+        >>> end = datetime(2012,06,01, 00,00,05)
+        >>> compute_datetime_delta(start, end)
+        {'hour': 0, 'month': 12, 'second': 31622405, 'max_days': 366, 'year': 1, 'day': 0, 'minute': 0}
+        
+        @endcode
+        
        @return A dictionary with year, month, day, hour, minute and second as keys()
        @return A dictionary with year, month, day, hour, minute and second as keys()
     """
     """
     comp = {}
     comp = {}
@@ -281,11 +501,11 @@ def compute_datetime_delta(start, end):
     else:
     else:
         d = end.hour - start.hour
         d = end.hour - start.hour
         if d < 0:
         if d < 0:
-            d = d + 24  + 24 * day_diff
+            d = d + 24 + 24 * day_diff
         else:
         else:
             d = d + 24 * day_diff
             d = d + 24 * day_diff
         comp["hour"] = d
         comp["hour"] = d
-    
+
     # Minutes
     # Minutes
     if start.minute == 0 and end.minute == 0:
     if start.minute == 0 and end.minute == 0:
         comp["minute"] = 0
         comp["minute"] = 0
@@ -298,7 +518,7 @@ def compute_datetime_delta(start, end):
                 d = d + 24 * 60 * day_diff
                 d = d + 24 * 60 * day_diff
         elif d == 0:
         elif d == 0:
             if comp["hour"]:
             if comp["hour"]:
-                d = 60* comp["hour"]
+                d = 60 * comp["hour"]
             else:
             else:
                 d = 24 * 60 * day_diff
                 d = 24 * 60 * day_diff
 
 
@@ -311,14 +531,14 @@ def compute_datetime_delta(start, end):
         d = end.second - start.second
         d = end.second - start.second
         if d != 0:
         if d != 0:
             if comp["minute"]:
             if comp["minute"]:
-                d = d + 60* comp["minute"]
+                d = d + 60 * comp["minute"]
             elif comp["hour"]:
             elif comp["hour"]:
-                d = d + 3600* comp["hour"]
+                d = d + 3600 * comp["hour"]
             else:
             else:
                 d = d + 24 * 60 * 60 * day_diff
                 d = d + 24 * 60 * 60 * day_diff
         elif d == 0:
         elif d == 0:
             if comp["minute"]:
             if comp["minute"]:
-                d = 60* comp["minute"]
+                d = 60 * comp["minute"]
             elif comp["hour"]:
             elif comp["hour"]:
                 d = 3600 * comp["hour"]
                 d = 3600 * comp["hour"]
             else:
             else:
@@ -329,12 +549,15 @@ def compute_datetime_delta(start, end):
 
 
 ###############################################################################
 ###############################################################################
 
 
+
 def string_to_datetime(time_string):
 def string_to_datetime(time_string):
-    """!Convert a string into a datetime object using the dateutil parser. Return None in case of failure"""
+    """!Convert a string into a datetime object using the dateutil parser. 
+       Return None in case of failure"""
 
 
     # BC is not supported
     # BC is not supported
     if time_string.find("bc") > 0:
     if time_string.find("bc") > 0:
-        core.error("Dates Before Christ are not supported in the temporal database")
+        core.error("Dates Before Christ are not supported "
+                   "in the temporal database")
         return None
         return None
 
 
     try:
     try:
@@ -345,17 +568,27 @@ def string_to_datetime(time_string):
 
 
 ###############################################################################
 ###############################################################################
 
 
+
 def datetime_to_grass_datetime_string(dt):
 def datetime_to_grass_datetime_string(dt):
     """!Convert a python datetime object into a GRASS datetime string"""
     """!Convert a python datetime object into a GRASS datetime string"""
 
 
     # GRASS datetime month names
     # GRASS datetime month names
-    month_names  = ["", "jan","feb","mar","apr","may","jun","jul","aug","sep","oct","nov","dec"]
+    month_names = ["", "jan", "feb", "mar", "apr", "may", "jun",
+                   "jul", "aug", "sep", "oct", "nov", "dec"]
 
 
     # Check for time zone info in the datetime object
     # Check for time zone info in the datetime object
-    if dt.tzinfo != None:
-        string = "%.2i %s %.2i %.2i:%.2i:%.2i %+.4i"%(dt.day, month_names[dt.month], dt.year, \
-                 dt.hour, dt.minute, dt.second, dt.tzinfo._offset.seconds/60)
+    if dt.tzinfo is not None:
+        string = "%.2i %s %.2i %.2i:%.2i:%.2i %+.4i" % (dt.day, 
+                 month_names[dt.month], dt.year,
+                 dt.hour, dt.minute, dt.second, dt.tzinfo._offset.seconds / 60)
     else:
     else:
-        string = "%.2i %s %.4i %.2i:%.2i:%.2i"%(dt.day, month_names[dt.month], dt.year, dt.hour, dt.minute, dt.second)
+        string = "%.2i %s %.4i %.2i:%.2i:%.2i" % (dt.day, month_names[
+            dt.month], dt.year, dt.hour, dt.minute, dt.second)
 
 
     return string
     return string
+
+###############################################################################
+
+if __name__ == "__main__":
+    import doctest
+    doctest.testmod()

+ 208 - 185
lib/python/temporal/extract.py

@@ -4,12 +4,6 @@
 
 
 Temporal GIS related functions to be used in Python scripts.
 Temporal GIS related functions to be used in Python scripts.
 
 
-Usage:
-
-@code
-import grass.temporal as tgis
-@endcode
-
 (C) 2008-2011 by the GRASS Development Team
 (C) 2008-2011 by the GRASS Development Team
 This program is free software under the GNU General Public
 This program is free software under the GNU General Public
 License (>=v2). Read the file COPYING that comes with GRASS
 License (>=v2). Read the file COPYING that comes with GRASS
@@ -23,30 +17,37 @@ from multiprocessing import Process
 
 
 ############################################################################
 ############################################################################
 
 
-def extract_dataset(input, output, type, where, expression, base, nprocs=1, register_null=False, layer=1, vtype="point,line,boundary,centroid,area,face"):
+
+def extract_dataset(input, output, type, where, expression, base, nprocs=1,
+                    register_null=False, layer=1,
+                    vtype="point,line,boundary,centroid,area,face"):
     """!Extract a subset of a space time raster, raster3d or vector dataset
     """!Extract a subset of a space time raster, raster3d or vector dataset
-    
+
        A mapcalc expression can be provided to process the temporal extracted maps.
        A mapcalc expression can be provided to process the temporal extracted maps.
        Mapcalc expressions are supported for raster and raster3d maps.
        Mapcalc expressions are supported for raster and raster3d maps.
-       
-       @param input The name of the input space time raster/raster3d dataset 
+
+       @param input The name of the input space time raster/raster3d dataset
        @param output The name of the extracted new space time raster/raster3d dataset
        @param output The name of the extracted new space time raster/raster3d dataset
        @param type The type of the dataset: "raster", "raster3d" or vector
        @param type The type of the dataset: "raster", "raster3d" or vector
        @param where The temporal SQL WHERE statement for subset extraction
        @param where The temporal SQL WHERE statement for subset extraction
        @param expression The r(3).mapcalc expression or the v.extract where statement
        @param expression The r(3).mapcalc expression or the v.extract where statement
-       @param base The base name of the new created maps in case a mapclac expression is provided 
+       @param base The base name of the new created maps in case a mapclac
+              expression is provided
        @param nprocs The number of parallel processes to be used for mapcalc processing
        @param nprocs The number of parallel processes to be used for mapcalc processing
-       @param register_null Set this number True to register empty maps (only raster and raster3d maps)
-       @param layer The vector layer number to be used when no timestamped layer is present, default is 1
-       @param vtype The feature type to be extracted for vector maps, default is point,line,boundary,centroid,area and face
+       @param register_null Set this number True to register empty maps
+             (only raster and raster3d maps)
+       @param layer The vector layer number to be used when no timestamped
+              layer is present, default is 1
+       @param vtype The feature type to be extracted for vector maps, default
+              is point,line,boundary,centroid,area and face
     """
     """
 
 
     # Check the parameters
     # Check the parameters
 
 
     if expression and not base:
     if expression and not base:
         core.fatal(_("You need to specify the base name of new created maps"))
         core.fatal(_("You need to specify the base name of new created maps"))
-    
-    mapset =  core.gisenv()["MAPSET"]
+
+    mapset = core.gisenv()["MAPSET"]
 
 
     if input.find("@") >= 0:
     if input.find("@") >= 0:
         id = input
         id = input
@@ -54,23 +55,23 @@ def extract_dataset(input, output, type, where, expression, base, nprocs=1, regi
         id = input + "@" + mapset
         id = input + "@" + mapset
 
 
     if type == "raster":
     if type == "raster":
-	sp = space_time_raster_dataset(id)
+        sp = space_time_raster_dataset(id)
     elif type == "raster3d":
     elif type == "raster3d":
-	sp = space_time_raster3d_dataset(id)
+        sp = space_time_raster3d_dataset(id)
     elif type == "vector":
     elif type == "vector":
-	sp = space_time_vector_dataset(id)
-	
+        sp = space_time_vector_dataset(id)
+
     dummy = sp.get_new_map_instance(None)
     dummy = sp.get_new_map_instance(None)
-	
+
     dbif = ()
     dbif = ()
     dbif.connect()
     dbif.connect()
-    
-    if sp.is_in_db(dbif) == False:
-	dbif.close()
+
+    if not sp.is_in_db(dbif):
+        dbif.close()
         core.fatal(_("Space time %s dataset <%s> not found") % (type, id))
         core.fatal(_("Space time %s dataset <%s> not found") % (type, id))
 
 
     if expression and not base:
     if expression and not base:
-	dbif.close()
+        dbif.close()
         core.fatal(_("Please specify base="))
         core.fatal(_("Please specify base="))
 
 
     sp.select(dbif)
     sp.select(dbif)
@@ -82,192 +83,214 @@ def extract_dataset(input, output, type, where, expression, base, nprocs=1, regi
 
 
     # The new space time dataset
     # The new space time dataset
     new_sp = sp.get_new_instance(out_id)
     new_sp = sp.get_new_instance(out_id)
-	
+
     if new_sp.is_in_db():
     if new_sp.is_in_db():
-        if core.overwrite() == False:
-	    dbif.close()
-            core.fatal(_("Space time %s dataset <%s> is already in database, use overwrite flag to overwrite") % (type, out_id))
+        if not core.overwrite():
+            dbif.close()
+            core.fatal(_("Space time %s dataset <%s> is already in database,"
+                         " use overwrite flag to overwrite") % (type, out_id))
     if type == "vector":
     if type == "vector":
-	rows = sp.get_registered_maps("id,name,mapset,layer", where, "start_time", dbif)
+        rows = sp.get_registered_maps(
+            "id,name,mapset,layer", where, "start_time", dbif)
     else:
     else:
-	rows = sp.get_registered_maps("id", where, "start_time", dbif)
+        rows = sp.get_registered_maps("id", where, "start_time", dbif)
 
 
     new_maps = {}
     new_maps = {}
     if rows:
     if rows:
-	num_rows = len(rows)
-	
-	core.percent(0, num_rows, 1)
-	
-	# Run the mapcalc expression
+        num_rows = len(rows)
+
+        core.percent(0, num_rows, 1)
+
+        # Run the mapcalc expression
         if expression:
         if expression:
-	    count = 0
-	    proc_count = 0
-	    proc_list = []
-	    
-	    for row in rows:
-		count += 1
-		
-		core.percent(count, num_rows, 1)
-		
-		map_name = "%s_%i" % (base, count)
-		
-		# We need to modify the r(3).mapcalc expression
-		if type != "vector":
-		    expr = "%s = %s" % (map_name, expression)
-		    
-		    expr = expr.replace(sp.base.get_map_id(), row["id"])
-		    expr = expr.replace(sp.base.get_name(), row["id"])
-		    
-		    # We need to build the id
-		    map_id = dummy.build_id(map_name, mapset)
-		else:
-		    map_id = dummy.build_id(map_name, mapset, row["layer"])
-
-		new_map = sp.get_new_map_instance(map_id)
-
-		# Check if new map is in the temporal database
-		if new_map.is_in_db(dbif):
-		    if core.overwrite() == True:
-			# Remove the existing temporal database entry
-			new_map.delete(dbif)
-			new_map = sp.get_new_map_instance(map_id)
-		    else:
-			core.error(_("Map <%s> is already in temporal database, use overwrite flag to overwrite")%(new_map.get_map_id()))
-			continue
-		
-		# Add process to the process list
-		if type == "raster":
-		    core.verbose(_("Apply r.mapcalc expression: \"%s\"") % expr)
-		    proc_list.append(Process(target=run_mapcalc2d, args=(expr,)))
-		elif type == "raster3d":
-		    core.verbose(_("Apply r3.mapcalc expression: \"%s\"") % expr)
-		    proc_list.append(Process(target=run_mapcalc3d, args=(expr,)))
-		elif type == "vector":
-		    core.verbose(_("Apply v.extract where statement: \"%s\"") % expression)
-		    if row["layer"]:
-			proc_list.append(Process(target=run_vector_extraction, args=(row["name"] + "@" + row["mapset"], map_name, row["layer"], vtype, expression)))
-		    else:
-			proc_list.append(Process(target=run_vector_extraction, args=(row["name"] + "@" + row["mapset"], map_name, layer, vtype, expression)))
-		
-		proc_list[proc_count].start()
-		proc_count += 1
-		
-		# Join processes if the maximum number of processes are reached or the end of the
-		# loop is reached
-		if proc_count == nprocs or proc_count == num_rows:
-		    proc_count = 0
-		    exitcodes = 0
-		    for proc in proc_list:
-			proc.join()
-			exitcodes += proc.exitcode
-			
-		    if exitcodes != 0:
-			dbif.close()
-			core.fatal(_("Error while computation"))
-			
-		    # Empty process list
-		    proc_list = []
-		    
-		# Store the new maps
-		new_maps[row["id"]] = new_map
-	
-	core.percent(0, num_rows, 1)
-	
-	# Insert the new space time dataset
-	if new_sp.is_in_db(dbif):
-	    if core.overwrite() == True:
-		new_sp.delete(dbif)
-		new_sp = sp.get_new_instance(out_id)
-
-	temporal_type, semantic_type, title, description = sp.get_initial_values()
-	new_sp.set_initial_values(temporal_type, semantic_type, title, description)
-	new_sp.insert(dbif)
-	
-	# collect empty maps to remove them
-	empty_maps = []
-	
-	# Register the maps in the database
+            count = 0
+            proc_count = 0
+            proc_list = []
+
+            for row in rows:
+                count += 1
+
+                core.percent(count, num_rows, 1)
+
+                map_name = "%s_%i" % (base, count)
+
+                # We need to modify the r(3).mapcalc expression
+                if type != "vector":
+                    expr = "%s = %s" % (map_name, expression)
+
+                    expr = expr.replace(sp.base.get_map_id(), row["id"])
+                    expr = expr.replace(sp.base.get_name(), row["id"])
+
+                    # We need to build the id
+                    map_id = dummy.build_id(map_name, mapset)
+                else:
+                    map_id = dummy.build_id(map_name, mapset, row["layer"])
+
+                new_map = sp.get_new_map_instance(map_id)
+
+                # Check if new map is in the temporal database
+                if new_map.is_in_db(dbif):
+                    if core.overwrite():
+                        # Remove the existing temporal database entry
+                        new_map.delete(dbif)
+                        new_map = sp.get_new_map_instance(map_id)
+                    else:
+                        core.error(_("Map <%s> is already in temporal database,"
+                                     " use overwrite flag to overwrite") %
+                                    (new_map.get_map_id()))
+                        continue
+
+                # Add process to the process list
+                if type == "raster":
+                    core.verbose(_("Apply r.mapcalc expression: \"%s\"")
+                                 % expr)
+                    proc_list.append(Process(target=run_mapcalc2d,
+                                             args=(expr,)))
+                elif type == "raster3d":
+                    core.verbose(_("Apply r3.mapcalc expression: \"%s\"")
+                                 % expr)
+                    proc_list.append(Process(target=run_mapcalc3d,
+                                             args=(expr,)))
+                elif type == "vector":
+                    core.verbose(_("Apply v.extract where statement: \"%s\"")
+                                 % expression)
+                    if row["layer"]:
+                        proc_list.append(Process(target=run_vector_extraction,
+                                                 args=(row["name"] + "@" + row["mapset"],
+                                                 map_name, row["layer"], 
+                                                 vtype, expression)))
+                    else:
+                        proc_list.append(Process(target=run_vector_extraction,
+                                                 args=(row["name"] + "@" + row["mapset"],
+                                                 map_name, layer, vtype, 
+                                                 expression)))
+
+                proc_list[proc_count].start()
+                proc_count += 1
+
+                # Join processes if the maximum number of processes are reached or the end of the
+                # loop is reached
+                if proc_count == nprocs or proc_count == num_rows:
+                    proc_count = 0
+                    exitcodes = 0
+                    for proc in proc_list:
+                        proc.join()
+                        exitcodes += proc.exitcode
+
+                    if exitcodes != 0:
+                        dbif.close()
+                        core.fatal(_("Error while computation"))
+
+                    # Empty process list
+                    proc_list = []
+
+                # Store the new maps
+                new_maps[row["id"]] = new_map
+
+        core.percent(0, num_rows, 1)
+
+        # Insert the new space time dataset
+        if new_sp.is_in_db(dbif):
+            if core.overwrite():
+                new_sp.delete(dbif)
+                new_sp = sp.get_new_instance(out_id)
+
+        temporal_type, semantic_type, title, description = sp.get_initial_values()
+        new_sp.set_initial_values(
+            temporal_type, semantic_type, title, description)
+        new_sp.insert(dbif)
+
+        # collect empty maps to remove them
+        empty_maps = []
+
+        # Register the maps in the database
         count = 0
         count = 0
         for row in rows:
         for row in rows:
             count += 1
             count += 1
-	    
-	    core.percent(count, num_rows, 1)
+
+            core.percent(count, num_rows, 1)
 
 
             old_map = sp.get_new_map_instance(row["id"])
             old_map = sp.get_new_map_instance(row["id"])
             old_map.select(dbif)
             old_map.select(dbif)
-            
+
             if expression:
             if expression:
-		# Register the new maps
-		if new_maps.has_key(row["id"]):
-		    new_map = new_maps[row["id"]]
-
-		    # Read the raster map data
-		    new_map.load()
-		    
-		    # In case of a empty map continue, do not register empty maps
-		    if type == "raster" or type == "raster3d":
-			if new_map.metadata.get_min() == None and new_map.metadata.get_max() == None:
-			    if not register_null:
-				empty_maps.append(new_map)
-				continue
-		    elif type == "vector":
-			if new_map.metadata.get_primitives() == 0 or new_map.metadata.get_primitives() == None:
-			    if not register_null:
-				empty_maps.append(new_map)
-				continue
-
-		    # Set the time stamp
-		    if old_map.is_time_absolute():
-			start, end, tz = old_map.get_absolute_time()
-			new_map.set_absolute_time(start, end, tz)
-		    else:
-			start, end = old_map.get_relative_time()
-			new_map.set_relative_time(start, end)
-
-		    # Insert map in temporal database
-		    new_map.insert(dbif)
-
-		    new_sp.register_map(new_map, dbif)
-	    else:
-		new_sp.register_map(old_map, dbif)          
-                
+                # Register the new maps
+                if row["id"] in new_maps:
+                    new_map = new_maps[row["id"]]
+
+                    # Read the raster map data
+                    new_map.load()
+
+                    # In case of a empty map continue, do not register empty maps
+                    if type == "raster" or type == "raster3d":
+                        if new_map.metadata.get_min() is None and \
+                            new_map.metadata.get_max() is None:
+                            if not register_null:
+                                empty_maps.append(new_map)
+                                continue
+                    elif type == "vector":
+                        if new_map.metadata.get_primitives() == 0 or \
+                           new_map.metadata.get_primitives() is None:
+                            if not register_null:
+                                empty_maps.append(new_map)
+                                continue
+
+                    # Set the time stamp
+                    if old_map.is_time_absolute():
+                        start, end, tz = old_map.get_absolute_time()
+                        new_map.set_absolute_time(start, end, tz)
+                    else:
+                        start, end = old_map.get_relative_time()
+                        new_map.set_relative_time(start, end)
+
+                    # Insert map in temporal database
+                    new_map.insert(dbif)
+
+                    new_sp.register_map(new_map, dbif)
+            else:
+                new_sp.register_map(old_map, dbif)
+
         # Update the spatio-temporal extent and the metadata table entries
         # Update the spatio-temporal extent and the metadata table entries
         new_sp.update_from_registered_maps(dbif)
         new_sp.update_from_registered_maps(dbif)
-	
-	core.percent(num_rows, num_rows, 1)
-	
-	# Remove empty maps
-	if len(empty_maps) > 0:
-	    names = ""
-	    count = 0
-	    for map in empty_maps:
-		if count == 0:
-		    names += "%s"%(map.get_name())
-		else:
-		    names += ",%s"%(map.get_name())
-		count += 1
-	    if type == "raster":
-		core.run_command("g.remove", rast=names, quiet=True)
-	    elif type == "raster3d":
-		core.run_command("g.remove", rast3d=names, quiet=True)
-	    elif type == "vector":
-		core.run_command("g.remove", vect=names, quiet=True)
-        
+
+        core.percent(num_rows, num_rows, 1)
+
+        # Remove empty maps
+        if len(empty_maps) > 0:
+            names = ""
+            count = 0
+            for map in empty_maps:
+                if count == 0:
+                    names += "%s" % (map.get_name())
+                else:
+                    names += ",%s" % (map.get_name())
+                count += 1
+            if type == "raster":
+                core.run_command("g.remove", rast=names, quiet=True)
+            elif type == "raster3d":
+                core.run_command("g.remove", rast3d=names, quiet=True)
+            elif type == "vector":
+                core.run_command("g.remove", vect=names, quiet=True)
+
     dbif.close()
     dbif.close()
 
 
 ###############################################################################
 ###############################################################################
 
 
+
 def run_mapcalc2d(expr):
 def run_mapcalc2d(expr):
     """Helper function to run r.mapcalc in parallel"""
     """Helper function to run r.mapcalc in parallel"""
-    return core.run_command("r.mapcalc", expression=expr, overwrite=core.overwrite(), quiet=True)
+    return core.run_command("r.mapcalc", expression=expr,
+                            overwrite=core.overwrite(), quiet=True)
 
 
 
 
 def run_mapcalc3d(expr):
 def run_mapcalc3d(expr):
     """Helper function to run r3.mapcalc in parallel"""
     """Helper function to run r3.mapcalc in parallel"""
-    return core.run_command("r3.mapcalc", expression=expr, overwrite=core.overwrite(), quiet=True)
-    
+    return core.run_command("r3.mapcalc", expression=expr,
+                            overwrite=core.overwrite(), quiet=True)
+
 
 
 def run_vector_extraction(input, output, layer, type, where):
 def run_vector_extraction(input, output, layer, type, where):
     """Helper function to run r.mapcalc in parallel"""
     """Helper function to run r.mapcalc in parallel"""
-    return core.run_command("v.extract", input=input, output=output, layer=layer, type=type, where=where, overwrite=core.overwrite(), quiet=True)
-
+    return core.run_command("v.extract", input=input, output=output,
+                            layer=layer, type=type, where=where,
+                            overwrite=core.overwrite(), quiet=True)

+ 259 - 244
lib/python/temporal/mapcalc.py

@@ -4,12 +4,6 @@
 
 
 Temporal GIS related functions to be used in Python scripts.
 Temporal GIS related functions to be used in Python scripts.
 
 
-Usage:
-
-@code
-import grass.temporal as tgis
-@endcode
-
 (C) 2008-2011 by the GRASS Development Team
 (C) 2008-2011 by the GRASS Development Team
 This program is free software under the GNU General Public
 This program is free software under the GNU General Public
 License (>=v2). Read the file COPYING that comes with GRASS
 License (>=v2). Read the file COPYING that comes with GRASS
@@ -23,87 +17,96 @@ from multiprocessing import Process
 
 
 ############################################################################
 ############################################################################
 
 
-def dataset_mapcalculator(inputs, output, type, expression, base, method, nprocs=1, register_null=False, spatial=False):
-    """!Perform map-calculations of maps from different space time raster/raster3d datasets, using
-       a specific sampling method to select temporal related maps.
-    
-       A mapcalc expression can be provided to process the temporal extracted maps.
+
+def dataset_mapcalculator(inputs, output, type, expression, base, method, 
+                          nprocs=1, register_null=False, spatial=False):
+    """!Perform map-calculations of maps from different space time 
+       raster/raster3d datasets, using a specific sampling method 
+       to select temporal related maps.
+
+       A mapcalc expression can be provided to process the temporal 
+       extracted maps.
        Mapcalc expressions are supported for raster and raster3d maps.
        Mapcalc expressions are supported for raster and raster3d maps.
-       
-       @param input The name of the input space time raster/raster3d dataset 
-       @param output The name of the extracted new space time raster/raster3d dataset
+
+       @param input The name of the input space time raster/raster3d dataset
+       @param output The name of the extracted new space time raster(3d) dataset
        @param type The type of the dataset: "raster" or "raster3d"
        @param type The type of the dataset: "raster" or "raster3d"
        @param method The method to be used for temporal sampling
        @param method The method to be used for temporal sampling
        @param expression The r(3).mapcalc expression
        @param expression The r(3).mapcalc expression
-       @param base The base name of the new created maps in case a mapclac expression is provided 
-       @param nprocs The number of parallel processes to be used for mapcalc processing
+       @param base The base name of the new created maps in case a 
+              mapclac expression is provided
+       @param nprocs The number of parallel processes to be used for 
+              mapcalc processing
        @param register_null Set this number True to register empty maps
        @param register_null Set this number True to register empty maps
        @param spatial Check spatial overlap
        @param spatial Check spatial overlap
     """
     """
-    
+
     # We need a database interface for fast computation
     # We need a database interface for fast computation
     dbif = ()
     dbif = ()
     dbif.connect()
     dbif.connect()
 
 
-    mapset =  core.gisenv()["MAPSET"]
-    
+    mapset = core.gisenv()["MAPSET"]
+
     input_name_list = inputs.split(",")
     input_name_list = inputs.split(",")
-    
+
     # Process the first input
     # Process the first input
     if input_name_list[0].find("@") >= 0:
     if input_name_list[0].find("@") >= 0:
-	id = input_name_list[0]
+        id = input_name_list[0]
     else:
     else:
-	id = input_name_list[0] + "@" + mapset
-	
+        id = input_name_list[0] + "@" + mapset
+
     if type == "raster":
     if type == "raster":
-	first_input = space_time_raster_dataset(id)
+        first_input = space_time_raster_dataset(id)
     else:
     else:
-	first_input = space_time_raster3d_dataset(id)
-    
-    if first_input.is_in_db(dbif) == False:
-	dbif.close()
+        first_input = space_time_raster3d_dataset(id)
+
+    if not first_input.is_in_db(dbif):
+        dbif.close()
         core.fatal(_("Space time %s dataset <%s> not found") % (type, id))
         core.fatal(_("Space time %s dataset <%s> not found") % (type, id))
 
 
     # Fill the object with data from the temporal database
     # Fill the object with data from the temporal database
     first_input.select(dbif)
     first_input.select(dbif)
-    
-    # All additional inputs in reverse sorted order to avoid wrong name substitution
+
+    # All additional inputs in reverse sorted order to avoid 
+    # wrong name substitution
     input_name_list = input_name_list[1:]
     input_name_list = input_name_list[1:]
     input_name_list.sort()
     input_name_list.sort()
     input_name_list.reverse()
     input_name_list.reverse()
     input_list = []
     input_list = []
-        
+
     for input in input_name_list:
     for input in input_name_list:
 
 
-	if input.find("@") >= 0:
-	    id = input
-	else:
-	    id = input + "@" + mapset
-	    
-	sp = first_input.get_new_instance(id)
-	
-	if sp.is_in_db(dbif) == False:
-	    dbif.close()
-	    core.fatal(_("Space time %s dataset <%s> not found in temporal database") % (type, id))
-
-	sp.select(dbif)
-	
-	input_list.append(copy.copy(sp))
+        if input.find("@") >= 0:
+            id = input
+        else:
+            id = input + "@" + mapset
+
+        sp = first_input.get_new_instance(id)
+
+        if not sp.is_in_db(dbif):
+            dbif.close()
+            core.fatal(_("Space time %s dataset <%s> not "
+                         "found in temporal database") % (type, id))
+
+        sp.select(dbif)
+
+        input_list.append(copy.copy(sp))
 
 
     # Create the new space time dataset
     # Create the new space time dataset
     if output.find("@") >= 0:
     if output.find("@") >= 0:
         out_id = output
         out_id = output
     else:
     else:
         out_id = output + "@" + mapset
         out_id = output + "@" + mapset
-        
+
     new_sp = first_input.get_new_instance(out_id)
     new_sp = first_input.get_new_instance(out_id)
-    
+
     # Check if in database
     # Check if in database
     if new_sp.is_in_db(dbif):
     if new_sp.is_in_db(dbif):
-        if core.overwrite() == False:
-	    dbif.close()
-            core.fatal(_("Space time %s dataset <%s> is already in database, use overwrite flag to overwrite") % (type, out_id))
- 
+        if not core.overwrite():
+            dbif.close()
+            core.fatal(_("Space time %s dataset <%s> is already in database, "
+                         "use overwrite flag to overwrite") % (type, out_id))
+
     # Sample all inputs by the first input and create a sample matrix
     # Sample all inputs by the first input and create a sample matrix
     if spatial:
     if spatial:
         core.message(_("Start spatio-temporal sampling"))
         core.message(_("Start spatio-temporal sampling"))
@@ -114,217 +117,226 @@ def dataset_mapcalculator(inputs, output, type, expression, base, method, nprocs
     sample_map_list = []
     sample_map_list = []
     # First entry is the first dataset id
     # First entry is the first dataset id
     id_list.append(first_input.get_name())
     id_list.append(first_input.get_name())
-    
+
     if len(input_list) > 0:
     if len(input_list) > 0:
-	has_samples = False
-	for dataset in input_list:
-	    list = dataset.sample_by_dataset(stds=first_input, method=method, spatial=spatial, dbif=dbif)
-	    
-	    # In case samples are not found
-	    if not list and len(list) == 0:
-		dbif.close()
-		core.message(_("No samples found for map calculation"))
-		return 0
-	    
-	    # The fist entries are the samples
-	    map_name_list = []
-	    if has_samples == False:
-		for entry in list:
-		    granule = entry["granule"]
-		    # Do not consider gaps
-		    if granule.get_id() == None:
-			continue
-		    sample_map_list.append(granule)
-		    map_name_list.append(granule.get_name())
-		# Attach the map names
-		map_matrix.append(copy.copy(map_name_list))
-		has_samples = True
-		
-	    map_name_list = []
-	    for entry in list:
-		maplist = entry["samples"]
-		granule = entry["granule"]
-		
-		# Do not consider gaps in the sampler
-		if granule.get_id() == None:
-		    continue
-		
-		if len(maplist) > 1:
-		    core.warning(_("Found more than a single map in a sample granule. "\
-		    "Only the first map is used for computation. "\
-		    "Use t.rast.aggregate.ds to create synchronous raster datasets."))
-		
-		# Store all maps! This includes non existent maps, identified by id == None 
-		map_name_list.append(maplist[0].get_name())
-	    
-	    # Attach the map names
-	    map_matrix.append(copy.copy(map_name_list))
-
-	    id_list.append(dataset.get_name())
+        has_samples = False
+        for dataset in input_list:
+            list = dataset.sample_by_dataset(stds=first_input,
+                                             method=method, spatial=spatial, 
+                                             dbif=dbif)
+
+            # In case samples are not found
+            if not list and len(list) == 0:
+                dbif.close()
+                core.message(_("No samples found for map calculation"))
+                return 0
+
+            # The fist entries are the samples
+            map_name_list = []
+            if not has_samples:
+                for entry in list:
+                    granule = entry["granule"]
+                    # Do not consider gaps
+                    if granule.get_id() is None:
+                        continue
+                    sample_map_list.append(granule)
+                    map_name_list.append(granule.get_name())
+                # Attach the map names
+                map_matrix.append(copy.copy(map_name_list))
+                has_samples = True
+
+            map_name_list = []
+            for entry in list:
+                maplist = entry["samples"]
+                granule = entry["granule"]
+
+                # Do not consider gaps in the sampler
+                if granule.get_id() is None:
+                    continue
+
+                if len(maplist) > 1:
+                    core.warning(_("Found more than a single map in a sample "
+                                   "granule. Only the first map is used for "
+                                   "computation. Use t.rast.aggregate.ds to "
+                                   "create synchronous raster datasets."))
+
+                # Store all maps! This includes non existent maps, 
+                # identified by id == None
+                map_name_list.append(maplist[0].get_name())
+
+            # Attach the map names
+            map_matrix.append(copy.copy(map_name_list))
+
+            id_list.append(dataset.get_name())
     else:
     else:
-	list = first_input.get_registered_maps_as_objects(dbif=dbif)
-	
-	if list == None:
-	    dbif.close()
+        list = first_input.get_registered_maps_as_objects(dbif=dbif)
+
+        if list is None:
+            dbif.close()
             core.message(_("No maps in input dataset"))
             core.message(_("No maps in input dataset"))
             return 0
             return 0
-	
-	map_name_list = []
-	for map in list:
-	    map_name_list.append(map.get_name())
-	    sample_map_list.append(map)
-	
-	# Attach the map names
-	map_matrix.append(copy.copy(map_name_list))
-   
+
+        map_name_list = []
+        for map in list:
+            map_name_list.append(map.get_name())
+            sample_map_list.append(map)
+
+        # Attach the map names
+        map_matrix.append(copy.copy(map_name_list))
+
     # Needed for map registration
     # Needed for map registration
     map_list = []
     map_list = []
-	
+
     if len(map_matrix) > 0:
     if len(map_matrix) > 0:
-	
-	core.message(_("Start mapcalc computation"))
-	    
-	count = 0
-	# Get the number of samples
-	num = len(map_matrix[0])
-	
-	# Parallel processing
+
+        core.message(_("Start mapcalc computation"))
+
+        count = 0
+        # Get the number of samples
+        num = len(map_matrix[0])
+
+        # Parallel processing
         proc_list = []
         proc_list = []
         proc_count = 0
         proc_count = 0
-	
-	# For all samples
+
+        # For all samples
         for i in range(num):
         for i in range(num):
-            
+
             count += 1
             count += 1
-	    core.percent(count, num, 1)
+            core.percent(count, num, 1)
+
+            # Create the r.mapcalc statement for the current time step
+            map_name = "%s_%i" % (base, count)
+            expr = "%s = %s" % (map_name, expression)
 
 
-	    # Create the r.mapcalc statement for the current time step
-	    map_name = "%s_%i" % (base, count)   
-	    expr = "%s = %s" % (map_name, expression)
-            
             # Check that all maps are in the sample
             # Check that all maps are in the sample
             valid_maps = True
             valid_maps = True
-            # Replace all dataset names with their map names of the current time step
+            # Replace all dataset names with their map names of the 
+            # current time step
             for j in range(len(map_matrix)):
             for j in range(len(map_matrix)):
-		if map_matrix[j][i] == None:
-		    valid_maps = False
-		    break
-		# Substitute the dataset name with the map name
-		expr = expr.replace(id_list[j], map_matrix[j][i])
-
-	    # Proceed with the next sample
-	    if valid_maps == False:
-		continue
-		
-	    # Create the new map id and check if the map is already in the database
-	    map_id = map_name + "@" + mapset
-
-	    new_map = first_input.get_new_map_instance(map_id)
-
-	    # Check if new map is in the temporal database
-	    if new_map.is_in_db(dbif):
-		if core.overwrite() == True:
-		    # Remove the existing temporal database entry
-		    new_map.delete(dbif)
-		    new_map = first_input.get_new_map_instance(map_id)
-		else:
-		    core.error(_("Map <%s> is already in temporal database, use overwrite flag to overwrite"))
-		    continue
-
-	    # Set the time stamp
-	    if sample_map_list[i].is_time_absolute():
-		start, end, tz = sample_map_list[i].get_absolute_time()
-		new_map.set_absolute_time(start, end, tz)
-	    else:
-		start, end = sample_map_list[i].get_relative_time()
-		new_map.set_relative_time(start, end)
-	    
-	    map_list.append(new_map)
-	    
-	    # Start the parallel r.mapcalc computation
-	    core.verbose(_("Apply mapcalc expression: \"%s\"") % expr)
-
-	    if type == "raster":
-		proc_list.append(Process(target=run_mapcalc2d, args=(expr,)))
-	    else:
-		proc_list.append(Process(target=run_mapcalc3d, args=(expr,)))
-	    proc_list[proc_count].start()
-	    proc_count += 1
-	    
-	    if proc_count == nprocs or proc_count == num:
-		proc_count = 0
-		exitcodes = 0
-		for proc in proc_list:
-		    proc.join()
-		    exitcodes += proc.exitcode
-		    
-		if exitcodes != 0:
-		    dbif.close()
-		    core.fatal(_("Error while mapcalc computation"))
-		    
-		# Empty process list
-		proc_list = []
-		
-	# Register the new maps in the output space time dataset
-	core.message(_("Start map registration in temporal database"))
-	    
-	# Overwrite an existing dataset if requested
-	if new_sp.is_in_db(dbif):
-	    if core.overwrite() == True:
-		new_sp.delete(dbif)
-		new_sp = first_input.get_new_instance(out_id)
-		
-	# Copy the ids from the first input
-	temporal_type, semantic_type, title, description = first_input.get_initial_values()
-	new_sp.set_initial_values(temporal_type, semantic_type, title, description)
-	# Insert the dataset in the temporal database
-	new_sp.insert(dbif)
-    
-	count = 0
-	
-	# collect empty maps to remove them
-	empty_maps = []
-	
-	# Insert maps in the temporal database and in the new space time dataset
-	for new_map in map_list:
+                if map_matrix[j][i] is None:
+                    valid_maps = False
+                    break
+                # Substitute the dataset name with the map name
+                expr = expr.replace(id_list[j], map_matrix[j][i])
+
+            # Proceed with the next sample
+            if not valid_maps:
+                continue
+
+            # Create the new map id and check if the map is already 
+            # in the database
+            map_id = map_name + "@" + mapset
+
+            new_map = first_input.get_new_map_instance(map_id)
+
+            # Check if new map is in the temporal database
+            if new_map.is_in_db(dbif):
+                if core.overwrite():
+                    # Remove the existing temporal database entry
+                    new_map.delete(dbif)
+                    new_map = first_input.get_new_map_instance(map_id)
+                else:
+                    core.error(_("Map <%s> is already in temporal database, "
+                                 "use overwrite flag to overwrite"))
+                    continue
+
+            # Set the time stamp
+            if sample_map_list[i].is_time_absolute():
+                start, end, tz = sample_map_list[i].get_absolute_time()
+                new_map.set_absolute_time(start, end, tz)
+            else:
+                start, end = sample_map_list[i].get_relative_time()
+                new_map.set_relative_time(start, end)
+
+            map_list.append(new_map)
+
+            # Start the parallel r.mapcalc computation
+            core.verbose(_("Apply mapcalc expression: \"%s\"") % expr)
+
+            if type == "raster":
+                proc_list.append(Process(target=run_mapcalc2d, args=(expr,)))
+            else:
+                proc_list.append(Process(target=run_mapcalc3d, args=(expr,)))
+            proc_list[proc_count].start()
+            proc_count += 1
+
+            if proc_count == nprocs or proc_count == num:
+                proc_count = 0
+                exitcodes = 0
+                for proc in proc_list:
+                    proc.join()
+                    exitcodes += proc.exitcode
+
+                if exitcodes != 0:
+                    dbif.close()
+                    core.fatal(_("Error while mapcalc computation"))
+
+                # Empty process list
+                proc_list = []
+
+        # Register the new maps in the output space time dataset
+        core.message(_("Start map registration in temporal database"))
+
+        # Overwrite an existing dataset if requested
+        if new_sp.is_in_db(dbif):
+            if core.overwrite():
+                new_sp.delete(dbif)
+                new_sp = first_input.get_new_instance(out_id)
+
+        # Copy the ids from the first input
+        temporal_type, semantic_type, title, description = first_input.get_initial_values()
+        new_sp.set_initial_values(
+            temporal_type, semantic_type, title, description)
+        # Insert the dataset in the temporal database
+        new_sp.insert(dbif)
+
+        count = 0
+
+        # collect empty maps to remove them
+        empty_maps = []
+
+        # Insert maps in the temporal database and in the new space time dataset
+        for new_map in map_list:
 
 
             count += 1
             count += 1
-	    core.percent(count, num, 1)
-	    
-	    # Read the map data
-	    new_map.load()
-	    
-	    # In case of a null map continue, do not register null maps
-	    if new_map.metadata.get_min() == None and new_map.metadata.get_max() == None:
-		if not register_null:
-		    empty_maps.append(new_map)
-		    continue
-
-	    # Insert map in temporal database
-	    new_map.insert(dbif)
-
-	    new_sp.register_map(new_map, dbif)
+            core.percent(count, num, 1)
+
+            # Read the map data
+            new_map.load()
+
+            # In case of a null map continue, do not register null maps
+            if new_map.metadata.get_min() is None and \
+               new_map.metadata.get_max() is None:
+                if not register_null:
+                    empty_maps.append(new_map)
+                    continue
+
+            # Insert map in temporal database
+            new_map.insert(dbif)
+
+            new_sp.register_map(new_map, dbif)
 
 
         # Update the spatio-temporal extent and the metadata table entries
         # Update the spatio-temporal extent and the metadata table entries
         new_sp.update_from_registered_maps(dbif)
         new_sp.update_from_registered_maps(dbif)
-		
-	core.percent(1, 1, 1)
-
-	# Remove empty maps
-	if len(empty_maps) > 0:
-	    names = ""
-	    count = 0
-	    for map in empty_maps:
-		if count == 0:
-		    names += "%s"%(map.get_name())
-		else:
-		    names += ",%s"%(map.get_name())
-		count += 1
-	    if type == "raster":
-		core.run_command("g.remove", rast=names, quiet=True)
-	    elif type == "raster3d":
-		core.run_command("g.remove", rast3d=names, quiet=True)
-        
+
+        core.percent(1, 1, 1)
+
+        # Remove empty maps
+        if len(empty_maps) > 0:
+            names = ""
+            count = 0
+            for map in empty_maps:
+                if count == 0:
+                    names += "%s" % (map.get_name())
+                else:
+                    names += ",%s" % (map.get_name())
+                count += 1
+            if type == "raster":
+                core.run_command("g.remove", rast=names, quiet=True)
+            elif type == "raster3d":
+                core.run_command("g.remove", rast3d=names, quiet=True)
+
     dbif.close()
     dbif.close()
 
 
 
 
@@ -332,10 +344,13 @@ def dataset_mapcalculator(inputs, output, type, expression, base, method, nprocs
 
 
 def run_mapcalc2d(expr):
 def run_mapcalc2d(expr):
     """Helper function to run r.mapcalc in parallel"""
     """Helper function to run r.mapcalc in parallel"""
-    return core.run_command("r.mapcalc", expression=expr, overwrite=core.overwrite(), quiet=True)
+    return core.run_command("r.mapcalc", expression=expr, 
+                            overwrite=core.overwrite(), quiet=True)
 
 
 ###############################################################################
 ###############################################################################
 
 
+
 def run_mapcalc3d(expr):
 def run_mapcalc3d(expr):
     """Helper function to run r3.mapcalc in parallel"""
     """Helper function to run r3.mapcalc in parallel"""
-    return core.run_command("r3.mapcalc", expression=expr, overwrite=core.overwrite(), quiet=True)
+    return core.run_command("r3.mapcalc", expression=expr, 
+                            overwrite=core.overwrite(), quiet=True)

+ 69 - 35
lib/python/temporal/metadata.py

@@ -6,6 +6,8 @@ Temporal GIS related metadata functions to be used in Python scripts and tgis pa
 
 
 Usage:
 Usage:
 
 
+@code
+
 >>> import grass.temporal as tgis
 >>> import grass.temporal as tgis
 >>> meta = tgis.RasterMetadata()
 >>> meta = tgis.RasterMetadata()
 >>> meta = tgis.Raster3DMetadata()
 >>> meta = tgis.Raster3DMetadata()
@@ -14,6 +16,8 @@ Usage:
 >>> meta = tgis.STR3DSMetadata()
 >>> meta = tgis.STR3DSMetadata()
 >>> meta = tgis.STVDSMetadata()
 >>> meta = tgis.STVDSMetadata()
 
 
+@endcode
+
 (C) 2008-2011 by the GRASS Development Team
 (C) 2008-2011 by the GRASS Development Team
 This program is free software under the GNU General Public
 This program is free software under the GNU General Public
 License (>=v2). Read the file COPYING that comes with GRASS
 License (>=v2). Read the file COPYING that comes with GRASS
@@ -31,8 +35,9 @@ class RasterMetadataBase(SQLDatabaseInterface):
     
     
         Usage:
         Usage:
         
         
-        >>> import grass.temporal as tgis
-        >>> meta = tgis.RasterMetadataBase(table="metadata", ident="soil@PERMANENT",
+        @code
+        
+        >>> meta = RasterMetadataBase(table="metadata", ident="soil@PERMANENT",
         ... datatype="CELL", cols=100, rows=100, number_of_cells=10000, nsres=0.1,
         ... datatype="CELL", cols=100, rows=100, number_of_cells=10000, nsres=0.1,
         ... ewres=0.1, min=0, max=100)
         ... ewres=0.1, min=0, max=100)
         >>> meta.datatype
         >>> meta.datatype
@@ -69,7 +74,8 @@ class RasterMetadataBase(SQLDatabaseInterface):
         ewres=0.1
         ewres=0.1
         min=0.0
         min=0.0
         max=100.0
         max=100.0
-    
+        
+        @endcode
     """
     """
     def __init__(self, table=None, ident=None, datatype=None, cols=None, 
     def __init__(self, table=None, ident=None, datatype=None, cols=None, 
 		rows=None, number_of_cells=None, nsres=None, ewres=None, 
 		rows=None, number_of_cells=None, nsres=None, ewres=None, 
@@ -267,9 +273,10 @@ class RasterMetadata(RasterMetadataBase):
         register table is stored.
         register table is stored.
        
        
         Usage:
         Usage:
+        
+        @code
        
        
-        >>> import grass.temporal as tgis
-        >>> meta = tgis.RasterMetadata(ident="soil@PERMANENT",
+        >>> meta = RasterMetadata(ident="soil@PERMANENT",
         ... datatype="CELL", cols=100, rows=100, number_of_cells=10000, nsres=0.1,
         ... datatype="CELL", cols=100, rows=100, number_of_cells=10000, nsres=0.1,
         ... ewres=0.1, min=0, max=100)
         ... ewres=0.1, min=0, max=100)
         >>> meta.datatype
         >>> meta.datatype
@@ -310,6 +317,8 @@ class RasterMetadata(RasterMetadataBase):
         min=0.0
         min=0.0
         max=100.0
         max=100.0
         strds_register=None
         strds_register=None
+        
+        @endcode
     """
     """
     def __init__(self, ident=None, strds_register=None, datatype=None, 
     def __init__(self, ident=None, strds_register=None, datatype=None, 
 		 cols=None, rows=None, number_of_cells=None, nsres=None, 
 		 cols=None, rows=None, number_of_cells=None, nsres=None, 
@@ -363,9 +372,10 @@ class Raster3DMetadata(RasterMetadataBase):
         raster dataset register table is stored.
         raster dataset register table is stored.
        
        
         Usage:
         Usage:
+        
+        @code
        
        
-        >>> import grass.temporal as tgis
-        >>> meta = tgis.Raster3DMetadata(ident="soil@PERMANENT",
+        >>> meta = Raster3DMetadata(ident="soil@PERMANENT",
         ... datatype="FCELL", cols=100, rows=100, depths=100,
         ... datatype="FCELL", cols=100, rows=100, depths=100,
         ... number_of_cells=1000000, nsres=0.1, ewres=0.1, tbres=0.1,
         ... number_of_cells=1000000, nsres=0.1, ewres=0.1, tbres=0.1,
         ... min=0, max=100)
         ... min=0, max=100)
@@ -415,6 +425,8 @@ class Raster3DMetadata(RasterMetadataBase):
         str3ds_register=None
         str3ds_register=None
         depths=100
         depths=100
         tbres=0.1
         tbres=0.1
+        
+        @endcode
     """
     """
     def __init__(self, ident=None, str3ds_register=None, datatype=None, 
     def __init__(self, ident=None, str3ds_register=None, datatype=None, 
 		 cols=None, rows=None, depths=None, number_of_cells=None, 
 		 cols=None, rows=None, depths=None, number_of_cells=None, 
@@ -507,8 +519,9 @@ class VectorMetadata(SQLDatabaseInterface):
         raster dataset register table is stored.
         raster dataset register table is stored.
        
        
         Usage:
         Usage:
+        
+        @code
        
        
-        >>> import grass.temporal as tgis
         >>> meta = VectorMetadata(ident="lidar@PERMANENT", is_3d=True, 
         >>> meta = VectorMetadata(ident="lidar@PERMANENT", is_3d=True, 
         ... number_of_points=1, number_of_lines=2, number_of_boundaries=3,
         ... number_of_points=1, number_of_lines=2, number_of_boundaries=3,
         ... number_of_centroids=4, number_of_faces=5, number_of_kernels=6, 
         ... number_of_centroids=4, number_of_faces=5, number_of_kernels=6, 
@@ -573,6 +586,8 @@ class VectorMetadata(SQLDatabaseInterface):
         islands=10
         islands=10
         holes=11
         holes=11
         volumes=12
         volumes=12
+        
+        @endcode
     """
     """
     def __init__(
     def __init__(
         self, ident=None, stvds_register=None, is_3d=False, 
         self, ident=None, stvds_register=None, is_3d=False, 
@@ -851,13 +866,15 @@ class VectorMetadata(SQLDatabaseInterface):
 
 
 
 
 class STDSMetadataBase(SQLDatabaseInterface):
 class STDSMetadataBase(SQLDatabaseInterface):
-    """!This is the space time dataset metadata base class for strds, stvds and str3ds datasets
+    """!This is the space time dataset metadata base class for 
+       strds, stvds and str3ds datasets
        setting/getting the id, the title and the description
        setting/getting the id, the title and the description
        
        
         Usage:
         Usage:
         
         
-        >>> import grass.temporal as tgis
-        >>> meta = tgis.STDSMetadataBase(ident="soils@PERMANENT",
+        @code
+        
+        >>> meta = STDSMetadataBase(ident="soils@PERMANENT",
         ... title="Soils", description="Soils 1950 - 2010")
         ... title="Soils", description="Soils 1950 - 2010")
         >>> meta.id
         >>> meta.id
         'soils@PERMANENT'
         'soils@PERMANENT'
@@ -874,6 +891,8 @@ class STDSMetadataBase(SQLDatabaseInterface):
          | Soils 1950 - 2010
          | Soils 1950 - 2010
         >>> meta.print_shell_info()
         >>> meta.print_shell_info()
         number_of_maps=None
         number_of_maps=None
+        
+        @endcode
     """
     """
     def __init__(self, table=None, ident=None, title=None, description=None):
     def __init__(self, table=None, ident=None, title=None, description=None):
 
 
@@ -956,16 +975,18 @@ class STDSMetadataBase(SQLDatabaseInterface):
 
 
 
 
 class STDSRasterMetadataBase(STDSMetadataBase):
 class STDSRasterMetadataBase(STDSMetadataBase):
-    """!This is the space time dataset metadata base class for strds and str3ds datasets
+    """!This is the space time dataset metadata base 
+       class for strds and str3ds datasets
 
 
        Most of the metadata values are set by triggers in the database when
        Most of the metadata values are set by triggers in the database when
-       new raster or voxel maps are added. Therefor only some set- an many get-functions
-       are available.
+       new raster or voxel maps are added. Therefor only some 
+       set- an many get-functions are available.
        
        
         Usage:
         Usage:
         
         
-        >>> import grass.temporal as tgis
-        >>> meta = tgis.STDSRasterMetadataBase(ident="soils@PERMANENT",
+        @code
+        
+        >>> meta = STDSRasterMetadataBase(ident="soils@PERMANENT",
         ... title="Soils", description="Soils 1950 - 2010")
         ... title="Soils", description="Soils 1950 - 2010")
         >>> meta.id
         >>> meta.id
         'soils@PERMANENT'
         'soils@PERMANENT'
@@ -1006,6 +1027,8 @@ class STDSRasterMetadataBase(STDSMetadataBase):
         min_max=None
         min_max=None
         max_min=None
         max_min=None
         max_max=None
         max_max=None
+        
+        @endcode
     """
     """
     def __init__(self, table=None, ident=None, title=None, description=None):
     def __init__(self, table=None, ident=None, title=None, description=None):
 
 
@@ -1147,8 +1170,9 @@ class STRDSMetadata(STDSRasterMetadataBase):
         
         
         Usage:
         Usage:
         
         
-        >>> import grass.temporal as tgis
-        >>> meta = tgis.STRDSMetadata(ident="soils@PERMANENT",
+        @code
+        
+        >>> meta = STRDSMetadata(ident="soils@PERMANENT",
         ... title="Soils", description="Soils 1950 - 2010")
         ... title="Soils", description="Soils 1950 - 2010")
         >>> meta.id
         >>> meta.id
         'soils@PERMANENT'
         'soils@PERMANENT'
@@ -1193,6 +1217,8 @@ class STRDSMetadata(STDSRasterMetadataBase):
         max_min=None
         max_min=None
         max_max=None
         max_max=None
         raster_register=None
         raster_register=None
+        
+        @endcode
     """
     """
     def __init__(self, ident=None, raster_register=None, title=None, description=None):
     def __init__(self, ident=None, raster_register=None, title=None, description=None):
 
 
@@ -1241,8 +1267,9 @@ class STR3DSMetadata(STDSRasterMetadataBase):
         
         
         Usage:
         Usage:
         
         
-        >>> import grass.temporal as tgis
-        >>> meta = tgis.STR3DSMetadata(ident="soils@PERMANENT",
+        @code
+        
+        >>> meta = STR3DSMetadata(ident="soils@PERMANENT",
         ... title="Soils", description="Soils 1950 - 2010")
         ... title="Soils", description="Soils 1950 - 2010")
         >>> meta.id
         >>> meta.id
         'soils@PERMANENT'
         'soils@PERMANENT'
@@ -1293,6 +1320,8 @@ class STR3DSMetadata(STDSRasterMetadataBase):
         tbres_min=None
         tbres_min=None
         tbres_max=None
         tbres_max=None
         raster3d_register=None
         raster3d_register=None
+        
+        @endcode
         """
         """
     def __init__(self, ident=None, raster3d_register=None, title=None, description=None):
     def __init__(self, ident=None, raster3d_register=None, title=None, description=None):
 
 
@@ -1366,9 +1395,12 @@ class STVDSMetadata(STDSMetadataBase):
        Most of the metadata values are set by triggers in the database when
        Most of the metadata values are set by triggers in the database when
        new vector maps are added. Therefor only some set- an many get-functions
        new vector maps are added. Therefor only some set- an many get-functions
        are available.
        are available.
-       
-        >>> import grass.temporal as tgis
-        >>> meta = tgis.STVDSMetadata(ident="lidars@PERMANENT",
+        
+        Usage:
+        
+        @code
+        
+        >>> meta = STVDSMetadata(ident="lidars@PERMANENT",
         ... title="LIDARS", description="LIDARS 2008 - 2010")
         ... title="LIDARS", description="LIDARS 2008 - 2010")
         >>> meta.id
         >>> meta.id
         'lidars@PERMANENT'
         'lidars@PERMANENT'
@@ -1424,6 +1456,8 @@ class STVDSMetadata(STDSMetadataBase):
         islands=None
         islands=None
         holes=None
         holes=None
         volumes=None
         volumes=None
+        
+        @endcode
     """
     """
     def __init__(
     def __init__(
         self, ident=None, vector_register=None, title=None, description=None):
         self, ident=None, vector_register=None, title=None, description=None):
@@ -1600,18 +1634,18 @@ class STVDSMetadata(STDSMetadataBase):
         STDSMetadataBase.print_info(self)
         STDSMetadataBase.print_info(self)
         print " | Vector register table:...... " + str(
         print " | Vector register table:...... " + str(
             self.get_vector_register())
             self.get_vector_register())
-        print " | Number of points ........... " + str(self.get_number_of_points())
-        print " | Number of lines ............ " + str(self.get_number_of_lines())
-        print " | Number of boundaries ....... " + str(self.get_number_of_boundaries())
-        print " | Number of centroids ........ " + str(self.get_number_of_centroids())
-        print " | Number of faces ............ " + str(self.get_number_of_faces())
-        print " | Number of kernels .......... " + str(self.get_number_of_kernels())
-        print " | Number of primitives ....... " + str(self.get_number_of_primitives())
-        print " | Number of nodes ............ " + str(self.get_number_of_nodes())
-        print " | Number of areas ............ " + str(self.get_number_of_areas())
-        print " | Number of islands .......... " + str(self.get_number_of_islands())
-        print " | Number of holes ............ " + str(self.get_number_of_holes())
-        print " | Number of volumes .......... " + str(self.get_number_of_volumes())
+        print " | Number of points ........... " + str(self.number_of_points)
+        print " | Number of lines ............ " + str(self.number_of_lines)
+        print " | Number of boundaries ....... " + str(self.number_of_boundaries)
+        print " | Number of centroids ........ " + str(self.number_of_centroids)
+        print " | Number of faces ............ " + str(self.number_of_faces)
+        print " | Number of kernels .......... " + str(self.number_of_kernels)
+        print " | Number of primitives ....... " + str(self.number_of_primitives)
+        print " | Number of nodes ............ " + str(self.number_of_nodes)
+        print " | Number of areas ............ " + str(self.number_of_areas)
+        print " | Number of islands .......... " + str(self.number_of_islands)
+        print " | Number of holes ............ " + str(self.number_of_holes)
+        print " | Number of volumes .......... " + str(self.number_of_volumes)
 
 
     def print_shell_info(self):
     def print_shell_info(self):
         """!Print information about this class in shell style"""
         """!Print information about this class in shell style"""

文件差异内容过多而无法显示
+ 543 - 419
lib/python/temporal/space_time_datasets.py


+ 226 - 166
lib/python/temporal/space_time_datasets_tools.py

@@ -23,13 +23,15 @@ for details.
 """
 """
 
 
 from space_time_datasets import *
 from space_time_datasets import *
- 
+
 ###############################################################################
 ###############################################################################
 
 
-def register_maps_in_space_time_dataset(type, name, maps=None, file=None, start=None, \
-                                        end=None, unit=None, increment=None, dbif = None, \
-                                        interval=False, fs="|"):
-    """!Use this method to register maps in space time datasets. This function is generic and
+
+def register_maps_in_space_time_dataset(
+    type, name, maps=None, file=None, start=None,
+    end=None, unit=None, increment=None, dbif=None,
+        interval=False, fs="|"):
+    """!Use this method to register maps in space time datasets. 
 
 
        Additionally a start time string and an increment string can be specified
        Additionally a start time string and an increment string can be specified
        to assign a time interval automatically to the maps.
        to assign a time interval automatically to the maps.
@@ -40,86 +42,100 @@ def register_maps_in_space_time_dataset(type, name, maps=None, file=None, start=
        @param type: The type of the maps rast, rast3d or vect
        @param type: The type of the maps rast, rast3d or vect
        @param name: The name of the space time dataset
        @param name: The name of the space time dataset
        @param maps: A comma separated list of map names
        @param maps: A comma separated list of map names
-       @param file: Input file one map with start and optional end time, one per line
-       @param start: The start date and time of the first raster map (format absolute: "yyyy-mm-dd HH:MM:SS" or "yyyy-mm-dd", format relative is integer 5)
-       @param end: The end date and time of the first raster map (format absolute: "yyyy-mm-dd HH:MM:SS" or "yyyy-mm-dd", format relative is integer 5)
-       @param unit: The unit of the relative time: years, months, days, hours, minutes, seconds
-       @param increment: Time increment between maps for time stamp creation (format absolute: NNN seconds, minutes, hours, days, weeks, months, years; format relative: 1.0)
+       @param file: Input file one map with start and optional end time, 
+                    one per line
+       @param start: The start date and time of the first raster map
+                    (format absolute: "yyyy-mm-dd HH:MM:SS" or "yyyy-mm-dd",
+                    format relative is integer 5)
+       @param end: The end date and time of the first raster map
+                   (format absolute: "yyyy-mm-dd HH:MM:SS" or "yyyy-mm-dd",
+                   format relative is integer 5)
+       @param unit: The unit of the relative time: years, months, days,
+                    hours, minutes, seconds
+       @param increment: Time increment between maps for time stamp creation
+                         (format absolute: NNN seconds, minutes, hours, days,
+                         weeks, months, years; format relative: 1.0)
        @param dbif: The database interface to be used
        @param dbif: The database interface to be used
-       @param interval: If True, time intervals are created in case the start time and an increment is provided
+       @param interval: If True, time intervals are created in case the start
+                        time and an increment is provided
        @param fs: Field separator used in input file
        @param fs: Field separator used in input file
     """
     """
 
 
     start_time_in_file = False
     start_time_in_file = False
     end_time_in_file = False
     end_time_in_file = False
-    
+
     if maps and file:
     if maps and file:
-        core.fatal(_("%s= and %s= are mutually exclusive") % ("input","file"))
+        core.fatal(_("%s= and %s= are mutually exclusive") % ("input", "file"))
 
 
     if end and increment:
     if end and increment:
-        core.fatal(_("%s= and %s= are mutually exclusive") % ("end","increment"))
+        core.fatal(_("%s= and %s= are mutually exclusive") % (
+            "end", "increment"))
 
 
     if end and not start:
     if end and not start:
-        core.fatal(_("Please specify %s= and %s=") % ("start_time","end_time"))
+        core.fatal(_("Please specify %s= and %s=") % ("start_time",
+                                                      "end_time"))
 
 
     if not maps and not file:
     if not maps and not file:
-        core.fatal(_("Please specify %s= or %s=") % ("input","file"))
+        core.fatal(_("Please specify %s= or %s=") % ("input", "file"))
 
 
     # We may need the mapset
     # We may need the mapset
-    mapset =  core.gisenv()["MAPSET"]
-    
+    mapset = core.gisenv()["MAPSET"]
+
     # The name of the space time dataset is optional
     # The name of the space time dataset is optional
     if name:
     if name:
-	# Check if the dataset name contains the mapset as well
-	if name.find("@") < 0:
-	    id = name + "@" + mapset
-	else:
-	    id = name
-
-	if type == "rast" or type == "raster":
-	    sp = dataset_factory("strds", id)
-	elif type == "rast3d":
-	    sp = dataset_factory("str3ds", id)
-	elif type == "vect" or type == "vector":
-	    sp = dataset_factory("stvds", id)
-	else:
-	    core.fatal(_("Unkown map type: %s")%(type))
-
-        
+        # Check if the dataset name contains the mapset as well
+        if name.find("@") < 0:
+            id = name + "@" + mapset
+        else:
+            id = name
+
+        if type == "rast" or type == "raster":
+            sp = dataset_factory("strds", id)
+        elif type == "rast3d":
+            sp = dataset_factory("str3ds", id)
+        elif type == "vect" or type == "vector":
+            sp = dataset_factory("stvds", id)
+        else:
+            core.fatal(_("Unkown map type: %s") % (type))
+
     dbif, connect = init_dbif(None)
     dbif, connect = init_dbif(None)
 
 
     if name:
     if name:
-	# Read content from temporal database
-	sp.select(dbif)
+        # Read content from temporal database
+        sp.select(dbif)
 
 
-	if sp.is_in_db(dbif) == False:
-	    dbif.close()
-	    core.fatal(_("Space time %s dataset <%s> no found") % (sp.get_new_map_instance(None).get_type(), name))
+        if not sp.is_in_db(dbif):
+            dbif.close()
+            core.fatal(_("Space time %s dataset <%s> no found") %
+                       (sp.get_new_map_instance(None).get_type(), name))
+
+        if sp.is_time_relative() and not unit:
+            dbif.close()
+            core.fatal(_("Space time %s dataset <%s> with relative time found, "
+                         "but no relative unit set for %s maps") %
+                       (sp.get_new_map_instance(None).get_type(),
+                        name, sp.get_new_map_instance(None).get_type()))
 
 
-	if sp.is_time_relative() and not unit:
-	    dbif.close()
-	    core.fatal(_("Space time %s dataset <%s> with relative time found, but no relative unit set for %s maps") % (sp.get_new_map_instance(None).get_type(), name, sp.get_new_map_instance(None).get_type()))
-    
     # We need a dummy map object to build the map ids
     # We need a dummy map object to build the map ids
     dummy = dataset_factory(type, None)
     dummy = dataset_factory(type, None)
-        
+
     maplist = []
     maplist = []
-    
+
     # Map names as comma separated string
     # Map names as comma separated string
     if maps:
     if maps:
         if maps.find(",") < 0:
         if maps.find(",") < 0:
-            maplist = [maps,]
+            maplist = [maps, ]
         else:
         else:
             maplist = maps.split(",")
             maplist = maps.split(",")
 
 
-	# Build the map list again with the ids
-	for count in range(len(maplist)):
-	    row = {}
-	    mapid = dummy.build_id(maplist[count], mapset, None)
-		
-	    row["id"] = mapid
+        # Build the map list again with the ids
+        for count in range(len(maplist)):
+            row = {}
+            mapid = dummy.build_id(maplist[count], mapset, None)
+
+            row["id"] = mapid
             maplist[count] = row
             maplist[count] = row
-            
+
     # Read the map list from file
     # Read the map list from file
     if file:
     if file:
         fd = open(file, "r")
         fd = open(file, "r")
@@ -145,69 +161,73 @@ def register_maps_in_space_time_dataset(type, name, maps=None, file=None, start=
 
 
             mapname = line_list[0].strip()
             mapname = line_list[0].strip()
             row = {}
             row = {}
-            
-	    if start_time_in_file and  end_time_in_file:
-	        row["start"] = line_list[1].strip()
-	        row["end"] = line_list[2].strip()
 
 
-	    if start_time_in_file and  not end_time_in_file:
-	        row["start"] = line_list[1].strip()
-	    
-	    row["id"] = dummy.build_id(mapname, mapset)
+            if start_time_in_file and end_time_in_file:
+                row["start"] = line_list[1].strip()
+                row["end"] = line_list[2].strip()
+
+            if start_time_in_file and not end_time_in_file:
+                row["start"] = line_list[1].strip()
+
+            row["id"] = dummy.build_id(mapname, mapset)
 
 
             maplist.append(row)
             maplist.append(row)
-    
+
     num_maps = len(maplist)
     num_maps = len(maplist)
     map_object_list = []
     map_object_list = []
     statement = ""
     statement = ""
-    
+
     core.message(_("Gathering map informations"))
     core.message(_("Gathering map informations"))
-    
+
     for count in range(len(maplist)):
     for count in range(len(maplist)):
-	core.percent(count, num_maps, 1)
+        core.percent(count, num_maps, 1)
 
 
         # Get a new instance of the map type
         # Get a new instance of the map type
         map = dataset_factory(type, maplist[count]["id"])
         map = dataset_factory(type, maplist[count]["id"])
 
 
         # Use the time data from file
         # Use the time data from file
-        if maplist[count].has_key("start"):
+        if "start" in maplist[count]:
             start = maplist[count]["start"]
             start = maplist[count]["start"]
-        if maplist[count].has_key("end"):
+        if "end" in maplist[count]:
             end = maplist[count]["end"]
             end = maplist[count]["end"]
-            
+
         is_in_db = False
         is_in_db = False
 
 
         # Put the map into the database
         # Put the map into the database
-        if map.is_in_db(dbif) == False:
+        if not map.is_in_db(dbif):
             is_in_db = False
             is_in_db = False
             # Break in case no valid time is provided
             # Break in case no valid time is provided
-            if start == "" or start == None:
+            if start == "" or start is None:
                 dbif.close()
                 dbif.close()
                 if map.get_layer():
                 if map.get_layer():
-		    core.fatal(_("Unable to register %s map <%s> with layer %s. The map has no valid time and the start time is not set.") % \
-				(map.get_type(), map.get_map_id(), map.get_layer() ))
-		else:
-		    core.fatal(_("Unable to register %s map <%s>. The map has no valid time and the start time is not set.") % \
-				(map.get_type(), map.get_map_id() ))
-	    
-	    if unit:
+                    core.fatal(_("Unable to register %s map <%s> with layer %s. "
+                                 "The map has no valid time and the start time is not set.") %
+                               (map.get_type(), map.get_map_id(), map.get_layer()))
+                else:
+                    core.fatal(_("Unable to register %s map <%s>. The map has no valid"
+                                 " time and the start time is not set.") %
+                               (map.get_type(), map.get_map_id()))
+
+            if unit:
                 map.set_time_to_relative()
                 map.set_time_to_relative()
             else:
             else:
                 map.set_time_to_absolute()
                 map.set_time_to_absolute()
- 
+
         else:
         else:
             is_in_db = True
             is_in_db = True
-            if core.overwrite == False:
-		continue
+            if not core.overwrite:
+                continue
             map.select(dbif)
             map.select(dbif)
             if name and map.get_temporal_type() != sp.get_temporal_type():
             if name and map.get_temporal_type() != sp.get_temporal_type():
                 dbif.close()
                 dbif.close()
                 if map.get_layer():
                 if map.get_layer():
-		    core.fatal(_("Unable to register %s map <%s> with layer. The temporal types are different.") %  \
-		                 (map.get_type(), map.get_map_id(), map.get_layer()))
-		else:
-		    core.fatal(_("Unable to register %s map <%s>. The temporal types are different.") %  \
-		                 (map.get_type(), map.get_map_id()))
+                    core.fatal(_("Unable to register %s map <%s> with layer. "
+                                 "The temporal types are different.") %
+                               (map.get_type(), map.get_map_id(), map.get_layer()))
+                else:
+                    core.fatal(_("Unable to register %s map <%s>. "
+                                 "The temporal types are different.") %
+                               (map.get_type(), map.get_map_id()))
 
 
         # Load the data from the grass file database
         # Load the data from the grass file database
         map.load()
         map.load()
@@ -217,21 +237,25 @@ def register_maps_in_space_time_dataset(type, name, maps=None, file=None, start=
             # In case the time is in the input file we ignore the increment counter
             # In case the time is in the input file we ignore the increment counter
             if start_time_in_file:
             if start_time_in_file:
                 count = 1
                 count = 1
-            assign_valid_time_to_map(ttype=map.get_temporal_type(), map=map, start=start, end=end, unit=unit, increment=increment, mult=count, interval=interval)
+            assign_valid_time_to_map(ttype=map.get_temporal_type(),
+                                     map=map, start=start, end=end, unit=unit,
+                                     increment=increment, mult=count,
+                                     interval=interval)
 
 
         if is_in_db:
         if is_in_db:
-           #  Gather the SQL update statement
-           statement += map.update_all(dbif=dbif, execute=False)
+            #  Gather the SQL update statement
+            statement += map.update_all(dbif=dbif, execute=False)
         else:
         else:
-           #  Gather the SQL insert statement
-           statement += map.insert(dbif=dbif, execute=False)
+            #  Gather the SQL insert statement
+            statement += map.insert(dbif=dbif, execute=False)
 
 
         # Sqlite3 performace better for huge datasets when committing in small chunks
         # Sqlite3 performace better for huge datasets when committing in small chunks
         if dbmi.__name__ == "sqlite3":
         if dbmi.__name__ == "sqlite3":
             if count % 100 == 0:
             if count % 100 == 0:
-                if statement != None and statement != "":
-                    core.message(_("Registering maps in the temporal database"))
-		    dbif.execute_transaction(statement)
+                if statement is not None and statement != "":
+                    core.message(_("Registering maps in the temporal database")
+                                 )
+                    dbif.execute_transaction(statement)
                     statement = ""
                     statement = ""
 
 
         # Store the maps in a list to register in a space time dataset
         # Store the maps in a list to register in a space time dataset
@@ -240,7 +264,7 @@ def register_maps_in_space_time_dataset(type, name, maps=None, file=None, start=
 
 
     core.percent(num_maps, num_maps, 1)
     core.percent(num_maps, num_maps, 1)
 
 
-    if statement != None and statement != "":
+    if statement is not None and statement != "":
         core.message(_("Register maps in the temporal database"))
         core.message(_("Register maps in the temporal database"))
         dbif.execute_transaction(statement)
         dbif.execute_transaction(statement)
 
 
@@ -251,63 +275,80 @@ def register_maps_in_space_time_dataset(type, name, maps=None, file=None, start=
         num_maps = len(map_object_list)
         num_maps = len(map_object_list)
         core.message(_("Register maps in the space time raster dataset"))
         core.message(_("Register maps in the space time raster dataset"))
         for map in map_object_list:
         for map in map_object_list:
-	    core.percent(count, num_maps, 1)
-	    sp.register_map(map=map, dbif=dbif)
+            core.percent(count, num_maps, 1)
+            sp.register_map(map=map, dbif=dbif)
             count += 1
             count += 1
-        
+
     # Update the space time tables
     # Update the space time tables
     if name:
     if name:
         core.message(_("Update space time raster dataset"))
         core.message(_("Update space time raster dataset"))
-	sp.update_from_registered_maps(dbif)
+        sp.update_from_registered_maps(dbif)
 
 
     if connect == True:
     if connect == True:
         dbif.close()
         dbif.close()
 
 
     core.percent(num_maps, num_maps, 1)
     core.percent(num_maps, num_maps, 1)
-        
+
 
 
 ###############################################################################
 ###############################################################################
 
 
 def assign_valid_time_to_map(ttype, map, start, end, unit, increment=None, mult=1, interval=False):
 def assign_valid_time_to_map(ttype, map, start, end, unit, increment=None, mult=1, interval=False):
     """!Assign the valid time to a map dataset
     """!Assign the valid time to a map dataset
 
 
-       @param ttype: The temporal type which should be assigned and which the time format is of
+       @param ttype: The temporal type which should be assigned
+                     and which the time format is of
        @param map: A map dataset object derived from abstract_map_dataset
        @param map: A map dataset object derived from abstract_map_dataset
-       @param start: The start date and time of the first raster map (format absolute: "yyyy-mm-dd HH:MM:SS" or "yyyy-mm-dd", format relative is integer 5)
-       @param end: The end date and time of the first raster map (format absolute: "yyyy-mm-dd HH:MM:SS" or "yyyy-mm-dd", format relative is integer 5)
-       @param unit: The unit of the relative time: years, months, days, hours, minutes, seconds
-       @param increment: Time increment between maps for time stamp creation (format absolute: NNN seconds, minutes, hours, days, weeks, months, years; format relative is integer 1)
+       @param start: The start date and time of the first raster map
+                     (format absolute: "yyyy-mm-dd HH:MM:SS" or "yyyy-mm-dd",
+                     format relative is integer 5)
+       @param end: The end date and time of the first raster map
+                   (format absolute: "yyyy-mm-dd HH:MM:SS" or "yyyy-mm-dd",
+                   format relative is integer 5)
+       @param unit: The unit of the relative time: years, months,
+                    days, hours, minutes, seconds
+       @param increment: Time increment between maps for time stamp creation
+                        (format absolute: NNN seconds, minutes, hours, days,
+                        weeks, months, years; format relative is integer 1)
        @param multi: A multiplier for the increment
        @param multi: A multiplier for the increment
-       @param interval: If True, time intervals are created in case the start time and an increment is provided
+       @param interval: If True, time intervals are created in case the start
+                        time and an increment is provided
     """
     """
 
 
     if ttype == "absolute":
     if ttype == "absolute":
         start_time = string_to_datetime(start)
         start_time = string_to_datetime(start)
-        if start_time == None:
+        if start_time is None:
             dbif.close()
             dbif.close()
-            core.fatal(_("Unable to convert string \"%s\"into a datetime object")%(start))
+            core.fatal(_("Unable to convert string \"%s\"into a "
+                         "datetime object") % (start))
         end_time = None
         end_time = None
 
 
         if end:
         if end:
             end_time = string_to_datetime(end)
             end_time = string_to_datetime(end)
-            if end_time == None:
+            if end_time is None:
                 dbif.close()
                 dbif.close()
-                core.fatal(_("Unable to convert string \"%s\"into a datetime object")%(end))
+                core.fatal(_("Unable to convert string \"%s\"into a "
+                             "datetime object") % (end))
 
 
         # Add the increment
         # Add the increment
         if increment:
         if increment:
-            start_time = increment_datetime_by_string(start_time, increment, mult)
-            if start_time == None:
-		core.fatal(_("Error in increment computation"))
+            start_time = increment_datetime_by_string(
+                start_time, increment, mult)
+            if start_time is None:
+                core.fatal(_("Error in increment computation"))
             if interval:
             if interval:
-                end_time = increment_datetime_by_string(start_time, increment, 1)
-		if end_time == None:
-		    core.fatal(_("Error in increment computation"))
-	if map.get_layer():
-	    core.verbose(_("Set absolute valid time for map <%s> with layer %s to %s - %s") % (map.get_map_id(), map.get_layer(), str(start_time), str(end_time)))
+                end_time = increment_datetime_by_string(
+                    start_time, increment, 1)
+                if end_time is None:
+                    core.fatal(_("Error in increment computation"))
+        if map.get_layer():
+            core.verbose(_("Set absolute valid time for map <%(id)s> with "
+                           "layer %(layer)s to %(start)s - %(end)s") %
+                         {'id': map.get_map_id(), 'layer': map.get_layer(),
+                          'start': str(start_time), 'end': str(end_time)})
         else:
         else:
-	    core.verbose(_("Set absolute valid time for map <%s> to %s - %s") % (map.get_map_id(), str(start_time), str(end_time)))
-        
+            core.verbose(_("Set absolute valid time for map <%s> to %s - %s") %
+                         (map.get_map_id(), str(start_time), str(end_time)))
+
         map.set_absolute_time(start_time, end_time, None)
         map.set_absolute_time(start_time, end_time, None)
     else:
     else:
         start_time = int(start)
         start_time = int(start)
@@ -321,19 +362,26 @@ def assign_valid_time_to_map(ttype, map, start, end, unit, increment=None, mult=
             if interval:
             if interval:
                 end_time = start_time + int(increment)
                 end_time = start_time + int(increment)
 
 
-	if map.get_layer():
-	    core.verbose(_("Set relative valid time for map <%s> with layer %s to %i - %s with unit %s") % (map.get_map_id(), map.get_layer(), start_time,  str(end_time), unit))
+        if map.get_layer():
+            core.verbose(_("Set relative valid time for map <%s> with layer %s "
+                           "to %i - %s with unit %s") %
+                         (map.get_map_id(), map.get_layer(), start_time,
+                          str(end_time), unit))
         else:
         else:
-	    core.verbose(_("Set relative valid time for map <%s> to %i - %s with unit %s") % (map.get_map_id(), start_time,  str(end_time), unit))
-	    
+            core.verbose(_("Set relative valid time for map <%s> to %i - %s "
+                           "with unit %s") % (map.get_map_id(), start_time,
+                                              str(end_time), unit))
+
         map.set_relative_time(start_time, end_time, unit)
         map.set_relative_time(start_time, end_time, unit)
 
 
 ###############################################################################
 ###############################################################################
 
 
+
 def dataset_factory(type, id):
 def dataset_factory(type, id):
     """!A factory functions to create space time or map datasets
     """!A factory functions to create space time or map datasets
-    
-       @param type: the dataset type: rast or raster, rast3d, vect or vector, strds, str3ds, stvds
+
+       @param type: the dataset type: rast or raster, rast3d,
+                    vect or vector, strds, str3ds, stvds
        @param id: The id of the dataset ("name@mapset")
        @param id: The id of the dataset ("name@mapset")
     """
     """
     if type == "strds":
     if type == "strds":
@@ -346,7 +394,7 @@ def dataset_factory(type, id):
         sp = raster_dataset(id)
         sp = raster_dataset(id)
     elif type == "rast3d":
     elif type == "rast3d":
         sp = raster3d_dataset(id)
         sp = raster3d_dataset(id)
-    elif type == "vect" or  type == "vector":
+    elif type == "vect" or type == "vector":
         sp = vector_dataset(id)
         sp = vector_dataset(id)
     else:
     else:
         core.error(_("Unknown dataset type: %s") % type)
         core.error(_("Unknown dataset type: %s") % type)
@@ -356,24 +404,32 @@ def dataset_factory(type, id):
 
 
 ###############################################################################
 ###############################################################################
 
 
+
 def list_maps_of_stds(type, input, columns, order, where, separator, method, header):
 def list_maps_of_stds(type, input, columns, order, where, separator, method, header):
     """! List the maps of a space time dataset using diffetent methods
     """! List the maps of a space time dataset using diffetent methods
 
 
         @param type: The type of the maps raster, raster3d or vector
         @param type: The type of the maps raster, raster3d or vector
         @param input: Name of a space time raster dataset
         @param input: Name of a space time raster dataset
-        @param columns: A comma separated list of columns to be printed to stdout 
-        @param order: A comma separated list of columns to order the space time dataset by category 
-        @param where: A where statement for selected listing without "WHERE" e.g: start_time < "2001-01-01" and end_time > "2001-01-01"
+        @param columns: A comma separated list of columns to be printed to stdout
+        @param order: A comma separated list of columns to order the
+                      space time dataset by category
+        @param where: A where statement for selected listing without "WHERE"
+                      e.g: start_time < "2001-01-01" and end_time > "2001-01-01"
         @param separator: The field separator character between the columns
         @param separator: The field separator character between the columns
-        @param method: String identifier to select a method out of cols,comma,delta or deltagaps
+        @param method: String identifier to select a method out of cols,
+                       comma,delta or deltagaps
             * "cols": Print preselected columns specified by columns
             * "cols": Print preselected columns specified by columns
             * "comma": Print the map ids (name@mapset) as comma separated string
             * "comma": Print the map ids (name@mapset) as comma separated string
-            * "delta": Print the map ids (name@mapset) with start time, end time, relative length of intervals and the relative distance to the begin
-            * "deltagaps": Same as "delta" with additional listing of gaps. Gaps can be simply identified as the id is "None"
-            * "gran": List map using the granularity of the space time dataset, columns are identical to deltagaps 
-        @param header: Set True to print column names 
+            * "delta": Print the map ids (name@mapset) with start time,
+                       end time, relative length of intervals and the relative
+                       distance to the begin
+            * "deltagaps": Same as "delta" with additional listing of gaps.
+                           Gaps can be simply identified as the id is "None"
+            * "gran": List map using the granularity of the space time dataset,
+                      columns are identical to deltagaps
+        @param header: Set True to print column names
     """
     """
-    mapset =  core.gisenv()["MAPSET"]
+    mapset = core.gisenv()["MAPSET"]
 
 
     if input.find("@") >= 0:
     if input.find("@") >= 0:
         id = input
         id = input
@@ -381,21 +437,21 @@ def list_maps_of_stds(type, input, columns, order, where, separator, method, hea
         id = input + "@" + mapset
         id = input + "@" + mapset
 
 
     sp = dataset_factory(type, id)
     sp = dataset_factory(type, id)
-    
-    if sp.is_in_db() == False:
+
+    if not sp.is_in_db():
         core.fatal(_("Dataset <%s> not found in temporal database") % (id))
         core.fatal(_("Dataset <%s> not found in temporal database") % (id))
 
 
     sp.select()
     sp.select()
 
 
-    if separator == None or separator == "":
+    if separator is None or separator == "":
         separator = "\t"
         separator = "\t"
-           
+
     # This method expects a list of objects for gap detection
     # This method expects a list of objects for gap detection
     if method == "delta" or method == "deltagaps" or method == "gran":
     if method == "delta" or method == "deltagaps" or method == "gran":
-	if type == "stvds":
-	    columns = "id,name,layer,mapset,start_time,end_time"
-	else:
-	    columns = "id,name,mapset,start_time,end_time"
+        if type == "stvds":
+            columns = "id,name,layer,mapset,start_time,end_time"
+        else:
+            columns = "id,name,mapset,start_time,end_time"
         if method == "deltagaps":
         if method == "deltagaps":
             maps = sp.get_registered_maps_as_objects_with_gaps(where, None)
             maps = sp.get_registered_maps_as_objects_with_gaps(where, None)
         elif method == "delta":
         elif method == "delta":
@@ -405,15 +461,15 @@ def list_maps_of_stds(type, input, columns, order, where, separator, method, hea
 
 
         if header:
         if header:
             string = ""
             string = ""
-	    string += "%s%s" % ("id", separator)
-	    string += "%s%s" % ("name", separator)
+            string += "%s%s" % ("id", separator)
+            string += "%s%s" % ("name", separator)
             if type == "stvds":
             if type == "stvds":
-		string += "%s%s" % ("layer", separator)
-	    string += "%s%s" % ("mapset", separator)
+                string += "%s%s" % ("layer", separator)
+            string += "%s%s" % ("mapset", separator)
             string += "%s%s" % ("start_time", separator)
             string += "%s%s" % ("start_time", separator)
             string += "%s%s" % ("end_time", separator)
             string += "%s%s" % ("end_time", separator)
             string += "%s%s" % ("interval_length", separator)
             string += "%s%s" % ("interval_length", separator)
-            string += "%s"   % ("distance_from_begin")
+            string += "%s" % ("distance_from_begin")
 
 
         if maps and len(maps) > 0:
         if maps and len(maps) > 0:
 
 
@@ -431,7 +487,7 @@ def list_maps_of_stds(type, input, columns, order, where, separator, method, hea
 
 
                 start, end = map.get_valid_time()
                 start, end = map.get_valid_time()
                 if end:
                 if end:
-                    delta = end -start
+                    delta = end - start
                 else:
                 else:
                     delta = None
                     delta = None
                 delta_first = start - first_time
                 delta_first = start - first_time
@@ -444,13 +500,13 @@ def list_maps_of_stds(type, input, columns, order, where, separator, method, hea
                 string = ""
                 string = ""
                 string += "%s%s" % (map.get_id(), separator)
                 string += "%s%s" % (map.get_id(), separator)
                 string += "%s%s" % (map.get_name(), separator)
                 string += "%s%s" % (map.get_name(), separator)
-		if type == "stvds":
-		    string += "%s%s" % (map.get_layer(), separator)
+                if type == "stvds":
+                    string += "%s%s" % (map.get_layer(), separator)
                 string += "%s%s" % (map.get_mapset(), separator)
                 string += "%s%s" % (map.get_mapset(), separator)
                 string += "%s%s" % (start, separator)
                 string += "%s%s" % (start, separator)
                 string += "%s%s" % (end, separator)
                 string += "%s%s" % (end, separator)
                 string += "%s%s" % (delta, separator)
                 string += "%s%s" % (delta, separator)
-                string += "%s"   % (delta_first)
+                string += "%s" % (delta_first)
                 print string
                 print string
 
 
     else:
     else:
@@ -497,17 +553,21 @@ def list_maps_of_stds(type, input, columns, order, where, separator, method, hea
                         else:
                         else:
                             output += str(col)
                             output += str(col)
                         count += 1
                         count += 1
-                        
+
                     print output
                     print output
 
 
 ###############################################################################
 ###############################################################################
 
 
+
 def sample_stds_by_stds_topology(intype, sampletype, inputs, sampler, header, separator, method, spatial=False):
 def sample_stds_by_stds_topology(intype, sampletype, inputs, sampler, header, separator, method, spatial=False):
-    """! Sample the input space time datasets with a sample space time dataset and print the result to stdout
+    """!Sample the input space time datasets with a sample 
+       space time dataset and print the result to stdout
+
+        In case multiple maps are located in the current granule, 
+        the map names are separated by comma.
 
 
-        In case multiple maps are located in the current granule, the map names are separated by comma.
-        
-        In case a layer is present, the names map ids are extended in this form: name:layer@mapset 
+        In case a layer is present, the names map ids are extended 
+        in this form: name:layer@mapset
 
 
         Attention: Do not use the comma as separator
         Attention: Do not use the comma as separator
 
 
@@ -515,12 +575,13 @@ def sample_stds_by_stds_topology(intype, sampletype, inputs, sampler, header, se
         @param samtype: Type of the sample space time dataset (strds, stvds or str3ds)
         @param samtype: Type of the sample space time dataset (strds, stvds or str3ds)
         @param input: Name of a space time dataset
         @param input: Name of a space time dataset
         @param sampler: Name of a space time dataset used for temporal sampling
         @param sampler: Name of a space time dataset used for temporal sampling
-        @param header: Set True to print column names 
+        @param header: Set True to print column names
         @param separator: The field separator character between the columns
         @param separator: The field separator character between the columns
-        @param method: The method to be used for temporal sampling (start,during,contain,overlap,equal)
+        @param method: The method to be used for temporal sampling 
+                       (start,during,contain,overlap,equal)
         @param spatial: Perform spatial overlapping check
         @param spatial: Perform spatial overlapping check
     """
     """
-    mapset =  core.gisenv()["MAPSET"]
+    mapset = core.gisenv()["MAPSET"]
 
 
     input_list = inputs.split(",")
     input_list = inputs.split(",")
     sts = []
     sts = []
@@ -554,9 +615,9 @@ def sample_stds_by_stds_topology(intype, sampletype, inputs, sampler, header, se
 
 
     sst.select(dbif)
     sst.select(dbif)
 
 
-    if separator == None or separator == "" or separator.find(",") >= 0:
+    if separator is None or separator == "" or separator.find(",") >= 0:
         separator = " | "
         separator = " | "
-       
+
     mapmatrizes = []
     mapmatrizes = []
     for st in sts:
     for st in sts:
         mapmatrix = st.sample_by_dataset(sst, method, spatial, dbif)
         mapmatrix = st.sample_by_dataset(sst, method, spatial, dbif)
@@ -573,7 +634,7 @@ def sample_stds_by_stds_topology(intype, sampletype, inputs, sampler, header, se
             string += "%s%s" % ("start_time", separator)
             string += "%s%s" % ("start_time", separator)
             string += "%s%s" % ("end_time", separator)
             string += "%s%s" % ("end_time", separator)
             string += "%s%s" % ("interval_length", separator)
             string += "%s%s" % ("interval_length", separator)
-            string += "%s"   % ("distance_from_begin")
+            string += "%s" % ("distance_from_begin")
 
 
         first_time, dummy = mapmatrizes[0][0]["granule"].get_valid_time()
         first_time, dummy = mapmatrizes[0][0]["granule"].get_valid_time()
 
 
@@ -590,7 +651,7 @@ def sample_stds_by_stds_topology(intype, sampletype, inputs, sampler, header, se
                         mapnames += ",%s" % str(sample.get_id())
                         mapnames += ",%s" % str(sample.get_id())
                     count += 1
                     count += 1
                 mapname_list.append(mapnames)
                 mapname_list.append(mapnames)
-                
+
             entry = mapmatrizes[0][i]
             entry = mapmatrizes[0][i]
             map = entry["granule"]
             map = entry["granule"]
 
 
@@ -613,8 +674,7 @@ def sample_stds_by_stds_topology(intype, sampletype, inputs, sampler, header, se
             string += "%s%s" % (start, separator)
             string += "%s%s" % (start, separator)
             string += "%s%s" % (end, separator)
             string += "%s%s" % (end, separator)
             string += "%s%s" % (delta, separator)
             string += "%s%s" % (delta, separator)
-            string += "%s"   % (delta_first)
+            string += "%s" % (delta_first)
             print string
             print string
 
 
     dbif.close()
     dbif.close()
-

+ 185 - 141
lib/python/temporal/spatial_extent.py

@@ -6,6 +6,8 @@ Temporal GIS related spatial extent functions to be used in Python scripts and t
 
 
 Usage:
 Usage:
 
 
+@code
+
 >>> import grass.temporal as tgis
 >>> import grass.temporal as tgis
 >>> extent = tgis.RasterSpatialExtent( 
 >>> extent = tgis.RasterSpatialExtent( 
 ... ident="raster@PERMANENT", north=90, south=90, east=180, west=180,
 ... ident="raster@PERMANENT", north=90, south=90, east=180, west=180,
@@ -26,6 +28,7 @@ Usage:
 ... ident="stvds@PERMANENT", north=90, south=90, east=180, west=180,
 ... ident="stvds@PERMANENT", north=90, south=90, east=180, west=180,
 ... top=100, bottom=-20)
 ... top=100, bottom=-20)
 
 
+@endcode
 
 
 (C) 2008-2011 by the GRASS Development Team
 (C) 2008-2011 by the GRASS Development Team
 This program is free software under the GNU General Public
 This program is free software under the GNU General Public
@@ -43,8 +46,11 @@ class SpatialExtent(SQLDatabaseInterface):
         This class implements a three dimensional axis aligned bounding box
         This class implements a three dimensional axis aligned bounding box
         and functions to compute topological relationships
         and functions to compute topological relationships
         
         
-        >>> import grass.temporal as tgis
-        >>> extent = tgis.SpatialExtent(table="raster_spatial_extent", 
+        Usage:
+        
+        @code
+        
+        >>> extent = SpatialExtent(table="raster_spatial_extent", 
         ... ident="soil@PERMANENT", north=90, south=90, east=180, west=180,
         ... ident="soil@PERMANENT", north=90, south=90, east=180, west=180,
         ... top=100, bottom=-20)
         ... top=100, bottom=-20)
         >>> extent.id
         >>> extent.id
@@ -76,6 +82,8 @@ class SpatialExtent(SQLDatabaseInterface):
         west=180.0
         west=180.0
         top=100.0
         top=100.0
         bottom=-20.0
         bottom=-20.0
+        
+        @endcode
     """
     """
     def __init__(self, table=None, ident=None, north=None, south=None, 
     def __init__(self, table=None, ident=None, north=None, south=None, 
                  east=None, west=None, top=None, bottom=None, proj="XY"):
                  east=None, west=None, top=None, bottom=None, proj="XY"):
@@ -98,7 +106,8 @@ class SpatialExtent(SQLDatabaseInterface):
         """
         """
 
 
         if self.get_projection() != extent.get_projection():
         if self.get_projection() != extent.get_projection():
-            core.error(_("Projections are different. Unable to compute overlapping_2d for spatial extents"))
+            core.error(_("Projections are different. Unable to compute "
+                         "overlapping_2d for spatial extents"))
             return False
             return False
 
 
         N = extent.get_north()
         N = extent.get_north()
@@ -142,11 +151,14 @@ class SpatialExtent(SQLDatabaseInterface):
            
            
         Usage:
         Usage:
         
         
-        >>> import grass.temporal as tgis
-        >>> A = tgis.SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
-        >>> B = tgis.SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
+        @code
+        
+        >>> A = SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
         >>> A.overlapping(B)
         >>> A.overlapping(B)
         True
         True
+        
+        @endcode
         """
         """
 
 
         if not self.overlapping_2d(extent):
         if not self.overlapping_2d(extent):
@@ -164,8 +176,8 @@ class SpatialExtent(SQLDatabaseInterface):
         return True
         return True
 
 
     def intersect_2d(self, extent):
     def intersect_2d(self, extent):
-        """!Return the two dimensional intersection as spatial_extent object or None
-           in case no intersection was found.
+        """!Return the two dimensional intersection as spatial_extent 
+           object or None in case no intersection was found.
         """
         """
 
 
         if not self.overlapping_2d(extent):
         if not self.overlapping_2d(extent):
@@ -211,14 +223,15 @@ class SpatialExtent(SQLDatabaseInterface):
         return new
         return new
 
 
     def intersect(self, extent):
     def intersect(self, extent):
-        """!Return the three dimensional intersection as spatial_extent object or None
-        in case no intersection was found.
+        """!Return the three dimensional intersection as spatial_extent 
+        object or None in case no intersection was found.
         
         
         Usage:
         Usage:
         
         
-        >>> import grass.temporal as tgis
-        >>> A = tgis.SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
-        >>> B = tgis.SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
+        @code
+        
+        >>> A = SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
         >>> C = A.intersect(B)
         >>> C = A.intersect(B)
         >>> C.print_info()
         >>> C.print_info()
          +-------------------- Spatial extent ----------------------------------------+
          +-------------------- Spatial extent ----------------------------------------+
@@ -228,7 +241,7 @@ class SpatialExtent(SQLDatabaseInterface):
          | West:....................... 10.0
          | West:....................... 10.0
          | Top:........................ 50.0
          | Top:........................ 50.0
          | Bottom:..................... -50.0
          | Bottom:..................... -50.0
-        >>> B = tgis.SpatialExtent(north=40, south=30, east=60, west=10, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=40, south=30, east=60, west=10, bottom=-50, top=50)
         >>> C = A.intersect(B)
         >>> C = A.intersect(B)
         >>> C.print_info()
         >>> C.print_info()
          +-------------------- Spatial extent ----------------------------------------+
          +-------------------- Spatial extent ----------------------------------------+
@@ -238,7 +251,7 @@ class SpatialExtent(SQLDatabaseInterface):
          | West:....................... 10.0
          | West:....................... 10.0
          | Top:........................ 50.0
          | Top:........................ 50.0
          | Bottom:..................... -50.0
          | Bottom:..................... -50.0
-        >>> B = tgis.SpatialExtent(north=40, south=30, east=60, west=30, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=40, south=30, east=60, west=30, bottom=-50, top=50)
         >>> C = A.intersect(B)
         >>> C = A.intersect(B)
         >>> C.print_info()
         >>> C.print_info()
          +-------------------- Spatial extent ----------------------------------------+
          +-------------------- Spatial extent ----------------------------------------+
@@ -248,7 +261,7 @@ class SpatialExtent(SQLDatabaseInterface):
          | West:....................... 30.0
          | West:....................... 30.0
          | Top:........................ 50.0
          | Top:........................ 50.0
          | Bottom:..................... -50.0
          | Bottom:..................... -50.0
-        >>> B = tgis.SpatialExtent(north=40, south=30, east=60, west=30, bottom=-30, top=50)
+        >>> B = SpatialExtent(north=40, south=30, east=60, west=30, bottom=-30, top=50)
         >>> C = A.intersect(B)
         >>> C = A.intersect(B)
         >>> C.print_info()
         >>> C.print_info()
          +-------------------- Spatial extent ----------------------------------------+
          +-------------------- Spatial extent ----------------------------------------+
@@ -258,7 +271,7 @@ class SpatialExtent(SQLDatabaseInterface):
          | West:....................... 30.0
          | West:....................... 30.0
          | Top:........................ 50.0
          | Top:........................ 50.0
          | Bottom:..................... -30.0
          | Bottom:..................... -30.0
-        >>> B = tgis.SpatialExtent(north=40, south=30, east=60, west=30, bottom=-30, top=30)
+        >>> B = SpatialExtent(north=40, south=30, east=60, west=30, bottom=-30, top=30)
         >>> C = A.intersect(B)
         >>> C = A.intersect(B)
         >>> C.print_info()
         >>> C.print_info()
          +-------------------- Spatial extent ----------------------------------------+
          +-------------------- Spatial extent ----------------------------------------+
@@ -268,6 +281,8 @@ class SpatialExtent(SQLDatabaseInterface):
          | West:....................... 30.0
          | West:....................... 30.0
          | Top:........................ 30.0
          | Top:........................ 30.0
          | Bottom:..................... -30.0
          | Bottom:..................... -30.0
+         
+         @endcode
         """
         """
 
 
         if not self.overlapping(extent):
         if not self.overlapping(extent):
@@ -295,15 +310,19 @@ class SpatialExtent(SQLDatabaseInterface):
         return new
         return new
 
 
     def is_in_2d(self, extent):
     def is_in_2d(self, extent):
-        """Check two dimensional if the self is located in extent
-
+        """!Check two dimensional if the self is located in extent
+        
+        @verbatim
          _____
          _____
         |A _  |
         |A _  |
         | |_| |
         | |_| |
         |_____|B 
         |_____|B 
+        
+        @endverbatim
         """
         """
         if self.get_projection() != extent.get_projection():
         if self.get_projection() != extent.get_projection():
-            core.error(_("Projections are different. Unable to compute is_in_2d for spatial extents"))
+            core.error(_("Projections are different. Unable to compute "
+                         "is_in_2d for spatial extents"))
             return False
             return False
 
 
         eN = extent.get_north()
         eN = extent.get_north()
@@ -338,17 +357,20 @@ class SpatialExtent(SQLDatabaseInterface):
         return True
         return True
 
 
     def is_in(self, extent):
     def is_in(self, extent):
-        """Check three dimensional if the self is located in extent 
+        """!Check three dimensional if the self is located in extent 
         
         
         Usage:
         Usage:
         
         
-        >>> import grass.temporal as tgis
-        >>> A = tgis.SpatialExtent(north=79, south=21, east=59, west=11, bottom=-49, top=49)
-        >>> B = tgis.SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
+        @code
+        
+        >>> A = SpatialExtent(north=79, south=21, east=59, west=11, bottom=-49, top=49)
+        >>> B = SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
         >>> A.is_in(B)
         >>> A.is_in(B)
         True
         True
         >>> B.is_in(A)
         >>> B.is_in(A)
         False
         False
+        
+        @endcode
         """
         """
         if not self.is_in_2d(extent):
         if not self.is_in_2d(extent):
             return False
             return False
@@ -367,18 +389,19 @@ class SpatialExtent(SQLDatabaseInterface):
         return True
         return True
 
 
     def contain_2d(self, extent):
     def contain_2d(self, extent):
-        """Check two dimensional if self contains extent """
+        """!Check two dimensional if self contains extent """
         return extent.is_in_2d(self)
         return extent.is_in_2d(self)
 
 
     def contain(self, extent):
     def contain(self, extent):
-        """Check three dimensional if self contains extent """
+        """!Check three dimensional if self contains extent """
         return extent.is_in(self)
         return extent.is_in(self)
 
 
     def equivalent_2d(self, extent):
     def equivalent_2d(self, extent):
-        """Check two dimensional if self is equivalent to extent """
+        """!Check two dimensional if self is equivalent to extent """
 
 
         if self.get_projection() != extent.get_projection():
         if self.get_projection() != extent.get_projection():
-            core.error(_("Projections are different. Unable to compute equivalent_2d for spatial extents"))
+            core.error(_("Projections are different. Unable to compute "
+                         "equivalent_2d for spatial extents"))
             return False
             return False
 
 
         eN = extent.get_north()
         eN = extent.get_north()
@@ -413,7 +436,7 @@ class SpatialExtent(SQLDatabaseInterface):
         return True
         return True
 
 
     def equivalent(self, extent):
     def equivalent(self, extent):
-        """Check three dimensional if self is equivalent to extent """
+        """!Check three dimensional if self is equivalent to extent """
 
 
         if not self.equivalent_2d(extent):
         if not self.equivalent_2d(extent):
             return False
             return False
@@ -432,7 +455,9 @@ class SpatialExtent(SQLDatabaseInterface):
         return True
         return True
 
 
     def cover_2d(self, extent):
     def cover_2d(self, extent):
-        """Return True if two dimensional self covers extent
+        """!Return True if two dimensional self covers extent
+           
+           @verbatim
             _____    _____    _____    _____
             _____    _____    _____    _____
            |A  __|  |__  A|  |A | B|  |B | A|
            |A  __|  |__  A|  |A | B|  |B | A|
            |  |B |  | B|  |  |  |__|  |__|  |
            |  |B |  | B|  |  |  |__|  |__|  |
@@ -447,6 +472,8 @@ class SpatialExtent(SQLDatabaseInterface):
            |A|B  |  |_____|A |A|B|A|  |_____|A
            |A|B  |  |_____|A |A|B|A|  |_____|A
            | |   |  |B    |  | | | |  |_____|B
            | |   |  |B    |  | | | |  |_____|B
            |_|___|  |_____|  |_|_|_|  |_____|A
            |_|___|  |_____|  |_|_|_|  |_____|A
+           
+           @endverbatim
 
 
            The following cases are excluded:
            The following cases are excluded:
            * contain
            * contain
@@ -513,7 +540,7 @@ class SpatialExtent(SQLDatabaseInterface):
         return True
         return True
 
 
     def cover(self, extent):
     def cover(self, extent):
-        """Return True if three dimensional self covers extent
+        """!Return True if three dimensional self covers extent
 
 
            The following cases are excluded:
            The following cases are excluded:
            * contain
            * contain
@@ -598,22 +625,27 @@ class SpatialExtent(SQLDatabaseInterface):
         return True
         return True
 
 
     def covered_2d(self, extent):
     def covered_2d(self, extent):
-        """Check two dimensional if self is covered by  extent """
+        """!Check two dimensional if self is covered by  extent """
 
 
         return extent.cover_2d(self)
         return extent.cover_2d(self)
 
 
     def covered(self, extent):
     def covered(self, extent):
-        """Check three dimensional if self is covered by extent """
+        """!Check three dimensional if self is covered by extent """
 
 
         return extent.cover(self)
         return extent.cover(self)
 
 
     def overlap_2d(self, extent):
     def overlap_2d(self, extent):
-        """Return True if the two dimensional extents overlap. Code is lend from wind_overlap.c in lib/gis
+        """!Return True if the two dimensional extents overlap. Code is 
+           lend from wind_overlap.c in lib/gis
+           
+           @verbatim
             _____
             _____
            |A  __|__
            |A  __|__
            |  |  | B|
            |  |  | B|
            |__|__|  |
            |__|__|  |
               |_____|
               |_____|
+              
+           @endverbatim
 
 
            The following cases are excluded:
            The following cases are excluded:
            * contain
            * contain
@@ -668,7 +700,7 @@ class SpatialExtent(SQLDatabaseInterface):
         return True
         return True
 
 
     def overlap(self, extent):
     def overlap(self, extent):
-        """Return True if the three dimensional extents overlap
+        """!Return True if the three dimensional extents overlap
 
 
            The following cases are excluded:
            The following cases are excluded:
            * contain
            * contain
@@ -731,22 +763,26 @@ class SpatialExtent(SQLDatabaseInterface):
         return True
         return True
 
 
     def meet_2d(self, extent):
     def meet_2d(self, extent):
-        """ Check if self and extent meet each other in two dimensions
+        """!Check if self and extent meet each other in two dimensions
+        
+        @verbatim
           _____ _____    _____ _____
           _____ _____    _____ _____
          |  A  |  B  |  |  B  |  A  |
          |  A  |  B  |  |  B  |  A  |
          |_____|     |  |     |     |
          |_____|     |  |     |     |
                |_____|  |_____|_____|
                |_____|  |_____|_____|
 
 
-                 ___
-                | A |
-                |   |
-                |___|    _____
-               |  B  |  |  B  |
-               |     |  |     |
-               |_____|  |_____|_
-                          |  A  |
-                          |     |
-                          |_____|
+           ___
+          | A |
+          |   |
+          |___|    _____
+         |  B  |  |  B  |
+         |     |  |     |
+         |_____|  |_____|_
+                    |  A  |
+                    |     |
+                    |_____|
+         
+         @endverbatim
 
 
         """
         """
 
 
@@ -805,7 +841,7 @@ class SpatialExtent(SQLDatabaseInterface):
         return True
         return True
 
 
     def meet(self, extent):
     def meet(self, extent):
-        """ Check if self and extent meet each other in three dimensions"""
+        """!Check if self and extent meet each other in three dimensions"""
         eN = extent.get_north()
         eN = extent.get_north()
         eS = extent.get_south()
         eS = extent.get_south()
         eE = extent.get_east()
         eE = extent.get_east()
@@ -880,7 +916,7 @@ class SpatialExtent(SQLDatabaseInterface):
         return True
         return True
 
 
     def disjoint_2d(self, extent):
     def disjoint_2d(self, extent):
-        """Return True if the two dimensional extents are disjoint
+        """!Return True if the two dimensional extents are disjoint
         """
         """
 
 
         if self.overlapping_2d(extent) or self.meet_2d(extent):
         if self.overlapping_2d(extent) or self.meet_2d(extent):
@@ -888,7 +924,7 @@ class SpatialExtent(SQLDatabaseInterface):
         return True
         return True
 
 
     def disjoint(self, extent):
     def disjoint(self, extent):
-        """Return True if the three dimensional extents are disjoint
+        """!Return True if the three dimensional extents are disjoint
         """
         """
 
 
         if self.overlapping(extent) or self.meet(extent):
         if self.overlapping(extent) or self.meet(extent):
@@ -896,7 +932,7 @@ class SpatialExtent(SQLDatabaseInterface):
         return True
         return True
 
 
     def spatial_relation_2d(self, extent):
     def spatial_relation_2d(self, extent):
-        """Returns the two dimensional spatial relation between self and extent
+        """!Returns the two dimensional spatial relation between self and extent
 
 
         Spatial relations are:
         Spatial relations are:
         * disjoint
         * disjoint
@@ -931,7 +967,7 @@ class SpatialExtent(SQLDatabaseInterface):
         return "unknown"
         return "unknown"
 
 
     def spatial_relation(self, extent):
     def spatial_relation(self, extent):
-        """Returns the three dimensional spatial relation between self and extent
+        """!Returns the three dimensional spatial relation between self and extent
 
 
         Spatial relations are:
         Spatial relations are:
         * disjoint
         * disjoint
@@ -946,19 +982,20 @@ class SpatialExtent(SQLDatabaseInterface):
         
         
         Usage:
         Usage:
         
         
-        >>> import grass.temporal as tgis
-        >>> A = tgis.SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
-        >>> B = tgis.SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
+        @code
+        
+        >>> A = SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
         >>> A.spatial_relation(B)
         >>> A.spatial_relation(B)
         'equivalent'
         'equivalent'
         >>> B.spatial_relation(A)
         >>> B.spatial_relation(A)
         'equivalent'
         'equivalent'
-        >>> B = tgis.SpatialExtent(north=70, south=20, east=60, west=10, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=70, south=20, east=60, west=10, bottom=-50, top=50)
         >>> A.spatial_relation_2d(B)
         >>> A.spatial_relation_2d(B)
         'cover'
         'cover'
         >>> A.spatial_relation(B)
         >>> A.spatial_relation(B)
         'cover'
         'cover'
-        >>> B = tgis.SpatialExtent(north=70, south=30, east=60, west=10, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=70, south=30, east=60, west=10, bottom=-50, top=50)
         >>> A.spatial_relation_2d(B)
         >>> A.spatial_relation_2d(B)
         'cover'
         'cover'
         >>> A.spatial_relation(B)
         >>> A.spatial_relation(B)
@@ -967,135 +1004,137 @@ class SpatialExtent(SQLDatabaseInterface):
         'covered'
         'covered'
         >>> B.spatial_relation(A)
         >>> B.spatial_relation(A)
         'covered'
         'covered'
-        >>> B = tgis.SpatialExtent(north=70, south=30, east=50, west=10, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=70, south=30, east=50, west=10, bottom=-50, top=50)
         >>> A.spatial_relation_2d(B)
         >>> A.spatial_relation_2d(B)
         'cover'
         'cover'
         >>> B.spatial_relation_2d(A)
         >>> B.spatial_relation_2d(A)
         'covered'
         'covered'
         >>> A.spatial_relation(B)
         >>> A.spatial_relation(B)
         'cover'
         'cover'
-        >>> B = tgis.SpatialExtent(north=70, south=30, east=50, west=20, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=70, south=30, east=50, west=20, bottom=-50, top=50)
         >>> B.spatial_relation(A)
         >>> B.spatial_relation(A)
         'covered'
         'covered'
-        >>> B = tgis.SpatialExtent(north=70, south=30, east=50, west=20, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=70, south=30, east=50, west=20, bottom=-50, top=50)
         >>> A.spatial_relation_2d(B)
         >>> A.spatial_relation_2d(B)
         'contain'
         'contain'
         >>> A.spatial_relation(B)
         >>> A.spatial_relation(B)
         'cover'
         'cover'
-        >>> B = tgis.SpatialExtent(north=70, south=30, east=50, west=20, bottom=-40, top=50)
+        >>> B = SpatialExtent(north=70, south=30, east=50, west=20, bottom=-40, top=50)
         >>> A.spatial_relation(B)
         >>> A.spatial_relation(B)
         'cover'
         'cover'
-        >>> B = tgis.SpatialExtent(north=70, south=30, east=50, west=20, bottom=-40, top=40)
+        >>> B = SpatialExtent(north=70, south=30, east=50, west=20, bottom=-40, top=40)
         >>> A.spatial_relation(B)
         >>> A.spatial_relation(B)
         'contain'
         'contain'
         >>> B.spatial_relation(A)
         >>> B.spatial_relation(A)
         'in'
         'in'
-        >>> B = tgis.SpatialExtent(north=90, south=30, east=50, west=20, bottom=-40, top=40)
+        >>> B = SpatialExtent(north=90, south=30, east=50, west=20, bottom=-40, top=40)
         >>> A.spatial_relation_2d(B)
         >>> A.spatial_relation_2d(B)
         'overlap'
         'overlap'
         >>> A.spatial_relation(B)
         >>> A.spatial_relation(B)
         'overlap'
         'overlap'
-        >>> B = tgis.SpatialExtent(north=90, south=5, east=70, west=5, bottom=-40, top=40)
+        >>> B = SpatialExtent(north=90, south=5, east=70, west=5, bottom=-40, top=40)
         >>> A.spatial_relation_2d(B)
         >>> A.spatial_relation_2d(B)
         'in'
         'in'
         >>> A.spatial_relation(B)
         >>> A.spatial_relation(B)
         'overlap'
         'overlap'
-        >>> B = tgis.SpatialExtent(north=90, south=5, east=70, west=5, bottom=-40, top=60)
+        >>> B = SpatialExtent(north=90, south=5, east=70, west=5, bottom=-40, top=60)
         >>> A.spatial_relation(B)
         >>> A.spatial_relation(B)
         'overlap'
         'overlap'
-        >>> B = tgis.SpatialExtent(north=90, south=5, east=70, west=5, bottom=-60, top=60)
+        >>> B = SpatialExtent(north=90, south=5, east=70, west=5, bottom=-60, top=60)
         >>> A.spatial_relation(B)
         >>> A.spatial_relation(B)
         'in'
         'in'
-        >>> A = tgis.SpatialExtent(north=80, south=60, east=60, west=10, bottom=-50, top=50)
-        >>> B = tgis.SpatialExtent(north=60, south=20, east=60, west=10, bottom=-50, top=50)
+        >>> A = SpatialExtent(north=80, south=60, east=60, west=10, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=60, south=20, east=60, west=10, bottom=-50, top=50)
         >>> A.spatial_relation_2d(B)
         >>> A.spatial_relation_2d(B)
         'meet'
         'meet'
         >>> A.spatial_relation(B)
         >>> A.spatial_relation(B)
         'meet'
         'meet'
-        >>> A = tgis.SpatialExtent(north=60, south=40, east=60, west=10, bottom=-50, top=50)
-        >>> B = tgis.SpatialExtent(north=80, south=60, east=60, west=10, bottom=-50, top=50)
+        >>> A = SpatialExtent(north=60, south=40, east=60, west=10, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=80, south=60, east=60, west=10, bottom=-50, top=50)
         >>> A.spatial_relation_2d(B)
         >>> A.spatial_relation_2d(B)
         'meet'
         'meet'
         >>> A.spatial_relation(B)
         >>> A.spatial_relation(B)
         'meet'
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=60, west=40, bottom=-50, top=50)
-        >>> B = tgis.SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
+        >>> A = SpatialExtent(north=80, south=40, east=60, west=40, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
         >>> A.spatial_relation_2d(B)
         >>> A.spatial_relation_2d(B)
         'meet'
         'meet'
         >>> A.spatial_relation(B)
         >>> A.spatial_relation(B)
         'meet'
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
-        >>> B = tgis.SpatialExtent(north=90, south=30, east=60, west=40, bottom=-50, top=50)
+        >>> A = SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=90, south=30, east=60, west=40, bottom=-50, top=50)
         >>> A.spatial_relation_2d(B)
         >>> A.spatial_relation_2d(B)
         'meet'
         'meet'
         >>> A.spatial_relation(B)
         >>> A.spatial_relation(B)
         'meet'
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
-        >>> B = tgis.SpatialExtent(north=70, south=50, east=60, west=40, bottom=-50, top=50)
+        >>> A = SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=70, south=50, east=60, west=40, bottom=-50, top=50)
         >>> A.spatial_relation_2d(B)
         >>> A.spatial_relation_2d(B)
         'meet'
         'meet'
         >>> A.spatial_relation(B)
         >>> A.spatial_relation(B)
         'meet'
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
-        >>> B = tgis.SpatialExtent(north=60, south=20, east=60, west=40, bottom=-50, top=50)
+        >>> A = SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=60, south=20, east=60, west=40, bottom=-50, top=50)
         >>> A.spatial_relation_2d(B)
         >>> A.spatial_relation_2d(B)
         'meet'
         'meet'
         >>> A.spatial_relation(B)
         >>> A.spatial_relation(B)
         'meet'
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
-        >>> B = tgis.SpatialExtent(north=40, south=20, east=60, west=40, bottom=-50, top=50)
+        >>> A = SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=40, south=20, east=60, west=40, bottom=-50, top=50)
         >>> A.spatial_relation_2d(B)
         >>> A.spatial_relation_2d(B)
         'disjoint'
         'disjoint'
         >>> A.spatial_relation(B)
         >>> A.spatial_relation(B)
         'disjoint'
         'disjoint'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
-        >>> B = tgis.SpatialExtent(north=60, south=20, east=60, west=40, bottom=-60, top=60)
+        >>> A = SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=60, south=20, east=60, west=40, bottom=-60, top=60)
         >>> A.spatial_relation(B)
         >>> A.spatial_relation(B)
         'meet'
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
-        >>> B = tgis.SpatialExtent(north=90, south=30, east=60, west=40, bottom=-40, top=40)
+        >>> A = SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
+        >>> B = SpatialExtent(north=90, south=30, east=60, west=40, bottom=-40, top=40)
         >>> A.spatial_relation(B)
         >>> A.spatial_relation(B)
         'meet'
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
-        >>> B = tgis.SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
+        >>> A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
+        >>> B = SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
         >>> A.spatial_relation(B)
         >>> A.spatial_relation(B)
         'meet'
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
-        >>> B = tgis.SpatialExtent(north=80, south=50, east=60, west=30, bottom=-50, top=0)
+        >>> A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
+        >>> B = SpatialExtent(north=80, south=50, east=60, west=30, bottom=-50, top=0)
         >>> A.spatial_relation(B)
         >>> A.spatial_relation(B)
         'meet'
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
-        >>> B = tgis.SpatialExtent(north=70, south=50, east=50, west=30, bottom=-50, top=0)
+        >>> A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
+        >>> B = SpatialExtent(north=70, south=50, east=50, west=30, bottom=-50, top=0)
         >>> A.spatial_relation(B)
         >>> A.spatial_relation(B)
         'meet'
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
-        >>> B = tgis.SpatialExtent(north=90, south=30, east=70, west=10, bottom=-50, top=0)
+        >>> A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
+        >>> B = SpatialExtent(north=90, south=30, east=70, west=10, bottom=-50, top=0)
         >>> A.spatial_relation(B)
         >>> A.spatial_relation(B)
         'meet'
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
-        >>> B = tgis.SpatialExtent(north=70, south=30, east=50, west=10, bottom=-50, top=0)
+        >>> A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
+        >>> B = SpatialExtent(north=70, south=30, east=50, west=10, bottom=-50, top=0)
         >>> A.spatial_relation(B)
         >>> A.spatial_relation(B)
         'meet'
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
-        >>> B = tgis.SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
+        >>> A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
+        >>> B = SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
         >>> A.spatial_relation(B)
         >>> A.spatial_relation(B)
         'meet'
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
-        >>> B = tgis.SpatialExtent(north=80, south=50, east=60, west=30, bottom=0, top=50)
+        >>> A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
+        >>> B = SpatialExtent(north=80, south=50, east=60, west=30, bottom=0, top=50)
         >>> A.spatial_relation(B)
         >>> A.spatial_relation(B)
         'meet'
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
-        >>> B = tgis.SpatialExtent(north=70, south=50, east=50, west=30, bottom=0, top=50)
+        >>> A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
+        >>> B = SpatialExtent(north=70, south=50, east=50, west=30, bottom=0, top=50)
         >>> A.spatial_relation(B)
         >>> A.spatial_relation(B)
         'meet'
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
-        >>> B = tgis.SpatialExtent(north=90, south=30, east=70, west=10, bottom=0, top=50)
+        >>> A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
+        >>> B = SpatialExtent(north=90, south=30, east=70, west=10, bottom=0, top=50)
         >>> A.spatial_relation(B)
         >>> A.spatial_relation(B)
         'meet'
         'meet'
-        >>> A = tgis.SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
-        >>> B = tgis.SpatialExtent(north=70, south=30, east=50, west=10, bottom=0, top=50)
+        >>> A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
+        >>> B = SpatialExtent(north=70, south=30, east=50, west=10, bottom=0, top=50)
         >>> A.spatial_relation(B)
         >>> A.spatial_relation(B)
         'meet'
         'meet'
+        
+        @endverbatim
         """
         """
 
 
         if self.equivalent(extent):
         if self.equivalent(extent):
@@ -1118,7 +1157,7 @@ class SpatialExtent(SQLDatabaseInterface):
         return "unknown"
         return "unknown"
 
 
     def set_spatial_extent(self, north, south, east, west, top, bottom):
     def set_spatial_extent(self, north, south, east, west, top, bottom):
-        """Set the spatial extent"""
+        """!Set the spatial extent"""
 
 
         self.set_north(north)
         self.set_north(north)
         self.set_south(south)
         self.set_south(south)
@@ -1128,7 +1167,7 @@ class SpatialExtent(SQLDatabaseInterface):
         self.set_bottom(bottom)
         self.set_bottom(bottom)
 
 
     def set_projection(self, proj):
     def set_projection(self, proj):
-        """Set the projection of the spatial extent it should be XY or LL.
+        """!Set the projection of the spatial extent it should be XY or LL.
            As default the projection is XY
            As default the projection is XY
         """
         """
         if proj is None or (proj != "XY" and proj != "LL"):
         if proj is None or (proj != "XY" and proj != "LL"):
@@ -1144,54 +1183,54 @@ class SpatialExtent(SQLDatabaseInterface):
         self.set_west(west)
         self.set_west(west)
 
 
     def set_id(self, ident):
     def set_id(self, ident):
-        """Convenient method to set the unique identifier (primary key)"""
+        """!Convenient method to set the unique identifier (primary key)"""
         self.ident = ident
         self.ident = ident
         self.D["id"] = ident
         self.D["id"] = ident
 
 
     def set_north(self, north):
     def set_north(self, north):
-        """Set the northern edge of the map"""
+        """!Set the northern edge of the map"""
         if north is not None:
         if north is not None:
             self.D["north"] = float(north)
             self.D["north"] = float(north)
         else:
         else:
             self.D["north"] = None
             self.D["north"] = None
 
 
     def set_south(self, south):
     def set_south(self, south):
-        """Set the southern edge of the map"""
+        """!Set the southern edge of the map"""
         if south is not None:
         if south is not None:
             self.D["south"] = float(south)
             self.D["south"] = float(south)
         else:
         else:
             self.D["south"] = None
             self.D["south"] = None
 
 
     def set_west(self, west):
     def set_west(self, west):
-        """Set the western edge of the map"""
+        """!Set the western edge of the map"""
         if west is not None:
         if west is not None:
             self.D["west"] = float(west)
             self.D["west"] = float(west)
         else:
         else:
             self.D["west"] = None
             self.D["west"] = None
 
 
     def set_east(self, east):
     def set_east(self, east):
-        """Set the eastern edge of the map"""
+        """!Set the eastern edge of the map"""
         if east is not None:
         if east is not None:
             self.D["east"] = float(east)
             self.D["east"] = float(east)
         else:
         else:
             self.D["east"] = None
             self.D["east"] = None
 
 
     def set_top(self, top):
     def set_top(self, top):
-        """Set the top edge of the map"""
+        """!Set the top edge of the map"""
         if top is not None:
         if top is not None:
             self.D["top"] = float(top)
             self.D["top"] = float(top)
         else:
         else:
             self.D["top"] = None
             self.D["top"] = None
 
 
     def set_bottom(self, bottom):
     def set_bottom(self, bottom):
-        """Set the bottom edge of the map"""
+        """!Set the bottom edge of the map"""
         if bottom is not None:
         if bottom is not None:
             self.D["bottom"] = float(bottom)
             self.D["bottom"] = float(bottom)
         else:
         else:
             self.D["bottom"] = None
             self.D["bottom"] = None
 
 
     def get_id(self):
     def get_id(self):
-        """Convenient method to get the unique identifier (primary key)
+        """!Convenient method to get the unique identifier (primary key)
            @return None if not found
            @return None if not found
         """
         """
         if "id" in self.D:
         if "id" in self.D:
@@ -1200,15 +1239,16 @@ class SpatialExtent(SQLDatabaseInterface):
             return None
             return None
 
 
     def get_projection(self):
     def get_projection(self):
-        """Get the projection of the spatial extent"""
+        """!Get the projection of the spatial extent"""
         return self.D["proj"]
         return self.D["proj"]
 
 
     def get_volume(self):
     def get_volume(self):
-        """Compute the volume of the extent, in case z is zero 
+        """!Compute the volume of the extent, in case z is zero 
            (top == bottom or top - bottom = 1) the area is returned"""
            (top == bottom or top - bottom = 1) the area is returned"""
 
 
         if self.get_projection() == "LL":
         if self.get_projection() == "LL":
-            core.error(_("Volume computation is not supported for LL projections"))
+            core.error(_("Volume computation is not supported "
+                         "for LL projections"))
 
 
         area = self.get_area()
         area = self.get_area()
 
 
@@ -1222,10 +1262,11 @@ class SpatialExtent(SQLDatabaseInterface):
         return area * z
         return area * z
 
 
     def get_area(self):
     def get_area(self):
-        """Compute the area of the extent, extent in z direction is ignored"""
+        """!Compute the area of the extent, extent in z direction is ignored"""
 
 
         if self.get_projection() == "LL":
         if self.get_projection() == "LL":
-            core.error(_("Area computation is not supported for LL projections"))
+            core.error(_("Area computation is not supported "
+                         "for LL projections"))
 
 
         bbox = self.get_spatial_extent()
         bbox = self.get_spatial_extent()
 
 
@@ -1235,18 +1276,20 @@ class SpatialExtent(SQLDatabaseInterface):
         return x * y
         return x * y
 
 
     def get_spatial_extent(self):
     def get_spatial_extent(self):
-        """Return a tuple (north, south, east, west, top, bottom) of the spatial extent"""
+        """!Return a tuple (north, south, east, west, top, bottom) 
+           of the spatial extent"""
 
 
         return (
         return (
-            self.get_north(), self.get_south, self.get_east(), self.get_west(),
-            self.get_top(), self.get_bottom())
+            self.north, self.south, self.east, self.west,
+            self.top, self.bottom)
 
 
     def get_spatial_extent_2d(self):
     def get_spatial_extent_2d(self):
-        """Return a tuple (north, south, east, west,) of the 2d spatial extent"""
-        return (self.get_north(), self.get_south, self.get_east(), self.get_west())
+        """!Return a tuple (north, south, east, west,) of the 2d spatial extent
+        """
+        return (self.north, self.south, self.east, self.west)
 
 
     def get_north(self):
     def get_north(self):
-        """Get the northern edge of the map
+        """!Get the northern edge of the map
            @return None if not found"""
            @return None if not found"""
         if "north" in self.D:
         if "north" in self.D:
             return self.D["north"]
             return self.D["north"]
@@ -1254,7 +1297,7 @@ class SpatialExtent(SQLDatabaseInterface):
             return None
             return None
 
 
     def get_south(self):
     def get_south(self):
-        """Get the southern edge of the map
+        """!Get the southern edge of the map
            @return None if not found"""
            @return None if not found"""
         if "south" in self.D:
         if "south" in self.D:
             return self.D["south"]
             return self.D["south"]
@@ -1262,7 +1305,7 @@ class SpatialExtent(SQLDatabaseInterface):
             return None
             return None
 
 
     def get_east(self):
     def get_east(self):
-        """Get the eastern edge of the map
+        """!Get the eastern edge of the map
            @return None if not found"""
            @return None if not found"""
         if "east" in self.D:
         if "east" in self.D:
             return self.D["east"]
             return self.D["east"]
@@ -1270,7 +1313,7 @@ class SpatialExtent(SQLDatabaseInterface):
             return None
             return None
 
 
     def get_west(self):
     def get_west(self):
-        """Get the western edge of the map
+        """!Get the western edge of the map
            @return None if not found"""
            @return None if not found"""
         if "west" in self.D:
         if "west" in self.D:
             return self.D["west"]
             return self.D["west"]
@@ -1278,7 +1321,7 @@ class SpatialExtent(SQLDatabaseInterface):
             return None
             return None
 
 
     def get_top(self):
     def get_top(self):
-        """Get the top edge of the map
+        """!Get the top edge of the map
            @return None if not found"""
            @return None if not found"""
         if "top" in self.D:
         if "top" in self.D:
             return self.D["top"]
             return self.D["top"]
@@ -1286,7 +1329,7 @@ class SpatialExtent(SQLDatabaseInterface):
             return None
             return None
 
 
     def get_bottom(self):
     def get_bottom(self):
-        """Get the bottom edge of the map
+        """!Get the bottom edge of the map
            @return None if not found"""
            @return None if not found"""
         if "bottom" in self.D:
         if "bottom" in self.D:
             return self.D["bottom"]
             return self.D["bottom"]
@@ -1302,7 +1345,7 @@ class SpatialExtent(SQLDatabaseInterface):
     bottom= property(fget=get_bottom, fset=set_bottom)
     bottom= property(fget=get_bottom, fset=set_bottom)
 
 
     def print_info(self):
     def print_info(self):
-        """Print information about this class in human readable style"""
+        """!Print information about this class in human readable style"""
         #      0123456789012345678901234567890
         #      0123456789012345678901234567890
         print " +-------------------- Spatial extent ----------------------------------------+"
         print " +-------------------- Spatial extent ----------------------------------------+"
         print " | North:...................... " + str(self.get_north())
         print " | North:...................... " + str(self.get_north())
@@ -1325,37 +1368,38 @@ class SpatialExtent(SQLDatabaseInterface):
 ###############################################################################
 ###############################################################################
 
 
 class RasterSpatialExtent(SpatialExtent):
 class RasterSpatialExtent(SpatialExtent):
-    def __init__(self, ident=None, north=None, south=None, east=None, west=None, top=None, bottom=None):
+    def __init__(self, ident=None, north=None, south=None, east=None, 
+                 west=None, top=None, bottom=None):
         SpatialExtent.__init__(self, "raster_spatial_extent",
         SpatialExtent.__init__(self, "raster_spatial_extent",
                                 ident, north, south, east, west, top, bottom)
                                 ident, north, south, east, west, top, bottom)
 
 
-
 class Raster3DSpatialExtent(SpatialExtent):
 class Raster3DSpatialExtent(SpatialExtent):
-    def __init__(self, ident=None, north=None, south=None, east=None, west=None, top=None, bottom=None):
+    def __init__(self, ident=None, north=None, south=None, east=None, 
+                 west=None, top=None, bottom=None):
         SpatialExtent.__init__(self, "raster3d_spatial_extent",
         SpatialExtent.__init__(self, "raster3d_spatial_extent",
                                 ident, north, south, east, west, top, bottom)
                                 ident, north, south, east, west, top, bottom)
 
 
-
 class VectorSpatialExtent(SpatialExtent):
 class VectorSpatialExtent(SpatialExtent):
-    def __init__(self, ident=None, north=None, south=None, east=None, west=None, top=None, bottom=None):
+    def __init__(self, ident=None, north=None, south=None, east=None, 
+                 west=None, top=None, bottom=None):
         SpatialExtent.__init__(self, "vector_spatial_extent",
         SpatialExtent.__init__(self, "vector_spatial_extent",
                                 ident, north, south, east, west, top, bottom)
                                 ident, north, south, east, west, top, bottom)
 
 
-
 class STRDSSpatialExtent(SpatialExtent):
 class STRDSSpatialExtent(SpatialExtent):
-    def __init__(self, ident=None, north=None, south=None, east=None, west=None, top=None, bottom=None):
+    def __init__(self, ident=None, north=None, south=None, east=None, 
+                 west=None, top=None, bottom=None):
         SpatialExtent.__init__(self, "strds_spatial_extent",
         SpatialExtent.__init__(self, "strds_spatial_extent",
                                 ident, north, south, east, west, top, bottom)
                                 ident, north, south, east, west, top, bottom)
 
 
-
 class STR3DSSpatialExtent(SpatialExtent):
 class STR3DSSpatialExtent(SpatialExtent):
-    def __init__(self, ident=None, north=None, south=None, east=None, west=None, top=None, bottom=None):
+    def __init__(self, ident=None, north=None, south=None, east=None, 
+                 west=None, top=None, bottom=None):
         SpatialExtent.__init__(self, "str3ds_spatial_extent",
         SpatialExtent.__init__(self, "str3ds_spatial_extent",
                                 ident, north, south, east, west, top, bottom)
                                 ident, north, south, east, west, top, bottom)
 
 
-
 class STVDSSpatialExtent(SpatialExtent):
 class STVDSSpatialExtent(SpatialExtent):
-    def __init__(self, ident=None, north=None, south=None, east=None, west=None, top=None, bottom=None):
+    def __init__(self, ident=None, north=None, south=None, east=None, 
+                 west=None, top=None, bottom=None):
         SpatialExtent.__init__(self, "stvds_spatial_extent",
         SpatialExtent.__init__(self, "stvds_spatial_extent",
                                 ident, north, south, east, west, top, bottom)
                                 ident, north, south, east, west, top, bottom)
 
 

+ 228 - 190
lib/python/temporal/stds_export.py

@@ -14,9 +14,9 @@ output="/tmp/temp_1950_2012.tar.gz"
 compression="gzip"
 compression="gzip"
 workdir="/tmp"
 workdir="/tmp"
 where=None
 where=None
-_format="GTiff"
-_type="strds"
-tgis.export_stds(input, output, compression, workdir, where, _format, _type)
+format_="GTiff"
+type_="strds"
+tgis.export_stds(input, output, compression, workdir, where, format_, type_)
 ...
 ...
 @endcode
 @endcode
 
 
@@ -39,7 +39,7 @@ init_file_name = "init.txt"
 metadata_file_name = "metadata.txt"
 metadata_file_name = "metadata.txt"
 read_file_name = "readme.txt"
 read_file_name = "readme.txt"
 list_file_name = "list.txt"
 list_file_name = "list.txt"
-tmp_tar_file_name = "archive" 
+tmp_tar_file_name = "archive"
 
 
 # This global variable is for unique vector map export,
 # This global variable is for unique vector map export,
 # since single vector maps may have several layer
 # since single vector maps may have several layer
@@ -47,6 +47,8 @@ tmp_tar_file_name = "archive"
 exported_maps = {}
 exported_maps = {}
 
 
 ############################################################################
 ############################################################################
+
+
 def _export_raster_maps_as_geotiff(rows, tar, list_file, new_cwd, fs):
 def _export_raster_maps_as_geotiff(rows, tar, list_file, new_cwd, fs):
     for row in rows:
     for row in rows:
         name = row["name"]
         name = row["name"]
@@ -59,40 +61,46 @@ def _export_raster_maps_as_geotiff(rows, tar, list_file, new_cwd, fs):
             end = start
             end = start
         string = "%s%s%s%s%s\n" % (name, fs, start, fs, end)
         string = "%s%s%s%s%s\n" % (name, fs, start, fs, end)
         # Write the filename, the start_time and the end_time
         # Write the filename, the start_time and the end_time
-        list_file.write(string) 
+        list_file.write(string)
         # Export the raster map with r.out.gdal as tif
         # Export the raster map with r.out.gdal as tif
         out_name = name + ".tif"
         out_name = name + ".tif"
         if datatype == "CELL":
         if datatype == "CELL":
             nodata = max_val + 1
             nodata = max_val + 1
             if nodata < 256 and min_val >= 0:
             if nodata < 256 and min_val >= 0:
-                gdal_type = "Byte" 
+                gdal_type = "Byte"
             elif nodata < 65536 and min_val >= 0:
             elif nodata < 65536 and min_val >= 0:
-                gdal_type = "UInt16" 
+                gdal_type = "UInt16"
             elif min_val >= 0:
             elif min_val >= 0:
-                gdal_type = "UInt32" 
+                gdal_type = "UInt32"
             else:
             else:
-                gdal_type = "Int32" 
-            ret = core.run_command("r.out.gdal", flags="c", input=name, output=out_name, nodata=nodata, type=gdal_type, format="GTiff")
+                gdal_type = "Int32"
+            ret = core.run_command("r.out.gdal", flags="c", input=name, 
+                                   output=out_name, nodata=nodata, 
+                                   type=gdal_type, format="GTiff")
         else:
         else:
-            ret = core.run_command("r.out.gdal", flags="c", input=name, output=out_name, format="GTiff")
+            ret = core.run_command("r.out.gdal", flags="c",
+                                   input=name, output=out_name, format="GTiff")
         if ret != 0:
         if ret != 0:
             shutil.rmtree(new_cwd)
             shutil.rmtree(new_cwd)
             tar.close()
             tar.close()
             core.fatal(_("Unable to export raster map <%s>" % name))
             core.fatal(_("Unable to export raster map <%s>" % name))
-            
+
         tar.add(out_name)
         tar.add(out_name)
 
 
-        # Export the color rules 
+        # Export the color rules
         out_name = name + ".color"
         out_name = name + ".color"
         ret = core.run_command("r.colors.out", map=name, rules=out_name)
         ret = core.run_command("r.colors.out", map=name, rules=out_name)
         if ret != 0:
         if ret != 0:
             shutil.rmtree(new_cwd)
             shutil.rmtree(new_cwd)
             tar.close()
             tar.close()
-            core.fatal(_("Unable to export color rules for raster map <%s> r.out.gdal" % name))
-            
+            core.fatal(_("Unable to export color rules for raster "
+                         "map <%s> r.out.gdal" % name))
+
         tar.add(out_name)
         tar.add(out_name)
 
 
 ############################################################################
 ############################################################################
+
+
 def _export_raster_maps(rows, tar, list_file, new_cwd, fs):
 def _export_raster_maps(rows, tar, list_file, new_cwd, fs):
     for row in rows:
     for row in rows:
         name = row["name"]
         name = row["name"]
@@ -102,17 +110,20 @@ def _export_raster_maps(rows, tar, list_file, new_cwd, fs):
             end = start
             end = start
         string = "%s%s%s%s%s\n" % (name, fs, start, fs, end)
         string = "%s%s%s%s%s\n" % (name, fs, start, fs, end)
         # Write the filename, the start_time and the end_time
         # Write the filename, the start_time and the end_time
-        list_file.write(string) 
+        list_file.write(string)
         # Export the raster map with r.pack
         # Export the raster map with r.pack
         ret = core.run_command("r.pack", input=name, flags="c")
         ret = core.run_command("r.pack", input=name, flags="c")
         if ret != 0:
         if ret != 0:
             shutil.rmtree(new_cwd)
             shutil.rmtree(new_cwd)
             tar.close()
             tar.close()
-            core.fatal(_("Unable to export raster map <%s> with r.pack" % name))
-            
+            core.fatal(_("Unable to export raster map <%s> with r.pack" %
+                         name))
+
         tar.add(name + ".pack")
         tar.add(name + ".pack")
-        
+
 ############################################################################
 ############################################################################
+
+
 def _export_vector_maps_as_gml(rows, tar, list_file, new_cwd, fs):
 def _export_vector_maps_as_gml(rows, tar, list_file, new_cwd, fs):
     for row in rows:
     for row in rows:
         name = row["name"]
         name = row["name"]
@@ -125,29 +136,33 @@ def _export_vector_maps_as_gml(rows, tar, list_file, new_cwd, fs):
             end = start
             end = start
         string = "%s%s%s%s%s\n" % (name, fs, start, fs, end)
         string = "%s%s%s%s%s\n" % (name, fs, start, fs, end)
         # Write the filename, the start_time and the end_time
         # Write the filename, the start_time and the end_time
-        list_file.write(string) 
+        list_file.write(string)
         # Export the vector map with v.out.ogr
         # Export the vector map with v.out.ogr
-        ret = core.run_command("v.out.ogr", input=name, dsn=(name + ".xml"), layer=layer, format="GML")
+        ret = core.run_command("v.out.ogr", input=name, 
+                               dsn=(name + ".xml"), layer=layer, format="GML")
         if ret != 0:
         if ret != 0:
             shutil.rmtree(new_cwd)
             shutil.rmtree(new_cwd)
             tar.close()
             tar.close()
-            core.fatal(_("Unable to export vector map <%s> as GML with v.out.ogr" % name))
-            
+            core.fatal(_("Unable to export vector map <%s> as "
+                         "GML with v.out.ogr" % name))
+
         tar.add(name + ".xml")
         tar.add(name + ".xml")
         tar.add(name + ".xsd")
         tar.add(name + ".xsd")
-                
+
 ############################################################################
 ############################################################################
+
+
 def _export_vector_maps(rows, tar, list_file, new_cwd, fs):
 def _export_vector_maps(rows, tar, list_file, new_cwd, fs):
     for row in rows:
     for row in rows:
         name = row["name"]
         name = row["name"]
         start = row["start_time"]
         start = row["start_time"]
         end = row["end_time"]
         end = row["end_time"]
         layer = row["layer"]
         layer = row["layer"]
-        
+
         # Export unique maps only
         # Export unique maps only
         if name in exported_maps:
         if name in exported_maps:
             continue
             continue
-        
+
         if not layer:
         if not layer:
             layer = 1
             layer = 1
         if not end:
         if not end:
@@ -160,13 +175,16 @@ def _export_vector_maps(rows, tar, list_file, new_cwd, fs):
         if ret != 0:
         if ret != 0:
             shutil.rmtree(new_cwd)
             shutil.rmtree(new_cwd)
             tar.close()
             tar.close()
-            core.fatal(_("Unable to export vector map <%s> with v.pack" % name))
-            
+            core.fatal(_("Unable to export vector map <%s> with v.pack" %
+                         name))
+
         tar.add(name + ".pack")
         tar.add(name + ".pack")
-        
+
         exported_maps[name] = name
         exported_maps[name] = name
-        
+
 ############################################################################
 ############################################################################
+
+
 def _export_raster3d_maps(rows, tar, list_file, new_cwd, fs):
 def _export_raster3d_maps(rows, tar, list_file, new_cwd, fs):
     for row in rows:
     for row in rows:
         name = row["name"]
         name = row["name"]
@@ -176,172 +194,192 @@ def _export_raster3d_maps(rows, tar, list_file, new_cwd, fs):
             end = start
             end = start
         string = "%s%s%s%s%s\n" % (name, fs, start, fs, end)
         string = "%s%s%s%s%s\n" % (name, fs, start, fs, end)
         # Write the filename, the start_time and the end_time
         # Write the filename, the start_time and the end_time
-        list_file.write(string) 
+        list_file.write(string)
         # Export the raster map with r3.pack
         # Export the raster map with r3.pack
         ret = core.run_command("r3.pack", input=name, flags="c")
         ret = core.run_command("r3.pack", input=name, flags="c")
         if ret != 0:
         if ret != 0:
             shutil.rmtree(new_cwd)
             shutil.rmtree(new_cwd)
             tar.close()
             tar.close()
-            core.fatal(_("Unable to export raster map <%s> with r3.pack" % name))
-            
+            core.fatal(_("Unable to export raster map <%s> with r3.pack" %
+                         name))
+
         tar.add(name + ".pack")
         tar.add(name + ".pack")
 
 
 ############################################################################
 ############################################################################
-def export_stds(input, output, compression, workdir, where, _format="pack", _type="strds"):
-	"""
-		!Export space time datasets as tar archive with optional compression
-		
-		This method should be used to export space time datasets of type raster and vector
-		as tar archive that can be reimported with the method import_stds().
-		
-		@param input The name of the space time dataset to export
-		@param output The name of the archive file
-		@param compression The compression of the archive file: 
-		  * "no"  no compression
-		  * "gzip" GNU zip compression
-		  * "bzip2" Bzip compression
-		@param workdir The working directory used for extraction and packing
-		@param where The temporal WHERE SQL statement to select a subset of maps from the space time dataset
-		@param _format The export format:
-		  * "GTiff" Geotiff format, only for raster maps
-		  * "pack" The GRASS raster, 3D raster or vector Pack format, this is the default setting
-		  * "GML" GML file export format, only for vector maps, v.out.ogr export option
-		@param type The space time dataset type
-		  * "strds" Space time raster dataset
-		  * "str3ds" Space time 3D raster dataset
-		  * "stvds" Space time vector dataset
-	"""
-	mapset =  core.gisenv()["MAPSET"]
-
-	if input.find("@") >= 0:
-		id = input
-	else:
-		id = input + "@" + mapset
-		
-	sp = dataset_factory(_type, id)
-
-	if sp.is_in_db() == False:
-		core.fatal(_("Space time %s dataset <%s> not found") % (sp.get_new_map_instance(None).get_type(), id))
-
-	# Save current working directory path
-	old_cwd = os.getcwd()
-
-	# Create the temporary directory and jump into it
-	new_cwd = tempfile.mkdtemp(dir=workdir)
-	os.chdir(new_cwd)
-
-	sp.select()
-	   
-	if _type == "strds":
-		columns = "name,start_time,end_time,min,max,datatype"
-	elif _type == "stvds":
-		columns = "name,start_time,end_time,layer"
-	else:
-		columns = "name,start_time,end_time"
-	rows = sp.get_registered_maps(columns, where, "start_time", None)
-
-	if compression == "gzip":
-		flag = "w:gz"
-	elif compression == "bzip2":
-		flag = "w:bz2"
-	else:
-		flag = "w:"
-
-	# Open the tar archive to add the files
-	tar = tarfile.open(tmp_tar_file_name, flag)
-	list_file = open(list_file_name, "w")
-
-	fs = "|"
-
-	if rows:
-		if _type == "strds":
-			if _format == "GTiff":
-				_export_raster_maps_as_geotiff(rows, tar, list_file, new_cwd, fs)
-			else:
-				_export_raster_maps(rows, tar, list_file, new_cwd, fs)
-		elif _type == "stvds":
-			if _format == "GML":
-				_export_vector_maps_as_gml(rows, tar, list_file, new_cwd, fs)
-			else:
-				_export_vector_maps(rows, tar, list_file, new_cwd, fs)
-		elif _type == "str3ds":
-			_export_raster3d_maps(rows, tar, list_file, new_cwd, fs)
-		
-	list_file.close()
-
-	# Write projection and metadata
-	proj = core.read_command("g.proj", flags="j")
-
-	proj_file = open(proj_file_name, "w")
-	proj_file.write(proj)
-	proj_file.close()
-
-	init_file = open(init_file_name, "w")
-	# Create the init string
-	string = ""
-	string += "%s=%s\n" % ("stds_type", sp.get_type()) # This is optional, if not present strds will be assumed for backward compatibility
-	string += "%s=%s\n" % ("format", _format) # This is optional, if not present gtiff will be assumed for backward compatibility
-	string += "%s=%s\n" % ("temporal_type", sp.get_temporal_type())
-	string += "%s=%s\n" % ("semantic_type", sp.get_semantic_type())
-	string += "%s=%s\n" % ("number_of_maps", sp.metadata.get_number_of_maps())
-	north, south, east, west, top, bottom = sp.get_spatial_extent()
-	string += "%s=%s\n" % ("north", north)
-	string += "%s=%s\n" % ("south", south)
-	string += "%s=%s\n" % ("east", east)
-	string += "%s=%s\n" % ("west", west)
-	init_file.write(string)
-	init_file.close()
-
-	metadata = core.read_command("t.info", type=_type, input=id)
-	metadata_file = open(metadata_file_name, "w")
-	metadata_file.write(metadata)
-	metadata_file.close()
-
-	read_file = open(read_file_name, "w")
-	if _type == "strds":
-		read_file.write("This space time raster dataset was exported with t.rast.export of GRASS GIS 7\n")
-	elif _type == "stvds":
-		read_file.write("This space time vector dataset was exported with t.vect.export of GRASS GIS 7\n")
-	elif _type == "str3ds":
-		read_file.write("This space time 3D raster dataset was exported with t.rast3d.export of GRASS GIS 7\n")
-	read_file.write("\n")
-	read_file.write("Files:\n")
-	if _type == "strds":
-		if _format == "GTiff":
-					#123456789012345678901234567890
-			read_file.write("       *.tif  -- GeoTIFF raster files\n")
-			read_file.write("     *.color  -- GRASS GIS raster color rules\n")
-		elif _format == "pack":
-			read_file.write("      *.pack  -- GRASS raster files packed with r.pack\n")
-	elif _type == "stvds":
-					#123456789012345678901234567890
-		if _format == "GML":
-			read_file.write("       *.xml  -- Vector GML files\n")
-		else:
-			read_file.write("      *.pack  -- GRASS vector files packed with v.pack\n")
-	elif _type == "str3ds":
-		read_file.write("      *.pack  -- GRASS 3D raster files packed with r3.pack\n")
-	read_file.write("%13s -- Projection information in PROJ.4 format\n" % (proj_file_name))
-	read_file.write("%13s -- GRASS GIS space time %s dataset information\n" % (init_file_name, sp.get_new_map_instance(None).get_type()))
-	read_file.write("%13s -- Time series file, lists all maps by name with interval\n"  % (list_file_name))
-	read_file.write("                 time stamps in ISO-Format. Field separator is |\n")
-	read_file.write("%13s -- Projection information in PROJ.4 format\n" % (metadata_file_name))
-	read_file.write("%13s -- This file\n" % (read_file_name))
-	read_file.close()
-
-	# Append the file list
-	tar.add(list_file_name)
-	tar.add(proj_file_name)
-	tar.add(init_file_name)
-	tar.add(read_file_name)
-	tar.add(metadata_file_name)
-	tar.close()
-
-	os.chdir(old_cwd)
-
-	# Move the archive to its destination
-	shutil.move(os.path.join(new_cwd, tmp_tar_file_name), output)
-
-	# Remove the temporary created working directory
-	shutil.rmtree(new_cwd)
 
 
+
+def export_stds(input, output, compression, workdir, where, format_="pack", 
+                type_="strds"):
+    """
+            !Export space time datasets as tar archive with optional compression
+
+            This method should be used to export space time datasets 
+            of type raster and vector as tar archive that can be reimported 
+            with the method import_stds().
+
+            @param input: The name of the space time dataset to export
+            @param output: The name of the archive file
+            @param compression: The compression of the archive file:
+              * "no"  no compression
+              * "gzip" GNU zip compression
+              * "bzip2" Bzip compression
+            @param workdir: The working directory used for extraction and packing
+            @param where: The temporal WHERE SQL statement to select a subset 
+                          of maps from the space time dataset
+            @param format:_ The export format:
+              * "GTiff" Geotiff format, only for raster maps
+              * "pack" The GRASS raster, 3D raster or vector Pack format, 
+                       this is the default setting
+              * "GML" GML file export format, only for vector maps, 
+                      v.out.ogr export option
+            @param type_: The space time dataset type
+              * "strds" Space time raster dataset
+              * "str3ds" Space time 3D raster dataset
+              * "stvds" Space time vector dataset
+    """
+    mapset = core.gisenv()["MAPSET"]
+
+    if input.find("@") >= 0:
+        id = input
+    else:
+        id = input + "@" + mapset
+
+    sp = dataset_factory(type_, id)
+
+    if sp.is_in_db() == False:
+        core.fatal(_("Space time %s dataset <%s> not found") % (
+            sp.get_new_map_instance(None).get_type(), id))
+
+    # Save current working directory path
+    old_cwd = os.getcwd()
+
+    # Create the temporary directory and jump into it
+    new_cwd = tempfile.mkdtemp(dir=workdir)
+    os.chdir(new_cwd)
+
+    sp.select()
+
+    if type_ == "strds":
+        columns = "name,start_time,end_time,min,max,datatype"
+    elif type_ == "stvds":
+        columns = "name,start_time,end_time,layer"
+    else:
+        columns = "name,start_time,end_time"
+    rows = sp.get_registered_maps(columns, where, "start_time", None)
+
+    if compression == "gzip":
+        flag = "w:gz"
+    elif compression == "bzip2":
+        flag = "w:bz2"
+    else:
+        flag = "w:"
+
+    # Open the tar archive to add the files
+    tar = tarfile.open(tmp_tar_file_name, flag)
+    list_file = open(list_file_name, "w")
+
+    fs = "|"
+
+    if rows:
+        if type_ == "strds":
+            if format_ == "GTiff":
+                _export_raster_maps_as_geotiff(
+                    rows, tar, list_file, new_cwd, fs)
+            else:
+                _export_raster_maps(rows, tar, list_file, new_cwd, fs)
+        elif type_ == "stvds":
+            if format_ == "GML":
+                _export_vector_maps_as_gml(rows, tar, list_file, new_cwd, fs)
+            else:
+                _export_vector_maps(rows, tar, list_file, new_cwd, fs)
+        elif type_ == "str3ds":
+            _export_raster3d_maps(rows, tar, list_file, new_cwd, fs)
+
+    list_file.close()
+
+    # Write projection and metadata
+    proj = core.read_command("g.proj", flags="j")
+
+    proj_file = open(proj_file_name, "w")
+    proj_file.write(proj)
+    proj_file.close()
+
+    init_file = open(init_file_name, "w")
+    # Create the init string
+    string = ""
+     # This is optional, if not present strds will be assumed for backward 
+     # compatibility
+    string += "%s=%s\n" % ("stds_type", sp.get_type()) 
+     # This is optional, if not present gtiff will be assumed for 
+     # backward compatibility
+    string += "%s=%s\n" % ("format", format_) 
+    string += "%s=%s\n" % ("temporal_type", sp.get_temporal_type())
+    string += "%s=%s\n" % ("semantic_type", sp.get_semantic_type())
+    string += "%s=%s\n" % ("number_of_maps", sp.metadata.get_number_of_maps())
+    north, south, east, west, top, bottom = sp.get_spatial_extent()
+    string += "%s=%s\n" % ("north", north)
+    string += "%s=%s\n" % ("south", south)
+    string += "%s=%s\n" % ("east", east)
+    string += "%s=%s\n" % ("west", west)
+    init_file.write(string)
+    init_file.close()
+
+    metadata = core.read_command("t.info", type=type_, input=id)
+    metadata_file = open(metadata_file_name, "w")
+    metadata_file.write(metadata)
+    metadata_file.close()
+
+    read_file = open(read_file_name, "w")
+    if type_ == "strds":
+        read_file.write("This space time raster dataset was exported with "
+                        "t.rast.export of GRASS GIS 7\n")
+    elif type_ == "stvds":
+        read_file.write("This space time vector dataset was exported with "
+                        "t.vect.export of GRASS GIS 7\n")
+    elif type_ == "str3ds":
+        read_file.write("This space time 3D raster dataset was exported "
+                        "with t.rast3d.export of GRASS GIS 7\n")
+    read_file.write("\n")
+    read_file.write("Files:\n")
+    if type_ == "strds":
+        if format_ == "GTiff":
+                                #123456789012345678901234567890
+            read_file.write("       *.tif  -- GeoTIFF raster files\n")
+            read_file.write("     *.color  -- GRASS GIS raster color rules\n")
+        elif format_ == "pack":
+            read_file.write("      *.pack  -- GRASS raster files packed with r.pack\n")
+    elif type_ == "stvds":
+                                #123456789012345678901234567890
+        if format_ == "GML":
+            read_file.write("       *.xml  -- Vector GML files\n")
+        else:
+            read_file.write("      *.pack  -- GRASS vector files packed with v.pack\n")
+    elif type_ == "str3ds":
+        read_file.write("      *.pack  -- GRASS 3D raster files packed with r3.pack\n")
+    read_file.write("%13s -- Projection information in PROJ.4 format\n" %
+                    (proj_file_name))
+    read_file.write("%13s -- GRASS GIS space time %s dataset information\n" %
+                    (init_file_name, sp.get_new_map_instance(None).get_type()))
+    read_file.write("%13s -- Time series file, lists all maps by name "
+                    "with interval\n" % (list_file_name))
+    read_file.write("                 time stamps in ISO-Format. Field separator is |\n")
+    read_file.write("%13s -- Projection information in PROJ.4 format\n" %
+                    (metadata_file_name))
+    read_file.write("%13s -- This file\n" % (read_file_name))
+    read_file.close()
+
+    # Append the file list
+    tar.add(list_file_name)
+    tar.add(proj_file_name)
+    tar.add(init_file_name)
+    tar.add(read_file_name)
+    tar.add(metadata_file_name)
+    tar.close()
+
+    os.chdir(old_cwd)
+
+    # Move the archive to its destination
+    shutil.move(os.path.join(new_cwd, tmp_tar_file_name), output)
+
+    # Remove the temporary created working directory
+    shutil.rmtree(new_cwd)

+ 331 - 303
lib/python/temporal/stds_import.py

@@ -19,7 +19,7 @@ link=True
 exp=True
 exp=True
 overr=False
 overr=False
 create=False
 create=False
-tgis.import_stds(input, output, extrdir, title, descr, location, 
+tgis.import_stds(input, output, extrdir, title, descr, location,
                 link, exp, overr, create, "strds")
                 link, exp, overr, create, "strds")
 ...
 ...
 @endcode
 @endcode
@@ -51,317 +51,345 @@ list_file_name = "list.txt"
 imported_maps = {}
 imported_maps = {}
 
 
 ############################################################################
 ############################################################################
+
 def _import_raster_maps_from_geotiff(maplist, overr, exp, location, link):
 def _import_raster_maps_from_geotiff(maplist, overr, exp, location, link):
-	impflags = ""
-	if overr:
-		impflags += "o"
-	if exp or location:
-		impflags += "e"
-	for row in maplist:
-		name = row["name"]
-		filename = str(row["name"]) + ".tif"
-
-		if link:
-			ret = core.run_command("r.external", input = filename,
-						output = name,
-						flags = impflags,
-						overwrite = core.overwrite())
-		else:
-			ret = core.run_command("r.in.gdal", input = filename,
-						output = name,
-						flags = impflags,
-						overwrite = core.overwrite())
-
-		if ret != 0:
-			core.fatal(_("Unable to import/link raster map <%s>.") % name)
-
-		# Set the color rules if present
-		filename = str(row["name"]) + ".color"
-		if os.path.isfile(filename):
-			ret = core.run_command("r.colors", map = name,
-						rules = filename,
-						overwrite = core.overwrite())
-			if ret != 0:
-				core.fatal(_("Unable to set the color rules for raster map <%s>.") % name)
-                                                        
+    impflags = ""
+    if overr:
+        impflags += "o"
+    if exp or location:
+        impflags += "e"
+    for row in maplist:
+        name = row["name"]
+        filename = str(row["name"]) + ".tif"
+
+        if link:
+            ret = core.run_command("r.external", input=filename,
+                                   output=name,
+                                   flags=impflags,
+                                   overwrite=core.overwrite())
+        else:
+            ret = core.run_command("r.in.gdal", input=filename,
+                                   output=name,
+                                   flags=impflags,
+                                   overwrite=core.overwrite())
+
+        if ret != 0:
+            core.fatal(_("Unable to import/link raster map <%s>.") % name)
+
+        # Set the color rules if present
+        filename = str(row["name"]) + ".color"
+        if os.path.isfile(filename):
+            ret = core.run_command("r.colors", map=name,
+                                   rules=filename,
+                                   overwrite=core.overwrite())
+            if ret != 0:
+                core.fatal(_("Unable to set the color rules for "
+                             "raster map <%s>.") % name)
+
 ############################################################################
 ############################################################################
+
 def _import_raster_maps(maplist):
 def _import_raster_maps(maplist):
-	# We need to disable the projection check because of its simple implementation
-	impflags = "o"
-	for row in maplist:
-		name = row["name"]
-		filename = str(row["name"]) + ".pack"
-		ret = core.run_command("r.unpack", input = filename,
-						output = name,
-						flags = impflags,
-						overwrite = core.overwrite(),
-						verbose = True)
-
-		if ret != 0:
-			core.fatal(_("Unable to unpack raster map <%s>.") % name)
+    # We need to disable the projection check because of its 
+    # simple implementation
+    impflags = "o"
+    for row in maplist:
+        name = row["name"]
+        filename = str(row["name"]) + ".pack"
+        ret = core.run_command("r.unpack", input=filename,
+                               output=name,
+                               flags=impflags,
+                               overwrite=core.overwrite(),
+                               verbose=True)
+
+        if ret != 0:
+            core.fatal(_("Unable to unpack raster map <%s>.") % name)
 
 
 ############################################################################
 ############################################################################
+
 def _import_vector_maps_from_gml(maplist, overr, exp, location, link):
 def _import_vector_maps_from_gml(maplist, overr, exp, location, link):
-        impflags = "o"
-        if exp or location:
-                impflags += "e"
-        for row in maplist:
-                name = row["name"]
-                filename = str(row["name"]) + ".xml"
+    impflags = "o"
+    if exp or location:
+        impflags += "e"
+    for row in maplist:
+        name = row["name"]
+        filename = str(row["name"]) + ".xml"
+
+        ret = core.run_command("v.in.ogr", dsn=filename,
+                               output=name,
+                               flags=impflags,
+                               overwrite=core.overwrite())
 
 
-                ret = core.run_command("v.in.ogr", dsn = filename,
-                                        output = name,
-                                        flags = impflags,
-                                        overwrite = core.overwrite())
+        if ret != 0:
+            core.fatal(_("Unable to import vector map <%s>.") % name)
 
 
-                if ret != 0:
-                        core.fatal(_("Unable to import vector map <%s>.") % name)
-                        
 ############################################################################
 ############################################################################
+
 def _import_vector_maps(maplist):
 def _import_vector_maps(maplist):
-        # We need to disable the projection check because of its simple implementation
-        impflags = "o"
-        for row in maplist:
-        	# Separate the name from the layer
-                name = row["name"].split(":")[0]
-                # Import only unique maps
-                if name in imported_maps:
-                    continue
-                filename = name + ".pack"
-                ret = core.run_command("v.unpack", input = filename,
-                                                output = name,
-                                                flags = impflags,
-                                                overwrite = core.overwrite(),
-                                                verbose = True)
-
-                if ret != 0:
-                        core.fatal(_("Unable to unpack vector map <%s>.") % name)
-                
-                imported_maps[name] = name
+    # We need to disable the projection check because of its 
+    # simple implementation
+    impflags = "o"
+    for row in maplist:
+        # Separate the name from the layer
+        name = row["name"].split(":")[0]
+        # Import only unique maps
+        if name in imported_maps:
+            continue
+        filename = name + ".pack"
+        ret = core.run_command("v.unpack", input=filename,
+                               output=name,
+                               flags=impflags,
+                               overwrite=core.overwrite(),
+                               verbose=True)
+
+        if ret != 0:
+            core.fatal(_("Unable to unpack vector map <%s>.") % name)
+
+        imported_maps[name] = name
 ############################################################################
 ############################################################################
 
 
-def import_stds(input, output, extrdir, title = None, descr = None, location = None,
-                link = False, exp = False, overr = False, create = False, stds_type = "strds"):
-	"""
-		!Import space time datasets of type raster and vector
-		
-		@param input Name of the input archive file
-		@param output The name of the output space time dataset
-		@param extrdir The extraction directory
-		@param title The title of the new created space time dataset
-		@param description The description of the new created space time dataset
-		@param location The name of the location that should be created, 
-		                maps are imported into this location
-		@param link Switch to link raster maps instead importing them
-		@param exp Extend location extents based on new dataset
-		@param overr Override projection (use location's projection)
-		@param create Create the location specified by the "location" parameter and exit. 
-		              Do not import the space time datasets.
-		@param stds_type The type of the space time dataset that should be imported
-	"""
-
-	core.set_raise_on_error(True)
-
-	# Check if input file and extraction directory exits
-	if not os.path.exists(input):
-		core.fatal(_("Space time raster dataset archive <%s> not found") % input)
-	if not create and not os.path.exists(extrdir):
-		core.fatal(_("Extraction directory <%s> not found") % extrdir)
-
-	tar = tarfile.open(name = input, mode = 'r')
-
-	# Check for important files
-	members = tar.getnames()
-
-	if init_file_name not in members:
-		core.fatal(_("Unable to find init file <%s>") % init_file_name)
-	if list_file_name not in members:
-		core.fatal(_("Unable to find list file <%s>") % list_file_name)
-	if proj_file_name not in members:
-		core.fatal(_("Unable to find projection file <%s>") % proj_file_name)
-
-	tar.extractall(path = extrdir)
-	tar.close()
-
-	# Save current working directory path
-	old_cwd = os.getcwd()
-
-	# Switch into the data directory
-	os.chdir(extrdir)
-
-	# Check projection information
-	if not location:
-		temp_name = core.tempfile()
-		temp_file = open(temp_name, "w")
-		proj_name = os.path.abspath(proj_file_name)
-
-		p = core.start_command("g.proj", flags = "j", stdout = temp_file)
-		p.communicate()
-		temp_file.close()
-
-		if not core.compare_key_value_text_files(temp_name, proj_name, sep="="):
-			if overr:
-				core.warning(_("Projection information does not match. Proceeding..."))
-			else:
-				core.fatal(_("Projection information does not match. Aborting."))
-
-	# Create a new location based on the projection information and switch into it
-	old_env = core.gisenv()
-	if location:
-		try:
-			proj4_string = open(proj_file_name, 'r').read()
-			core.create_location(dbase = old_env["GISDBASE"],
-								  location = location,
-								  proj4 = proj4_string)
-			# Just create a new location and return
-			if create:
-				os.chdir(old_cwd)
-				return
-		except Exception as e:
-				core.fatal(_("Unable to create location %s. Reason: %s") % (location, str(e)))
-		# Switch to the new created location
-		ret = core.run_command("g.mapset", mapset = "PERMANENT",
-					location = location,
-					gisdbase = old_env["GISDBASE"])
-		if ret != 0:
-			core.fatal(_("Unable to switch to location %s") % location)
-		# create default database connection
-		ret = core.run_command("t.connect", flags = "d")
-		if ret != 0:
-			core.fatal(_("Unable to create default temporal database in new location %s") % location)
-
-	try:
-		# Make sure the temporal database exists
-		create_temporal_database()
-
-		fs = "|"
-		maplist = []
-		mapset = core.gisenv()["MAPSET"]
-		list_file = open(list_file_name, "r")
-
-		# Read the map list from file
-		line_count = 0
-		while True:
-			line = list_file.readline()
-			if not line:
-				break
-
-			line_list = line.split(fs)
-
-			mapname = line_list[0].strip()
-			mapid = mapname + "@" + mapset
-
-			row = {}
-			row["name"] = mapname
-			row["id"] = mapid
-			row["start"] = line_list[1].strip()
-			row["end"] = line_list[2].strip()
-
-			maplist.append(row)
-			line_count += 1
-
-		list_file.close()
-
-		# Read the init file
-		fs = "="
-		init = {}
-		init_file = open(init_file_name, "r")
-		while True:
-			line = init_file.readline()
-			if not line:
-				break
-
-			kv = line.split(fs)
-			init[kv[0]] = kv[1].strip()
-
-		init_file.close()
-
-		if not init.has_key("temporal_type") or \
-		   not init.has_key("semantic_type") or \
-		   not init.has_key("number_of_maps"):
-			core.fatal(_("Key words %s, %s or %s not found in init file.") %
-			("temporal_type", "semantic_type", "number_of_maps"))
-
-		if line_count != int(init["number_of_maps"]):
-			core.fatal(_("Number of maps mismatch in init and list file."))
-
-		_format = "GTiff"
-		_type = "strds"
-
-		if init.has_key("stds_type"):
-			_type = init["stds_type"]
-		if init.has_key("format"):
-			_format = init["format"]
-
-		if stds_type != _type:
-			core.fatal(_("The archive file is of wrong space time dataset type"))
-
-		# Check the existence of the files 
-		if _format == "GTiff":
-			for row in maplist:
-				filename = str(row["name"]) + ".tif"
-				if not os.path.exists(filename):
-					core.fatal(_("Unable to find geotiff raster file <%s> in archive.") % filename)
-		elif _format == "GML":
-			for row in maplist:
-				filename = str(row["name"]) + ".xml"
-				if not os.path.exists(filename):
-					core.fatal(_("Unable to find GML vector file <%s> in archive.") % filename)
-		elif _format == "pack":
-			for row in maplist:
-				if _type == "stvds":
-					filename = str(row["name"].split(":")[0]) + ".pack"
-				else:
-					filename = str(row["name"]) + ".pack"
-				if not os.path.exists(filename):
-					core.fatal(_("Unable to find GRASS package file <%s> in archive.") % filename)
-		else:
-			core.fatal(_("Unsupported input format"))
-
-		# Check the space time dataset
-		id = output + "@" + mapset
-		sp = dataset_factory(_type, id)
-		if sp.is_in_db() and core.overwrite() == False:
-			core.fatal(_("Space time %s dataset <%s> is already in the database. Use the overwrite flag.") % (_type, sp.get_id()))
-
-		# Import the maps
-		if _type == "strds":
-			if _format == "GTiff":
-				_import_raster_maps_from_geotiff(maplist, overr, exp, location, link)
-			if _format == "pack":
-				_import_raster_maps(maplist)
-                elif _type == "stvds":
-                        if _format == "GML":
-                                _import_vector_maps_from_gml(maplist, overr, exp, location, link)
-                        if _format == "pack":
-                                _import_vector_maps(maplist)
-
-		# Create the space time dataset
-		if sp.is_in_db() and core.overwrite() == True:
-			core.info(_("Overwrite space time %s dataset <%s> and unregister all maps.") % (sp.get_new_map_instance(None).get_type(), sp.get_id()))
-			sp.delete()
-			sp = sp.get_new_instance(id)
-
-		temporal_type = init["temporal_type"]
-		semantic_type = init["semantic_type"]
-		core.verbose(_("Create space time %s dataset.") % sp.get_new_map_instance(None).get_type())
-
-		sp.set_initial_values(temporal_type = temporal_type, semantic_type = semantic_type, title = title, description = descr)
-		sp.insert()
-
-		# register the maps
-		fs = "|"
-		register_maps_in_space_time_dataset(type = sp.get_new_map_instance(None).get_type(),
-					 name = output, file = list_file_name, start = "file", end = "file", dbif = None, fs = fs)
-
-		os.chdir(old_cwd)
-	except:
-		raise
-
-	# Make sure the location is switched back correctly
-	finally:
-		if location:
-			# Switch to the old location
-			ret = core.run_command("g.mapset", mapset = old_env["MAPSET"],
-						location = old_env["LOCATION_NAME"],
-						gisdbase = old_env["GISDBASE"])
+def import_stds(
+    input, output, extrdir, title=None, descr=None, location=None,
+        link=False, exp=False, overr=False, create=False, stds_type="strds"):
+    """!Import space time datasets of type raster and vector
+
+        @param input: Name of the input archive file
+        @param output: The name of the output space time dataset
+        @param extrdir: The extraction directory
+        @param title: The title of the new created space time dataset
+        @param description: The description of the new created 
+                            space time dataset
+        @param location: The name of the location that should be created,
+                        maps are imported into this location
+        @param link: Switch to link raster maps instead importing them
+        @param exp: Extend location extents based on new dataset
+        @param overr: Override projection (use location's projection)
+        @param create: Create the location specified by the "location" 
+                      parameter and exit.
+                      Do not import the space time datasets.
+        @param stds_type: The type of the space time dataset that 
+                          should be imported
+    """
+
+    core.set_raise_on_error(True)
+
+    # Check if input file and extraction directory exits
+    if not os.path.exists(input):
+        core.fatal(_("Space time raster dataset archive <%s> not found")
+                   % input)
+    if not create and not os.path.exists(extrdir):
+        core.fatal(_("Extraction directory <%s> not found") % extrdir)
+
+    tar = tarfile.open(name=input, mode='r')
+
+    # Check for important files
+    members = tar.getnames()
+
+    if init_file_name not in members:
+        core.fatal(_("Unable to find init file <%s>") % init_file_name)
+    if list_file_name not in members:
+        core.fatal(_("Unable to find list file <%s>") % list_file_name)
+    if proj_file_name not in members:
+        core.fatal(_("Unable to find projection file <%s>") % proj_file_name)
+
+    tar.extractall(path=extrdir)
+    tar.close()
+
+    # Save current working directory path
+    old_cwd = os.getcwd()
+
+    # Switch into the data directory
+    os.chdir(extrdir)
+
+    # Check projection information
+    if not location:
+        temp_name = core.tempfile()
+        temp_file = open(temp_name, "w")
+        proj_name = os.path.abspath(proj_file_name)
+
+        p = core.start_command("g.proj", flags="j", stdout=temp_file)
+        p.communicate()
+        temp_file.close()
+
+        if not core.compare_key_value_text_files(temp_name, proj_name, sep="="):
+            if overr:
+                core.warning(_("Projection information does not match. "
+                               "Proceeding..."))
+            else:
+                core.fatal(_("Projection information does not match. Aborting."))
+
+    # Create a new location based on the projection information and switch into it
+    old_env = core.gisenv()
+    if location:
+        try:
+            proj4_string = open(proj_file_name, 'r').read()
+            core.create_location(dbase=old_env["GISDBASE"],
+                                 location=location,
+                                 proj4=proj4_string)
+            # Just create a new location and return
+            if create:
+                os.chdir(old_cwd)
+                return
+        except Exception as e:
+            core.fatal(_("Unable to create location %s. Reason: %s")
+                       % (location, str(e)))
+        # Switch to the new created location
+        ret = core.run_command("g.mapset", mapset="PERMANENT",
+                               location=location,
+                               gisdbase=old_env["GISDBASE"])
+        if ret != 0:
+            core.fatal(_("Unable to switch to location %s") % location)
+        # create default database connection
+        ret = core.run_command("t.connect", flags="d")
+        if ret != 0:
+            core.fatal(_("Unable to create default temporal database "
+                         "in new location %s") % location)
+
+    try:
+        # Make sure the temporal database exists
+        create_temporal_database()
+
+        fs = "|"
+        maplist = []
+        mapset = core.gisenv()["MAPSET"]
+        list_file = open(list_file_name, "r")
+
+        # Read the map list from file
+        line_count = 0
+        while True:
+            line = list_file.readline()
+            if not line:
+                break
+
+            line_list = line.split(fs)
+
+            mapname = line_list[0].strip()
+            mapid = mapname + "@" + mapset
+
+            row = {}
+            row["name"] = mapname
+            row["id"] = mapid
+            row["start"] = line_list[1].strip()
+            row["end"] = line_list[2].strip()
+
+            maplist.append(row)
+            line_count += 1
+
+        list_file.close()
+
+        # Read the init file
+        fs = "="
+        init = {}
+        init_file = open(init_file_name, "r")
+        while True:
+            line = init_file.readline()
+            if not line:
+                break
+
+            kv = line.split(fs)
+            init[kv[0]] = kv[1].strip()
+
+        init_file.close()
+
+        if "temporal_type" not in init or \
+           "semantic_type" not in init or \
+           "number_of_maps" not in init:
+            core.fatal(_("Key words %s, %s or %s not found in init file.") %
+                       ("temporal_type", "semantic_type", "number_of_maps"))
+
+        if line_count != int(init["number_of_maps"]):
+            core.fatal(_("Number of maps mismatch in init and list file."))
+
+        _format = "GTiff"
+        _type = "strds"
+
+        if "stds_type" in init:
+            _type = init["stds_type"]
+        if "format" in init:
+            _format = init["format"]
+
+        if stds_type != _type:
+            core.fatal(_("The archive file is of wrong space time dataset type"))
+
+        # Check the existence of the files
+        if _format == "GTiff":
+            for row in maplist:
+                filename = str(row["name"]) + ".tif"
+                if not os.path.exists(filename):
+                    core.fatal(_("Unable to find geotiff raster file "
+                                 "<%s> in archive.") % filename)
+        elif _format == "GML":
+            for row in maplist:
+                filename = str(row["name"]) + ".xml"
+                if not os.path.exists(filename):
+                    core.fatal(_("Unable to find GML vector file "
+                                 "<%s> in archive.") % filename)
+        elif _format == "pack":
+            for row in maplist:
+                if _type == "stvds":
+                    filename = str(row["name"].split(":")[0]) + ".pack"
+                else:
+                    filename = str(row["name"]) + ".pack"
+                if not os.path.exists(filename):
+                    core.fatal(_("Unable to find GRASS package file "
+                                 "<%s> in archive.") % filename)
+        else:
+            core.fatal(_("Unsupported input format"))
+
+        # Check the space time dataset
+        id = output + "@" + mapset
+        sp = dataset_factory(_type, id)
+        if sp.is_in_db() and core.overwrite() == False:
+            core.fatal(_("Space time %s dataset <%s> is already in the "
+                         "database. Use the overwrite flag.") % \
+                        (_type, sp.get_id()))
+
+        # Import the maps
+        if _type == "strds":
+            if _format == "GTiff":
+                _import_raster_maps_from_geotiff(
+                    maplist, overr, exp, location, link)
+            if _format == "pack":
+                _import_raster_maps(maplist)
+        elif _type == "stvds":
+            if _format == "GML":
+                _import_vector_maps_from_gml(
+                    maplist, overr, exp, location, link)
+            if _format == "pack":
+                _import_vector_maps(maplist)
+
+        # Create the space time dataset
+        if sp.is_in_db() and core.overwrite() == True:
+            core.info(_("Overwrite space time %s dataset "
+                        "<%s> and unregister all maps.") % \
+                       (sp.get_new_map_instance(None).get_type(), sp.get_id()))
+            sp.delete()
+            sp = sp.get_new_instance(id)
+
+        temporal_type = init["temporal_type"]
+        semantic_type = init["semantic_type"]
+        core.verbose(_("Create space time %s dataset.") %
+                     sp.get_new_map_instance(None).get_type())
+
+        sp.set_initial_values(temporal_type=temporal_type, 
+                              semantic_type=semantic_type, title=title, 
+                              description=descr)
+        sp.insert()
+
+        # register the maps
+        fs = "|"
+        register_maps_in_space_time_dataset(
+            type=sp.get_new_map_instance(None).get_type(),
+            name=output, file=list_file_name, start="file", 
+            end="file", dbif=None, fs=fs)
+
+        os.chdir(old_cwd)
+    except:
+        raise
+
+    # Make sure the location is switched back correctly
+    finally:
+        if location:
+            # Switch to the old location
+            ret = core.run_command("g.mapset", mapset=old_env["MAPSET"],
+                                   location=old_env["LOCATION_NAME"],
+                                   gisdbase=old_env["GISDBASE"])

+ 147 - 58
lib/python/temporal/temporal_extent.py

@@ -6,11 +6,15 @@ Temporal GIS related temporal extent functions to be used in Python scripts and
 
 
 Usage:
 Usage:
 
 
+@code
+
 >>> import grass.temporal as tgis
 >>> import grass.temporal as tgis
 >>> from datetime import datetime
 >>> from datetime import datetime
 >>> t = tgis.RasterRelativeTime()
 >>> t = tgis.RasterRelativeTime()
 >>> t = tgis.RasterAbsoluteTime()
 >>> t = tgis.RasterAbsoluteTime()
 
 
+@endcode
+
 (C) 2008-2011 by the GRASS Development Team
 (C) 2008-2011 by the GRASS Development Team
 This program is free software under the GNU General Public
 This program is free software under the GNU General Public
 License (>=v2). Read the file COPYING that comes with GRASS
 License (>=v2). Read the file COPYING that comes with GRASS
@@ -34,8 +38,9 @@ class AbstractTemporalExtent(SQLDatabaseInterface):
         
         
         Usage:
         Usage:
         
         
-        >>> import grass.temporal as tgis
-        >>> A = tgis.AbstractTemporalExtent(table="raster_absolute_time",
+        @code
+        
+        >>> A = AbstractTemporalExtent(table="raster_absolute_time",
         ... ident="soil@PERMANENT", start_time=datetime(2001, 01, 01),
         ... ident="soil@PERMANENT", start_time=datetime(2001, 01, 01),
         ... end_time=datetime(2005,01,01) )
         ... end_time=datetime(2005,01,01) )
         >>> A.id
         >>> A.id
@@ -51,7 +56,7 @@ class AbstractTemporalExtent(SQLDatabaseInterface):
         start_time=2001-01-01 00:00:00
         start_time=2001-01-01 00:00:00
         end_time=2005-01-01 00:00:00
         end_time=2005-01-01 00:00:00
         >>> # relative time
         >>> # relative time
-        >>> A = tgis.AbstractTemporalExtent(table="raster_absolute_time",
+        >>> A = AbstractTemporalExtent(table="raster_absolute_time",
         ... ident="soil@PERMANENT", start_time=0, end_time=1 )
         ... ident="soil@PERMANENT", start_time=0, end_time=1 )
         >>> A.id
         >>> A.id
         'soil@PERMANENT'
         'soil@PERMANENT'
@@ -65,6 +70,8 @@ class AbstractTemporalExtent(SQLDatabaseInterface):
         >>> A.print_shell_info()
         >>> A.print_shell_info()
         start_time=0
         start_time=0
         end_time=1
         end_time=1
+        
+        @endcode
     """
     """
     def __init__(self, table=None, ident=None, start_time=None, end_time=None):
     def __init__(self, table=None, ident=None, start_time=None, end_time=None):
 
 
@@ -77,21 +84,25 @@ class AbstractTemporalExtent(SQLDatabaseInterface):
     def starts(self, extent):
     def starts(self, extent):
         """!Return True if this temporal extent (A) starts at the start of the 
         """!Return True if this temporal extent (A) starts at the start of the 
            provided temporal extent (B) and finishes within it
            provided temporal extent (B) and finishes within it
+           @verbatim
            A  |-----|
            A  |-----|
            B  |---------|
            B  |---------|
+           @endverbatim
            
            
            @param extent: The temporal extent object with which this extent starts
            @param extent: The temporal extent object with which this extent starts
            
            
            Usage:
            Usage:
            
            
+           @code
            
            
-           >>> import grass.temporal as tgis
-           >>> A = tgis.AbstractTemporalExtent(start_time=5, end_time=6 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=5, end_time=7 )
+           >>> A = AbstractTemporalExtent(start_time=5, end_time=6 )
+           >>> B = AbstractTemporalExtent(start_time=5, end_time=7 )
            >>> A.starts(B)
            >>> A.starts(B)
            True
            True
            >>> B.starts(A)
            >>> B.starts(A)
            False
            False
+           
+           @endcode
         """
         """
         if  self.D["end_time"] is None or extent.D["end_time"] is None:
         if  self.D["end_time"] is None or extent.D["end_time"] is None:
             return False
             return False
@@ -105,20 +116,25 @@ class AbstractTemporalExtent(SQLDatabaseInterface):
     def started(self, extent):
     def started(self, extent):
         """!Return True if this temporal extent (A) started at the start of the 
         """!Return True if this temporal extent (A) started at the start of the 
            provided temporal extent (B) and finishes after it
            provided temporal extent (B) and finishes after it
+           @verbatim
            A  |---------|
            A  |---------|
            B  |-----|
            B  |-----|
+           @endverbatim
            
            
            @param extent: The temporal extent object with which this extent started
            @param extent: The temporal extent object with which this extent started
            
            
            Usage:
            Usage:
            
            
-           >>> import grass.temporal as tgis
-           >>> A = tgis.AbstractTemporalExtent(start_time=5, end_time=7 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=5, end_time=6 )
+           @code
+           
+           >>> A = AbstractTemporalExtent(start_time=5, end_time=7 )
+           >>> B = AbstractTemporalExtent(start_time=5, end_time=6 )
            >>> A.started(B)
            >>> A.started(B)
            True
            True
            >>> B.started(A)
            >>> B.started(A)
            False
            False
+           
+           @endcode
         """
         """
         if  self.D["end_time"] is None or extent.D["end_time"] is None:
         if  self.D["end_time"] is None or extent.D["end_time"] is None:
             return False
             return False
@@ -132,20 +148,25 @@ class AbstractTemporalExtent(SQLDatabaseInterface):
     def finishes(self, extent):
     def finishes(self, extent):
         """!Return True if this temporal extent (A) starts after the start of the 
         """!Return True if this temporal extent (A) starts after the start of the 
            provided temporal extent (B) and finishes with it
            provided temporal extent (B) and finishes with it
+           @verbatim
            A      |-----|
            A      |-----|
            B  |---------|
            B  |---------|
+           @endverbatim
            
            
            @param extent: The temporal extent object with which this extent finishes
            @param extent: The temporal extent object with which this extent finishes
            
            
            Usage:
            Usage:
            
            
-           >>> import grass.temporal as tgis
-           >>> A = tgis.AbstractTemporalExtent(start_time=6, end_time=7 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=5, end_time=7 )
+           @code
+           
+           >>> A = AbstractTemporalExtent(start_time=6, end_time=7 )
+           >>> B = AbstractTemporalExtent(start_time=5, end_time=7 )
            >>> A.finishes(B)
            >>> A.finishes(B)
            True
            True
            >>> B.finishes(A)
            >>> B.finishes(A)
            False
            False
+           
+           @endcode
         """
         """
         if  self.D["end_time"] is None or extent.D["end_time"] is None:
         if  self.D["end_time"] is None or extent.D["end_time"] is None:
             return False
             return False
@@ -159,20 +180,25 @@ class AbstractTemporalExtent(SQLDatabaseInterface):
     def finished(self, extent):
     def finished(self, extent):
         """!Return True if this temporal extent (A) starts before the start of the 
         """!Return True if this temporal extent (A) starts before the start of the 
            provided temporal extent (B) and finishes with it
            provided temporal extent (B) and finishes with it
+           @verbatim
            A  |---------|
            A  |---------|
            B      |-----|
            B      |-----|
+           @endverbatim
            
            
            @param extent: The temporal extent object with which this extent finishes
            @param extent: The temporal extent object with which this extent finishes
            
            
            Usage:
            Usage:
            
            
-           >>> import grass.temporal as tgis
-           >>> A = tgis.AbstractTemporalExtent(start_time=5, end_time=7 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=6, end_time=7 )
+           @code
+           
+           >>> A = AbstractTemporalExtent(start_time=5, end_time=7 )
+           >>> B = AbstractTemporalExtent(start_time=6, end_time=7 )
            >>> A.finished(B)
            >>> A.finished(B)
            True
            True
            >>> B.finished(A)
            >>> B.finished(A)
            False
            False
+           
+           @endcode
         """
         """
         if  self.D["end_time"] is None or extent.D["end_time"] is None:
         if  self.D["end_time"] is None or extent.D["end_time"] is None:
             return False
             return False
@@ -186,20 +212,25 @@ class AbstractTemporalExtent(SQLDatabaseInterface):
     def after(self, extent):
     def after(self, extent):
         """!Return True if this temporal extent (A) is located after the  
         """!Return True if this temporal extent (A) is located after the  
            provided temporal extent (B)
            provided temporal extent (B)
+           @verbatim
            A             |---------|
            A             |---------|
            B  |---------|
            B  |---------|
+           @endverbatim
            
            
            @param extent: The temporal extent object that is located before this extent
            @param extent: The temporal extent object that is located before this extent
            
            
            Usage:
            Usage:
            
            
-           >>> import grass.temporal as tgis
-           >>> A = tgis.AbstractTemporalExtent(start_time=8, end_time=9 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=6, end_time=7 )
+           @code
+           
+           >>> A = AbstractTemporalExtent(start_time=8, end_time=9 )
+           >>> B = AbstractTemporalExtent(start_time=6, end_time=7 )
            >>> A.after(B)
            >>> A.after(B)
            True
            True
            >>> B.after(A)
            >>> B.after(A)
            False
            False
+           
+           @endcode
         """
         """
         if extent.D["end_time"] is None:
         if extent.D["end_time"] is None:
             if self.D["start_time"] > extent.D["start_time"]:
             if self.D["start_time"] > extent.D["start_time"]:
@@ -215,20 +246,25 @@ class AbstractTemporalExtent(SQLDatabaseInterface):
     def before(self, extent):
     def before(self, extent):
         """!Return True if this temporal extent (A) is located before the  
         """!Return True if this temporal extent (A) is located before the  
            provided temporal extent (B)
            provided temporal extent (B)
+           @verbatim
            A  |---------|
            A  |---------|
            B             |---------|
            B             |---------|
+           @endverbatim
            
            
            @param extent: The temporal extent object that is located after this extent
            @param extent: The temporal extent object that is located after this extent
            
            
            Usage:
            Usage:
            
            
-           >>> import grass.temporal as tgis
-           >>> A = tgis.AbstractTemporalExtent(start_time=6, end_time=7 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=8, end_time=9 )
+           @code
+           
+           >>> A = AbstractTemporalExtent(start_time=6, end_time=7 )
+           >>> B = AbstractTemporalExtent(start_time=8, end_time=9 )
            >>> A.before(B)
            >>> A.before(B)
            True
            True
            >>> B.before(A)
            >>> B.before(A)
            False
            False
+           
+           @endcode
         """
         """
         if self.D["end_time"] is None:
         if self.D["end_time"] is None:
             if self.D["start_time"] < extent.D["start_time"]:
             if self.D["start_time"] < extent.D["start_time"]:
@@ -244,29 +280,34 @@ class AbstractTemporalExtent(SQLDatabaseInterface):
     def adjacent(self, extent):
     def adjacent(self, extent):
         """!Return True if this temporal extent (A) is a meeting neighbor the 
         """!Return True if this temporal extent (A) is a meeting neighbor the 
            provided temporal extent (B)
            provided temporal extent (B)
+           @verbatim
            A            |---------|
            A            |---------|
            B  |---------|
            B  |---------|
            A  |---------|
            A  |---------|
            B            |---------|
            B            |---------|
+           @endverbatim
            
            
            @param extent: The temporal extent object that is a meeting neighbor
            @param extent: The temporal extent object that is a meeting neighbor
                           of this extent
                           of this extent
            
            
            Usage:
            Usage:
            
            
-           >>> import grass.temporal as tgis
-           >>> A = tgis.AbstractTemporalExtent(start_time=5, end_time=7 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=7, end_time=9 )
+           @code
+           
+           >>> A = AbstractTemporalExtent(start_time=5, end_time=7 )
+           >>> B = AbstractTemporalExtent(start_time=7, end_time=9 )
            >>> A.adjacent(B)
            >>> A.adjacent(B)
            True
            True
            >>> B.adjacent(A)
            >>> B.adjacent(A)
            True
            True
-           >>> A = tgis.AbstractTemporalExtent(start_time=5, end_time=7 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=3, end_time=5 )
+           >>> A = AbstractTemporalExtent(start_time=5, end_time=7 )
+           >>> B = AbstractTemporalExtent(start_time=3, end_time=5 )
            >>> A.adjacent(B)
            >>> A.adjacent(B)
            True
            True
            >>> B.adjacent(A)
            >>> B.adjacent(A)
            True
            True
+           
+           @endcode
         """
         """
         if  self.D["end_time"] is None and extent.D["end_time"] is None:
         if  self.D["end_time"] is None and extent.D["end_time"] is None:
             return False
             return False
@@ -280,19 +321,26 @@ class AbstractTemporalExtent(SQLDatabaseInterface):
     def follows(self, extent):
     def follows(self, extent):
         """!Return True if this temporal extent (A) follows the  
         """!Return True if this temporal extent (A) follows the  
            provided temporal extent (B)
            provided temporal extent (B)
+           @verbatim
            A            |---------|
            A            |---------|
            B  |---------|
            B  |---------|
+           @endverbatim
            
            
            @param extent: The temporal extent object that is the predecessor
            @param extent: The temporal extent object that is the predecessor
                           of this extent
                           of this extent
            
            
-           >>> import grass.temporal as tgis
-           >>> A = tgis.AbstractTemporalExtent(start_time=5, end_time=7 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=3, end_time=5 )
+           Usage:
+           
+           @code
+           
+           >>> A = AbstractTemporalExtent(start_time=5, end_time=7 )
+           >>> B = AbstractTemporalExtent(start_time=3, end_time=5 )
            >>> A.follows(B)
            >>> A.follows(B)
            True
            True
            >>> B.follows(A)
            >>> B.follows(A)
            False
            False
+           
+           @endcode
         """
         """
         if  extent.D["end_time"] is None:
         if  extent.D["end_time"] is None:
             return False
             return False
@@ -305,21 +353,26 @@ class AbstractTemporalExtent(SQLDatabaseInterface):
     def precedes(self, extent):
     def precedes(self, extent):
         """!Return True if this temporal extent (A) precedes the provided 
         """!Return True if this temporal extent (A) precedes the provided 
            temporal extent (B)
            temporal extent (B)
+           @verbatim
            A  |---------|
            A  |---------|
            B            |---------|
            B            |---------|
+           @endverbatim
            
            
            @param extent: The temporal extent object that is the successor
            @param extent: The temporal extent object that is the successor
                           of this extent
                           of this extent
            
            
            Usage:
            Usage:
            
            
-           >>> import grass.temporal as tgis
-           >>> A = tgis.AbstractTemporalExtent(start_time=5, end_time=7 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=7, end_time=9 )
+           @code
+           
+           >>> A = AbstractTemporalExtent(start_time=5, end_time=7 )
+           >>> B = AbstractTemporalExtent(start_time=7, end_time=9 )
            >>> A.precedes(B)
            >>> A.precedes(B)
            True
            True
            >>> B.precedes(A)
            >>> B.precedes(A)
            False
            False
+           
+           @endcode
         """
         """
         if  self.D["end_time"] is None:
         if  self.D["end_time"] is None:
             return False
             return False
@@ -332,20 +385,25 @@ class AbstractTemporalExtent(SQLDatabaseInterface):
     def during(self, extent):
     def during(self, extent):
         """!Return True if this temporal extent (A) is located during the provided 
         """!Return True if this temporal extent (A) is located during the provided 
            temporal extent (B)
            temporal extent (B)
+           @verbatim
            A   |-------|
            A   |-------|
            B  |---------|
            B  |---------|
-           
+           @endverbatim
+                      
            @param extent: The temporal extent object that contains this extent
            @param extent: The temporal extent object that contains this extent
            
            
            Usage:
            Usage:
            
            
-           >>> import grass.temporal as tgis
-           >>> A = tgis.AbstractTemporalExtent(start_time=5, end_time=7 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=4, end_time=9 )
+           @code
+           
+           >>> A = AbstractTemporalExtent(start_time=5, end_time=7 )
+           >>> B = AbstractTemporalExtent(start_time=4, end_time=9 )
            >>> A.during(B)
            >>> A.during(B)
            True
            True
            >>> B.during(A)
            >>> B.during(A)
            False
            False
+           
+           @endcode
         """
         """
         # Check single point of time in interval
         # Check single point of time in interval
         if  extent.D["end_time"] is None:
         if  extent.D["end_time"] is None:
@@ -368,21 +426,26 @@ class AbstractTemporalExtent(SQLDatabaseInterface):
     def contains(self, extent):
     def contains(self, extent):
         """!Return True if this temporal extent (A) contains the provided 
         """!Return True if this temporal extent (A) contains the provided 
            temporal extent (B)
            temporal extent (B)
+           @verbatim
            A  |---------|
            A  |---------|
            B   |-------|
            B   |-------|
+           @endverbatim
            
            
            @param extent: The temporal extent object that is located 
            @param extent: The temporal extent object that is located 
                           during this extent
                           during this extent
            
            
            Usage:
            Usage:
            
            
-           >>> import grass.temporal as tgis
-           >>> A = tgis.AbstractTemporalExtent(start_time=4, end_time=9 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=5, end_time=8 )
+           @code
+           
+           >>> A = AbstractTemporalExtent(start_time=4, end_time=9 )
+           >>> B = AbstractTemporalExtent(start_time=5, end_time=8 )
            >>> A.contains(B)
            >>> A.contains(B)
            True
            True
            >>> B.contains(A)
            >>> B.contains(A)
            False
            False
+           
+           @endcode
         """
         """
         # Check single point of time in interval
         # Check single point of time in interval
         if  self.D["end_time"] is None:
         if  self.D["end_time"] is None:
@@ -405,21 +468,26 @@ class AbstractTemporalExtent(SQLDatabaseInterface):
     def equivalent(self, extent):
     def equivalent(self, extent):
         """!Return True if this temporal extent (A) is equivalent to the provided 
         """!Return True if this temporal extent (A) is equivalent to the provided 
            temporal extent (B)
            temporal extent (B)
+           @verbatim
            A  |---------|
            A  |---------|
            B  |---------|
            B  |---------|
+           @endverbatim
            
            
            @param extent: The temporal extent object that is equivalent 
            @param extent: The temporal extent object that is equivalent 
                           during this extent
                           during this extent
            
            
            Usage:
            Usage:
            
            
-           >>> import grass.temporal as tgis
-           >>> A = tgis.AbstractTemporalExtent(start_time=5, end_time=6 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=5, end_time=6 )
+           @code
+           
+           >>> A = AbstractTemporalExtent(start_time=5, end_time=6 )
+           >>> B = AbstractTemporalExtent(start_time=5, end_time=6 )
            >>> A.equivalent(B)
            >>> A.equivalent(B)
            True
            True
            >>> B.equivalent(A)
            >>> B.equivalent(A)
            True
            True
+           
+           @endcode
         """
         """
         if  self.D["end_time"] is None and extent.D["end_time"] is None:
         if  self.D["end_time"] is None and extent.D["end_time"] is None:
             if self.D["start_time"] == extent.D["start_time"]:
             if self.D["start_time"] == extent.D["start_time"]:
@@ -439,21 +507,25 @@ class AbstractTemporalExtent(SQLDatabaseInterface):
     def overlapped(self, extent):
     def overlapped(self, extent):
         """!Return True if this temporal extent (A) overlapped the provided 
         """!Return True if this temporal extent (A) overlapped the provided 
            temporal extent (B)
            temporal extent (B)
+           @verbatim
            A  |---------|
            A  |---------|
            B    |---------|
            B    |---------|
-           
+           @endverbatim
            @param extent: The temporal extent object that is overlaps 
            @param extent: The temporal extent object that is overlaps 
                           this extent
                           this extent
            
            
            Usage:
            Usage:
            
            
-           >>> import grass.temporal as tgis
-           >>> A = tgis.AbstractTemporalExtent(start_time=5, end_time=7 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=6, end_time=8 )
+           @code
+           
+           >>> A = AbstractTemporalExtent(start_time=5, end_time=7 )
+           >>> B = AbstractTemporalExtent(start_time=6, end_time=8 )
            >>> A.overlapped(B)
            >>> A.overlapped(B)
            True
            True
            >>> B.overlapped(A)
            >>> B.overlapped(A)
            False
            False
+           
+           @endcode
         """
         """
         if  self.D["end_time"] is None or extent.D["end_time"] is None:
         if  self.D["end_time"] is None or extent.D["end_time"] is None:
             return False
             return False
@@ -468,21 +540,26 @@ class AbstractTemporalExtent(SQLDatabaseInterface):
     def overlaps(self, extent):
     def overlaps(self, extent):
         """!Return True if this temporal extent (A) overlapps the provided 
         """!Return True if this temporal extent (A) overlapps the provided 
            temporal extent (B)
            temporal extent (B)
+           @verbatim
            A    |---------|
            A    |---------|
            B  |---------|
            B  |---------|
+           @endverbatim
            
            
            @param extent: The temporal extent object that is overlapped 
            @param extent: The temporal extent object that is overlapped 
                           this extent
                           this extent
            
            
            Usage:
            Usage:
            
            
-           >>> import grass.temporal as tgis
-           >>> A = tgis.AbstractTemporalExtent(start_time=6, end_time=8 )
-           >>> B = tgis.AbstractTemporalExtent(start_time=5, end_time=7 )
+           @code
+           
+           >>> A = AbstractTemporalExtent(start_time=6, end_time=8 )
+           >>> B = AbstractTemporalExtent(start_time=5, end_time=7 )
            >>> A.overlaps(B)
            >>> A.overlaps(B)
            True
            True
            >>> B.overlaps(A)
            >>> B.overlaps(A)
            False
            False
+           
+           @endcode
         """
         """
         if  self.D["end_time"] is None or extent.D["end_time"] is None:
         if  self.D["end_time"] is None or extent.D["end_time"] is None:
             return False
             return False
@@ -663,17 +740,20 @@ class AbsoluteTemporalExtent(AbstractTemporalExtent):
 ###############################################################################
 ###############################################################################
 
 
 class RasterAbsoluteTime(AbsoluteTemporalExtent):
 class RasterAbsoluteTime(AbsoluteTemporalExtent):
-    def __init__(self, ident=None, start_time=None, end_time=None, timezone=None):
+    def __init__(self, ident=None, start_time=None, end_time=None, 
+                 timezone=None):
         AbsoluteTemporalExtent.__init__(self, "raster_absolute_time",
         AbsoluteTemporalExtent.__init__(self, "raster_absolute_time",
             ident, start_time, end_time, timezone)
             ident, start_time, end_time, timezone)
 
 
 class Raster3DAbsoluteTime(AbsoluteTemporalExtent):
 class Raster3DAbsoluteTime(AbsoluteTemporalExtent):
-    def __init__(self, ident=None, start_time=None, end_time=None, timezone=None):
+    def __init__(self, ident=None, start_time=None, end_time=None, 
+                 timezone=None):
         AbsoluteTemporalExtent.__init__(self, "raster3d_absolute_time",
         AbsoluteTemporalExtent.__init__(self, "raster3d_absolute_time",
             ident, start_time, end_time, timezone)
             ident, start_time, end_time, timezone)
 
 
 class VectorAbsoluteTime(AbsoluteTemporalExtent):
 class VectorAbsoluteTime(AbsoluteTemporalExtent):
-    def __init__(self, ident=None, start_time=None, end_time=None, timezone=None):
+    def __init__(self, ident=None, start_time=None, end_time=None, 
+                 timezone=None):
         AbsoluteTemporalExtent.__init__(self, "vector_absolute_time",
         AbsoluteTemporalExtent.__init__(self, "vector_absolute_time",
             ident, start_time, end_time, timezone)
             ident, start_time, end_time, timezone)
 
 
@@ -687,8 +767,9 @@ class STDSAbsoluteTime(AbsoluteTemporalExtent):
         
         
         Usage:
         Usage:
         
         
-        >>> import grass.temporal as tgis
-        >>> A = tgis.STDSAbsoluteTime(table="strds_absolute_time",
+        @code
+        
+        >>> A = STDSAbsoluteTime(table="strds_absolute_time",
         ... ident="strds@PERMANENT", start_time=datetime(2001, 01, 01),
         ... ident="strds@PERMANENT", start_time=datetime(2001, 01, 01),
         ... end_time=datetime(2005,01,01), granularity="1 days",
         ... end_time=datetime(2005,01,01), granularity="1 days",
         ... map_time="interval")
         ... map_time="interval")
@@ -713,6 +794,8 @@ class STDSAbsoluteTime(AbsoluteTemporalExtent):
         end_time=2005-01-01 00:00:00
         end_time=2005-01-01 00:00:00
         granularity=1 days
         granularity=1 days
         map_time=interval
         map_time=interval
+        
+        @endcode
     """
     """
     def __init__(self, table=None, ident=None, start_time=None, end_time=None, 
     def __init__(self, table=None, ident=None, start_time=None, end_time=None, 
                  granularity=None, timezone=None, map_time=None):
                  granularity=None, timezone=None, map_time=None):
@@ -808,9 +891,10 @@ class RelativeTemporalExtent(AbstractTemporalExtent):
         start_time and end_time must be of type integer
         start_time and end_time must be of type integer
        
        
         Usage:
         Usage:
+        
+        @code
        
        
-        >>> import grass.temporal as tgis
-        >>> A = tgis.RelativeTemporalExtent(table="raster_absolute_time",
+        >>> A = RelativeTemporalExtent(table="raster_absolute_time",
         ... ident="soil@PERMANENT", start_time=0, end_time=1, unit="years")
         ... ident="soil@PERMANENT", start_time=0, end_time=1, unit="years")
         >>> A.id
         >>> A.id
         'soil@PERMANENT'
         'soil@PERMANENT'
@@ -829,6 +913,8 @@ class RelativeTemporalExtent(AbstractTemporalExtent):
         start_time=0
         start_time=0
         end_time=1
         end_time=1
         unit=years
         unit=years
+        
+        @endcode
     """
     """
     def __init__(self, table=None, ident=None, start_time=None, end_time=None, 
     def __init__(self, table=None, ident=None, start_time=None, end_time=None, 
                  unit=None):
                  unit=None):
@@ -916,9 +1002,10 @@ class STDSRelativeTime(RelativeTemporalExtent):
         start_time and end_time must be of type integer
         start_time and end_time must be of type integer
        
        
         Usage:
         Usage:
+        
+        @code
        
        
-        >>> import grass.temporal as tgis
-        >>> A = tgis.STDSRelativeTime(table="raster_absolute_time",
+        >>> A = STDSRelativeTime(table="raster_absolute_time",
         ... ident="soil@PERMANENT", start_time=0, end_time=1, unit="years",
         ... ident="soil@PERMANENT", start_time=0, end_time=1, unit="years",
         ... granularity=5, map_time="interval")
         ... granularity=5, map_time="interval")
         >>> A.id
         >>> A.id
@@ -946,6 +1033,8 @@ class STDSRelativeTime(RelativeTemporalExtent):
         unit=years
         unit=years
         granularity=5
         granularity=5
         map_time=interval
         map_time=interval
+        
+        @endcode
     """
     """
     def __init__(self, table=None, ident=None, start_time=None, end_time=None, 
     def __init__(self, table=None, ident=None, start_time=None, end_time=None, 
                  unit=None, granularity=None, map_time=None):
                  unit=None, granularity=None, map_time=None):

+ 85 - 79
lib/python/temporal/temporal_granularity.py

@@ -25,14 +25,14 @@ from datetime_math import *
 
 
 ###############################################################################
 ###############################################################################
 
 
-def compute_relative_time_granularity(maps):            
+
+def compute_relative_time_granularity(maps):
     """!Compute the relative time granularity
     """!Compute the relative time granularity
-        
-        Attention: The computation of the granularity is only correct in case of not
-        overlapping intervals. Hence a correct temporal topology is required for
-        computation.
-    
-	
+
+        Attention: The computation of the granularity 
+        is only correct in case of not overlapping intervals. 
+        Hence a correct temporal topology is required for computation.
+
         @param maps: a ordered by start_time list of map objects
         @param maps: a ordered by start_time list of map objects
     """
     """
 
 
@@ -44,7 +44,7 @@ def compute_relative_time_granularity(maps):
     for map in maps:
     for map in maps:
         start, end = map.get_valid_time()
         start, end = map.get_valid_time()
         if start and end:
         if start and end:
-            t =  abs(end - start)
+            t = abs(end - start)
             delta.append(int(t))
             delta.append(int(t))
 
 
     # Compute the timedelta of the gaps
     # Compute the timedelta of the gaps
@@ -54,12 +54,13 @@ def compute_relative_time_granularity(maps):
             if relation == "after":
             if relation == "after":
                 start1, end1 = maps[i].get_valid_time()
                 start1, end1 = maps[i].get_valid_time()
                 start2, end2 = maps[i + 1].get_valid_time()
                 start2, end2 = maps[i + 1].get_valid_time()
-                # Gaps are between intervals, intervals and points, points and points
+                # Gaps are between intervals, intervals and 
+                # points, points and points
                 if end1 and start2:
                 if end1 and start2:
-                    t =  abs(end1 - start2)
+                    t = abs(end1 - start2)
                     delta.append(int(t))
                     delta.append(int(t))
-                if  not end1 and start2:
-                    t =  abs(start1 - start2)
+                if not end1 and start2:
+                    t = abs(start1 - start2)
                     delta.append(int(t))
                     delta.append(int(t))
 
 
     delta.sort()
     delta.sort()
@@ -76,16 +77,16 @@ def compute_relative_time_granularity(maps):
 
 
 ###############################################################################
 ###############################################################################
 
 
-def compute_absolute_time_granularity(maps):                  
+
+def compute_absolute_time_granularity(maps):
     """!Compute the absolute time granularity
     """!Compute the absolute time granularity
-        
-        Attention: The computation of the granularity is only correct in case of not
-        overlapping intervals. Hence a correct temporal topology is required for
-        computation.
-    
-	
+
+        Attention: The computation of the granularity 
+        is only correct in case of not overlapping intervals. 
+        Hence a correct temporal topology is required for computation.
+
         @param maps: a ordered by start_time list of map objects
         @param maps: a ordered by start_time list of map objects
-    """     
+    """
 
 
     has_seconds = False
     has_seconds = False
     has_minutes = False
     has_minutes = False
@@ -117,83 +118,85 @@ def compute_absolute_time_granularity(maps):
             if relation == "after":
             if relation == "after":
                 start1, end1 = maps[i].get_valid_time()
                 start1, end1 = maps[i].get_valid_time()
                 start2, end2 = maps[i + 1].get_valid_time()
                 start2, end2 = maps[i + 1].get_valid_time()
-                # Gaps are between intervals, intervals and points, points and points
+                # Gaps are between intervals, intervals and 
+                # points, points and points
                 if end1 and start2:
                 if end1 and start2:
                     delta.append(end1 - start2)
                     delta.append(end1 - start2)
                     datetime_delta.append(compute_datetime_delta(end1, start2))
                     datetime_delta.append(compute_datetime_delta(end1, start2))
-                if  not end1 and start2:
+                if not end1 and start2:
                     delta.append(start2 - start1)
                     delta.append(start2 - start1)
-                    datetime_delta.append(compute_datetime_delta(start1, start2))
+                    datetime_delta.append(compute_datetime_delta(
+                        start1, start2))
 
 
     # Check what changed
     # Check what changed
     dlist = []
     dlist = []
     for d in datetime_delta:
     for d in datetime_delta:
-        if d.has_key("second") and d["second"] > 0:
+        if "second" in d and d["second"] > 0:
             has_seconds = True
             has_seconds = True
-        if d.has_key("minute") and d["minute"] > 0:
+        if "minute" in d and d["minute"] > 0:
             has_minutes = True
             has_minutes = True
-        if d.has_key("hour") and d["hour"] > 0:
+        if "hour" in d and d["hour"] > 0:
             has_hours = True
             has_hours = True
-        if d.has_key("day") and d["day"] > 0:
+        if "day" in d and d["day"] > 0:
             has_days = True
             has_days = True
-        if d.has_key("month") and d["month"] > 0:
+        if "month" in d and d["month"] > 0:
             has_months = True
             has_months = True
-        if d.has_key("year") and d["year"] > 0:
+        if "year" in d and d["year"] > 0:
             has_years = True
             has_years = True
 
 
     # Create a list with a single time unit only
     # Create a list with a single time unit only
     if has_seconds:
     if has_seconds:
         for d in datetime_delta:
         for d in datetime_delta:
-            if d.has_key("second"):
-                dlist.append(d["second"])   
-            elif d.has_key("minute"):
-                dlist.append(d["minute"] * 60)   
-            elif d.has_key("hour"):
-                dlist.append(d["hour"] * 3600)   
-            elif d.has_key("day"):
-                dlist.append(d["day"] * 24 * 3600)   
+            if "second" in d:
+                dlist.append(d["second"])
+            elif "minute" in d:
+                dlist.append(d["minute"] * 60)
+            elif "hour" in d:
+                dlist.append(d["hour"] * 3600)
+            elif "day" in d:
+                dlist.append(d["day"] * 24 * 3600)
             else:
             else:
-                dlist.append(d["max_days"] * 24 * 3600)   
-        use_seconds = True        
+                dlist.append(d["max_days"] * 24 * 3600)
+        use_seconds = True
     elif has_minutes:
     elif has_minutes:
         for d in datetime_delta:
         for d in datetime_delta:
-            if d.has_key("minute"):
-                dlist.append(d["minute"])   
-            elif d.has_key("hour"):
-                dlist.append(d["hour"] * 60)   
-            elif d.has_key("day"):
-                dlist.append(d["day"] * 24 * 60)   
+            if "minute" in d:
+                dlist.append(d["minute"])
+            elif "hour" in d:
+                dlist.append(d["hour"] * 60)
+            elif "day" in d:
+                dlist.append(d["day"] * 24 * 60)
             else:
             else:
-                dlist.append(d["max_days"] * 24 * 60)   
-        use_minutes = True        
+                dlist.append(d["max_days"] * 24 * 60)
+        use_minutes = True
     elif has_hours:
     elif has_hours:
         for d in datetime_delta:
         for d in datetime_delta:
-            if d.has_key("hour"):
-                dlist.append(d["hour"])   
-            elif d.has_key("day"):
-                dlist.append(d["day"] * 24)   
+            if "hour" in d:
+                dlist.append(d["hour"])
+            elif "day" in d:
+                dlist.append(d["day"] * 24)
             else:
             else:
-                dlist.append(d["max_days"] * 24)   
-        use_hours = True        
+                dlist.append(d["max_days"] * 24)
+        use_hours = True
     elif has_days:
     elif has_days:
         for d in datetime_delta:
         for d in datetime_delta:
-            if d.has_key("day"):
-                dlist.append(d["day"])   
+            if "day" in d:
+                dlist.append(d["day"])
             else:
             else:
-                dlist.append(d["max_days"])   
-        use_days = True        
+                dlist.append(d["max_days"])
+        use_days = True
     elif has_months:
     elif has_months:
         for d in datetime_delta:
         for d in datetime_delta:
-            if d.has_key("month"):
-                dlist.append(d["month"])   
-            elif d.has_key("year"):
-                dlist.append(d["year"] * 12)   
-        use_months = True        
+            if "month" in d:
+                dlist.append(d["month"])
+            elif "year" in d:
+                dlist.append(d["year"] * 12)
+        use_months = True
     elif has_years:
     elif has_years:
         for d in datetime_delta:
         for d in datetime_delta:
-            if d.has_key("year"):
-                dlist.append(d["year"])   
-        use_years = True        
+            if "year" in d:
+                dlist.append(d["year"])
+        use_years = True
 
 
     dlist.sort()
     dlist.sort()
     ulist = list(set(dlist))
     ulist = list(set(dlist))
@@ -229,20 +232,23 @@ def compute_absolute_time_granularity(maps):
 #  See http://www.opensource.org/licenses/mit-license.php
 #  See http://www.opensource.org/licenses/mit-license.php
 # Error Codes:
 # Error Codes:
 #   None
 #   None
-def gcd(a,b):
-	"""!The Euclidean Algorithm """
-	a = abs(a)
-	b = abs(b)
-        while a:
-                a, b = b%a, a
-        return b
-        
+
+
+def gcd(a, b):
+    """!The Euclidean Algorithm """
+    a = abs(a)
+    b = abs(b)
+    while a:
+        a, b = b % a, a
+    return b
+
 ###############################################################################
 ###############################################################################
 
 
+
 def gcd_list(list):
 def gcd_list(list):
-	"""!Finds the GCD of numbers in a list.
-	Input: List of numbers you want to find the GCD of
-		E.g. [8, 24, 12]
-	Returns: GCD of all numbers
-	"""
-	return reduce(gcd, list)
+    """!Finds the GCD of numbers in a list.
+    Input: List of numbers you want to find the GCD of
+            E.g. [8, 24, 12]
+    Returns: GCD of all numbers
+    """
+    return reduce(gcd, list)

+ 303 - 272
lib/python/temporal/temporal_relationships.py

@@ -25,43 +25,45 @@ from datetime_math import *
 
 
 ###############################################################################
 ###############################################################################
 
 
+
 class temporal_topology_builder(object):
 class temporal_topology_builder(object):
-    """!This class is designed to build the temporal topology based on a lists of maps
-    
-	Example:
-	@code
-	# We have a space time raster dataset and build a map list
-	# from all registered maps ordered by start time
-	maps = strds.get_registered_maps_as_objects()
-	
-	# Now lets build the temporal topology of the maps in the list
-	tb = temporal_topology_builder()
-	tb.build(maps)
-	
-	for _map in tb:
-	    _map.print_temporal_topology_info()
-	    _follows = _map.get_follows()
-	    if _follows:
-		for f in _follows:
-		    f.print_temporal_topology_info()
-	    
-	# Using the next and previous methods, we can iterate over the 
-	# topological related maps in this way
-	
-	_first = tb.get_first()
-	
-	while _first:
-	    _first.print_temporal_topology_info()
-	    _first = _first.next()
-	
-	# Dictionary like accessed
-	_map = tb["name@mapset"]
-	@endcode
-    
+    """!This class is designed to build the temporal topology 
+       based on a lists of maps
+
+        Example:
+        @code
+        # We have a space time raster dataset and build a map list
+        # from all registered maps ordered by start time
+        maps = strds.get_registered_maps_as_objects()
+
+        # Now lets build the temporal topology of the maps in the list
+        tb = temporal_topology_builder()
+        tb.build(maps)
+
+        for _map in tb:
+            _map.print_temporal_topology_info()
+            _follows = _map.get_follows()
+            if _follows:
+                for f in _follows:
+                    f.print_temporal_topology_info()
+
+        # Using the next and previous methods, we can iterate over the
+        # topological related maps in this way
+
+        _first = tb.get_first()
+
+        while _first:
+            _first.print_temporal_topology_info()
+            _first = _first.next()
+
+        # Dictionary like accessed
+        _map = tb["name@mapset"]
+        @endcode
+
     """
     """
     def __init__(self):
     def __init__(self):
-	self._reset()
-        
+        self._reset()
+
     def _reset(self):
     def _reset(self):
         self._store = {}
         self._store = {}
         self._first = None
         self._first = None
@@ -69,192 +71,214 @@ class temporal_topology_builder(object):
 
 
     def _set_first(self, first):
     def _set_first(self, first):
         self._first = first
         self._first = first
-        self._insert(first)        
-        
+        self._insert(first)
+
     def _detect_first(self):
     def _detect_first(self):
-	if len(self) > 0:
-	    _prev = self._store.values()[0]
-	    while _prev != None:
-		self._first = _prev
-		_prev = _prev.prev()
-		
+        if len(self) > 0:
+            prev_ = self._store.values()[0]
+            while prev_ is not None:
+                self._first = prev_
+                prev_ = prev_.temporal_prev()
+
     def _insert(self, t):
     def _insert(self, t):
         self._store[t.get_id()] = t
         self._store[t.get_id()] = t
-        
+
     def get_first(self):
     def get_first(self):
-	"""!Return the first map with the earliest start time
-	
-	   @return The map with the earliest start time
-	"""
-	return self._first
+        """!Return the first map with the earliest start time
+
+           @return The map with the earliest start time
+        """
+        return self._first
 
 
     def _build_internal_iteratable(self, maps):
     def _build_internal_iteratable(self, maps):
-	"""!Build an iteratable temporal topology structure for all maps in the list and store the maps internally
-	
-	   Basically the "next" and "prev" relations will be set in the temporal topology structure of each map
-	   The maps will be added to the object, so they can be accessed using the iterator of this class
-	   
-	   @param maps: A sorted (by start_time)list of abstract_dataset objects with initiated temporal extent
-	"""
-	self._build_iteratable(maps)
-
-	for _map in maps:
-	    self._insert(_map)
-	
-	# Detect the first map
-	self._detect_first()
-	
+        """!Build an iteratable temporal topology structure for all maps in 
+           the list and store the maps internally
+
+           Basically the "next" and "prev" relations will be set in the 
+           temporal topology structure of each map
+           The maps will be added to the object, so they can be 
+           accessed using the iterator of this class
+
+           @param maps: A sorted (by start_time)list of abstract_dataset 
+                        objects with initiated temporal extent
+        """
+        self._build_iteratable(maps)
+
+        for _map in maps:
+            self._insert(_map)
+
+        # Detect the first map
+        self._detect_first()
+
     def _build_iteratable(self, maps):
     def _build_iteratable(self, maps):
-	"""!Build an iteratable temporal topology structure for all maps in the list
-	
-	   Basically the "next" and "prev" relations will be set in the temporal topology structure of each map.
-	   
-	   @param maps: A sorted (by start_time)list of abstract_dataset objects with initiated temporal extent
-	"""
-	for i in xrange(len(maps)):
-	    offset = i + 1
-	    for j in xrange(offset, len(maps)):		
-		# Get the temporal relationship
-		relation = maps[j].temporal_relation(maps[i])
-		
-		# Build the next reference
-		if relation != "equivalent" and relation != "started":
-		    maps[i].set_next(maps[j])
-		    break
-		
-	for _map in maps:
-	    _next = _map.next()
-	    if _next:
-		_next.set_prev(_map)
-	    _map.set_temporal_topology_build_true()
-	
+        """!Build an iteratable temporal topology structure for 
+           all maps in the list
+
+           Basically the "next" and "prev" relations will be set in 
+           the temporal topology structure of each map.
+
+           @param maps: A sorted (by start_time)list of abstract_dataset 
+                        objects with initiated temporal extent
+        """
+        for i in xrange(len(maps)):
+            offset = i + 1
+            for j in xrange(offset, len(maps)):
+                # Get the temporal relationship
+                relation = maps[j].temporal_relation(maps[i])
+
+                # Build the next reference
+                if relation != "equivalent" and relation != "started":
+                    maps[i].set_temporal_next(maps[j])
+                    break
+
+        for map_ in maps:
+            next_ = map_.temporal_next()
+            if next_:
+                next_.set_temporal_prev(map_)
+            map_.set_temporal_topology_build_true()
+
     def build2(self, mapsA, mapsB):
     def build2(self, mapsA, mapsB):
-	"""!Build the temporal topology structure between two ordered lists of maps
-	
-	   This method builds the temporal topology from mapsA to mapsB and vice verse.
-	   The temporal topology structure of each map, defined in class temporal_map_relations,
-	   will be reseted and rebuild for mapsA and mapsB. 
-	   
-	   After building the temporal topology the modified map objects of mapsA can be accessed 
-	   in the same way as a dictionary using there id. The implemented iterator assures 
-	   the chronological iteration over the mapsA.	    
-	   
-	   @param mapsA: A sorted list (by start_time) of abstract_dataset objects with initiated temporal extent
-	   @param mapsB: A sorted list (by start_time) of abstract_dataset objects with initiated temporal extent
-	"""
-	
-	if mapsA == mapsB:
-	    self.build(mapsA, True)
-	    return
-	
-	for _map in mapsA:
-	    _map.reset_temporal_topology()
-	    
-	for _map in mapsB:
-	    _map.reset_temporal_topology()
-	
-	for i in xrange(len(mapsA)):
-	    for j in xrange(len(mapsB)):
-		
-		# Get the temporal relationship
-		relation = mapsB[j].temporal_relation(mapsA[i])
-		
-		if relation == "before":
-		    continue
-			    
-		if relation == "equivalent":
-		    mapsB[j].append_equivalent(mapsA[i])
-		    mapsA[i].append_equivalent(mapsB[j])
-		elif relation == "follows":
-		    mapsB[j].append_follows(mapsA[i])
-		    mapsA[i].append_precedes(mapsB[j])
-		elif relation == "precedes":
-		    mapsB[j].append_precedes(mapsA[i])
-		    mapsA[i].append_follows(mapsB[j])
-		elif relation == "during" or relation == "starts" or relation == "finishes":
-		    mapsB[j].append_during(mapsA[i])
-		    mapsA[i].append_contains(mapsB[j])
-		elif relation == "contains" or relation == "started" or relation == "finished":
-		    mapsB[j].append_contains(mapsA[i])
-		    mapsA[i].append_during(mapsB[j])
-		elif relation == "overlaps":
-		    mapsB[j].append_overlaps(mapsA[i])
-		    mapsA[i].append_overlapped(mapsB[j])
-		elif relation == "overlapped":
-		    mapsB[j].append_overlapped(mapsA[i])
-		    mapsA[i].append_overlaps(mapsB[j])
-
-		# Break if the next map follows and the over-next maps is after
-		if relation == "follows":
-		    if j < len(mapsB) - 1:
-			relation = mapsB[j + 1].temporal_relation(mapsA[i])
-			if relation == "after":
-			    break
-		# Break if the the next map is after
-		if relation == "after":
-		    break 
-	
-	self._build_internal_iteratable(mapsA)
-	self._build_iteratable(mapsB)
-			    
+        """!Build the temporal topology structure between 
+           two ordered lists of maps
+
+           This method builds the temporal topology from mapsA to 
+           mapsB and vice verse. The temporal topology structure of each map, 
+           defined in class temporal_map_relations,
+           will be reseted and rebuild for mapsA and mapsB.
+
+           After building the temporal topology the modified 
+           map objects of mapsA can be accessed
+           in the same way as a dictionary using there id. 
+           The implemented iterator assures
+           the chronological iteration over the mapsA.
+
+           @param mapsA: A sorted list (by start_time) of abstract_dataset 
+                         objects with initiated temporal extent
+           @param mapsB: A sorted list (by start_time) of abstract_dataset 
+                         objects with initiated temporal extent
+        """
+
+        if mapsA == mapsB:
+            self.build(mapsA, True)
+            return
+
+        for map_ in mapsA:
+            map_.reset_temporal_topology()
+
+        for map_ in mapsB:
+            map_.reset_temporal_topology()
+
+        for i in xrange(len(mapsA)):
+            for j in xrange(len(mapsB)):
+
+                # Get the temporal relationship
+                relation = mapsB[j].temporal_relation(mapsA[i])
+
+                if relation == "before":
+                    continue
+
+                if relation == "equivalent":
+                    mapsB[j].append_temporal_equivalent(mapsA[i])
+                    mapsA[i].append_temporal_equivalent(mapsB[j])
+                elif relation == "follows":
+                    mapsB[j].append_temporal_follows(mapsA[i])
+                    mapsA[i].append_temporal_precedes(mapsB[j])
+                elif relation == "precedes":
+                    mapsB[j].append_temporal_precedes(mapsA[i])
+                    mapsA[i].append_temporal_follows(mapsB[j])
+                elif relation == "during" or relation == "starts" or \
+                     relation == "finishes":
+                    mapsB[j].append_temporal_during(mapsA[i])
+                    mapsA[i].append_temporal_contains(mapsB[j])
+                elif relation == "contains" or relation == "started" or \
+                     relation == "finished":
+                    mapsB[j].append_temporal_contains(mapsA[i])
+                    mapsA[i].append_temporal_during(mapsB[j])
+                elif relation == "overlaps":
+                    mapsB[j].append_temporal_overlaps(mapsA[i])
+                    mapsA[i].append_temporal_overlapped(mapsB[j])
+                elif relation == "overlapped":
+                    mapsB[j].append_temporal_overlapped(mapsA[i])
+                    mapsA[i].append_temporal_overlaps(mapsB[j])
+
+                # Break if the next map follows and the over-next maps is after
+                if relation == "follows":
+                    if j < len(mapsB) - 1:
+                        relation = mapsB[j + 1].temporal_relation(mapsA[i])
+                        if relation == "after":
+                            break
+                # Break if the the next map is after
+                if relation == "after":
+                    break
+
+        self._build_internal_iteratable(mapsA)
+        self._build_iteratable(mapsB)
+
     def build(self, maps):
     def build(self, maps):
-	"""!Build the temporal topology structure
-	
-	   This method builds the temporal topology based on all maps in the provided map list.
-	   The temporal topology structure of each map, defined in class temporal_map_relations,
-	   will be reseted and rebuild. 
-	   
-	   After building the temporal topology the modified map objects can be accessed 
-	   in the same way as a dictionary using there id. The implemented iterator assures 
-	   the chronological iteration over the maps.	   
-	   
-	   @param maps: A sorted list (by start_time) of abstract_dataset objects with initiated temporal extent
-	"""
-	for _map in maps:
-	    _map.reset_temporal_topology()
-	
-	for i in xrange(len(maps)):
-	    offset = i + 1
-	    for j in xrange(offset, len(maps)):
-		
-		# Get the temporal relationship
-		relation = maps[j].temporal_relation(maps[i])
-			    
-		# The start time of map j is equal or later than map i
-		if relation == "equivalent":
-		    maps[j].append_equivalent(maps[i])
-		    maps[i].append_equivalent(maps[j])
-		elif relation == "follows":
-		    maps[j].append_follows(maps[i])
-		    maps[i].append_precedes(maps[j])
-		elif relation == "during" or relation == "starts" or relation == "finishes":
-		    maps[j].append_during(maps[i])
-		    maps[i].append_contains(maps[j])
-		elif relation == "started":
-		    # Consider equal start time, in case "started" map j contains map i
-		    maps[j].append_contains(maps[i])
-		    maps[i].append_during(maps[j])
-		elif relation == "overlaps":
-		    maps[j].append_overlaps(maps[i])
-		    maps[i].append_overlapped(maps[j])
-
-		# Break if the last map follows
-		if relation == "follows":
-		    if j < len(maps) - 1:
-			relation = maps[j + 1].temporal_relation(maps[i])
-			if relation == "after":
-			    break
-		# Break if the the next map is after
-		if relation == "after":
-		    break 
-		    
-	self._build_internal_iteratable(maps)
-	
+        """!Build the temporal topology structure
+
+           This method builds the temporal topology based on 
+           all maps in the provided map list.
+           The temporal topology structure of each map, 
+           defined in class temporal_map_relations,
+           will be reseted and rebuild.
+
+           After building the temporal topology the 
+           modified map objects can be accessed
+           in the same way as a dictionary using there id. 
+           The implemented iterator assures
+           the chronological iteration over the maps.
+
+           @param maps: A sorted list (by start_time) of abstract_dataset 
+                        objects with initiated temporal extent
+        """
+        for map_ in maps:
+            map_.reset_temporal_topology()
+
+        for i in xrange(len(maps)):
+            offset = i + 1
+            for j in xrange(offset, len(maps)):
+
+                # Get the temporal relationship
+                relation = maps[j].temporal_relation(maps[i])
+
+                # The start time of map j is equal or later than map i
+                if relation == "equivalent":
+                    maps[j].append_temporal_equivalent(maps[i])
+                    maps[i].append_temporal_equivalent(maps[j])
+                elif relation == "follows":
+                    maps[j].append_temporal_follows(maps[i])
+                    maps[i].append_temporal_precedes(maps[j])
+                elif relation == "during" or relation == "starts" or \
+                     relation == "finishes":
+                    maps[j].append_temporal_during(maps[i])
+                    maps[i].append_temporal_contains(maps[j])
+                elif relation == "started":
+                    # Consider equal start time, in case 
+                    # "started" map j contains map i
+                    maps[j].append_temporal_contains(maps[i])
+                    maps[i].append_temporal_during(maps[j])
+                elif relation == "overlaps":
+                    maps[j].append_temporal_overlaps(maps[i])
+                    maps[i].append_temporal_overlapped(maps[j])
+
+                # Break if the last map follows
+                if relation == "follows":
+                    if j < len(maps) - 1:
+                        relation = maps[j + 1].temporal_relation(maps[i])
+                        if relation == "after":
+                            break
+                # Break if the the next map is after
+                if relation == "after":
+                    break
+
+        self._build_internal_iteratable(maps)
+
     def __iter__(self):
     def __iter__(self):
-	_start = self._first
-	while _start != None:
-	    yield _start
-	    _start = _start.next()
+        start_ = self._first
+        while start_ is not None:
+            yield start_
+            start_ = start_.temporal_next()
 
 
     def __getitem__(self, index):
     def __getitem__(self, index):
         return self._store[index.get_id()]
         return self._store[index.get_id()]
@@ -269,86 +293,93 @@ class temporal_topology_builder(object):
 ###############################################################################
 ###############################################################################
 
 
 def print_temporal_topology_relationships(maps1, maps2):
 def print_temporal_topology_relationships(maps1, maps2):
-    """!Print the temporal relation matrix of the temporal ordered map lists maps1 and maps2
-       to stdout.
-	
-	@param maps1: A sorted (by start_time) list of abstract_dataset objects with initiated temporal extent
-	@param maps2: A sorted (by start_time) list of abstract_dataset objects with initiated temporal extent
+    """!Print the temporal relation matrix of the temporal ordered 
+       map lists maps1 and maps2 to stdout.
+
+        @param maps1: A sorted (by start_time) list of abstract_dataset 
+                      objects with initiated temporal extent
+        @param maps2: A sorted (by start_time) list of abstract_dataset 
+                      objects with initiated temporal extent
     """
     """
-    
+
     identical = False
     identical = False
     use_id = True
     use_id = True
-    
+
     if maps1 == maps2:
     if maps1 == maps2:
-	identical = True
-	use_id = False
+        identical = True
+        use_id = False
 
 
     for i in range(len(maps1)):
     for i in range(len(maps1)):
-	if identical == True:
-	    start = i + 1
-	else:
-	    start = 0
-	for j in range(start, len(maps2)):
-	    relation = maps1[j].temporal_relation(maps2[i])
-
-	    if use_id == False:
-		print maps2[j].base.get_name(), relation, maps1[i].base.get_name()
-	    else:
-		print maps2[j].base.get_id(), relation, maps1[i].base.get_id()
-
-	    # Break if the last map follows
-	    if relation == "follows":
-		if j < len(maps1) - 1:
-		    relation = maps1[j + 1].temporal_relation(maps2[i])
-		    if relation == "after":
-			break
-	    # Break if the the next map is after
-	    if relation == "after":
-		break
+        if identical == True:
+            start = i + 1
+        else:
+            start = 0
+        for j in range(start, len(maps2)):
+            relation = maps1[j].temporal_relation(maps2[i])
+
+            if use_id == False:
+                print maps2[j].base.get_name(
+                ), relation, maps1[i].base.get_name()
+            else:
+                print maps2[j].base.get_id(), relation, maps1[i].base.get_id()
+
+            # Break if the last map follows
+            if relation == "follows":
+                if j < len(maps1) - 1:
+                    relation = maps1[j + 1].temporal_relation(maps2[i])
+                    if relation == "after":
+                        break
+            # Break if the the next map is after
+            if relation == "after":
+                break
 
 
 ###############################################################################
 ###############################################################################
 
 
+
 def count_temporal_topology_relationships(maps1, maps2):
 def count_temporal_topology_relationships(maps1, maps2):
     """!Count the temporal relations between the two lists of maps
     """!Count the temporal relations between the two lists of maps
 
 
-	The map lists must be ordered by start time. Temporal relations are counted 
-	by analyzing the sparse (upper right side in case maps1 == maps2) temporal relationships matrix.
+        The map lists must be ordered by start time. 
+        Temporal relations are counted by analyzing the sparse 
+        (upper right side in case maps1 == maps2) temporal relationships matrix.
 
 
-	@param maps1: A sorted (by start_time) list of abstract_dataset objects with initiated temporal extent
-	@param maps2: A sorted (by start_time) list of abstract_dataset objects with initiated temporal extent
-	@return A dictionary with counted temporal relationships
+        @param maps1: A sorted (by start_time) list of abstract_dataset 
+                      objects with initiated temporal extent
+        @param maps2: A sorted (by start_time) list of abstract_dataset 
+                      objects with initiated temporal extent
+        @return A dictionary with counted temporal relationships
     """
     """
-    
+
     tcount = {}
     tcount = {}
     identical = False
     identical = False
-    
+
     if maps1 == maps2:
     if maps1 == maps2:
-	identical = True
+        identical = True
 
 
     for i in range(len(maps1)):
     for i in range(len(maps1)):
-	if identical == True:
-	    start = i + 1
-	else:
-	    start = 0
-	for j in range(start, len(maps2)):
-	    relation = maps1[j].temporal_relation(maps2[i])
-
-	    if relation == "before":
-		continue
-	    
-	    if tcount.has_key(relation):
-		tcount[relation] = tcount[relation] + 1
-	    else:
-		tcount[relation] = 1
-
-	    # Break if the last map follows
-	    if relation == "follows":
-		if j < len(maps1) - 1:
-		    relation = maps1[j + 1].temporal_relation(maps2[i])
-		    if relation == "after":
-			break
-	    # Break if the the next map is after
-	    if relation == "after":
-		break  
+        if identical:
+            start = i + 1
+        else:
+            start = 0
+        for j in range(start, len(maps2)):
+            relation = maps1[j].temporal_relation(maps2[i])
+
+            if relation == "before":
+                continue
+
+            if relation in tcount:
+                tcount[relation] = tcount[relation] + 1
+            else:
+                tcount[relation] = 1
+
+            # Break if the last map follows
+            if relation == "follows":
+                if j < len(maps1) - 1:
+                    relation = maps1[j + 1].temporal_relation(maps2[i])
+                    if relation == "after":
+                        break
+            # Break if the the next map is after
+            if relation == "after":
+                break
 
 
     return tcount
     return tcount

文件差异内容过多而无法显示
+ 656 - 522
lib/python/temporal/unit_tests.py


+ 117 - 80
lib/python/temporal/univar_statistics.py

@@ -9,7 +9,8 @@ Usage:
 @code
 @code
 import grass.temporal as tgis
 import grass.temporal as tgis
 
 
-tgis.print_gridded_dataset_univar_statistics(type, input, where, extended, header, fs)
+tgis.print_gridded_dataset_univar_statistics(
+    type, input, where, extended, header, fs)
 
 
 ...
 ...
 @endcode
 @endcode
@@ -25,23 +26,24 @@ for details.
 from space_time_datasets_tools import *
 from space_time_datasets_tools import *
 
 
 ###############################################################################
 ###############################################################################
-    
+
+
 def print_gridded_dataset_univar_statistics(type, input, where, extended, header, fs):
 def print_gridded_dataset_univar_statistics(type, input, where, extended, header, fs):
     """!Print univariate statistics for a space time raster or raster3d dataset
     """!Print univariate statistics for a space time raster or raster3d dataset
-    
-      param type Must be "strds" or "str3ds"
+
+       @param type Must be "strds" or "str3ds"
        @param input The name of the space time dataset
        @param input The name of the space time dataset
        @param where A temporal database where statement
        @param where A temporal database where statement
-       @param extended If True compute extended statistics 
-       @param header   If True print column names as header 
-       @param fs Field separator 
+       @param extended If True compute extended statistics
+       @param header   If True print column names as header
+       @param fs Field separator
     """
     """
-    
+
     # We need a database interface
     # We need a database interface
     dbif = SQLDatabaseInterfaceConnection()
     dbif = SQLDatabaseInterfaceConnection()
     dbif.connect()
     dbif.connect()
-   
-    mapset =  core.gisenv()["MAPSET"]
+
+    mapset = core.gisenv()["MAPSET"]
 
 
     if input.find("@") >= 0:
     if input.find("@") >= 0:
         id = input
         id = input
@@ -49,76 +51,90 @@ def print_gridded_dataset_univar_statistics(type, input, where, extended, header
         id = input + "@" + mapset
         id = input + "@" + mapset
 
 
     sp = dataset_factory(type, id)
     sp = dataset_factory(type, id)
-    
+
     if sp.is_in_db(dbif) == False:
     if sp.is_in_db(dbif) == False:
         dbif.close()
         dbif.close()
-        core.fatal(_("Space time %s dataset <%s> not found") % (sp.get_new_map_instance(None).get_type(), id))
+        core.fatal(_("Space time %s dataset <%s> not found") % (
+            sp.get_new_map_instance(None).get_type(), id))
 
 
     sp.select(dbif)
     sp.select(dbif)
 
 
-    rows = sp.get_registered_maps("id,start_time,end_time", where, "start_time", dbif)
+    rows = sp.get_registered_maps(
+        "id,start_time,end_time", where, "start_time", dbif)
 
 
     if not rows:
     if not rows:
         dbif.close()
         dbif.close()
-        core.fatal(_("Space time %s dataset <%s> is empty") % (sp.get_new_map_instance(None).get_type(), id))
+        core.fatal(_("Space time %s dataset <%s> is empty") % (
+            sp.get_new_map_instance(None).get_type(), id))
 
 
     if header == True:
     if header == True:
-        print "id" + fs + "start" + fs + "end" + fs + "mean" + fs + "min" + fs + "max" + fs,
+        print "id" + fs + "start" + fs + "end" + fs + "mean" + fs + \
+            "min" + fs + "max" + fs,
         print "mean_of_abs" + fs + "stddev" + fs + "variance" + fs,
         print "mean_of_abs" + fs + "stddev" + fs + "variance" + fs,
         if extended == True:
         if extended == True:
-            print "coeff_var" + fs + "sum" + fs + "null_cells" + fs + "cells" + fs,
-            print "first_quartile" + fs + "median" + fs + "third_quartile" + fs + "percentile_90" 
+            print "coeff_var" + fs + "sum" + fs + \
+                "null_cells" + fs + "cells" + fs,
+            print "first_quartile" + fs + "median" + fs + \
+                "third_quartile" + fs + "percentile_90"
         else:
         else:
-            print "coeff_var" + fs + "sum" + fs + "null_cells" + fs + "cells" 
+            print "coeff_var" + fs + "sum" + fs + "null_cells" + fs + "cells"
 
 
     for row in rows:
     for row in rows:
         id = row["id"]
         id = row["id"]
         start = row["start_time"]
         start = row["start_time"]
         end = row["end_time"]
         end = row["end_time"]
 
 
-        flag="g"
+        flag = "g"
 
 
         if extended == True:
         if extended == True:
             flag += "e"
             flag += "e"
 
 
-	if type == "strds":
-	    stats = core.parse_command("r.univar", map=id, flags=flag)
-	elif type == "str3ds":
-	    stats = core.parse_command("r3.univar", map=id, flags=flag)
+        if type == "strds":
+            stats = core.parse_command("r.univar", map=id, flags=flag)
+        elif type == "str3ds":
+            stats = core.parse_command("r3.univar", map=id, flags=flag)
 
 
         print str(id) + fs + str(start) + fs + str(end),
         print str(id) + fs + str(start) + fs + str(end),
-        print fs + str(stats["mean"]) + fs + str(stats["min"]) + fs + str(stats["max"]) + fs + str(stats["mean_of_abs"]),
-        print fs + str(stats["stddev"]) + fs + str(stats["variance"]) + fs + str(stats["coeff_var"]) + fs + str(stats["sum"]),
+        print fs + str(stats["mean"]) + fs + str(stats["min"]) + \
+            fs + str(stats["max"]) + fs + str(stats["mean_of_abs"]),
+        print fs + str(stats["stddev"]) + fs + str(stats["variance"]) + \
+            fs + str(stats["coeff_var"]) + fs + str(stats["sum"]),
 
 
         if extended == True:
         if extended == True:
-            print fs + str(stats["null_cells"]) + fs + str(stats["cells"]) + fs,
-            print str(stats["first_quartile"]) + fs + str(stats["median"]) + fs + str(stats["third_quartile"]) + fs + str(stats["percentile_90"]) 
+            print fs + str(stats["null_cells"]) + fs + str(
+                stats["cells"]) + fs,
+            print str(stats["first_quartile"]) + fs + str(stats["median"]) + \
+                  fs + str(stats["third_quartile"]) + \
+                  fs + str(stats["percentile_90"])
         else:
         else:
             print fs + str(stats["null_cells"]) + fs + str(stats["cells"])
             print fs + str(stats["null_cells"]) + fs + str(stats["cells"])
-        
+
     dbif.close()
     dbif.close()
 
 
 ###############################################################################
 ###############################################################################
-    
-def print_vector_dataset_univar_statistics(input, twhere, layer, type, column, where, extended, header, fs):
+
+
+def print_vector_dataset_univar_statistics(input, twhere, layer, type, column, 
+                                           where, extended, header, fs):
     """!Print univariate statistics for a space time vector dataset
     """!Print univariate statistics for a space time vector dataset
-    
-       @param input The name of the space time dataset
-       @param twhere A temporal database where statement
-       @param layer The layer number used in case no layer is present in the temporal dataset
-       @param type options: point,line,boundary,centroid,area
-       @param column The name of the attribute column
-       @param where A temporal database where statement
-       @param extended If True compute extended statistics 
-       @param header   If True print column names as header 
-       @param fs Field separator 
+
+       @param input: The name of the space time dataset
+       @param twhere: A temporal database where statement
+       @param layer: The layer number used in case no layer is present 
+              in the temporal dataset
+       @param type: options: point,line,boundary,centroid,area
+       @param column: The name of the attribute column
+       @param where: A temporal database where statement
+       @param extended: If True compute extended statistics
+       @param header:   If True print column names as header
+       @param fs: Field separator
     """
     """
 
 
     # We need a database interface
     # We need a database interface
     dbif = SQLDatabaseInterfaceConnection()
     dbif = SQLDatabaseInterfaceConnection()
     dbif.connect()
     dbif.connect()
-   
-    mapset =  core.gisenv()["MAPSET"]
+
+    mapset = core.gisenv()["MAPSET"]
 
 
     if input.find("@") >= 0:
     if input.find("@") >= 0:
         id = input
         id = input
@@ -126,71 +142,92 @@ def print_vector_dataset_univar_statistics(input, twhere, layer, type, column, w
         id = input + "@" + mapset
         id = input + "@" + mapset
 
 
     sp = dataset_factory("stvds", id)
     sp = dataset_factory("stvds", id)
-    
+
     if sp.is_in_db(dbif) == False:
     if sp.is_in_db(dbif) == False:
         dbif.close()
         dbif.close()
-        core.fatal(_("Space time %s dataset <%s> not found") % (sp.get_new_map_instance(None).get_type(), id))
+        core.fatal(_("Space time %s dataset <%s> not found") % (
+            sp.get_new_map_instance(None).get_type(), id))
 
 
     sp.select(dbif)
     sp.select(dbif)
 
 
-    rows = sp.get_registered_maps("id,name,mapset,start_time,end_time,layer", twhere, "start_time", dbif)
+    rows = sp.get_registered_maps("id,name,mapset,start_time,end_time,layer",
+                                  twhere, "start_time", dbif)
 
 
     if not rows:
     if not rows:
         dbif.close()
         dbif.close()
-        core.fatal(_("Space time %s dataset <%s> is empty") % (sp.get_new_map_instance(None).get_type(), id))
+        core.fatal(_("Space time %s dataset <%s> is empty") % (
+            sp.get_new_map_instance(None).get_type(), id))
 
 
     string = ""
     string = ""
     if header == True:
     if header == True:
-        string += "id" + fs + "start" + fs + "end" + fs + "n" + fs + "nmissing" + fs + "nnull" + fs
+        string += "id" + fs + "start" + fs + "end" + fs + "n" + \
+            fs + "nmissing" + fs + "nnull" + fs
         string += "min" + fs + "max" + fs + "range"
         string += "min" + fs + "max" + fs + "range"
-	if type == "point" or type == "centroid":
-            string += fs + "mean" + fs + "mean_abs" + fs + "population_stddev" + fs + "population_variance" + fs
-	    string += "population_coeff_variation" + fs + "sample_stddev" + fs + "sample_variance" + fs
-	    string += "kurtosis" + fs + "skewness"
+        if type == "point" or type == "centroid":
+            string += fs + "mean" + fs + "mean_abs" + fs + "population_stddev" +\
+                      fs + "population_variance" + fs
+            string += "population_coeff_variation" + fs + \
+                "sample_stddev" + fs + "sample_variance" + fs
+            string += "kurtosis" + fs + "skewness"
             if extended == True:
             if extended == True:
-                string+= fs + "first_quartile" + fs + "median" + fs + "third_quartile" + fs + "percentile_90" 
+                string += fs + "first_quartile" + fs + "median" + fs + \
+                    "third_quartile" + fs + "percentile_90"
 
 
-	print string
+        print string
 
 
     for row in rows:
     for row in rows:
         id = row["name"] + "@" + row["mapset"]
         id = row["name"] + "@" + row["mapset"]
         start = row["start_time"]
         start = row["start_time"]
         end = row["end_time"]
         end = row["end_time"]
-	mylayer = row["layer"]
+        mylayer = row["layer"]
 
 
-        flags="g"
+        flags = "g"
 
 
         if extended == True:
         if extended == True:
             flags += "e"
             flags += "e"
 
 
-	if not mylayer:
-	    mylayer = layer
+        if not mylayer:
+            mylayer = layer
 
 
-        stats = core.parse_command("v.univar", map=id, where=where, column=column, layer=mylayer, type=type, flags=flags)
+        stats = core.parse_command("v.univar", map=id, where=where,
+                                   column=column, layer=mylayer, 
+                                   type=type, flags=flags)
 
 
-	string = ""
-	if stats:
+        string = ""
+        if stats:
             string += str(id) + fs + str(start) + fs + str(end)
             string += str(id) + fs + str(start) + fs + str(end)
-            string += fs + str(stats["n"]) + fs + str(stats["nmissing"]) + fs + str(stats["nnull"])
-	    if stats.has_key("min"):
-            	string += fs + str(stats["min"]) + fs + str(stats["max"]) + fs + str(stats["range"])
-	    else:
-            	string += fs + fs + fs
-
-	    if type == "point" or type == "centroid":
-		if stats.has_key("mean"):
-            	    string += fs + str(stats["mean"]) + fs + str(stats["mean_abs"]) + fs + str(stats["population_stddev"]) + fs + str(stats["population_variance"])
-            	    string += fs + str(stats["population_coeff_variation"]) + fs + str(stats["sample_stddev"]) + fs + str(stats["sample_variance"])
-            	    string += fs + str(stats["kurtosis"]) + fs + str(stats["skewness"])
-	        else:
-            	    string += fs + fs + fs + fs + fs + fs + fs + fs + fs
+            string += fs + str(stats["n"]) + fs + str(stats[
+                "nmissing"]) + fs + str(stats["nnull"])
+            if "min" in stats:
+                string += fs + str(stats["min"]) + fs + str(
+                    stats["max"]) + fs + str(stats["range"])
+            else:
+                string += fs + fs + fs
+
+            if type == "point" or type == "centroid":
+                if "mean" in stats:
+                    string += fs + str(stats["mean"]) + fs + \
+                    str(stats["mean_abs"]) + fs + \
+                    str(stats["population_stddev"]) + fs + \
+                    str(stats["population_variance"])
+                    
+                    string += fs + str(stats["population_coeff_variation"]) + \
+                    fs + str(stats["sample_stddev"]) + fs + \
+                    str(stats["sample_variance"])
+                    
+                    string += fs + str(stats["kurtosis"]) + fs + \
+                    str(stats["skewness"])
+                else:
+                    string += fs + fs + fs + fs + fs + fs + fs + fs + fs
                 if extended == True:
                 if extended == True:
-		    if stats.has_key("first_quartile"):
-                        string += fs + str(stats["first_quartile"]) + fs + str(stats["median"]) + fs + str(stats["third_quartile"]) + fs + str(stats["percentile_90"]) 
-		    else:
-                        string += fs + fs + fs + fs 
-        
-	    print string
+                    if "first_quartile" in stats:
+                        string += fs + str(stats["first_quartile"]) + fs + \
+                        str(stats["median"]) + fs + \
+                        str(stats["third_quartile"]) + fs + \
+                        str(stats["percentile_90"])
+                    else:
+                        string += fs + fs + fs + fs
 
 
-    dbif.close()
+            print string
 
 
+    dbif.close()