Ver código fonte

python temporal lib: PEP8 cleaning; markdown cleaning

git-svn-id: https://svn.osgeo.org/grass/grass/trunk@62343 15284696-431f-4ddb-bdfa-cd5b030d7da7
Luca Delucchi 10 anos atrás
pai
commit
b9f4872f09
36 arquivos alterados com 3025 adições e 2481 exclusões
  1. 68 57
      lib/python/temporal/abstract_dataset.py
  2. 163 135
      lib/python/temporal/abstract_map_dataset.py
  3. 201 155
      lib/python/temporal/abstract_space_time_dataset.py
  4. 47 40
      lib/python/temporal/aggregation.py
  5. 72 49
      lib/python/temporal/base.py
  6. 127 95
      lib/python/temporal/c_libraries_interface.py
  7. 246 170
      lib/python/temporal/core.py
  8. 74 62
      lib/python/temporal/datetime_math.py
  9. 20 21
      lib/python/temporal/extract.py
  10. 0 1
      lib/python/temporal/factory.py
  11. 9 8
      lib/python/temporal/gui_support.py
  12. 44 32
      lib/python/temporal/list_stds.py
  13. 29 32
      lib/python/temporal/mapcalc.py
  14. 42 35
      lib/python/temporal/metadata.py
  15. 23 17
      lib/python/temporal/open_stds.py
  16. 42 39
      lib/python/temporal/register.py
  17. 16 11
      lib/python/temporal/sampling.py
  18. 73 62
      lib/python/temporal/space_time_datasets.py
  19. 38 28
      lib/python/temporal/spatial_extent.py
  20. 62 66
      lib/python/temporal/spatial_topology_dataset_connector.py
  21. 113 88
      lib/python/temporal/spatio_temporal_relationships.py
  22. 26 22
      lib/python/temporal/stds_export.py
  23. 76 73
      lib/python/temporal/stds_import.py
  24. 390 360
      lib/python/temporal/temporal_algebra.py
  25. 147 125
      lib/python/temporal/temporal_extent.py
  26. 11 6
      lib/python/temporal/temporal_granularity.py
  27. 21 13
      lib/python/temporal/temporal_raster3d_algebra.py
  28. 15 10
      lib/python/temporal/temporal_raster_algebra.py
  29. 473 342
      lib/python/temporal/temporal_raster_base_algebra.py
  30. 48 48
      lib/python/temporal/temporal_raster_operator.py
  31. 97 103
      lib/python/temporal/temporal_topology_dataset_connector.py
  32. 80 59
      lib/python/temporal/temporal_vector_algebra.py
  33. 61 61
      lib/python/temporal/temporal_vector_operator.py
  34. 1 0
      lib/python/temporal/testsuite/test_doctests.py
  35. 43 34
      lib/python/temporal/unit_tests.py
  36. 27 22
      lib/python/temporal/univar_statistics.py

+ 68 - 57
lib/python/temporal/abstract_dataset.py

@@ -10,8 +10,6 @@ for details.
 
 :authors: Soeren Gebbert
 """
-import uuid
-import copy
 from abc import ABCMeta, abstractmethod
 from temporal_extent import *
 from spatial_extent import *
@@ -19,11 +17,11 @@ from metadata import *
 from temporal_topology_dataset_connector import *
 from spatial_topology_dataset_connector import *
 
-from grass.exceptions import ImplementationError
-
 ###############################################################################
 
-class AbstractDataset(SpatialTopologyDatasetConnector, TemporalTopologyDatasetConnector):
+
+class AbstractDataset(SpatialTopologyDatasetConnector,
+                      TemporalTopologyDatasetConnector):
     """This is the base class for all datasets
        (raster, vector, raster3d, strds, stvds, str3ds)"""
 
@@ -35,21 +33,19 @@ class AbstractDataset(SpatialTopologyDatasetConnector, TemporalTopologyDatasetCo
         self.msgr = get_tgis_message_interface()
 
     def reset_topology(self):
-        """
-        Reset any information about temporal topology"""
-        
+        """Reset any information about temporal topology"""
+
         self.reset_spatial_topology()
         self.reset_temporal_topology()
 
     def get_number_of_relations(self):
-        """ 
-        Return a dictionary in which the keys are the relation names and the value
-        are the number of relations.
+        """Return a dictionary in which the keys are the relation names and the
+        value are the number of relations.
 
         The following relations are available:
 
         Spatial relations:
-        
+
             - equivalent
             - overlap
             - in
@@ -59,7 +55,7 @@ class AbstractDataset(SpatialTopologyDatasetConnector, TemporalTopologyDatasetCo
             - covered
 
         Temporal relations:
-        
+
             - equal
             - follows
             - precedes
@@ -72,18 +68,19 @@ class AbstractDataset(SpatialTopologyDatasetConnector, TemporalTopologyDatasetCo
             - finishes
             - finished
 
-        To access topological information the spatial, temporal or booth topologies must be build first
-        using the SpatioTemporalTopologyBuilder.
+        To access topological information the spatial, temporal or booth
+        topologies must be build first using the SpatioTemporalTopologyBuilder.
 
-        :return: The dictionary with relations as keys and number as values or None in case the topology  wasn't build
+        :return: The dictionary with relations as keys and number as values or
+                 None in case the topology  wasn't build
         """
         if self.is_temporal_topology_build() and not self.is_spatial_topology_build():
             return self.get_number_of_temporal_relations()
         elif self.is_spatial_topology_build() and not self.is_temporal_topology_build():
             self.get_number_of_spatial_relations()
         else:
-            return  self.get_number_of_temporal_relations() + \
-                    self.get_number_of_spatial_relations()
+            return self.get_number_of_temporal_relations() + \
+                   self.get_number_of_spatial_relations()
 
         return None
 
@@ -92,7 +89,6 @@ class AbstractDataset(SpatialTopologyDatasetConnector, TemporalTopologyDatasetCo
         self.set_spatial_topology_build_true()
         self.set_temporal_topology_build_true()
 
-
     def set_topology_build_false(self):
         """Use this method when the spatio-temporal topology was not build"""
         self.set_spatial_topology_build_false()
@@ -101,7 +97,8 @@ class AbstractDataset(SpatialTopologyDatasetConnector, TemporalTopologyDatasetCo
     def is_topology_build(self):
         """Check if the spatial and temporal topology was build
 
-           :return: A dictionary with "spatial" and "temporal" as keys that have boolen values
+           :return: A dictionary with "spatial" and "temporal" as keys that
+                    have boolen values
         """
         d = {}
         d["spatial"] = self.is_spatial_topology_build()
@@ -109,7 +106,6 @@ class AbstractDataset(SpatialTopologyDatasetConnector, TemporalTopologyDatasetCo
 
         return d
 
-
     def print_topology_info(self):
         if self.is_temporal_topology_build():
             self.print_temporal_topology_info()
@@ -131,7 +127,7 @@ class AbstractDataset(SpatialTopologyDatasetConnector, TemporalTopologyDatasetCo
             and the metadata. It must be implemented in the dataset
             specific subclasses. This is the code for the
             vector dataset:
-            
+
             .. code-block:: python
 
                 self.base = VectorBase(ident=ident)
@@ -140,7 +136,8 @@ class AbstractDataset(SpatialTopologyDatasetConnector, TemporalTopologyDatasetCo
                 self.spatial_extent = VectorSpatialExtent(ident=ident)
                 self.metadata = VectorMetadata(ident=ident)
 
-           :param ident: The identifier of the dataset that  "name@mapset" or in case of vector maps "name:layer@mapset"
+           :param ident: The identifier of the dataset that  "name@mapset" or
+                         in case of vector maps "name:layer@mapset"
         """
 
     @abstractmethod
@@ -205,7 +202,8 @@ class AbstractDataset(SpatialTopologyDatasetConnector, TemporalTopologyDatasetCo
     def spatial_relation(self, dataset):
         """Return the spatial relationship between self and dataset
 
-           :param dataset: The abstract dataset to compute the spatial relation with self
+           :param dataset: The abstract dataset to compute the spatial
+                           relation with self
            :return: The spatial relationship as string
         """
 
@@ -251,8 +249,8 @@ class AbstractDataset(SpatialTopologyDatasetConnector, TemporalTopologyDatasetCo
     def get_temporal_extent_as_tuple(self):
         """Returns a tuple of the valid start and end time
 
-           Start and end time can be either of type datetime or of type integer,
-           depending on the temporal type.
+           Start and end time can be either of type datetime or of type
+           integer, depending on the temporal type.
 
            :return: A tuple of (start_time, end_time)
         """
@@ -327,9 +325,11 @@ class AbstractDataset(SpatialTopologyDatasetConnector, TemporalTopologyDatasetCo
     def get_spatial_extent_as_tuple(self):
         """Return the spatial extent as tuple
 
-           Top and bottom are set to 0 in case of a two dimensional spatial extent.
+           Top and bottom are set to 0 in case of a two dimensional spatial
+           extent.
 
-           :return: A the spatial extent as tuple (north, south, east, west, top, bottom)
+           :return: A the spatial extent as tuple (north, south, east, west,
+                    top, bottom)
         """
         return self.spatial_extent.get_spatial_extent_as_tuple()
 
@@ -378,15 +378,18 @@ class AbstractDataset(SpatialTopologyDatasetConnector, TemporalTopologyDatasetCo
 
            :param dbif: The database interface to be used
            :param execute: If True the SQL statements will be executed.
-                                      If False the prepared SQL statements are returned
-                                     and must be executed by the caller.
-            :return: The SQL insert statement in case execute=False, or an empty string otherwise
+                           If False the prepared SQL statements are returned
+                           and must be executed by the caller.
+           :return: The SQL insert statement in case execute=False, or an
+                    empty string otherwise
         """
 
         if get_enable_mapset_check() is True and self.get_mapset() != get_current_mapset():
-            self.msgr.fatal(_("Unable to insert dataset <%(ds)s> of type %(type)s in the temporal database."
-                         " The mapset of the dataset does not match the current mapset")%\
-                         {"ds":self.get_id(), "type":self.get_type()})
+            self.msgr.fatal(_("Unable to insert dataset <%(ds)s> of type "
+                              "%(type)s in the temporal database. The mapset "
+                              "of the dataset does not match the current "
+                              "mapset") % {"ds": self.get_id(),
+                                           "type": self.get_type()})
 
         dbif, connected = init_dbif(dbif)
 
@@ -414,17 +417,19 @@ class AbstractDataset(SpatialTopologyDatasetConnector, TemporalTopologyDatasetCo
 
            :param dbif: The database interface to be used
            :param execute: If True the SQL statements will be executed.
-                                      If False the prepared SQL statements are returned
-                                      and must be executed by the caller.
+                           If False the prepared SQL statements are returned
+                           and must be executed by the caller.
            :param ident: The identifier to be updated, useful for renaming
-           :return: The SQL update statement in case execute=False, or an empty string otherwise
+           :return: The SQL update statement in case execute=False, or an
+                    empty string otherwise
         """
 
         if get_enable_mapset_check() is True and self.get_mapset() != get_current_mapset():
-            self.msgr.fatal(_("Unable to update dataset <%(ds)s> of type %(type)s in the temporal database."
-                         " The mapset of the dataset does not match the current mapset")%\
-                                 {"ds":self.get_id(), "type":self.get_type()})
-
+            self.msgr.fatal(_("Unable to update dataset <%(ds)s> of type "
+                              "%(type)s in the temporal database. The mapset "
+                              "of the dataset does not match the current "
+                              "mapset") % {"ds": self.get_id(),
+                                           "type": self.get_type()})
 
         dbif, connected = init_dbif(dbif)
 
@@ -455,16 +460,19 @@ class AbstractDataset(SpatialTopologyDatasetConnector, TemporalTopologyDatasetCo
 
            :param dbif: The database interface to be used
            :param execute: If True the SQL statements will be executed.
-                                      f False the prepared SQL statements are returned
-                                     and must be executed by the caller.
+                           If False the prepared SQL statements are returned
+                           and must be executed by the caller.
            :param ident: The identifier to be updated, useful for renaming
-           :return: The SQL update statement in case execute=False, or an empty string otherwise
+           :return: The SQL update statement in case execute=False, or an
+                    empty string otherwise
         """
 
         if get_enable_mapset_check() is True and self.get_mapset() != get_current_mapset():
-            self.msgr.fatal(_("Unable to update dataset <%(ds)s> of type %(type)s in the temporal database."
-                         " The mapset of the dataset does not match the current mapset")%\
-                         {"ds":self.get_id(), "type":self.get_type()})
+            self.msgr.fatal(_("Unable to update dataset <%(ds)s> of type "
+                              "%(type)s in the temporal database. The mapset"
+                              " of the dataset does not match the current "
+                              "mapset") % {"ds": self.get_id(),
+                                           "type": self.get_type()})
 
         dbif, connected = init_dbif(dbif)
 
@@ -472,9 +480,10 @@ class AbstractDataset(SpatialTopologyDatasetConnector, TemporalTopologyDatasetCo
         statement = self.base.get_update_all_statement_mogrified(dbif, ident)
         statement += self.temporal_extent.get_update_all_statement_mogrified(dbif,
                                                                              ident)
-        statement += self.spatial_extent.get_update_all_statement_mogrified(
-            dbif, ident)
-        statement += self.metadata.get_update_all_statement_mogrified(dbif, ident)
+        statement += self.spatial_extent.get_update_all_statement_mogrified(dbif,
+                                                                            ident)
+        statement += self.metadata.get_update_all_statement_mogrified(dbif,
+                                                                      ident)
 
         if self.is_stds() is False:
             statement += self.stds_register.get_update_all_statement_mogrified(dbif, ident)
@@ -492,7 +501,7 @@ class AbstractDataset(SpatialTopologyDatasetConnector, TemporalTopologyDatasetCo
     def is_time_absolute(self):
         """Return True in case the temporal type is absolute
 
-            :return: True if temporal type is absolute, False otherwise
+           :return: True if temporal type is absolute, False otherwise
         """
         if "temporal_type" in self.base.D:
             return self.base.get_ttype() == "absolute"
@@ -502,7 +511,7 @@ class AbstractDataset(SpatialTopologyDatasetConnector, TemporalTopologyDatasetCo
     def is_time_relative(self):
         """Return True in case the temporal type is relative
 
-            :return: True if temporal type is relative, False otherwise
+           :return: True if temporal type is relative, False otherwise
         """
         if "temporal_type" in self.base.D:
             return self.base.get_ttype() == "relative"
@@ -523,7 +532,7 @@ class AbstractDataset(SpatialTopologyDatasetConnector, TemporalTopologyDatasetCo
     def temporal_relation(self, dataset):
         """Return the temporal relation of self and the provided dataset
 
-            :return: The temporal relation as string
+           :return: The temporal relation as string
         """
         return self.temporal_extent.temporal_relation(dataset.temporal_extent)
 
@@ -533,7 +542,7 @@ class AbstractDataset(SpatialTopologyDatasetConnector, TemporalTopologyDatasetCo
 
            :param dataset: The abstract dataset to temporal intersect with
            :return: The new temporal extent with start and end time,
-                        or None in case of no intersection
+                    or None in case of no intersection
         """
         return self.temporal_extent.intersect(dataset.temporal_extent)
 
@@ -543,7 +552,7 @@ class AbstractDataset(SpatialTopologyDatasetConnector, TemporalTopologyDatasetCo
 
            :param dataset: The abstract dataset to create temporal union with
            :return: The new temporal extent with start and end time,
-                        or None in case of no intersection
+                    or None in case of no intersection
         """
         return self.temporal_extent.union(dataset.temporal_extent)
 
@@ -558,6 +567,7 @@ class AbstractDataset(SpatialTopologyDatasetConnector, TemporalTopologyDatasetCo
 
 ###############################################################################
 
+
 class AbstractDatasetComparisonKeyStartTime(object):
     """This comparison key can be used to sort lists of abstract datasets
        by start time
@@ -565,7 +575,7 @@ class AbstractDatasetComparisonKeyStartTime(object):
         Example:
 
         .. code-block:: python
-        
+
             # Return all maps in a space time raster dataset as map objects
             map_list = strds.get_registered_maps_as_objects()
 
@@ -607,6 +617,7 @@ class AbstractDatasetComparisonKeyStartTime(object):
 
 ###############################################################################
 
+
 class AbstractDatasetComparisonKeyEndTime(object):
     """This comparison key can be used to sort lists of abstract datasets
        by end time
@@ -614,7 +625,7 @@ class AbstractDatasetComparisonKeyEndTime(object):
         Example:
 
         .. code-block:: python
-        
+
             # Return all maps in a space time raster dataset as map objects
             map_list = strds.get_registered_maps_as_objects()
 

+ 163 - 135
lib/python/temporal/abstract_map_dataset.py

@@ -13,6 +13,7 @@ for details.
 from abstract_dataset import *
 from datetime_math import *
 
+
 class AbstractMapDataset(AbstractDataset):
     """This is the base class for all maps (raster, vector, raster3d).
 
@@ -22,7 +23,7 @@ class AbstractMapDataset(AbstractDataset):
 
         This class provides all functionalities that are needed to manage maps
         in the temporal database. That are:
-        
+
         - insert() to register the map and therefore its spatio-temporal extent
           and metadata in the temporal database
         - update() to update the map spatio-temporal extent and metadata in the
@@ -33,6 +34,7 @@ class AbstractMapDataset(AbstractDataset):
         - Methods to set relative and absolute time stamps
         - Abstract methods that must be implemented in the map specific
           subclasses
+
     """
 
     __metaclass__ = ABCMeta
@@ -67,12 +69,12 @@ class AbstractMapDataset(AbstractDataset):
         raise ImplementationError(
             "This method must be implemented in the subclasses")
 
-
     @abstractmethod
     def has_grass_timestamp(self):
         """Check if a grass file based time stamp exists for this map.
-            :return: True is the grass file based time stamped exists for this
-                    map
+
+        :return: True is the grass file based time stamped exists for this map
+
         """
 
     @abstractmethod
@@ -111,9 +113,9 @@ class AbstractMapDataset(AbstractDataset):
         """Convert the valid time into a grass datetime library
            compatible timestamp string
 
-            This methods works for relative and absolute time
+           This methods works for relative and absolute time
 
-            :return: the grass timestamp string
+           :return: the grass timestamp string
         """
         start = ""
 
@@ -151,15 +153,16 @@ class AbstractMapDataset(AbstractDataset):
     def build_id(name, mapset, layer=None):
         """Convenient method to build the unique identifier
 
-            Existing layer and mapset definitions in the name
-            string will be reused
+           Existing layer and mapset definitions in the name
+           string will be reused
 
            :param name: The name of the map
            :param mapset: The mapset in which the map is located
            :param layer: The layer of the vector map, use None in case no
-                                 layer exists
+                         layer exists
 
-           :return: the id of the map as "name(:layer)@mapset" while layer is optional
+           :return: the id of the map as "name(:layer)@mapset" while layer is
+                    optional
         """
 
         # Check if the name includes any mapset
@@ -182,7 +185,6 @@ class AbstractMapDataset(AbstractDataset):
         """
         return self.base.get_layer()
 
-
     def print_self(self):
         """Print the content of the internal structure to stdout"""
         self.base.print_self()
@@ -262,10 +264,10 @@ class AbstractMapDataset(AbstractDataset):
 
            :param dbif: The database interface to be used
            :param execute: If True the SQL statements will be executed.
-                                      If False the prepared SQL statements are
-                                      returned and must be executed by the caller.
+                           If False the prepared SQL statements are
+                           returned and must be executed by the caller.
            :return: The SQL insert statement in case execute=False, or an
-                        empty string otherwise
+                    empty string otherwise
         """
         if get_enable_timestamp_write():
             self.write_timestamp_to_grass()
@@ -281,10 +283,10 @@ class AbstractMapDataset(AbstractDataset):
 
            :param dbif: The database interface to be used
            :param execute: If True the SQL statements will be executed.
-                                      If False the prepared SQL statements are
-                                      returned and must be executed by the caller.
+                           If False the prepared SQL statements are
+                           returned and must be executed by the caller.
            :return: The SQL insert statement in case execute=False, or an
-                        empty string otherwise
+                    empty string otherwise
         """
         if get_enable_timestamp_write():
             self.write_timestamp_to_grass()
@@ -300,10 +302,11 @@ class AbstractMapDataset(AbstractDataset):
 
            :param dbif: The database interface to be used
            :param execute: If True the SQL statements will be executed.
-                                      If False the prepared SQL statements are
-                                      returned and must be executed by the caller.
-            :return: The SQL insert statement in case execute=False, or an
-                         empty string otherwise
+                           If False the prepared SQL statements are
+                           returned and must be executed by the caller.
+           :return: The SQL insert statement in case execute=False, or an
+                    empty string otherwise
+
         """
         if get_enable_timestamp_write():
             self.write_timestamp_to_grass()
@@ -326,51 +329,58 @@ class AbstractMapDataset(AbstractDataset):
             This method only modifies this object and does not commit
             the modifications to the temporal database.
 
-           :param start_time: A datetime object specifying the start time of the map
-           :param end_time: A datetime object specifying the end time of the map, None in case or time instance
+           :param start_time: A datetime object specifying the start time of
+                              the map
+           :param end_time: A datetime object specifying the end time of the
+                            map, None in case or time instance
 
            :return: True for success and False otherwise
         """
         if start_time and not isinstance(start_time, datetime):
             if self.get_layer() is not None:
-                self.msgr.error(_("Start time must be of type datetime for %(type)s"
-                             " map <%(id)s> with layer: %(l)s") % {
-                             'type': self.get_type(), 'id': self.get_map_id(),
-                             'l': self.get_layer()})
+                self.msgr.error(_("Start time must be of type datetime for "
+                                  "%(type)s map <%(id)s> with layer: %(l)s") %
+                                {'type': self.get_type(),
+                                 'id': self.get_map_id(),
+                                 'l': self.get_layer()})
                 return False
             else:
                 self.msgr.error(_("Start time must be of type datetime for "
-                                         "%(type)s map <%(id)s>") % {
-                                         'type': self.get_type(), 'id': self.get_map_id()})
+                                  "%(type)s map <%(id)s>") %
+                                {'type': self.get_type(),
+                                 'id': self.get_map_id()})
                 return False
 
         if end_time and not isinstance(end_time, datetime):
             if self.get_layer():
-                self.msgr.error(_("End time must be of type datetime for %(type)s "
-                             "map <%(id)s> with layer: %(l)s") % {
-                             'type': self.get_type(), 'id': self.get_map_id(),
-                             'l': self.get_layer()})
+                self.msgr.error(_("End time must be of type datetime for "
+                                  "%(type)s map <%(id)s> with layer: %(l)s") %
+                                {'type': self.get_type(),
+                                 'id': self.get_map_id(),
+                                 'l': self.get_layer()})
                 return False
             else:
                 self.msgr.error(_("End time must be of type datetime for "
-                             "%(type)s map <%(id)s>") % {
-                             'type': self.get_type(), 'id': self.get_map_id()})
+                                  "%(type)s map <%(id)s>") %
+                                {'type': self.get_type(),
+                                 'id': self.get_map_id()})
                 return False
 
         if start_time is not None and end_time is not None:
             if start_time > end_time:
                 if self.get_layer():
-                    self.msgr.error(_("End time must be greater than start time for"
-                                 " %(type)s map <%(id)s> with layer: %(l)s") % {
-                                 'type': self.get_type(),
-                                 'id': self.get_map_id(),
-                                 'l': self.get_layer()})
+                    self.msgr.error(_("End time must be greater than start "
+                                      "time for %(type)s map <%(id)s> with "
+                                      "layer: %(l)s") %
+                                    {'type': self.get_type(),
+                                     'id': self.get_map_id(),
+                                     'l': self.get_layer()})
                     return False
                 else:
-                    self.msgr.error(_("End time must be greater than start time "
-                                 "for %(type)s map <%(id)s>") % {
-                                 'type': self.get_type(),
-                                 'id': self.get_map_id()})
+                    self.msgr.error(_("End time must be greater than start "
+                                      "time for %(type)s map <%(id)s>") %
+                                    {'type': self.get_type(),
+                                     'id': self.get_map_id()})
                     return False
             else:
                 # Do not create an interval in case start and end time are
@@ -402,10 +412,11 @@ class AbstractMapDataset(AbstractDataset):
            """
 
         if get_enable_mapset_check() is True and self.get_mapset() != get_current_mapset():
-            self.msgr.fatal(_("Unable to update dataset <%(ds)s> of type %(type)s in the temporal database."
-                         " The mapset of the dataset does not match the current mapset")%\
-                         {"ds":self.get_id(), "type":self.get_type()})
-
+            self.msgr.fatal(_("Unable to update dataset <%(ds)s> of type "
+                              "%(type)s in the temporal database. The mapset "
+                              "of the dataset does not match the current "
+                              "mapset") % {"ds": self.get_id(),
+                                           "type": self.get_type()})
 
         if self.set_absolute_time(start_time, end_time):
             dbif, connected = init_dbif(dbif)
@@ -430,35 +441,39 @@ class AbstractMapDataset(AbstractDataset):
            :param start_time: An integer value
            :param end_time: An integer value, None in case or time instance
            :param unit: The unit of the relative time. Supported units:
-                                year(s), month(s), day(s), hour(s), minute(s), second(s)
+                        year(s), month(s), day(s), hour(s), minute(s), second(s)
 
            :return: True for success and False otherwise
 
         """
         if not self.check_relative_time_unit(unit):
             if self.get_layer() is not None:
-                self.msgr.error(_("Unsupported relative time unit type for %(type)s"
-                             " map <%(id)s> with layer %(l)s: %(u)s") % {
-                             'type': self.get_type(), 'id': self.get_id(),
-                             'l': self.get_layer(), 'u': unit})
+                self.msgr.error(_("Unsupported relative time unit type for "
+                                  "%(type)s map <%(id)s> with layer %(l)s: "
+                                  "%(u)s") % {'type': self.get_type(),
+                                              'id': self.get_id(),
+                                              'l': self.get_layer(),
+                                              'u': unit})
             else:
-                self.msgr.error(_("Unsupported relative time unit type for %(type)s"
-                             " map <%(id)s>: %(u)s") % {
-                             'type': self.get_type(), 'id': self.get_id(),
-                             'u': unit})
+                self.msgr.error(_("Unsupported relative time unit type for "
+                                  "%(type)s map <%(id)s>: %(u)s") %
+                                {'type': self.get_type(), 'id': self.get_id(),
+                                 'u': unit})
             return False
 
         if start_time is not None and end_time is not None:
             if int(start_time) > int(end_time):
                 if self.get_layer() is not None:
-                    self.msgr.error(_("End time must be greater than start time for"
-                                 " %(type)s map <%(id)s> with layer %(l)s") % \
-                                 {'type': self.get_type(), 'id': self.get_id(),
-                                  'l': self.get_layer()})
+                    self.msgr.error(_("End time must be greater than start "
+                                      "time for %(typ)s map <%(id)s> with "
+                                      "layer %(l)s") % {'typ': self.get_type(),
+                                                        'id': self.get_id(),
+                                                        'l': self.get_layer()})
                 else:
-                    self.msgr.error(_("End time must be greater than start time for"
-                                 " %(type)s map <%(id)s>") % {
-                                 'type': self.get_type(), 'id': self.get_id()})
+                    self.msgr.error(_("End time must be greater than start "
+                                      "time for %(type)s map <%(id)s>") %
+                                    {'type': self.get_type(),
+                                     'id': self.get_id()})
                 return False
             else:
                 # Do not create an interval in case start and end time are
@@ -493,9 +508,11 @@ class AbstractMapDataset(AbstractDataset):
            :param dbif: The database interface to be used
         """
         if get_enable_mapset_check() is True and self.get_mapset() != get_current_mapset():
-            self.msgr.fatal(_("Unable to update dataset <%(ds)s> of type %(type)s in the temporal database."
-                         " The mapset of the dataset does not match the current mapset")%\
-                         {"ds":self.get_id(), "type":self.get_type()})
+            self.msgr.fatal(_("Unable to update dataset <%(ds)s> of type "
+                              "%(type)s in the temporal database. The mapset "
+                              "of the dataset does not match the current "
+                              "mapset") % {"ds": self.get_id(),
+                                           "type": self.get_type()})
 
         if self.set_relative_time(start_time, end_time, unit):
             dbif, connected = init_dbif(dbif)
@@ -509,12 +526,14 @@ class AbstractMapDataset(AbstractDataset):
                 self.write_timestamp_to_grass()
 
     def set_temporal_extent(self, extent):
-        """Convenient method to set the temporal extent from a temporal extent object
+        """Convenient method to set the temporal extent from a temporal extent
+           object
 
-           :param temporal_extent: The temporal extent that should be set for this object
+           :param temporal_extent: The temporal extent that should be set for
+                                   this object
 
            .. code-block: : python
-           
+
                >>> import datetime
                >>> import grass.temporal as tgis
                >>> map      = tgis.RasterDataset(None)
@@ -563,11 +582,12 @@ class AbstractMapDataset(AbstractDataset):
            Unit can be year, years, month, months, day, days, hour, hours,
            minute, minutes, day or days.
 
-           :param increment: This is the increment, a string in case of absolute
-                                         time or an integer in case of relative time
+           :param increment: This is the increment, a string in case of
+                             absolute time or an integer in case of relative
+                             time
            :param update: Perform an immediate database update to store the
-                                    modified temporal extent, otherwise only this object
-                                    will be modified
+                          modified temporal extent, otherwise only this object
+                          will be modified
 
            Usage:
 
@@ -635,7 +655,7 @@ class AbstractMapDataset(AbstractDataset):
             start, end = self.get_absolute_time()
 
             new_start = decrement_datetime_by_string(start, increment)
-            if end == None:
+            if end is None:
                 new_end = increment_datetime_by_string(start, increment)
             else:
                 new_end = increment_datetime_by_string(end, increment)
@@ -647,7 +667,7 @@ class AbstractMapDataset(AbstractDataset):
         else:
             start, end, unit = self.get_relative_time()
             new_start = start - increment
-            if end == None:
+            if end is None:
                 new_end = start + increment
             else:
                 new_end = end + increment
@@ -657,7 +677,8 @@ class AbstractMapDataset(AbstractDataset):
             else:
                 self.set_relative_time(new_start, new_end, unit)
 
-    def set_spatial_extent_from_values(self, north, south, east, west, top=0, bottom=0):
+    def set_spatial_extent_from_values(self, north, south, east, west, top=0,
+                                       bottom=0):
         """Set the spatial extent of the map from values
 
             This method only modifies this object and does not commit
@@ -679,7 +700,8 @@ class AbstractMapDataset(AbstractDataset):
             This method only modifies this object and does not commit
             the modifications to the temporal database.
 
-            :param spatial_extent: An object of type SpatialExtent or its subclasses
+            :param spatial_extent: An object of type SpatialExtent or its
+                                   subclasses
 
            .. code-block: : python
 
@@ -711,12 +733,12 @@ class AbstractMapDataset(AbstractDataset):
                (20.0, -20.0, 30.0, -30.0, 15.0, -15.0)
 
         """
-        self.spatial_extent.north   += size
-        self.spatial_extent.south   -= size
-        self.spatial_extent.east    += size
-        self.spatial_extent.west    -= size
-        self.spatial_extent.top     += size
-        self.spatial_extent.bottom  -= size
+        self.spatial_extent.north += size
+        self.spatial_extent.south -= size
+        self.spatial_extent.east += size
+        self.spatial_extent.west -= size
+        self.spatial_extent.top += size
+        self.spatial_extent.bottom -= size
 
         if update:
             self.spatial_extent.update(dbif)
@@ -738,10 +760,10 @@ class AbstractMapDataset(AbstractDataset):
                (20.0, -20.0, 30.0, -30.0, 5.0, -5.0)
 
         """
-        self.spatial_extent.north   += size
-        self.spatial_extent.south   -= size
-        self.spatial_extent.east    += size
-        self.spatial_extent.west    -= size
+        self.spatial_extent.north += size
+        self.spatial_extent.south -= size
+        self.spatial_extent.east += size
+        self.spatial_extent.west -= size
 
         if update:
             self.spatial_extent.update(dbif)
@@ -749,7 +771,7 @@ class AbstractMapDataset(AbstractDataset):
     def check_for_correct_time(self):
         """Check for correct time"""
         if self.is_time_absolute():
-            start, end= self.get_absolute_time()
+            start, end = self.get_absolute_time()
         else:
             start, end, unit = self.get_relative_time()
 
@@ -757,19 +779,20 @@ class AbstractMapDataset(AbstractDataset):
             if end is not None:
                 if start >= end:
                     if self.get_layer() is not None:
-                        self.msgr.error(_("Map <%(id)s> with layer %(layer)s has "
-                                     "incorrect time interval, start time is "
-                                     "greater than end time") % {
-                                     'id': self.get_map_id(),
-                                     'layer': self.get_layer()})
+                        self.msgr.error(_("Map <%(id)s> with layer %(layer)s "
+                                          "has incorrect time interval, start "
+                                          "time is greater than end time") %
+                                        {'id': self.get_map_id(),
+                                         'layer': self.get_layer()})
                     else:
-                        self.msgr.error(_("Map <%s> has incorrect time interval, "
-                                     "start time is greater than end time") % \
-                                   (self.get_map_id()))
+                        self.msgr.error(_("Map <%s> has incorrect time "
+                                          "interval, start time is greater "
+                                          "than end time") %
+                                         (self.get_map_id()))
                     return False
         else:
             self.msgr.error(_("Map <%s> has incorrect start time") %
-                       (self.get_map_id()))
+                             (self.get_map_id()))
             return False
 
         return True
@@ -778,28 +801,30 @@ class AbstractMapDataset(AbstractDataset):
         """Delete a map entry from database if it exists
 
             Remove dependent entries:
-            
+
             - Remove the map entry in each space time dataset in which this map
               is registered
             - Remove the space time dataset register table
 
            :param dbif: The database interface to be used
            :param update: Call for each unregister statement the update from
-                                    registered maps of the space time dataset.
-                                    This can slow down the un-registration process
-                                    significantly.
-           :param execute: If True the SQL DELETE and DROP table statements will
-                                      be executed.
-                                      If False the prepared SQL statements are
-                                      returned and must be executed by the caller.
+                          registered maps of the space time dataset.
+                          This can slow down the un-registration process
+                          significantly.
+           :param execute: If True the SQL DELETE and DROP table statements
+                           will be executed.
+                           If False the prepared SQL statements are
+                           returned and must be executed by the caller.
 
            :return: The SQL statements if execute=False, else an empty string,
-                   None in case of a failure
+                    None in case of a failure
         """
         if get_enable_mapset_check() is True and self.get_mapset() != get_current_mapset():
-            self.msgr.fatal(_("Unable to delete dataset <%(ds)s> of type %(type)s from the temporal database."
-                         " The mapset of the dataset does not match the current mapset")%\
-                         {"ds":self.get_id(), "type":self.get_type()})
+            self.msgr.fatal(_("Unable to delete dataset <%(ds)s> of type "
+                              "%(type)s from the temporal database. The mapset"
+                              " of the dataset does not match the current "
+                              "mapset") % {"ds": self.get_id(),
+                                           "type": self.get_type()})
 
         dbif, connected = init_dbif(dbif)
         statement = ""
@@ -813,8 +838,8 @@ class AbstractMapDataset(AbstractDataset):
             statement += self.unregister(
                 dbif=dbif, update=update, execute=False)
 
-            self.msgr.verbose(_("Delete %s dataset <%s> from temporal database")
-                         % (self.get_type(), self.get_id()))
+            self.msgr.verbose(_("Delete %s dataset <%s> from temporal "
+                                "database") % (self.get_type(), self.get_id()))
 
             # Delete yourself from the database, trigger functions will
             # take care of dependencies
@@ -840,31 +865,34 @@ class AbstractMapDataset(AbstractDataset):
 
            :param dbif: The database interface to be used
            :param update: Call for each unregister statement the update from
-                                     registered maps of the space time dataset. This can
-                                     slow down the un-registration process significantly.
+                          registered maps of the space time dataset. This can
+                          slow down the un-registration process significantly.
            :param execute: If True the SQL DELETE and DROP table statements
-                                      will be executed.
-                                      If False the prepared SQL statements are
-                                      returned and must be executed by the caller.
+                           will be executed.
+                           If False the prepared SQL statements are
+                           returned and must be executed by the caller.
 
            :return: The SQL statements if execute=False, else an empty string
         """
 
-
         if self.get_layer() is not None:
             self.msgr.debug(1, "Unregister %(type)s map <%(map)s> with "
-                           "layer %(layer)s from space time datasets" % \
-                         {'type':self.get_type(), 'map':self.get_map_id(),
-                          'layer':self.get_layer()})
+                               "layer %(layer)s from space time datasets" %
+                               {'type': self.get_type(),
+                                'map': self.get_map_id(),
+                                'layer': self.get_layer()})
         else:
             self.msgr.debug(1, "Unregister %(type)s map <%(map)s> "
-                           "from space time datasets"
-                         % {'type':self.get_type(), 'map':self.get_map_id()})
+                               "from space time datasets" % {
+                               'type': self.get_type(),
+                               'map': self.get_map_id()})
 
         if get_enable_mapset_check() is True and self.get_mapset() != get_current_mapset():
-            self.msgr.fatal(_("Unable to unregister dataset <%(ds)s> of type %(type)s from the temporal database."
-                         " The mapset of the dataset does not match the current mapset")%\
-                         {"ds":self.get_id(), "type":self.get_type()})
+            self.msgr.fatal(_("Unable to unregister dataset <%(ds)s> of type "
+                              "%(type)s from the temporal database. The mapset"
+                              " of the dataset does not match the current "
+                              "mapset") % {"ds": self.get_id(),
+                                           "type": self.get_type()})
 
         statement = ""
         dbif, connected = init_dbif(dbif)
@@ -882,7 +910,7 @@ class AbstractMapDataset(AbstractDataset):
                     statement += stds.unregister_map(self, dbif, False)
                     # Take care to update the space time dataset after
                     # the map has been unregistered
-                    if update == True and execute == True:
+                    if update is True and execute is True:
                         stds.update_from_registered_maps(dbif)
 
         if execute:
@@ -924,9 +952,9 @@ class AbstractMapDataset(AbstractDataset):
            :param stds_id: The id of the space time dataset to be registered
            :param dbif: The database interface to be used
            :param execute: If True the SQL INSERT table statements
-                                      will be executed.
-                                      If False the prepared SQL statements are
-                                      returned and must be executed by the caller.
+                           will be executed.
+                           If False the prepared SQL statements are
+                           returned and must be executed by the caller.
 
            :return: The SQL statements if execute=False, else an empty string
         """
@@ -963,16 +991,16 @@ class AbstractMapDataset(AbstractDataset):
 
         return statement
 
-
     def remove_stds_from_register(self, stds_id, dbif=None, execute=True):
         """Remove a space time dataset from the register
 
-           :param stds_id: The id of the space time dataset to removed from the registered
+           :param stds_id: The id of the space time dataset to removed from
+                           the registered
            :param dbif: The database interface to be used
            :param execute: If True the SQL INSERT table statements
-                                      will be executed.
-                                      If False the prepared SQL statements are
-                                      returned and must be executed by the caller.
+                           will be executed.
+                           If False the prepared SQL statements are
+                           returned and must be executed by the caller.
 
            :return: The SQL statements if execute=False, else an empty string
         """

+ 201 - 155
lib/python/temporal/abstract_space_time_dataset.py

@@ -1,7 +1,7 @@
 # -*- coding: utf-8 -*-
 """
-The abstract_space_time_dataset module provides the AbstractSpaceTimeDataset class
-that is the base class for all space time datasets.
+The abstract_space_time_dataset module provides the AbstractSpaceTimeDataset
+class that is the base class for all space time datasets.
 
 (C) 2011-2013 by the GRASS Development Team
 This program is free software under the GNU General Public
@@ -18,11 +18,12 @@ from spatio_temporal_relationships import *
 
 ###############################################################################
 
+
 class AbstractSpaceTimeDataset(AbstractDataset):
     """Abstract space time dataset class
 
-       Base class for all space time datasets. 
-        
+       Base class for all space time datasets.
+
        This class represents an abstract space time dataset. Convenient functions
        to select, update, insert or delete objects of this type in the SQL
        temporal database exists as well as functions to register or unregister
@@ -51,9 +52,9 @@ class AbstractSpaceTimeDataset(AbstractDataset):
 
             :return: The name of the map register table
         """
-        
+
         uuid_rand = str(uuid.uuid4()).replace("-",  "")
-        
+
         table_name = self.get_new_map_instance(None).get_type() + "_map_register_" + uuid_rand
         return table_name
 
@@ -161,14 +162,15 @@ class AbstractSpaceTimeDataset(AbstractDataset):
     def set_aggregation_type(self, aggregation_type):
         """Set the aggregation type of the space time dataset
 
-           :param aggregation_type The aggregation type of the space time dataset
+           :param aggregation_type: The aggregation type of the space time
+                                    dataset
         """
         self.metadata.set_aggregation_type(aggregation_type)
 
     def update_command_string(self, dbif=None):
         """Append the current command string to any existing command string
            in the metadata class and calls metadata update
-           
+
            :param dbif: The database interface to be used
         """
 
@@ -248,8 +250,8 @@ class AbstractSpaceTimeDataset(AbstractDataset):
 
            Granularity can be of absolute time or relative time.
            In case of absolute time a string containing an integer
-           value and the time unit (years, months, days, hours, minuts, seconds).
-           In case of relative time an integer value is expected.
+           value and the time unit (years, months, days, hours, minuts,
+           seconds). In case of relative time an integer value is expected.
 
            :return: The granularity
         """
@@ -264,8 +266,8 @@ class AbstractSpaceTimeDataset(AbstractDataset):
 
            Granularity can be of absolute time or relative time.
            In case of absolute time a string containing an integer
-           value and the time unit (years, months, days, hours, minuts, seconds).
-           In case of relative time an integer value is expected.
+           value and the time unit (years, months, days, hours, minuts,
+           seconds). In case of relative time an integer value is expected.
 
            This method only modifies this object and does not commit
            the modifications to the temporal database.
@@ -317,10 +319,10 @@ class AbstractSpaceTimeDataset(AbstractDataset):
 
            :param dbif: The database interface to be used
            :param execute: If True the SQL statements will be executed.
-                          If False the prepared SQL statements are
-                          returned and must be executed by the caller.
+                           If False the prepared SQL statements are
+                           returned and must be executed by the caller.
            :return: The SQL insert statement in case execute=False, or an
-                        empty string otherwise
+                    empty string otherwise
         """
 
         dbif, connected = init_dbif(dbif)
@@ -342,21 +344,24 @@ class AbstractSpaceTimeDataset(AbstractDataset):
             # Read the SQL template
             sql = open(os.path.join(sql_path,
                                     "stds_map_register_table_template.sql"),
-                                    'r').read()
+                       'r').read()
 
             # Create a raster, raster3d or vector tables
             sql = sql.replace("SPACETIME_REGISTER_TABLE", stds_register_table)
             statement += sql
 
             if dbif.get_dbmi().__name__ == "sqlite3":
-                statement += "CREATE INDEX %s_index ON %s (id);"%(stds_register_table, stds_register_table)
+                statement += "CREATE INDEX %s_index ON %s (id);" % \
+                             (stds_register_table, stds_register_table)
 
             # Set the map register table name
             self.set_map_register(stds_register_table)
 
             self.msgr.debug(1, _("Created register table <%s> for space "
-                           "time %s  dataset <%s>") %
-                          (stds_register_table, self.get_new_map_instance(None).get_type(), self.get_id()))
+                                 "time %s  dataset <%s>") %
+                                (stds_register_table,
+                                 self.get_new_map_instance(None).get_type(),
+                                 self.get_id()))
 
         statement += AbstractDataset.insert(self, dbif=dbif, execute=False)
 
@@ -447,12 +452,12 @@ class AbstractSpaceTimeDataset(AbstractDataset):
         """Print the spatio-temporal relationships for each map of the space
            time dataset or for each map of the optional list of maps
 
-           :param maps: a ordered by start_time list of map objects, if None the
-                       registred maps of the space time dataset are used
+           :param maps: a ordered by start_time list of map objects, if None
+                        the registred maps of the space time dataset are used
            :param spatial: This indicates if the spatial topology is created as
-                          well: spatial can be None (no spatial topology), "2D"
-                          using west, east, south, north or "3D" using west,
-                          east, south, north, bottom, top
+                           well: spatial can be None (no spatial topology),
+                           "2D" using west, east, south, north or "3D" using
+                           west, east, south, north, bottom, top
            :param dbif: The database interface to be used
         """
 
@@ -461,7 +466,8 @@ class AbstractSpaceTimeDataset(AbstractDataset):
                 where=None, order="start_time", dbif=dbif)
 
         print_spatio_temporal_topology_relationships(maps1=maps, maps2=maps,
-                                              spatial=spatial, dbif=dbif)
+                                                     spatial=spatial,
+                                                     dbif=dbif)
 
     def count_temporal_relations(self, maps=None, dbif=None):
         """Count the temporal relations between the registered maps.
@@ -518,9 +524,10 @@ class AbstractSpaceTimeDataset(AbstractDataset):
             maps = self.get_registered_maps_as_objects(
                 where=None, order="start_time", dbif=dbif)
 
-        relations = count_temporal_topology_relationships(maps1=maps, dbif=dbif)
+        relations = count_temporal_topology_relationships(maps1=maps,
+                                                          dbif=dbif)
 
-        if relations == None:
+        if relations is None:
             return False
 
         map_time = self.get_map_time()
@@ -600,11 +607,11 @@ class AbstractSpaceTimeDataset(AbstractDataset):
                          In case of mixed of interval data the user can chose
                          between:
 
-                  - Example ["start", "during", "equals"] 
+                  - Example ["start", "during", "equals"]
 
                   - start: Select maps of which the start time is
                     located in the selection granule::
-                    
+
                         map    :        s
                         granule:  s-----------------e
 
@@ -616,13 +623,14 @@ class AbstractSpaceTimeDataset(AbstractDataset):
 
                   - contains: Select maps which are temporal
                      during the selection granule::
-                     
+
                         map    :     s-----------e
                         granule:  s-----------------e
 
                   - overlap: Select maps which temporal overlap
-                    the selection granule, this includes overlaps and overlapped::
-                    
+                    the selection granule, this includes overlaps and
+                    overlapped::
+
                         map    :     s-----------e
                         granule:        s-----------------e
 
@@ -631,25 +639,25 @@ class AbstractSpaceTimeDataset(AbstractDataset):
 
                   - during: Select maps which temporally contains
                     the selection granule::
-                    
+
                         map    :  s-----------------e
                         granule:     s-----------e
 
                   - equals: Select maps which temporally equal
                     to the selection granule::
-                    
+
                         map    :  s-----------e
                         granule:  s-----------e
 
                   - follows: Select maps which temporally follow
                     the selection granule::
-                    
+
                         map    :              s-----------e
                         granule:  s-----------e
 
                   - precedes: Select maps which temporally precedes
                     the selection granule::
-                    
+
                         map    :  s-----------e
                         granule:              s-----------e
 
@@ -668,12 +676,12 @@ class AbstractSpaceTimeDataset(AbstractDataset):
 
         if self.get_temporal_type() != stds.get_temporal_type():
             self.msgr.error(_("The space time datasets must be of "
-                         "the same temporal type"))
+                              "the same temporal type"))
             return None
 
         if stds.get_map_time() != "interval":
             self.msgr.error(_("The temporal map type of the sample "
-                         "dataset must be interval"))
+                              "dataset must be interval"))
             return None
 
         dbif, connected = init_dbif(dbif)
@@ -705,10 +713,10 @@ class AbstractSpaceTimeDataset(AbstractDataset):
                 relations.append("contains")
 
         # Remove start, equal, contain and overlap
-        relations = [relation.upper().strip() for relation in relations if relation \
-                    not in ["start", "overlap", "contain"]]
+        relations = [relation.upper().strip() for relation in relations
+                     if relation not in ["start", "overlap", "contain"]]
 
-        #print(relations)
+        #  print(relations)
 
         tb = SpatioTemporalTopologyBuilder()
         if spatial:
@@ -755,8 +763,8 @@ class AbstractSpaceTimeDataset(AbstractDataset):
 
         return obj_list
 
-
-    def sample_by_dataset_sql(self, stds, method=None, spatial=False, dbif=None):
+    def sample_by_dataset_sql(self, stds, method=None, spatial=False,
+                              dbif=None):
         """Sample this space time dataset with the temporal topology
            of a second space time dataset using SQL queries.
 
@@ -780,7 +788,7 @@ class AbstractSpaceTimeDataset(AbstractDataset):
            the list of samples are stored:
 
            .. code-block:: python
-           
+
                list = self.sample_by_dataset(stds=sampler, method=[
                    "during","overlap","contain","equal"])
                for entry in list:
@@ -809,7 +817,7 @@ class AbstractSpaceTimeDataset(AbstractDataset):
                          point type, only the start time is used for sampling.
                          In case of mixed of interval data the user can chose
                          between:
-                         
+
                   - Example ["start", "during", "equals"]
 
                   - start: Select maps of which the start time is
@@ -831,7 +839,8 @@ class AbstractSpaceTimeDataset(AbstractDataset):
                         granule:  s-----------------e
 
                   - overlap: Select maps which temporal overlap
-                    the selection granule, this includes overlaps and overlapped::
+                    the selection granule, this includes overlaps and
+                    overlapped::
 
                         map    :     s-----------e
                         granule:        s-----------------e
@@ -909,12 +918,12 @@ class AbstractSpaceTimeDataset(AbstractDataset):
 
         if self.get_temporal_type() != stds.get_temporal_type():
             self.msgr.error(_("The space time datasets must be of "
-                         "the same temporal type"))
+                              "the same temporal type"))
             return None
 
         if stds.get_map_time() != "interval":
             self.msgr.error(_("The temporal map type of the sample "
-                         "dataset must be interval"))
+                              "dataset must be interval"))
             return None
 
         # In case points of time are available, disable the interval specific
@@ -1080,7 +1089,7 @@ class AbstractSpaceTimeDataset(AbstractDataset):
            time are supported.
 
            The temporal topology search order is as follows:
-           
+
            1. Maps that are equal to the actual granule are used
            2. If no euqal found then maps that contain the actual granule
               are used
@@ -1192,11 +1201,12 @@ class AbstractSpaceTimeDataset(AbstractDataset):
                 next = start + gran
 
             map = first.get_new_instance(None)
-            map.set_spatial_extent_from_values(0,0,0,0,0,0)
+            map.set_spatial_extent_from_values(0, 0, 0, 0, 0, 0)
             if first.is_time_absolute():
                 map.set_absolute_time(start, next)
             else:
-                map.set_relative_time(start, next, first.get_relative_time_unit())
+                map.set_relative_time(start, next,
+                                      first.get_relative_time_unit())
 
             gap_list.append(copy.copy(map))
             start = next
@@ -1204,13 +1214,14 @@ class AbstractSpaceTimeDataset(AbstractDataset):
         tb = SpatioTemporalTopologyBuilder()
         tb.build(gap_list, maps)
 
-        relations_order = ["EQUAL", "DURING", "OVERLAPS", "OVERLAPPED", "CONTAINS"]
+        relations_order = ["EQUAL", "DURING", "OVERLAPS", "OVERLAPPED",
+                           "CONTAINS"]
 
         gran_list = []
         for gap in gap_list:
             # If not temporal relations then gap
             if not gap.get_temporal_relations():
-                gran_list.append([gap,])
+                gran_list.append([gap, ])
             else:
                 relations = gap.get_temporal_relations()
 
@@ -1230,7 +1241,7 @@ class AbstractSpaceTimeDataset(AbstractDataset):
                         new_maps.append(new_map)
                     gran_list.append(new_maps)
                 else:
-                    gran_list.append([gap,])
+                    gran_list.append([gap, ])
 
         if gran_list:
             return gran_list
@@ -1264,7 +1275,7 @@ class AbstractSpaceTimeDataset(AbstractDataset):
 
         maps = self.get_registered_maps_as_objects(where, "start_time", dbif)
 
-        if maps  is not None and len(maps) > 0:
+        if maps is not None and len(maps) > 0:
             for i in range(len(maps)):
                 obj_list.append(maps[i])
                 # Detect and insert gaps
@@ -1286,7 +1297,7 @@ class AbstractSpaceTimeDataset(AbstractDataset):
                         elif self.is_time_relative():
                             map.set_relative_time(start, end,
                                                  self.get_relative_time_unit())
-                        map.set_spatial_extent_from_values(0,0,0,0,0,0)
+                        map.set_spatial_extent_from_values(0, 0, 0, 0, 0, 0)
                         obj_list.append(copy.copy(map))
 
         if connected:
@@ -1372,7 +1383,6 @@ class AbstractSpaceTimeDataset(AbstractDataset):
 
         rows = self.get_registered_maps(columns, where, order, dbif)
 
-
         if rows is not None:
             for row in rows:
                 map = self.get_new_map_instance(row["id"])
@@ -1448,8 +1458,8 @@ class AbstractSpaceTimeDataset(AbstractDataset):
             except:
                 if connected:
                     dbif.close()
-                self.msgr.error(_("Unable to get map ids from register table <%s>")
-                           % (self.get_map_register()))
+                self.msgr.error(_("Unable to get map ids from register table "
+                                  "<%s>") % (self.get_map_register()))
                 raise
 
         if connected:
@@ -1557,10 +1567,13 @@ class AbstractSpaceTimeDataset(AbstractDataset):
                    granularity
 
         """
-        if get_enable_mapset_check() is True and self.get_mapset() != get_current_mapset():
-            self.msgr.fatal(_("Unable to shift dataset <%(ds)s> of type %(type)s in the temporal database."
-            " The mapset of the dataset does not match the current mapset")%\
-            ({"ds":self.get_id()}, {"type":self.get_type()}))
+        if get_enable_mapset_check() is True and \
+           self.get_mapset() != get_current_mapset():
+            self.msgr.fatal(_("Unable to shift dataset <%(ds)s> of type "
+                              "%(type)s in the temporal database. The mapset "
+                              "of the dataset does not match the current "
+                              "mapset") % ({"ds": self.get_id()},
+                                           {"type": self.get_type()}))
 
         if not check_granularity_string(gran, self.get_temporal_type()):
             self.msgr.error(_("Wrong granularity format: %s" % (gran)))
@@ -1723,10 +1736,13 @@ class AbstractSpaceTimeDataset(AbstractDataset):
 
         """
 
-        if get_enable_mapset_check() is True and self.get_mapset() != get_current_mapset():
-            self.msgr.fatal(_("Unable to snap dataset <%(ds)s> of type %(type)s in the temporal database."
-            " The mapset of the dataset does not match the current mapset")%\
-            ({"ds":self.get_id()}, {"type":self.get_type()}))
+        if get_enable_mapset_check() is True and \
+           self.get_mapset() != get_current_mapset():
+            self.msgr.fatal(_("Unable to snap dataset <%(ds)s> of type "
+                              "%(type)s in the temporal database. The mapset "
+                              "of the dataset does not match the current "
+                              "mapset") % ({"ds": self.get_id()},
+                                           {"type": self.get_type()}))
 
         dbif, connected = init_dbif(dbif)
 
@@ -1754,8 +1770,8 @@ class AbstractSpaceTimeDataset(AbstractDataset):
         # granularity if the end time is None
         if end is None:
             if self.is_time_absolute():
-                end =  increment_datetime_by_string(start,
-                                                    self.get_granularity())
+                end = increment_datetime_by_string(start,
+                                                   self.get_granularity())
             elif self.is_time_relative():
                 end = start + self.get_granularity()
 
@@ -1824,14 +1840,17 @@ class AbstractSpaceTimeDataset(AbstractDataset):
         """
 
         if get_enable_mapset_check() is True and self.get_mapset() != get_current_mapset():
-            self.msgr.fatal(_("Unable to rename dataset <%(ds)s> of type %(type)s in the temporal database."
-            " The mapset of the dataset does not match the current mapset")%\
-            ({"ds":self.get_id()}, {"type":self.get_type()}))
+            self.msgr.fatal(_("Unable to rename dataset <%(ds)s> of type "
+                              "%(type)s in the temporal database. The mapset "
+                              "of the dataset does not match the current "
+                              "mapset") % ({"ds": self.get_id()},
+                                           {"type": self.get_type()}))
 
         dbif, connected = init_dbif(dbif)
 
         if dbif.get_dbmi().__name__ != "sqlite3":
-            self.msgr.fatal(_("Renaming of space time datasets is not supported for PostgreSQL."))
+            self.msgr.fatal(_("Renaming of space time datasets is not "
+                              "supported for PostgreSQL."))
 
         # SELECT all needed information from the database
         self.select(dbif)
@@ -1856,14 +1875,14 @@ class AbstractSpaceTimeDataset(AbstractDataset):
         statement = self.update(dbif, execute=False, ident=old_ident)
 
         # We need to rename the raster register table
-        statement += "ALTER TABLE %s RENAME TO \"%s\";\n" % (old_map_register_table,
-                                                             new_map_register_table)
+        statement += "ALTER TABLE %s RENAME TO \"%s\";\n" % \
+                     (old_map_register_table, new_map_register_table)
 
         # We need to take care of the stds index in the sqlite3 database
         if dbif.get_dbmi().__name__ == "sqlite3":
             statement += "DROP INDEX %s_index;\n" % (old_map_register_table)
-            statement += "CREATE INDEX %s_index ON %s (id);"%(new_map_register_table,
-                                                              new_map_register_table)
+            statement += "CREATE INDEX %s_index ON %s (id);" % \
+                         (new_map_register_table, new_map_register_table)
 
         # We need to rename the space time dataset in the maps register table
         if maps:
@@ -1889,20 +1908,23 @@ class AbstractSpaceTimeDataset(AbstractDataset):
                           If False the prepared SQL statements are returned
                           and must be executed by the caller.
 
-           :return: The SQL statements if execute == False, else an empty string
+           :return: The SQL statements if execute == False, else an empty
+                    string
         """
         # First we need to check if maps are registered in this dataset and
         # unregister them
 
         self.msgr.verbose(_("Delete space time %s  dataset <%s> from temporal "
-                      "database") % \
-                    (self.get_new_map_instance(ident=None).get_type(),
-                     self.get_id()))
+                            "database") % (self.get_new_map_instance(ident=None).get_type(),
+                                           self.get_id()))
 
-        if get_enable_mapset_check() is True and self.get_mapset() != get_current_mapset():
-            self.msgr.fatal(_("Unable to delete dataset <%(ds)s> of type %(type)s from the temporal database."
-                         " The mapset of the dataset does not match the current mapset")%\
-                         {"ds":self.get_id(), "type":self.get_type()})
+        if get_enable_mapset_check() is True and \
+           self.get_mapset() != get_current_mapset():
+            self.msgr.fatal(_("Unable to delete dataset <%(ds)s> of type "
+                              "%(type)s from the temporal database. The mapset"
+                              " of the dataset does not match the current "
+                              "mapset") % {"ds": self.get_id(),
+                                           "type": self.get_type()})
 
         statement = ""
         dbif, connected = init_dbif(dbif)
@@ -1972,7 +1994,7 @@ class AbstractSpaceTimeDataset(AbstractDataset):
             if row is not None and row[0] == map_id:
                 is_registered = True
 
-        if connected == True:
+        if connected is True:
             dbif.close()
 
         return is_registered
@@ -1993,37 +2015,43 @@ class AbstractSpaceTimeDataset(AbstractDataset):
            :return: True if success, False otherwise
         """
 
-        if get_enable_mapset_check() is True and self.get_mapset() != get_current_mapset():
-            self.msgr.fatal(_("Unable to register map in dataset <%(ds)s> of type %(type)s."
-                         " The mapset of the dataset does not match the current mapset")%\
-                         {"ds":self.get_id(), "type":self.get_type()})
+        if get_enable_mapset_check() is True and \
+           self.get_mapset() != get_current_mapset():
+            self.msgr.fatal(_("Unable to register map in dataset <%(ds)s> of "
+                              "type %(type)s. The mapset of the dataset does "
+                              "not match the current mapset") %
+                            {"ds": self.get_id(), "type": self.get_type()})
 
         dbif, connected = init_dbif(dbif)
 
-        if map.is_in_db(dbif) == False:
+        if map.is_in_db(dbif) is False:
             dbif.close()
-            self.msgr.fatal(_("Only a map that was inserted in the temporal database"
-                              " can be registered in a space time dataset"))
+            self.msgr.fatal(_("Only a map that was inserted in the temporal "
+                              "database can be registered in a space time "
+                              "dataset"))
 
         if map.get_layer():
             self.msgr.debug(1, "Register %s map <%s> with layer %s in space "
-                           "time %s dataset <%s>" % (map.get_type(),
-                                                     map.get_map_id(),
-                                                     map.get_layer(),
-                                                     map.get_type(),
-                                                     self.get_id()))
+                               "time %s dataset <%s>" % (map.get_type(),
+                                                         map.get_map_id(),
+                                                         map.get_layer(),
+                                                         map.get_type(),
+                                                         self.get_id()))
         else:
             self.msgr.debug(1, "Register %s map <%s> in space time %s "
-                           "dataset <%s>" % (map.get_type(), map.get_map_id(),
-                                              map.get_type(), self.get_id()))
+                               "dataset <%s>" % (map.get_type(),
+                                                 map.get_map_id(),
+                                                 map.get_type(),
+                                                 self.get_id()))
 
         # First select all data from the database
         map.select(dbif)
 
         if not map.check_for_correct_time():
             if map.get_layer():
-                self.msgr.fatal(_("Map <%(id)s> with layer %(l)s has invalid time")
-                           % {'id': map.get_map_id(), 'l': map.get_layer()})
+                self.msgr.fatal(_("Map <%(id)s> with layer %(l)s has invalid "
+                                  "time") % {'id': map.get_map_id(),
+                                             'l': map.get_layer()})
             else:
                 self.msgr.fatal(_("Map <%s> has invalid time") % (map.get_map_id()))
 
@@ -2043,28 +2071,31 @@ class AbstractSpaceTimeDataset(AbstractDataset):
         # Check temporal types
         if stds_ttype != map_ttype:
             if map.get_layer():
-                self.msgr.fatal(_("Temporal type of space time dataset <%(id)s> and"
-                             " map <%(map)s> with layer %(l)s are different") \
-                             % {'id': self.get_id(), 'map': map.get_map_id(),
-                                'l': map.get_layer()})
+                self.msgr.fatal(_("Temporal type of space time dataset "
+                                  "<%(id)s> and map <%(map)s> with layer %(l)s"
+                                  " are different") % {'id': self.get_id(),
+                                                       'map': map.get_map_id(),
+                                                       'l': map.get_layer()})
             else:
-                self.msgr.fatal(_("Temporal type of space time dataset <%(id)s> and"
-                             " map <%(map)s> are different") % {
-                             'id': self.get_id(), 'map': map.get_map_id()})
+                self.msgr.fatal(_("Temporal type of space time dataset "
+                                  "<%(id)s> and map <%(map)s> are different")
+                                % {'id': self.get_id(),
+                                   'map': map.get_map_id()})
 
         # In case no map has been registered yet, set the
         # relative time unit from the first map
-        if (self.metadata.get_number_of_maps() is None or \
+        if (self.metadata.get_number_of_maps() is None or
             self.metadata.get_number_of_maps() == 0) and \
-            self.map_counter == 0 and self.is_time_relative():
+           self.map_counter == 0 and self.is_time_relative():
 
             self.set_relative_time_unit(map_rel_time_unit)
             statement += self.relative_time.get_update_all_statement_mogrified(
                 dbif)
 
             self.msgr.debug(1, _("Set temporal unit for space time %s dataset "
-                           "<%s> to %s") % (map.get_type(), self.get_id(),
-                                            map_rel_time_unit))
+                                 "<%s> to %s") % (map.get_type(),
+                                                  self.get_id(),
+                                                  map_rel_time_unit))
 
         stds_rel_time_unit = self.get_relative_time_unit()
 
@@ -2072,14 +2103,14 @@ class AbstractSpaceTimeDataset(AbstractDataset):
         if self.is_time_relative() and (stds_rel_time_unit != map_rel_time_unit):
             if map.get_layer():
                 self.msgr.fatal(_("Relative time units of space time dataset "
-                             "<%(id)s> and map <%(map)s> with layer %(l)s are"
-                             " different") % {'id': self.get_id(),
-                                              'map': map.get_map_id(),
-                                              'l': map.get_layer()})
+                                  "<%(id)s> and map <%(map)s> with layer %(l)s"
+                                  " are different") % {'id': self.get_id(),
+                                                       'map': map.get_map_id(),
+                                                       'l': map.get_layer()})
             else:
                 self.msgr.fatal(_("Relative time units of space time dataset "
-                             "<%(id)s> and map <%(map)s> are different") % \
-                             {'id': self.get_id(), 'map': map.get_map_id()})
+                                  "<%(id)s> and map <%(map)s> are different") %
+                                {'id': self.get_id(), 'map': map.get_map_id()})
 
         if get_enable_mapset_check() is True and stds_mapset != map_mapset:
             dbif.close()
@@ -2088,17 +2119,17 @@ class AbstractSpaceTimeDataset(AbstractDataset):
         # Check if map is already registered
         if self.is_map_registered(map_id, dbif=dbif):
             if map.get_layer() is not None:
-                self.msgr.warning(_("Map <%(map)s> with layer %(l)s is already "
-                                "registered.") % {'map': map.get_map_id(),
-                                                    'l': map.get_layer()})
+                self.msgr.warning(_("Map <%(map)s> with layer %(l)s is already"
+                                    " registered.") % {'map': map.get_map_id(),
+                                                       'l': map.get_layer()})
             else:
-                self.msgr.warning(_("Map <%s> is already registered.") % (
-                                map.get_map_id()))
+                self.msgr.warning(_("Map <%s> is already registered.") %
+                                   (map.get_map_id()))
             return False
 
         # Register the stds in the map stds register table column
         statement += map.add_stds_to_register(stds_id=self.base.get_id(),
-                                                 dbif=dbif, execute=False)
+                                              dbif=dbif, execute=False)
 
         # Now put the raster name in the stds map register table
         if dbif.get_dbmi().paramstyle == "qmark":
@@ -2138,10 +2169,13 @@ class AbstractSpaceTimeDataset(AbstractDataset):
                    string, None in case of a failure
         """
 
-        if get_enable_mapset_check() is True and self.get_mapset() != get_current_mapset():
-            self.msgr.fatal(_("Unable to unregister map from dataset <%(ds)s> of type %(type)s in the temporal database."
-                         " The mapset of the dataset does not match the current mapset")%\
-                         {"ds":self.get_id(), "type":self.get_type()})
+        if get_enable_mapset_check() is True and \
+           self.get_mapset() != get_current_mapset():
+            self.msgr.fatal(_("Unable to unregister map from dataset <%(ds)s>"
+                              " of type %(type)s in the temporal database."
+                              " The mapset of the dataset does not match the"
+                              " current mapset") % {"ds": self.get_id(),
+                                                    "type": self.get_type()})
 
         statement = ""
 
@@ -2151,22 +2185,24 @@ class AbstractSpaceTimeDataset(AbstractDataset):
         if self.is_map_registered(map.get_id(), dbif) is False:
             if map.get_layer() is not None:
                 self.msgr.warning(_("Map <%(map)s> with layer %(l)s is not "
-                                "registered in space time dataset "
-                                "<%(base)s>") % {'map': map.get_map_id(),
-                                'l': map.get_layer(), 'base': self.base.get_id()})
+                                    "registered in space time dataset "
+                                    "<%(base)s>") % {'map': map.get_map_id(),
+                                                     'l': map.get_layer(),
+                                                     'base': self.base.get_id()})
             else:
                 self.msgr.warning(_("Map <%(map)s> is not registered in space "
-                                "time dataset <%(base)s>") % {'map': map.get_map_id(),
-                                'base': self.base.get_id()})
-            if connected == True:
+                                    "time dataset <%(base)s>") %
+                                  {'map': map.get_map_id(),
+                                   'base': self.base.get_id()})
+            if connected is True:
                 dbif.close()
             return ""
 
         # Remove the space time dataset from the dataset register
-        # We need to execute the statement here, otherwise the space time dataset will not be
-        # removed correctly
+        # We need to execute the statement here, otherwise the space time
+        # dataset will not be removed correctly
         map.remove_stds_from_register(self.base.get_id(),
-                                                      dbif=dbif, execute=True)
+                                      dbif=dbif, execute=True)
 
         # Remove the map from the space time dataset register
         stds_register_table = self.get_map_register()
@@ -2177,7 +2213,7 @@ class AbstractSpaceTimeDataset(AbstractDataset):
                 sql = "DELETE FROM " + \
                     stds_register_table + " WHERE id = %s;\n"
 
-            statement += dbif.mogrify_sql_statement((sql, ( map.get_id(),)))
+            statement += dbif.mogrify_sql_statement((sql, (map.get_id(), )))
 
         if execute:
             dbif.execute_transaction(statement)
@@ -2192,9 +2228,10 @@ class AbstractSpaceTimeDataset(AbstractDataset):
         return statement
 
     def update_from_registered_maps(self, dbif=None):
-        """This methods updates the modification time, the spatial and temporal extent as well as
-           type specific metadata. It should always been called after maps
-           are registered or unregistered/deleted from the space time dataset.
+        """This methods updates the modification time, the spatial and
+           temporal extent as well as type specific metadata. It should always
+           been called after maps are registered or unregistered/deleted from
+           the space time dataset.
 
            The update of the temporal extent checks if the end time is set
            correctly.
@@ -2206,13 +2243,16 @@ class AbstractSpaceTimeDataset(AbstractDataset):
            :param dbif: The database interface to be used
         """
 
-        if get_enable_mapset_check() is True and self.get_mapset() != get_current_mapset():
-            self.msgr.fatal(_("Unable to update dataset <%(ds)s> of type %(type)s in the temporal database."
-                         " The mapset of the dataset does not match the current mapset")%\
-                         {"ds":self.get_id(), "type":self.get_type()})
+        if get_enable_mapset_check() is True and \
+           self.get_mapset() != get_current_mapset():
+            self.msgr.fatal(_("Unable to update dataset <%(ds)s> of type "
+                              "%(type)s in the temporal database. The mapset"
+                              " of the dataset does not match the current "
+                              "mapset") % {"ds": self.get_id(),
+                                           "type": self.get_type()})
 
-        self.msgr.verbose(_("Update metadata, spatial and temporal extent from "
-                       "all registered maps of <%s>") % (self.get_id()))
+        self.msgr.verbose(_("Update metadata, spatial and temporal extent from"
+                            " all registered maps of <%s>") % (self.get_id()))
 
         # Nothing to do if the map register is not present
         if not self.get_map_register():
@@ -2230,7 +2270,7 @@ class AbstractSpaceTimeDataset(AbstractDataset):
         sql_path = get_sql_template_path()
         stds_register_table = self.get_map_register()
 
-        #We create a transaction
+        # We create a transaction
         sql_script = ""
 
         # Update the spatial and temporal extent from registered maps
@@ -2249,7 +2289,9 @@ class AbstractSpaceTimeDataset(AbstractDataset):
 
         # Update type specific metadata
         sql = open(os.path.join(sql_path, "update_" +
-            self.get_type() + "_metadata_template.sql"), 'r').read()
+                                          self.get_type() +
+                                          "_metadata_template.sql"),
+                   'r').read()
         sql = sql.replace("SPACETIME_REGISTER_TABLE", stds_register_table)
         sql = sql.replace("SPACETIME_ID", self.base.get_id())
 
@@ -2278,21 +2320,23 @@ class AbstractSpaceTimeDataset(AbstractDataset):
                         (SELECT id FROM SPACETIME_REGISTER_TABLE);"""
                 sql = sql.replace("GRASS_MAP", self.get_new_map_instance(
                     None).get_type())
-                sql = sql.replace("SPACETIME_REGISTER_TABLE", stds_register_table)
+                sql = sql.replace("SPACETIME_REGISTER_TABLE",
+                                  stds_register_table)
             else:
                 sql = """SELECT max(start_time) FROM GRASS_MAP_relative_time
                          WHERE GRASS_MAP_relative_time.id IN
                         (SELECT id FROM SPACETIME_REGISTER_TABLE);"""
                 sql = sql.replace("GRASS_MAP", self.get_new_map_instance(
                     None).get_type())
-                sql = sql.replace("SPACETIME_REGISTER_TABLE", stds_register_table)
+                sql = sql.replace("SPACETIME_REGISTER_TABLE",
+                                  stds_register_table)
 
             dbif.execute(sql,  mapset=self.base.mapset)
             row = dbif.fetchone(mapset=self.base.mapset)
 
             if row is not None:
                 # This seems to be a bug in sqlite3 Python driver
-                if dbif.get_dbmi().__name__== "sqlite3":
+                if dbif.get_dbmi().__name__ == "sqlite3":
                     tstring = row[0]
                     # Convert the unicode string into the datetime format
                     if self.is_time_absolute():
@@ -2321,7 +2365,8 @@ class AbstractSpaceTimeDataset(AbstractDataset):
                ) WHERE id = 'SPACETIME_ID';"""
                 sql = sql.replace("GRASS_MAP", self.get_new_map_instance(
                     None).get_type())
-                sql = sql.replace("SPACETIME_REGISTER_TABLE", stds_register_table)
+                sql = sql.replace("SPACETIME_REGISTER_TABLE",
+                                  stds_register_table)
                 sql = sql.replace("SPACETIME_ID", self.base.get_id())
                 sql = sql.replace("STDS", self.get_type())
             elif self.is_time_relative():
@@ -2332,7 +2377,8 @@ class AbstractSpaceTimeDataset(AbstractDataset):
                ) WHERE id = 'SPACETIME_ID';"""
                 sql = sql.replace("GRASS_MAP", self.get_new_map_instance(
                     None).get_type())
-                sql = sql.replace("SPACETIME_REGISTER_TABLE", stds_register_table)
+                sql = sql.replace("SPACETIME_REGISTER_TABLE",
+                                  stds_register_table)
                 sql = sql.replace("SPACETIME_ID", self.base.get_id())
                 sql = sql.replace("STDS", self.get_type())
 

+ 47 - 40
lib/python/temporal/aggregation.py

@@ -126,13 +126,14 @@ def aggregate_raster_maps(inputs, base, start, end, count, method,
 
     # Check if new map is in the temporal database
     if new_map.is_in_db(dbif):
-        if gscript.overwrite() == True:
+        if gscript.overwrite() is True:
             # Remove the existing temporal database entry
             new_map.delete(dbif)
             new_map = RasterDataset(map_id)
         else:
-            msgr.error(_("Raster map <%(name)s> is already in temporal database, " \
-                         "use overwrite flag to overwrite"%({"name":new_map.get_name()})))
+            msgr.error(_("Raster map <%(name)s> is already in temporal "
+                         "database, use overwrite flag to overwrite" %
+                       ({"name": new_map.get_name()})))
             return
 
     msgr.verbose(_("Computing aggregation of maps between %(st)s - %(end)s" % {
@@ -147,16 +148,15 @@ def aggregate_raster_maps(inputs, base, start, end, count, method,
         file.write(string)
 
     file.close()
-    
     # Run r.series
-    if len(inputs) > 1000 :
+    if len(inputs) > 1000:
         ret = gscript.run_command("r.series", flags="z", file=filename,
-                               output=output, overwrite=gscript.overwrite(),
-                               method=method)
+                                  output=output, overwrite=gscript.overwrite(),
+                                  method=method)
     else:
         ret = gscript.run_command("r.series", file=filename,
-                               output=output, overwrite=gscript.overwrite(),
-                               method=method)
+                                  output=output, overwrite=gscript.overwrite(),
+                                  method=method)
 
     if ret != 0:
         dbif.close()
@@ -166,41 +166,47 @@ def aggregate_raster_maps(inputs, base, start, end, count, method,
     new_map.load()
 
     # In case of a null map continue, do not register null maps
-    if new_map.metadata.get_min() is None and new_map.metadata.get_max() is None:
+    if new_map.metadata.get_min() is None and \
+       new_map.metadata.get_max() is None:
         if not register_null:
-            gscript.run_command("g.remove", flags='f', type='rast', pattern=output)
+            gscript.run_command("g.remove", flags='f', type='rast',
+                                pattern=output)
             return None
 
     return new_map
 
 ##############################################################################
 
-def aggregate_by_topology(granularity_list,  granularity,  map_list,  topo_list,  basename,  time_suffix,
-                          offset=0,  method="average",  nprocs=1,  spatial=None,  dbif=None, 
-                          overwrite=False):
+
+def aggregate_by_topology(granularity_list, granularity, map_list, topo_list,
+                          basename, time_suffix, offset=0, method="average",
+                          nprocs=1, spatial=None, dbif=None, overwrite=False):
     """Aggregate a list of raster input maps with r.series
 
-       :param granularity_list: A list of AbstractMapDataset objects. 
-                               The temporal extents of the objects are used
-                               to build the spatio-temporal topology with the map list objects
+       :param granularity_list: A list of AbstractMapDataset objects.
+                                The temporal extents of the objects are used
+                                to build the spatio-temporal topology with the
+                                map list objects
        :param granularity: The granularity of the granularity list
-       :param map_list: A list of RasterDataset objects that contain the raster 
-                       maps that should be aggregated
-       :param topo_list: A list of strings of topological relations that are 
-                        used to select the raster maps for aggregation
+       :param map_list: A list of RasterDataset objects that contain the raster
+                        maps that should be aggregated
+       :param topo_list: A list of strings of topological relations that are
+                         used to select the raster maps for aggregation
        :param basename: The basename of the new generated raster maps
-       :param time_suffix: Use the granularity truncated start time of the 
-                          actual granule to create the suffix for the basename
-       :param offset: Use a numerical offset for suffix generation (overwritten by time_suffix)
+       :param time_suffix: Use the granularity truncated start time of the
+                           actual granule to create the suffix for the basename
+       :param offset: Use a numerical offset for suffix generation
+                      (overwritten by time_suffix)
        :param method: The aggregation method of r.series (average,min,max, ...)
        :param nprocs: The number of processes used for parallel computation
-       :param spatial: This indicates if the spatial topology is created as well:
-                      spatial can be None (no spatial topology), "2D" using west, east,
-                      south, north or "3D" using west, east, south, north, bottom, top
+       :param spatial: This indicates if the spatial topology is created as
+                       well: spatial can be None (no spatial topology), "2D"
+                       using west, east, south, north or "3D" using west,
+                       east, south, north, bottom, top
        :param dbif: The database interface to be used
        :param overwrite: Overwrite existing raster maps
-       :return: A list of RasterDataset objects that contain the new map names and
-               the temporal extent for map registration
+       :return: A list of RasterDataset objects that contain the new map names
+                and the temporal extent for map registration
     """
     import grass.pygrass.modules as pymod
     import copy
@@ -260,26 +266,27 @@ def aggregate_by_topology(granularity_list,  granularity,  map_list,  topo_list,
                 aggregation_list.append(map_layer.get_name())
 
         if aggregation_list:
-            msgr.verbose(_("Aggregating %(len)i raster maps from %(start)s to %(end)s") \
-                           %({"len":len(aggregation_list),
-                           "start":str(granule.temporal_extent.get_start_time()), 
-                           "end":str(granule.temporal_extent.get_end_time())}))
+            msgr.verbose(_("Aggregating %(len)i raster maps from %(start)s to"
+                           " %(end)s")  %({"len": len(aggregation_list),
+                           "start": str(granule.temporal_extent.get_start_time()),
+                           "end": str(granule.temporal_extent.get_end_time())}))
 
             if granule.is_time_absolute() is True and time_suffix is True:
-                suffix = create_suffix_from_datetime(granule.temporal_extent.get_start_time(), 
+                suffix = create_suffix_from_datetime(granule.temporal_extent.get_start_time(),
                                                      granularity)
             else:
                 suffix = gscript.get_num_suffix(count + int(offset),
                                                 len(granularity_list) + int(offset))
-            output_name = "%s_%s"%(basename,  suffix)
+            output_name = "%s_%s" % (basename, suffix)
 
-            map_layer = RasterDataset("%s@%s"%(output_name,
-                                               get_current_mapset()))
+            map_layer = RasterDataset("%s@%s" % (output_name,
+                                                 get_current_mapset()))
             map_layer.set_temporal_extent(granule.get_temporal_extent())
 
             if map_layer.map_exists() is True and overwrite is False:
-                msgr.fatal(_("Unable to perform aggregation. Output raster map <%(name)s> "\
-                             "exists and overwrite flag was not set"%({"name":output_name})))
+                msgr.fatal(_("Unable to perform aggregation. Output raster "
+                             "map <%(name)s> exists and overwrite flag was "
+                             "not set" % ({"name": output_name})))
 
             output_list.append(map_layer)
 
@@ -294,7 +301,7 @@ def aggregate_by_topology(granularity_list,  granularity,  map_list,  topo_list,
 
                 mod = copy.deepcopy(r_series)
                 mod(file=filename, output=output_name)
-                if len(aggregation_list) > 1000 :
+                if len(aggregation_list) > 1000:
                     mod(flags="z")
                 process_queue.put(mod)
             else:

+ 72 - 49
lib/python/temporal/base.py

@@ -25,11 +25,12 @@ for details.
 :author: Soeren Gebbert
 """
 
-from datetime import datetime, date, time, timedelta
+from datetime import datetime
 from core import *
 
 ###############################################################################
 
+
 class DictSQLSerializer(object):
     def __init__(self):
         self.D = {}
@@ -43,7 +44,7 @@ class DictSQLSerializer(object):
             Usage:
 
             .. code-block:: python
-            
+
                 >>> init()
                 >>> t = DictSQLSerializer()
                 >>> t.D["id"] = "soil@PERMANENT"
@@ -68,7 +69,6 @@ class DictSQLSerializer(object):
 
         """
 
-
         sql = ""
         args = []
 
@@ -190,6 +190,7 @@ class DictSQLSerializer(object):
 
 ###############################################################################
 
+
 class SQLDatabaseInterface(DictSQLSerializer):
     """This class represents the SQL database interface
 
@@ -197,7 +198,7 @@ class SQLDatabaseInterface(DictSQLSerializer):
        structure of this class in the temporal database are implemented.
        This is the base class for raster, raster3d, vector and
        space time datasets data management classes:
-       
+
        - Identification information (base)
        - Spatial extent
        - Temporal extent
@@ -298,7 +299,8 @@ class SQLDatabaseInterface(DictSQLSerializer):
 
            :param dbif: The database interface to be used,
                         if None a temporary connection will be established
-           :return: True if this object is present in the temporal database, False otherwise
+           :return: True if this object is present in the temporal database,
+                    False otherwise
         """
 
         sql = self.get_is_in_db_statement()
@@ -337,7 +339,8 @@ class SQLDatabaseInterface(DictSQLSerializer):
         if not dbif:
             dbif = SQLDatabaseInterfaceConnection()
 
-        return dbif.mogrify_sql_statement(self.get_select_statement(), mapset=self.mapset)
+        return dbif.mogrify_sql_statement(self.get_select_statement(),
+                                          mapset=self.mapset)
 
     def select(self, dbif=None):
         """Select the content from the temporal database and store it
@@ -374,7 +377,7 @@ class SQLDatabaseInterface(DictSQLSerializer):
             self.deserialize(row)
         else:
             self.msgr.fatal(_("Object <%s> not found in the temporal database")
-                       % self.get_id())
+                            % self.get_id())
 
         return True
 
@@ -394,7 +397,8 @@ class SQLDatabaseInterface(DictSQLSerializer):
         if not dbif:
             dbif = SQLDatabaseInterfaceConnection()
 
-        return dbif.mogrify_sql_statement(self.get_insert_statement(), mapset=self.mapset)
+        return dbif.mogrify_sql_statement(self.get_insert_statement(),
+                                          mapset=self.mapset)
 
     def insert(self, dbif=None):
         """Serialize the content of this object and store it in the temporal
@@ -425,10 +429,10 @@ class SQLDatabaseInterface(DictSQLSerializer):
            """
         if ident:
             return self.serialize("UPDATE", self.get_table_name(),
-                              "WHERE id = \'" + str(ident) + "\'")
+                                  "WHERE id = \'" + str(ident) + "\'")
         else:
             return self.serialize("UPDATE", self.get_table_name(),
-                              "WHERE id = \'" + str(self.ident) + "\'")
+                                  "WHERE id = \'" + str(self.ident) + "\'")
 
     def get_update_statement_mogrified(self, dbif=None, ident=None):
         """Return the update statement as mogrified string
@@ -441,7 +445,8 @@ class SQLDatabaseInterface(DictSQLSerializer):
         if not dbif:
             dbif = SQLDatabaseInterfaceConnection()
 
-        return dbif.mogrify_sql_statement(self.get_update_statement(ident), mapset=self.mapset)
+        return dbif.mogrify_sql_statement(self.get_update_statement(ident),
+                                          mapset=self.mapset)
 
     def update(self, dbif=None, ident=None):
         """Serialize the content of this object and update it in the temporal
@@ -477,10 +482,10 @@ class SQLDatabaseInterface(DictSQLSerializer):
            """
         if ident:
             return self.serialize("UPDATE ALL", self.get_table_name(),
-                              "WHERE id = \'" + str(ident) + "\'")
+                                  "WHERE id = \'" + str(ident) + "\'")
         else:
             return self.serialize("UPDATE ALL", self.get_table_name(),
-                              "WHERE id = \'" + str(self.ident) + "\'")
+                                  "WHERE id = \'" + str(self.ident) + "\'")
 
     def get_update_all_statement_mogrified(self, dbif=None, ident=None):
         """Return the update all statement as mogrified string
@@ -493,7 +498,8 @@ class SQLDatabaseInterface(DictSQLSerializer):
         if not dbif:
             dbif = SQLDatabaseInterfaceConnection()
 
-        return dbif.mogrify_sql_statement(self.get_update_all_statement(ident), mapset=self.mapset)
+        return dbif.mogrify_sql_statement(self.get_update_all_statement(ident),
+                                          mapset=self.mapset)
 
     def update_all(self, dbif=None, ident=None):
         """Serialize the content of this object, including None objects,
@@ -520,6 +526,7 @@ class SQLDatabaseInterface(DictSQLSerializer):
 
 ###############################################################################
 
+
 class DatasetBase(SQLDatabaseInterface):
     """This is the base class for all maps and spacetime datasets storing
         basic identification information
@@ -561,7 +568,7 @@ class DatasetBase(SQLDatabaseInterface):
     """
 
     def __init__(self, table=None, ident=None, name=None, mapset=None,
-                 creator=None, ctime=None,ttype=None):
+                 creator=None, ctime=None, ttype=None):
         """Constructor
 
             :param table: The name of the temporal database table
@@ -575,9 +582,9 @@ class DatasetBase(SQLDatabaseInterface):
             :param creator: The name of the creator
             :param ctime: The creation datetime object
             :param ttype: The temporal type
-            
-                - "absolute" Identifier for absolute time
-                - "relative" Identifier for relative time
+
+                              - "absolute" Identifier for absolute time
+                              - "relative" Identifier for relative time
         """
 
         SQLDatabaseInterface.__init__(self, table, ident)
@@ -616,7 +623,6 @@ class DatasetBase(SQLDatabaseInterface):
                 self.set_layer(layer)
             self.set_name(name)
 
-
     def set_name(self, name):
         """Set the name of the dataset
 
@@ -789,7 +795,7 @@ class RasterBase(DatasetBase):
     def __init__(self, ident=None, name=None, mapset=None, creator=None,
                  creation_time=None, temporal_type=None):
         DatasetBase.__init__(self, "raster_base", ident, name, mapset,
-                              creator, creation_time, temporal_type)
+                             creator, creation_time, temporal_type)
 
 
 class Raster3DBase(DatasetBase):
@@ -797,8 +803,8 @@ class Raster3DBase(DatasetBase):
     def __init__(self, ident=None, name=None, mapset=None, creator=None,
                  creation_time=None, temporal_type=None,):
         DatasetBase.__init__(self, "raster3d_base", ident, name,
-                              mapset, creator, creation_time,
-                              temporal_type)
+                             mapset, creator, creation_time,
+                             temporal_type)
 
 
 class VectorBase(DatasetBase):
@@ -806,7 +812,7 @@ class VectorBase(DatasetBase):
     def __init__(self, ident=None, name=None, mapset=None, layer=None,
                  creator=None, creation_time=None, temporal_type=None):
         DatasetBase.__init__(self, "vector_base", ident, name, mapset,
-                              creator, creation_time, temporal_type)
+                             creator, creation_time, temporal_type)
 
         self.set_id(ident)
         if ident is not None and name is None and mapset is None:
@@ -862,7 +868,7 @@ class STDSBase(DatasetBase):
                  semantic_type=None, creator=None, ctime=None,
                  ttype=None, mtime=None):
         DatasetBase.__init__(self, table, ident, name, mapset, creator,
-                              ctime, ttype)
+                             ctime, ttype)
 
         self.set_semantic_type(semantic_type)
         self.set_mtime(mtime)
@@ -870,29 +876,35 @@ class STDSBase(DatasetBase):
     def set_semantic_type(self, semantic_type):
         """Set the semantic type of the space time dataset"""
         self.D["semantic_type"] = semantic_type
-        
+
     def set_mtime(self, mtime=None):
-       """Set the modification time of the space time dataset, if nothing set the current time is used"""
-       if mtime == None:
+        """Set the modification time of the space time dataset, if nothing set
+           the current time is used
+        """
+        if mtime is None:
             self.D["modification_time"] = datetime.now()
-       else:
+        else:
             self.D["modification_time"] = mtime
 
     def get_semantic_type(self):
         """Get the semantic type of the space time dataset
-           :return: None if not found"""
+           :return: None if not found
+        """
         if "semantic_type" in self.D:
             return self.D["semantic_type"]
         else:
             return None
-            
+
     def get_mtime(self):
-       """Get the modification time of the space time dataset, datatype is datetime
-          :return: None if not found"""
-       if self.D.has_key("modification_time"):
-           return self.D["modification_time"]
-       else:
-           return None
+        """Get the modification time of the space time dataset, datatype is
+           datetime
+
+           :return: None if not found
+        """
+        if self.D.has_key("modification_time"):
+            return self.D["modification_time"]
+        else:
+            return None
 
     semantic_type = property(fget=get_semantic_type, fset=set_semantic_type)
 
@@ -919,8 +931,8 @@ class STRDSBase(STDSBase):
                  semantic_type=None, creator=None, ctime=None,
                  ttype=None):
         STDSBase.__init__(self, "strds_base", ident, name, mapset,
-                           semantic_type, creator, ctime,
-                           ttype)
+                          semantic_type, creator, ctime,
+                          ttype)
 
 
 class STR3DSBase(STDSBase):
@@ -929,8 +941,8 @@ class STR3DSBase(STDSBase):
                  semantic_type=None, creator=None, ctime=None,
                  ttype=None):
         STDSBase.__init__(self, "str3ds_base", ident, name, mapset,
-                           semantic_type, creator, ctime,
-                           ttype)
+                          semantic_type, creator, ctime,
+                          ttype)
 
 
 class STVDSBase(STDSBase):
@@ -939,11 +951,12 @@ class STVDSBase(STDSBase):
                  semantic_type=None, creator=None, ctime=None,
                  ttype=None):
         STDSBase.__init__(self, "stvds_base", ident, name, mapset,
-                           semantic_type, creator, ctime,
-                           ttype)
+                          semantic_type, creator, ctime,
+                          ttype)
 
 ###############################################################################
 
+
 class AbstractSTDSRegister(SQLDatabaseInterface):
     """This is the base class for all maps to store the space time datasets
        as comma separated string in which they are registered
@@ -991,8 +1004,9 @@ class AbstractSTDSRegister(SQLDatabaseInterface):
     def set_registered_stds(self, registered_stds):
         """Get the comma separated list of space time datasets ids
            in which this map is registered
-           :param registered_stds A comma separated list of space time dataset ids
-                                  in which this map is registered
+
+           :param registered_stds: A comma separated list of space time
+                                   dataset ids in which this map is registered
         """
         self.D["registered_stds"] = registered_stds
 
@@ -1009,7 +1023,9 @@ class AbstractSTDSRegister(SQLDatabaseInterface):
     def get_registered_stds(self):
         """Get the comma separated list of space time datasets ids
            in which this map is registered
-           :return: None if not found"""
+
+           :return: None if not found
+        """
         if "registered_stds" in self.D:
             return self.D["registered_stds"]
         else:
@@ -1017,24 +1033,31 @@ class AbstractSTDSRegister(SQLDatabaseInterface):
 
     # Properties of this class
     id = property(fget=get_id, fset=set_id)
-    registered_stds = property(fget=get_registered_stds, fset=set_registered_stds)
+    registered_stds = property(fget=get_registered_stds,
+                               fset=set_registered_stds)
 
 ###############################################################################
 
+
 class RasterSTDSRegister(AbstractSTDSRegister):
     """Time stamped raster map base information class"""
     def __init__(self, ident=None, registered_stds=None):
-        AbstractSTDSRegister.__init__(self, "raster_stds_register", ident, registered_stds)
+        AbstractSTDSRegister.__init__(self, "raster_stds_register", ident,
+                                      registered_stds)
+
 
 class Raster3DSTDSRegister(AbstractSTDSRegister):
     """Time stamped 3D raster map base information class"""
     def __init__(self, ident=None, registered_stds=None):
-        AbstractSTDSRegister.__init__(self, "raster3d_stds_register", ident, registered_stds)
+        AbstractSTDSRegister.__init__(self, "raster3d_stds_register", ident,
+                                      registered_stds)
+
 
 class VectorSTDSRegister(AbstractSTDSRegister):
     """Time stamped vector map base information class"""
     def __init__(self, ident=None, registered_stds=None):
-        AbstractSTDSRegister.__init__(self, "vector_stds_register", ident, registered_stds)
+        AbstractSTDSRegister.__init__(self, "vector_stds_register", ident,
+                                      registered_stds)
 
 ###############################################################################
 

+ 127 - 95
lib/python/temporal/c_libraries_interface.py

@@ -12,31 +12,30 @@ for details.
 """
 
 import sys
-from multiprocessing import Process, Lock, Pipe,  Queue
+from multiprocessing import Process, Lock, Pipe
 import logging
 from ctypes import *
 from core import *
-import core as corefunc
 import grass.lib.gis as libgis
 import grass.lib.raster as libraster
 import grass.lib.vector as libvector
 import grass.lib.date as libdate
 import grass.lib.raster3d as libraster3d
 import grass.lib.temporal as libtgis
-import signal, os
 
 ###############################################################################
 
+
 class RPCDefs(object):
     # Function identifier and index
-    STOP=0
-    HAS_TIMESTAMP=1
-    WRITE_TIMESTAMP=2
-    READ_TIMESTAMP=3
-    REMOVE_TIMESTAMP=4
-    READ_MAP_INFO=5
-    MAP_EXISTS=6
-    READ_MAP_INFO=7
+    STOP = 0
+    HAS_TIMESTAMP = 1
+    WRITE_TIMESTAMP = 2
+    READ_TIMESTAMP = 3
+    REMOVE_TIMESTAMP = 4
+    READ_MAP_INFO = 5
+    MAP_EXISTS = 6
+    READ_MAP_INFO = 7
     AVAILABLE_MAPSETS = 8
     GET_DRIVER_NAME = 9
     GET_DATABASE_NAME = 10
@@ -45,77 +44,83 @@ class RPCDefs(object):
     G_GISDBASE = 13
     G_FATAL_ERROR = 14
 
-    TYPE_RASTER=0
-    TYPE_RASTER3D=1
-    TYPE_VECTOR=2
+    TYPE_RASTER = 0
+    TYPE_RASTER3D = 1
+    TYPE_VECTOR = 2
 
 ###############################################################################
 
+
 def _fatal_error(lock, conn, data):
     """Calls G_fatal_error()"""
     libgis.G_fatal_error("Fatal Error in C library server")
 
+
 def _get_mapset(lock, conn, data):
     """Return the current mapset
-    
+
        :param lock: A multiprocessing.Lock instance
        :param conn: A multiprocessing.Pipe instance used to send True or False
        :param data: The mapset as list entry 1 [function_id]
-       
+
        :returns: Name of the current mapset
-    """    
+    """
     mapset = libgis.G_mapset()
-    conn.send(mapset) 
-    
+    conn.send(mapset)
+
+
 def _get_location(lock, conn, data):
     """Return the current location
-    
+
        :param lock: A multiprocessing.Lock instance
        :param conn: A multiprocessing.Pipe instance used to send True or False
        :param data: The mapset as list entry 1 [function_id]
-       
+
        :returns: Name of the location
-    """    
+    """
     location = libgis.G_location()
-    conn.send(location) 
-   
+    conn.send(location)
+
+
 def _get_gisdbase(lock, conn, data):
     """Return the current gisdatabase
-    
+
        :param lock: A multiprocessing.Lock instance
        :param conn: A multiprocessing.Pipe instance used to send True or False
        :param data: The mapset as list entry 1 [function_id]
-       
+
        :returns: Name of the gisdatabase
-    """    
+    """
     gisdbase = libgis.G_gisdbase()
-    conn.send(gisdbase) 
+    conn.send(gisdbase)
+
 
 def _get_driver_name(lock, conn, data):
     """Return the temporal database driver of a specific mapset
-    
+
        :param lock: A multiprocessing.Lock instance
        :param conn: A multiprocessing.Pipe instance used to send True or False
        :param data: The mapset as list entry 1 [function_id, mapset]
-       
+
        :returns: Name of the driver or None if no temporal database present
     """
     mapset = data[1]
     if not mapset:
         mapset = libgis.G_mapset()
-    
+
     drstring = libtgis.tgis_get_mapset_driver_name(mapset)
-    conn.send(drstring) 
+    conn.send(drstring)
 
 ###############################################################################
 
+
 def _get_database_name(lock, conn, data):
     """Return the temporal database name of a specific mapset
-    
+
        :param lock: A multiprocessing.Lock instance
        :param conn: A multiprocessing.Pipe instance used to send True or False
        :param data: The mapset as list entry 1 [function_id, mapset]
-       
+
        :returns: Name of the database or None if no temporal database present
     """
     mapset = data[1]
@@ -129,20 +134,21 @@ def _get_database_name(lock, conn, data):
         dbstring = dbstring.replace("$GISDBASE", libgis.G_gisdbase())
         dbstring = dbstring.replace("$LOCATION_NAME", libgis.G_location())
         dbstring = dbstring.replace("$MAPSET", libgis.G_mapset())
-    conn.send(dbstring) 
+    conn.send(dbstring)
 
 ###############################################################################
 
+
 def _available_mapsets(lock, conn, data):
     """Return all available mapsets the user can access as a list of strings
-    
+
        :param lock: A multiprocessing.Lock instance
        :param conn: A multiprocessing.Pipe instance used to send True or False
        :param data: The list of data entries [function_id]
-       
+
        :returns: Names of available mapsets as list of strings
     """
-    
+
     mapsets = libgis.G_get_available_mapsets()
 
     count = 0
@@ -156,10 +162,10 @@ def _available_mapsets(lock, conn, data):
             while mapset[c] != "\x00":
                 char_list += mapset[c]
                 c += 1
-                
-        mapset_list.append(char_list) 
-        
-    # We need to sort the mapset list, but the first one should be 
+
+        mapset_list.append(char_list)
+
+    # We need to sort the mapset list, but the first one should be
     # the current mapset
     current_mapset = libgis.G_mapset()
     mapset_list.remove(current_mapset)
@@ -168,7 +174,8 @@ def _available_mapsets(lock, conn, data):
     mapset_list.append(current_mapset)
     mapset_list.reverse()
 
-    conn.send(mapset_list) 
+    conn.send(mapset_list)
+
 
 def _has_timestamp(lock, conn, data):
     """Check if the file based GRASS timestamp is present and send
@@ -176,13 +183,14 @@ def _has_timestamp(lock, conn, data):
 
        :param lock: A multiprocessing.Lock instance
        :param conn: A multiprocessing.Pipe instance used to send True or False
-       :param data: The list of data entries [function_id, maptype, name, mapset, layer]
+       :param data: The list of data entries [function_id, maptype, name,
+                    mapset, layer]
 
     """
     maptype = data[1]
     name = data[2]
     mapset = data[3]
-    layer= data[4]
+    layer = data[4]
     check = False
     if maptype == RPCDefs.TYPE_RASTER:
         if libgis.G_has_raster_timestamp(name, mapset) == 1:
@@ -197,17 +205,20 @@ def _has_timestamp(lock, conn, data):
 
 ###############################################################################
 
+
 def _read_timestamp(lock, conn, data):
     """Read the file based GRASS timestamp and send
        the result using the provided pipe.
 
-       The tuple to be send via pipe: (return value of G_read_*_timestamp, timestamps).
+       The tuple to be send via pipe: (return value of G_read_*_timestamp,
+       timestamps).
 
        Please have a look at the documentation of G_read_raster_timestamp,
        G_read_vector_timestamp and G_read_raster3d_timestamp for the return
        values description.
 
        The timestamps to be send are tuples of values:
+
            - relative time (start, end, unit), start and end are of type
              integer, unit is of type string.
            - absolute time (start, end), start and end are of type datetime
@@ -216,13 +227,14 @@ def _read_timestamp(lock, conn, data):
 
        :param lock: A multiprocessing.Lock instance
        :param conn: A multiprocessing.Pipe instance used to send the result
-       :param data: The list of data entries [function_id, maptype, name, mapset, layer]
+       :param data: The list of data entries [function_id, maptype, name,
+                    mapset, layer]
 
     """
     maptype = data[1]
     name = data[2]
     mapset = data[3]
-    layer= data[4]
+    layer = data[4]
     check = False
     ts = libgis.TimeStamp()
     if maptype == RPCDefs.TYPE_RASTER:
@@ -237,6 +249,7 @@ def _read_timestamp(lock, conn, data):
 
 ###############################################################################
 
+
 def _write_timestamp(lock, conn, data):
     """Write the file based GRASS timestamp
        the return values of the called C-functions using the provided pipe.
@@ -249,19 +262,20 @@ def _write_timestamp(lock, conn, data):
 
        :param lock: A multiprocessing.Lock instance
        :param conn: A multiprocessing.Pipe instance used to send True or False
-       :param data: The list of data entries [function_id, maptype, name, mapset, layer, timestring]
+       :param data: The list of data entries [function_id, maptype, name,
+                    mapset, layer, timestring]
     """
     maptype = data[1]
     name = data[2]
     mapset = data[3]
-    layer= data[4]
+    layer = data[4]
     timestring = data[5]
     check = -3
     ts = libgis.TimeStamp()
     check = libgis.G_scan_timestamp(byref(ts), timestring)
 
     if check != 1:
-        logging.error("Unable to convert the timestamp: "+ timestring)
+        logging.error("Unable to convert the timestamp: " + timestring)
         return -2
 
     if maptype == RPCDefs.TYPE_RASTER:
@@ -275,6 +289,7 @@ def _write_timestamp(lock, conn, data):
 
 ###############################################################################
 
+
 def _remove_timestamp(lock, conn, data):
     """Remove the file based GRASS timestamp
        the return values of the called C-functions using the provided pipe.
@@ -287,13 +302,14 @@ def _remove_timestamp(lock, conn, data):
 
        :param lock: A multiprocessing.Lock instance
        :param conn: A multiprocessing.Pipe instance used to send True or False
-       :param data: The list of data entries [function_id, maptype, name, mapset, layer]
+       :param data: The list of data entries [function_id, maptype, name,
+                    mapset, layer]
 
     """
     maptype = data[1]
     name = data[2]
     mapset = data[3]
-    layer= data[4]
+    layer = data[4]
     check = False
     if maptype == RPCDefs.TYPE_RASTER:
         check = libgis.G_remove_raster_timestamp(name, mapset)
@@ -306,6 +322,7 @@ def _remove_timestamp(lock, conn, data):
 
 ###############################################################################
 
+
 def _map_exists(lock, conn, data):
     """Check if a map exists in the spatial database
 
@@ -322,11 +339,11 @@ def _map_exists(lock, conn, data):
     mapset = data[3]
     check = False
     if maptype == RPCDefs.TYPE_RASTER:
-         mapset = libgis.G_find_raster(name, mapset)
+        mapset = libgis.G_find_raster(name, mapset)
     elif maptype == RPCDefs.TYPE_VECTOR:
-         mapset = libgis.G_find_vector(name, mapset)
+        mapset = libgis.G_find_vector(name, mapset)
     elif maptype == RPCDefs.TYPE_RASTER3D:
-         mapset = libgis.G_find_raster3d(name, mapset)
+        mapset = libgis.G_find_raster3d(name, mapset)
 
     if mapset:
         check = True
@@ -335,6 +352,7 @@ def _map_exists(lock, conn, data):
 
 ###############################################################################
 
+
 def _read_map_info(lock, conn, data):
     """Read map specific metadata from the spatial database using C-library
        functions
@@ -347,16 +365,17 @@ def _read_map_info(lock, conn, data):
     name = data[2]
     mapset = data[3]
     if maptype == RPCDefs.TYPE_RASTER:
-         kvp = _read_raster_info(name, mapset)
+        kvp = _read_raster_info(name, mapset)
     elif maptype == RPCDefs.TYPE_VECTOR:
-         kvp = _read_vector_info(name, mapset)
+        kvp = _read_vector_info(name, mapset)
     elif maptype == RPCDefs.TYPE_RASTER3D:
-         kvp = _read_raster3d_info(name, mapset)
+        kvp = _read_raster3d_info(name, mapset)
 
     conn.send(kvp)
 
 ###############################################################################
 
+
 def _read_raster_info(name, mapset):
     """Read the raster map info from the file system and store the content
        into a dictionary
@@ -366,7 +385,8 @@ def _read_raster_info(name, mapset):
 
        :param name: The name of the map
        :param mapset: The mapset of the map
-       :returns: The key value pairs of the map specific metadata, or None in case of an error
+       :returns: The key value pairs of the map specific metadata, or None in
+                 case of an error
     """
 
     kvp = {}
@@ -436,6 +456,7 @@ def _read_raster_info(name, mapset):
 
 ###############################################################################
 
+
 def _read_raster3d_info(name, mapset):
     """Read the 3D raster map info from the file system and store the content
        into a dictionary
@@ -445,7 +466,8 @@ def _read_raster3d_info(name, mapset):
 
        :param name: The name of the map
        :param mapset: The mapset of the map
-       :returns: The key value pairs of the map specific metadata, or None in case of an error
+       :returns: The key value pairs of the map specific metadata, or None in
+                 case of an error
     """
 
     kvp = {}
@@ -496,7 +518,7 @@ def _read_raster3d_info(name, mapset):
     ret = libraster3d.Rast3d_range_load(g3map)
     if not ret:
         logging.error(_("Unable to load range of 3D raster map <%s>" %
-                     (name)))
+                      (name)))
         return None
     libraster3d.Rast3d_range_min_max(g3map, byref(min), byref(max))
 
@@ -517,6 +539,7 @@ def _read_raster3d_info(name, mapset):
 
 ###############################################################################
 
+
 def _read_vector_info(name, mapset):
     """Read the vector map info from the file system and store the content
        into a dictionary
@@ -526,7 +549,8 @@ def _read_vector_info(name, mapset):
 
        :param name: The name of the map
        :param mapset: The mapset of the map
-       :returns: The key value pairs of the map specific metadata, or None in case of an error
+       :returns: The key value pairs of the map specific metadata, or None in
+                 case of an error
     """
 
     kvp = {}
@@ -550,7 +574,7 @@ def _read_vector_info(name, mapset):
         with_topo = False
         if libvector.Vect_open_old2(byref(Map), name, mapset, "1") < 1:
             logging.error(_("Unable to open vector map <%s>" %
-                         (libvector.Vect_get_full_name(byref(Map)))))
+                          (libvector.Vect_get_full_name(byref(Map)))))
             return None
 
     # Release the vector spatial index memory when closed
@@ -617,11 +641,13 @@ def _read_vector_info(name, mapset):
 
 ###############################################################################
 
+
 def _convert_timestamp_from_grass(ts):
     """Convert a GRASS file based timestamp into the temporal framework
        format datetime or integer.
 
-       A tuple of two datetime objects (start, end) is returned in case of absolute time.
+       A tuple of two datetime objects (start, end) is returned in case of
+       absolute time.
        In case of relative time a tuple with start time, end time and the
        relative unit (start, end, unit) will be returned.
 
@@ -639,7 +665,7 @@ def _convert_timestamp_from_grass(ts):
                             byref(dt1),
                             byref(dt2),
                             byref(count))
-    
+
     if dt1.mode == libdate.DATETIME_ABSOLUTE:
         pdt1 = None
         pdt2 = None
@@ -695,6 +721,7 @@ def _convert_timestamp_from_grass(ts):
 
 ###############################################################################
 
+
 def _stop(lock, conn, data):
     libgis.G_debug(1, "Stop C-interface server")
     conn.close()
@@ -706,13 +733,14 @@ def _stop(lock, conn, data):
 server_connection = None
 server_lock = None
 
+
 def c_library_server(lock, conn):
     """The GRASS C-libraries server function designed to be a target for
        multiprocessing.Process
 
        :param lock: A multiprocessing.Lock
        :param conn: A multiprocessing.Pipe
-    """   
+    """
     # Crerate the function array
     functions = [0]*15
     functions[RPCDefs.STOP] = _stop
@@ -741,6 +769,7 @@ def c_library_server(lock, conn):
         functions[data[0]](lock, conn, data)
         lock.release()
 
+
 class CLibrariesInterface(object):
     """Fast and exit-safe interface to GRASS C-libraries functions
 
@@ -758,7 +787,7 @@ class CLibrariesInterface(object):
        Usage:
 
        .. code-block:: python
-           
+
            >>> import grass.script as gscript
            >>> import grass.temporal as tgis
            >>> gscript.use_temp_region()
@@ -784,7 +813,7 @@ class CLibrariesInterface(object):
            >>> mapsets = ciface.available_mapsets()
            >>> mapsets[0] == tgis.get_current_mapset()
            True
-           
+
            # Raster map
            >>> ciface = tgis.CLibrariesInterface()
            >>> check = ciface.raster_map_exists("test", tgis.get_current_mapset())
@@ -857,12 +886,12 @@ class CLibrariesInterface(object):
            1
            >>> ciface.has_vector_timestamp("test", tgis.get_current_mapset())
            True
-           
+
            >>> ciface.get_driver_name()
            'sqlite'
            >>> ciface.get_database_name().split("/")[-1]
            'sqlite.db'
-           
+
            >>> mapset = ciface.get_mapset()
            >>> location = ciface.get_location()
            >>> gisdbase = ciface.get_gisdbase()
@@ -881,7 +910,7 @@ class CLibrariesInterface(object):
         self.client_conn, self.server_conn = Pipe(True)
         self.lock = Lock()
         self.server = Process(target=c_library_server, args=(self.lock,
-                                                          self.server_conn))
+                                                             self.server_conn))
         self.server.daemon = True
         self.server.start()
 
@@ -913,7 +942,8 @@ class CLibrariesInterface(object):
 
            :param name: The name of the map
            :param mapset: The mapset of the map
-           :returns: The key value pairs of the map specific metadata, or None in case of an error
+           :returns: The key value pairs of the map specific metadata,
+                     or None in case of an error
         """
         self._check_restart_server()
         self.client_conn.send([RPCDefs.READ_MAP_INFO, RPCDefs.TYPE_RASTER,
@@ -1006,7 +1036,8 @@ class CLibrariesInterface(object):
 
            :param name: The name of the map
            :param mapset: The mapset of the map
-           :returns: The key value pairs of the map specific metadata, or None in case of an error
+           :returns: The key value pairs of the map specific metadata,
+                     or None in case of an error
         """
         self._check_restart_server()
         self.client_conn.send([RPCDefs.READ_MAP_INFO, RPCDefs.TYPE_RASTER3D,
@@ -1099,7 +1130,8 @@ class CLibrariesInterface(object):
 
            :param name: The name of the map
            :param mapset: The mapset of the map
-           :returns: The key value pairs of the map specific metadata, or None in case of an error
+           :returns: The key value pairs of the map specific metadata,
+                     or None in case of an error
         """
         self._check_restart_server()
         self.client_conn.send([RPCDefs.READ_MAP_INFO, RPCDefs.TYPE_VECTOR,
@@ -1180,18 +1212,18 @@ class CLibrariesInterface(object):
 
     def available_mapsets(self):
         """Return all available mapsets the user can access as a list of strings
-           
+
            :returns: Names of available mapsets as list of strings
         """
         self._check_restart_server()
         self.client_conn.send([RPCDefs.AVAILABLE_MAPSETS, ])
         return self.client_conn.recv()
-        
+
     def get_driver_name(self, mapset=None):
         """Return the temporal database driver of a specific mapset
-        
+
            :param mapset: Name of the mapset
-           
+
            :returns: Name of the driver or None if no temporal database present
         """
         self._check_restart_server()
@@ -1200,47 +1232,47 @@ class CLibrariesInterface(object):
 
     def get_database_name(self, mapset=None):
         """Return the temporal database name of a specific mapset
-        
+
            :param mapset: Name of the mapset
-           
+
            :returns: Name of the database or None if no temporal database present
         """
         self._check_restart_server()
         self.client_conn.send([RPCDefs.GET_DATABASE_NAME, mapset])
         return self.client_conn.recv()
-    
+
     def get_mapset(self):
         """Return the current mapset
-                   
+
            :returns: Name of the current mapset
         """
         self._check_restart_server()
-        self.client_conn.send([RPCDefs.G_MAPSET,])
+        self.client_conn.send([RPCDefs.G_MAPSET, ])
         return self.client_conn.recv()
-        
+
     def get_location(self):
         """Return the location
-                   
+
            :returns: Name of the location
         """
         self._check_restart_server()
-        self.client_conn.send([RPCDefs.G_LOCATION,])
+        self.client_conn.send([RPCDefs.G_LOCATION, ])
         return self.client_conn.recv()
-        
+
     def get_gisdbase(self):
         """Return the gisdatabase
-                   
+
            :returns: Name of the gisdatabase
         """
         self._check_restart_server()
-        self.client_conn.send([RPCDefs.G_GISDBASE,])
+        self.client_conn.send([RPCDefs.G_GISDBASE, ])
         return self.client_conn.recv()
-        
+
     def fatal_error(self, mapset=None):
         """Return the temporal database name of a specific mapset
-        
+
            :param mapset: Name of the mapset
-           
+
            :returns: Name of the database or None if no temporal database present
         """
         self._check_restart_server()
@@ -1248,11 +1280,11 @@ class CLibrariesInterface(object):
 
     def stop(self):
         """Stop the messenger server and close the pipe
-        
+
            This method should be called at exit using the package atexit
         """
         if self.server is not None and self.server.is_alive():
-            self.client_conn.send([0,])
+            self.client_conn.send([0, ])
             self.server.join(5)
             self.server.terminate()
         if self.client_conn is not None:

+ 246 - 170
lib/python/temporal/core.py

@@ -28,9 +28,7 @@ for details.
 
 :author: Soeren Gebbert
 """
-import sys, traceback
 import os
-import locale
 # i18N
 import gettext
 gettext.install('grasslibs', os.path.join(os.getenv("GISBASE"), 'locale'))
@@ -55,8 +53,9 @@ import atexit
 
 ###############################################################################
 
-# Profiling function provided by the temporal framework
+
 def profile_function(func):
+    """Profiling function provided by the temporal framework"""
     do_profiling = os.getenv("GRASS_TGIS_PROFILE")
 
     if do_profiling is "True" or do_profiling is "1":
@@ -77,6 +76,8 @@ def profile_function(func):
 # of the temporal GIS
 # It can either be "sqlite" or "pg"
 tgis_backend = None
+
+
 def get_tgis_backend():
     """Return the temporal GIS backend as string
 
@@ -88,6 +89,8 @@ def get_tgis_backend():
 # Global variable that defines the database string
 # of the temporal GIS
 tgis_database = None
+
+
 def get_tgis_database():
     """Return the temporal database string specified with t.connect
     """
@@ -97,16 +100,17 @@ def get_tgis_database():
 # The version of the temporal framework
 # this value must be an integer larger than 0
 # Increase this value in case of backward incompatible changes in the TGIS API
-tgis_version=2
-# The version of the temporal database since framework and database version can differ
-# this value must be an integer larger than 0
+tgis_version = 2
+# The version of the temporal database since framework and database version
+# can differ this value must be an integer larger than 0
 # Increase this value in case of backward incompatible changes
 # temporal database SQL layout
-tgis_db_version=2
+tgis_db_version = 2
 
 # We need to know the parameter style of the database backend
 tgis_dbmi_paramstyle = None
 
+
 def get_tgis_dbmi_paramstyle():
     """Return the temporal database backend parameter style
 
@@ -123,6 +127,7 @@ current_gisdbase = None
 
 ###############################################################################
 
+
 def get_current_mapset():
     """Return the current mapset
 
@@ -135,6 +140,7 @@ def get_current_mapset():
 
 ###############################################################################
 
+
 def get_current_location():
     """Return the current location
 
@@ -147,6 +153,7 @@ def get_current_location():
 
 ###############################################################################
 
+
 def get_current_gisdbase():
     """Return the current gis database (gisdbase)
 
@@ -159,48 +166,60 @@ def get_current_gisdbase():
 
 ###############################################################################
 
-# If this global variable is set True, then maps can only be registered in space time datasets
-# with the same mapset. In addition, only maps in the current mapset can be inserted, updated or deleted from
-# the temporal database.
+# If this global variable is set True, then maps can only be registered in
+# space time datasets with the same mapset. In addition, only maps in the
+# current mapset can be inserted, updated or deleted from the temporal database.
 # Overwrite this global variable by: g.gisenv set="TGIS_DISABLE_MAPSET_CHECK=True"
-# ATTENTION: Be aware to face corrupted temporal database in case this global variable is set to False.
-#            This feature is highly experimental and violates the grass permission guidance.
+# ATTENTION: Be aware to face corrupted temporal database in case this global
+#            variable is set to False. This feature is highly
+#            experimental and violates the grass permission guidance.
 enable_mapset_check = True
-# If this global variable is set True, the timestamps of maps will be written as textfiles
-# for each map that will be inserted or updated in the temporal database using the C-library
-# timestamp interface.
+# If this global variable is set True, the timestamps of maps will be written
+# as textfiles for each map that will be inserted or updated in the temporal
+# database using the C-library timestamp interface.
 # Overwrite this global variable by: g.gisenv set="TGIS_DISABLE_TIMESTAMP_WRITE=True"
-# ATTENTION: Be aware to face corrupted temporal database in case this global variable is set to False.
-#            This feature is highly experimental and violates the grass permission guidance.
+# ATTENTION: Be aware to face corrupted temporal database in case this global
+#            variable is set to False. This feature is highly
+#            experimental and violates the grass permission guidance.
 enable_timestamp_write = True
 
+
 def get_enable_mapset_check():
-    """Return True if the mapsets should be checked while insert, update, delete requests
-       and space time dataset registration.
+    """Return True if the mapsets should be checked while insert, update,
+       delete requests and space time dataset registration.
 
-       If this global variable is set True, then maps can only be registered in space time datasets
-       with the same mapset. In addition, only maps in the current mapset can be inserted, updated or deleted from
-       the temporal database.
+       If this global variable is set True, then maps can only be registered
+       in space time datasets with the same mapset. In addition, only maps in
+       the current mapset can be inserted, updated or deleted from the temporal
+       database.
        Overwrite this global variable by: g.gisenv set="TGIS_DISABLE_MAPSET_CHECK=True"
 
-       ATTENTION: Be aware to face corrupted temporal database in case this global variable is set to False.
-                  This feature is highly experimental and violates the grass permission guidance.
+       ..warning::
+
+           Be aware to face corrupted temporal database in case this
+           global variable is set to False. This feature is highly
+           experimental and violates the grass permission guidance.
+
     """
     global enable_mapset_check
     return enable_mapset_check
 
+
 def get_enable_timestamp_write():
-    """Return True if the map timestamps should be written to the spatial database metadata as well.
+    """Return True if the map timestamps should be written to the spatial
+       database metadata as well.
 
-       If this global variable is set True, the timestamps of maps will be written as textfiles
-       for each map that will be inserted or updated in the temporal database using the C-library
-       timestamp interface.
+       If this global variable is set True, the timestamps of maps will be
+       written as textfiles for each map that will be inserted or updated in
+       the temporal database using the C-library timestamp interface.
        Overwrite this global variable by: g.gisenv set="TGIS_DISABLE_TIMESTAMP_WRITE=True"
 
-       ATTENTION: Be aware that C-libraries can not access timestamp informations if they are not
-                  written as spatial database metadata, hence modules that make use of timestamps
-                  using the C-library interface will not work with maps that were created without
-                  writing the timestamps.
+       ..warning::
+
+           Be aware that C-libraries can not access timestamp informations if
+           they are not written as spatial database metadata, hence modules
+           that make use of timestamps using the C-library interface will not
+           work with maps that were created without writing the timestamps.
     """
     global enable_timestamp_write
     return enable_timestamp_write
@@ -209,18 +228,20 @@ def get_enable_timestamp_write():
 
 # The global variable that stores the PyGRASS Messenger object that
 # provides a fast and exit safe interface to the C-library message functions
-message_interface=None
+message_interface = None
+
 
 def _init_tgis_message_interface(raise_on_error=False):
     """Initiate the global mesage interface
 
-       :param raise_on_error: If True raise a FatalError exception in case of a fatal error,
-                             call sys.exit(1) otherwise
+       :param raise_on_error: If True raise a FatalError exception in case of
+                              a fatal error, call sys.exit(1) otherwise
     """
     global message_interface
     from grass.pygrass import messages
     message_interface = messages.get_msgr(raise_on_error=raise_on_error)
 
+
 def get_tgis_message_interface():
     """Return the temporal GIS message interface which is of type
        grass.pyhrass.message.Messenger()
@@ -236,7 +257,8 @@ def get_tgis_message_interface():
 # The global variable that stores the C-library interface object that
 # provides a fast and exit safe interface to the C-library libgis,
 # libraster, libraster3d and libvector functions
-c_library_interface=None
+c_library_interface = None
+
 
 def _init_tgis_c_library_interface():
     """Set the global C-library interface variable that
@@ -246,6 +268,7 @@ def _init_tgis_c_library_interface():
     global c_library_interface
     c_library_interface = CLibrariesInterface()
 
+
 def get_tgis_c_library_interface():
     """Return the C-library interface that
        provides a fast and exit safe interface to the C-library libgis,
@@ -260,6 +283,7 @@ def get_tgis_c_library_interface():
 # in case a fatal error occurs using the messenger interface
 raise_on_error = False
 
+
 def set_raise_on_error(raise_exp=True):
     """Define behavior on fatal error, invoked using the tgis messenger
     interface (msgr.fatal())
@@ -270,7 +294,7 @@ def set_raise_on_error(raise_exp=True):
                       sys.exit(1) when using the tgis messenger interface
 
     .. code-block:: python
-    
+
         >>> import grass.temporal as tgis
         >>> tgis.init()
         >>> ignore = tgis.set_raise_on_error(False)
@@ -316,6 +340,7 @@ def get_raise_on_error():
 
 ###############################################################################
 
+
 def get_tgis_version():
     """Get the version number of the temporal framework
        :returns: The version number of the temporal framework as string
@@ -325,6 +350,7 @@ def get_tgis_version():
 
 ###############################################################################
 
+
 def get_tgis_db_version():
     """Get the version number of the temporal framework
        :returns: The version number of the temporal framework as string
@@ -334,9 +360,10 @@ def get_tgis_db_version():
 
 ###############################################################################
 
+
 def get_tgis_metadata(dbif=None):
-    """Return the tgis metadata table as a list of rows (dicts)
-               or None if not present
+    """Return the tgis metadata table as a list of rows (dicts) or None if not
+       present
 
        :param dbif: The database interface to be used
        :returns: The selected rows with key/value columns or None
@@ -363,6 +390,7 @@ def get_tgis_metadata(dbif=None):
 # with substituted GRASS variables gisdbase, location and mapset
 tgis_database_string = None
 
+
 def get_tgis_database_string():
     """Return the preprocessed temporal database string
 
@@ -375,6 +403,7 @@ def get_tgis_database_string():
 
 ###############################################################################
 
+
 def get_sql_template_path():
     base = os.getenv("GISBASE")
     base_etc = os.path.join(base, "etc")
@@ -382,6 +411,7 @@ def get_sql_template_path():
 
 ###############################################################################
 
+
 def stop_subprocesses():
     """Stop the messenger and C-interface subprocesses
        that are started by tgis.init()
@@ -396,65 +426,73 @@ def stop_subprocesses():
 # We register this function to be called at exit
 atexit.register(stop_subprocesses)
 
+
 def get_available_temporal_mapsets():
     """Return a list of of mapset names with temporal database driver and names
         that are accessable from the current mapset.
-        
-        :returns: A dictionary, mapset names are keys, the tuple (driver, database) are the values 
+
+        :returns: A dictionary, mapset names are keys, the tuple (driver,
+                  database) are the values
     """
     global c_library_interface
-    
+
     mapsets = c_library_interface.available_mapsets()
-    
+
     tgis_mapsets = {}
-    
+
     for mapset in mapsets:
         driver = c_library_interface.get_driver_name(mapset)
         database = c_library_interface.get_database_name(mapset)
-        
+
         if driver and database:
             tgis_mapsets[mapset] = (driver,  database)
-            
+
     return tgis_mapsets
-    
 
 ###############################################################################
 
+
 def init(raise_fatal_error=False):
-    """This function set the correct database backend from GRASS environmental variables
-       and creates the grass temporal database structure for raster,
+    """This function set the correct database backend from GRASS environmental
+       variables and creates the grass temporal database structure for raster,
        vector and raster3d maps as well as for the space-time datasets strds,
        str3ds and stvds in case it does not exists.
 
-       Several global variables are initiated and the messenger and C-library interface
-       subprocesses are spawned.
+       Several global variables are initiated and the messenger and C-library
+       interface subprocesses are spawned.
+
+       Re-run this function in case the following GRASS variables change while
+       the process runs:
 
-       Re-run this function in case the following GRASS variables change while the process runs:
-       
        - MAPSET
        - LOCATION_NAME
        - GISDBASE
        - TGIS_DISABLE_MAPSET_CHECK
        - TGIS_DISABLE_TIMESTAMP_WRITE
 
-       Re-run this function if the following t.connect variables change while the process runs:
-       
+       Re-run this function if the following t.connect variables change while
+       the process runs:
+
        - temporal GIS driver (set by t.connect driver=)
        - temporal GIS database (set by t.connect database=)
 
        The following environmental variables are checked:
-       
+
         - GRASS_TGIS_PROFILE (True, False, 1, 0)
         - GRASS_TGIS_RAISE_ON_ERROR (True, False, 1, 0)
 
-        ATTENTION: This functions must be called before any spatio-temporal processing
-                   can be started
+        ..warning::
 
-        :param raise_fatal_error: Set this True to assure that the init() function
-                                 does not kill a persistent process like the GUI.
-                                 If set True a grass.pygrass.messages.FatalError
-                                 exception will be raised in case a fatal error occurs
-                                 in the init process, otherwise sys.exit(1) will be called.
+            This functions must be called before any spatio-temporal processing
+            can be started
+
+        :param raise_fatal_error: Set this True to assure that the init()
+                                  function does not kill a persistent process
+                                  like the GUI. If set True a
+                                  grass.pygrass.messages.FatalError
+                                  exception will be raised in case a fatal
+                                  error occurs in the init process, otherwise
+                                  sys.exit(1) will be called.
     """
     # We need to set the correct database backend and several global variables
     # from the GRASS mapset specific environment variables of g.gisenv and t.connect
@@ -482,7 +520,8 @@ def init(raise_fatal_error=False):
     current_gisdbase = grassenv["GISDBASE"]
 
     # Check environment variable GRASS_TGIS_RAISE_ON_ERROR
-    if os.getenv("GRASS_TGIS_RAISE_ON_ERROR") == "True" or os.getenv("GRASS_TGIS_RAISE_ON_ERROR") == "1":
+    if os.getenv("GRASS_TGIS_RAISE_ON_ERROR") == "True" or \
+       os.getenv("GRASS_TGIS_RAISE_ON_ERROR") == "1":
         raise_on_error = True
 
     # Check if the script library raises on error,
@@ -496,19 +535,21 @@ def init(raise_fatal_error=False):
     _init_tgis_c_library_interface()
     msgr = get_tgis_message_interface()
     msgr.debug(1, "Initiate the temporal database")
-    
+
     ciface = get_tgis_c_library_interface()
     driver_string = ciface.get_driver_name()
     database_string = ciface.get_database_name()
 
     # Set the mapset check and the timestamp write
     if grassenv.has_key("TGIS_DISABLE_MAPSET_CHECK"):
-        if grassenv["TGIS_DISABLE_MAPSET_CHECK"] == "True" or grassenv["TGIS_DISABLE_MAPSET_CHECK"] == "1":
+        if grassenv["TGIS_DISABLE_MAPSET_CHECK"] == "True" or \
+           grassenv["TGIS_DISABLE_MAPSET_CHECK"] == "1":
             enable_mapset_check = False
             msgr.warning("TGIS_DISABLE_MAPSET_CHECK is True")
 
     if grassenv.has_key("TGIS_DISABLE_TIMESTAMP_WRITE"):
-        if grassenv["TGIS_DISABLE_TIMESTAMP_WRITE"] == "True" or grassenv["TGIS_DISABLE_TIMESTAMP_WRITE"] == "1":
+        if grassenv["TGIS_DISABLE_TIMESTAMP_WRITE"] == "True" or \
+           grassenv["TGIS_DISABLE_TIMESTAMP_WRITE"] == "1":
             enable_timestamp_write = False
             msgr.warning("TGIS_DISABLE_TIMESTAMP_WRITE is True")
 
@@ -518,7 +559,8 @@ def init(raise_fatal_error=False):
             try:
                 import sqlite3
             except ImportError:
-                msgr.error("Unable to locate the sqlite SQL Python interface module sqlite3.")
+                msgr.error("Unable to locate the sqlite SQL Python interface"
+                           " module sqlite3.")
                 raise
             dbmi = sqlite3
         elif driver_string == "pg":
@@ -526,12 +568,14 @@ def init(raise_fatal_error=False):
             try:
                 import psycopg2
             except ImportError:
-                msgr.error("Unable to locate the Postgresql SQL Python interface module psycopg2.")
+                msgr.error("Unable to locate the Postgresql SQL Python "
+                           "interface module psycopg2.")
                 raise
             dbmi = psycopg2
         else:
-            msgr.fatal(_("Unable to initialize the temporal DBMI interface. Please use "
-                         "t.connect to specify the driver and the database string"))
+            msgr.fatal(_("Unable to initialize the temporal DBMI interface. "
+                         "Please use t.connect to specify the driver and the"
+                         " database string"))
     else:
         # Set the default sqlite3 connection in case nothing was defined
         gscript.run_command("t.connect", flags="d")
@@ -554,7 +598,8 @@ def init(raise_fatal_error=False):
         if os.path.exists(tgis_database_string):
             dbif.connect()
             # Check for raster_base table
-            dbif.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='raster_base';")
+            dbif.execute("SELECT name FROM sqlite_master WHERE type='table' "
+                         "AND name='raster_base';")
             name = dbif.fetchone()
             if name and name[0] == "raster_base":
                 db_exists = True
@@ -564,22 +609,27 @@ def init(raise_fatal_error=False):
         dbif.connect()
         # Check for raster_base table
         dbif.execute("SELECT EXISTS(SELECT * FROM information_schema.tables "
-                   "WHERE table_name=%s)", ('raster_base',))
+                     "WHERE table_name=%s)", ('raster_base',))
         if dbif.fetchone()[0]:
             db_exists = True
 
-    backup_howto = "The format of your actual temporal database is not supported any more.\n"\
-                   "Solution: You need to export it by restoring the GRASS GIS version used for creating this DB. "\
-                   "   From there, create a backup of your temporal database to avoid the loss of your temporal data.\n"\
-                   "Notes: Use t.rast.export and t.vect.export to make a backup of your existing space time datasets."\
-                   "To safe the timestamps of your existing maps and space time datasets, use t.rast.list, "\
-                   "t.vect.list and t.rast3d.list. "\
-                   "You can register the existing time stamped maps easily if you export columns=id,start_time,end_time "\
-                   "into text files and use t.register to register them again in new created space time datasets (t.create). "\
-                   "After the backup remove the existing temporal database, a new one will be created automatically.\n"
-
-
-    if db_exists == True:
+    backup_howto = "The format of your actual temporal database is not " \
+                   "supported any more.\nSolution: You need to export it by " \
+                   "restoring the GRASS GIS version used for creating this DB"\
+                   ". From there, create a backup of your temporal database "\
+                   "to avoid the loss of your temporal data.\nNotes: Use " \
+                   "t.rast.export and t.vect.export to make a backup of your" \
+                   " existing space time datasets.To safe the timestamps of" \
+                   " your existing maps and space time datasets, use " \
+                   "t.rast.list, t.vect.list and t.rast3d.list. "\
+                   "You can register the existing time stamped maps easily if"\
+                   " you export columns=id,start_time,end_time into text "\
+                   "files and use t.register to register them again in new" \
+                   " created space time datasets (t.create). After the backup"\
+                   " remove the existing temporal database, a new one will be"\
+                   " created automatically.\n"
+
+    if db_exists is True:
         # Check the version of the temporal database
         dbif.close()
         dbif.connect()
@@ -587,34 +637,41 @@ def init(raise_fatal_error=False):
         dbif.close()
         if metadata is None:
             msgr.fatal(_("Unable to receive temporal database metadata.\n"
-                         "Current temporal database info:%(info)s")%({"info":get_database_info_string()}))
+                         "Current temporal database info:%(info)s") % (
+                       {"info": get_database_info_string()}))
         for entry in metadata:
             if "tgis_version" in entry and entry[1] != str(get_tgis_version()):
-                msgr.fatal(_("Unsupported temporal database: version mismatch.\n %(backup)s"
-                             "Supported temporal API version is: %(api)i.\n"
-                             "Please update your GRASS GIS installation.\n"
-                             "Current temporal database info:%(info)s")%({"backup":backup_howto, "api":get_tgis_version(),
-                                                                          "info":get_database_info_string()}))
+                msgr.fatal(_("Unsupported temporal database: version mismatch."
+                             "\n %(backup)s Supported temporal API version is:"
+                             " %(api)i.\nPlease update your GRASS GIS "
+                             "installation.\nCurrent temporal database info:"
+                             "%(info)s") % ({"backup": backup_howto,
+                                             "api": get_tgis_version(),
+                                             "info": get_database_info_string()}))
             if "tgis_db_version" in entry and entry[1] != str(get_tgis_db_version()):
-                msgr.fatal(_("Unsupported temporal database: version mismatch.\n %(backup)s"
-                             "Supported temporal database version is: %(tdb)i\n"
-                             "Current temporal database info:%(info)s")%({"backup":backup_howto,"tdb":get_tgis_version(),
-                                                                          "info":get_database_info_string()}))
+                msgr.fatal(_("Unsupported temporal database: version mismatch."
+                             "\n %(backup)sSupported temporal database version"
+                             " is: %(tdb)i\nCurrent temporal database info:"
+                             "%(info)s") % ({"backup": backup_howto,
+                                             "tdb": get_tgis_version(),
+                                             "info": get_database_info_string()}))
         return
 
     create_temporal_database(dbif)
 
 ###############################################################################
 
+
 def get_database_info_string():
     dbif = SQLDatabaseInterfaceConnection()
 
-    info  = "\nDBMI interface:..... " + str(dbif.get_dbmi().__name__)
-    info += "\nTemporal database:.. " + str( get_tgis_database_string())
+    info = "\nDBMI interface:..... " + str(dbif.get_dbmi().__name__)
+    info += "\nTemporal database:.. " + str(get_tgis_database_string())
     return info
 
 ###############################################################################
 
+
 def create_temporal_database(dbif):
     """This function will create the temporal database
 
@@ -638,10 +695,10 @@ def create_temporal_database(dbif):
         get_sql_template_path(), "raster_metadata_table.sql"), 'r').read()
     raster3d_metadata_sql = open(os.path.join(template_path,
                                               "raster3d_metadata_table.sql"),
-                                              'r').read()
+                                 'r').read()
     vector_metadata_sql = open(os.path.join(template_path,
                                             "vector_metadata_table.sql"),
-                                            'r').read()
+                               'r').read()
     raster_views_sql = open(os.path.join(template_path, "raster_views.sql"),
                             'r').read()
     raster3d_views_sql = open(os.path.join(template_path,
@@ -651,16 +708,16 @@ def create_temporal_database(dbif):
 
     stds_tables_template_sql = open(os.path.join(template_path,
                                                  "stds_tables_template.sql"),
-                                                 'r').read()
+                                    'r').read()
     strds_metadata_sql = open(os.path.join(template_path,
                                            "strds_metadata_table.sql"),
-                                           'r').read()
+                              'r').read()
     str3ds_metadata_sql = open(os.path.join(template_path,
                                             "str3ds_metadata_table.sql"),
-                                            'r').read()
+                               'r').read()
     stvds_metadata_sql = open(os.path.join(template_path,
                                            "stvds_metadata_table.sql"),
-                                           'r').read()
+                              'r').read()
     strds_views_sql = open(os.path.join(template_path, "strds_views.sql"),
                            'r').read()
     str3ds_views_sql = open(os.path.join(template_path, "str3ds_views.sql"),
@@ -690,22 +747,24 @@ def create_temporal_database(dbif):
                 os.makedirs(tgis_dir)
             except Exception as e:
                 msgr.fatal(_("Unable to create SQLite temporal database\n"
-                                     "Exception: %s\nPlease use t.connect to set a "
-                                     "read- and writable temporal database path"%(e)))
-                
+                             "Exception: %s\nPlease use t.connect to set a "
+                             "read- and writable temporal database path" % (e)))
+
         # Set up the trigger that takes care of
         # the correct deletion of entries across the different tables
         delete_trigger_sql = open(os.path.join(template_path,
                                                "sqlite3_delete_trigger.sql"),
-                                               'r').read()
-        indexes_sql = open(os.path.join(template_path, "sqlite3_indexes.sql"), 'r').read()
+                                  'r').read()
+        indexes_sql = open(os.path.join(template_path, "sqlite3_indexes.sql"),
+                           'r').read()
     else:
         # Set up the trigger that takes care of
         # the correct deletion of entries across the different tables
         delete_trigger_sql = open(os.path.join(template_path,
-                                            "postgresql_delete_trigger.sql"),
-                                            'r').read()
-        indexes_sql = open(os.path.join(template_path, "postgresql_indexes.sql"), 'r').read()
+                                               "postgresql_delete_trigger.sql"),
+                                  'r').read()
+        indexes_sql = open(os.path.join(template_path,
+                                        "postgresql_indexes.sql"), 'r').read()
 
     # Connect now to the database
     if not dbif.connected:
@@ -751,6 +810,7 @@ def create_temporal_database(dbif):
 
 ###############################################################################
 
+
 def _create_tgis_metadata_table(content, dbif=None):
     """!Create the temporal gis metadata table which stores all metadata
        information about the temporal database.
@@ -765,7 +825,7 @@ def _create_tgis_metadata_table(content, dbif=None):
 
     for key in content.keys():
         statement = "INSERT INTO tgis_metadata (key, value) VALUES " + \
-                     "(\'%s\' , \'%s\');\n"%(str(key), str(content[key]))
+                    "(\'%s\' , \'%s\');\n" % (str(key), str(content[key]))
         dbif.execute_transaction(statement)
 
     if connected:
@@ -773,21 +833,22 @@ def _create_tgis_metadata_table(content, dbif=None):
 
 ###############################################################################
 
+
 class SQLDatabaseInterfaceConnection(object):
     def __init__(self):
         self.tgis_mapsets = get_available_temporal_mapsets()
         self.current_mapset = get_current_mapset()
         self.connections = {}
         self.connected = False
-        
+
         self.unique_connections = {}
-        
+
         for mapset in self.tgis_mapsets.keys():
             driver,  dbstring = self.tgis_mapsets[mapset]
-            
+
             if dbstring not in self.unique_connections.keys():
                 self.unique_connections[dbstring] = DBConnection(driver)
-            
+
             self.connections[mapset] = self.unique_connections[dbstring]
 
         self.msgr = get_tgis_message_interface()
@@ -817,21 +878,21 @@ class SQLDatabaseInterfaceConnection(object):
             conn = self.connections[mapset]
             if conn.is_connected() is False:
                 conn .connect(dbstring)
-                
+
         self.connected = True
-        
+
     def is_connected(self):
         return self.connected
 
     def close(self):
         """Close the DBMI connection
 
-           There may be several temporal databases in a location, hence 
-           close all temporal databases that have been opened. 
+           There may be several temporal databases in a location, hence
+           close all temporal databases that have been opened.
         """
         for key in self.unique_connections.keys():
             self.unique_connections[key] .close()
-        
+
         self.connected = False
 
     def mogrify_sql_statement(self, content, mapset=None):
@@ -842,15 +903,16 @@ class SQLDatabaseInterfaceConnection(object):
                            place holder (?), the second entry is the argument
                            list that should substitute the place holder.
            :param mapset: The mapset of the abstract dataset or temporal
-                          database location, if None the current mapset 
+                          database location, if None the current mapset
                           will be used
         """
         if mapset is None:
             mapset = self.current_mapset
-            
+
         if mapset not in self.tgis_mapsets.keys():
-            self.msgr.fatal(_("Unable to mogrify sql statement. There is no temporal database "
-                                       "connection defined for mapset <%(mapset)s>" % {"mapset":mapset}))
+            self.msgr.fatal(_("Unable to mogrify sql statement. There is no "
+                              "temporal database connection defined for "
+                              "mapset <%(mapset)s>" % {"mapset": mapset}))
 
         return self.connections[mapset].mogrify_sql_statement(content)
 
@@ -859,35 +921,38 @@ class SQLDatabaseInterfaceConnection(object):
 
            :param table_name: The name of the table to be checked for existence
            :param mapset: The mapset of the abstract dataset or temporal
-                          database location, if None the current mapset 
+                          database location, if None the current mapset
                           will be used
            :returns: True if the table exists, False otherwise
-           
+
            TODO:
-           There may be several temporal databases in a location, hence 
+           There may be several temporal databases in a location, hence
            the mapset is used to query the correct temporal database.
         """
         if mapset is None:
             mapset = self.current_mapset
 
         if mapset not in self.tgis_mapsets.keys():
-            self.msgr.fatal(_("Unable to check table. There is no temporal database "
-                                       "connection defined for mapset <%(mapset)s>" % {"mapset":mapset}))
+            self.msgr.fatal(_("Unable to check table. There is no temporal "
+                              "database connection defined for mapset "
+                              "<%(mapset)s>" % {"mapset": mapset}))
 
         return self.connections[mapset].check_table(table_name)
 
     def execute(self,  statement,  args=None,  mapset=None):
-        """""
-           :param mapset: The mapset of the abstract dataset or temporal
-                          database location, if None the current mapset 
-                          will be used
+        """
+
+        :param mapset: The mapset of the abstract dataset or temporal
+                       database location, if None the current mapset
+                       will be used
         """
         if mapset is None:
             mapset = self.current_mapset
-            
+
         if mapset not in self.tgis_mapsets.keys():
-            self.msgr.fatal(_("Unable to execute sql statement. There is no temporal database "
-                                       "connection defined for mapset <%(mapset)s>" % {"mapset":mapset}))
+            self.msgr.fatal(_("Unable to execute sql statement. There is no "
+                              "temporal database connection defined for "
+                              "mapset <%(mapset)s>" % {"mapset": mapset}))
 
         return self.connections[mapset].execute(statement,  args)
 
@@ -896,8 +961,9 @@ class SQLDatabaseInterfaceConnection(object):
             mapset = self.current_mapset
 
         if mapset not in self.tgis_mapsets.keys():
-            self.msgr.fatal(_("Unable to fetch one. There is no temporal database "
-                                       "connection defined for mapset <%(mapset)s>" % {"mapset":mapset}))
+            self.msgr.fatal(_("Unable to fetch one. There is no temporal "
+                              "database connection defined for mapset "
+                              "<%(mapset)s>" % {"mapset": mapset}))
 
         return self.connections[mapset].fetchone()
 
@@ -906,8 +972,9 @@ class SQLDatabaseInterfaceConnection(object):
             mapset = self.current_mapset
 
         if mapset not in self.tgis_mapsets.keys():
-            self.msgr.fatal(_("Unable to fetch all. There is no temporal database "
-                                       "connection defined for mapset <%(mapset)s>" % {"mapset":mapset}))
+            self.msgr.fatal(_("Unable to fetch all. There is no temporal "
+                              "database connection defined for mapset "
+                              "<%(mapset)s>" % {"mapset": mapset}))
 
         return self.connections[mapset].fetchall()
 
@@ -923,24 +990,27 @@ class SQLDatabaseInterfaceConnection(object):
             mapset = self.current_mapset
 
         if mapset not in self.tgis_mapsets.keys():
-            self.msgr.fatal(_("Unable to execute transaction. There is no temporal database "
-                                       "connection defined for mapset <%(mapset)s>" % {"mapset":mapset}))
+            self.msgr.fatal(_("Unable to execute transaction. There is no "
+                              "temporal database connection defined for "
+                              "mapset <%(mapset)s>" % {"mapset": mapset}))
 
         return self.connections[mapset].execute_transaction(statement)
- 
+
 ###############################################################################
 
+
 class DBConnection(object):
     """This class represents the database interface connection
        and provides access to the chisen backend modules.
 
        The following DBMS are supported:
-       
+
          - sqlite via the sqlite3 standard library
          - postgresql via psycopg2
 
     """
-    def __init__(self ,  backend=None):
+
+    def __init__(self, backend=None):
         self.connected = False
         if backend is None:
             global tgis_backend
@@ -960,7 +1030,7 @@ class DBConnection(object):
     def __del__(self):
         if self.connected is True:
             self.close()
-            
+
     def is_connected(self):
         return self.connected
 
@@ -988,7 +1058,7 @@ class DBConnection(object):
         try:
             if self.dbmi.__name__ == "sqlite3":
                 self.connection = self.dbmi.connect(dbstring,
-                        detect_types = self.dbmi.PARSE_DECLTYPES | self.dbmi.PARSE_COLNAMES)
+                        detect_types=self.dbmi.PARSE_DECLTYPES | self.dbmi.PARSE_COLNAMES)
                 self.connection.row_factory = self.dbmi.Row
                 self.connection.isolation_level = None
                 self.cursor = self.connection.cursor()
@@ -998,19 +1068,20 @@ class DBConnection(object):
                 self.connection = self.dbmi.connect(dbstring)
                 #self.connection.set_isolation_level(dbmi.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
                 self.cursor = self.connection.cursor(
-                    cursor_factory = self.dbmi.extras.DictCursor)
+                    cursor_factory=self.dbmi.extras.DictCursor)
             self.connected = True
         except Exception as e:
             self.msgr.fatal(_("Unable to connect to %(db)s database: "
-                               "%(string)s\nException: \"%(ex)s\"\nPlease use t.connect to set a "
-                               "read- and writable temporal database backend")%({"db":self.dbmi.__name__, 
-                                                                                 "string":tgis_database_string, 
-                                                                                 "ex":e,}))
+                              "%(string)s\nException: \"%(ex)s\"\nPlease use"
+                              " t.connect to set a read- and writable "
+                              "temporal database backend") % (
+                            {"db": self.dbmi.__name__,
+                             "string": tgis_database_string, "ex": e, }))
 
     def close(self):
         """Close the DBMI connection
            TODO:
-           There may be several temporal databases in a location, hence 
+           There may be several temporal databases in a location, hence
            close all temporal databases that have been opened. Use a dictionary
            to manage different connections.
         """
@@ -1020,7 +1091,7 @@ class DBConnection(object):
 
     def mogrify_sql_statement(self, content):
         """Return the SQL statement and arguments as executable SQL string
-        
+
            TODO:
            Use the mapset argument to identify the correct database driver
 
@@ -1029,19 +1100,19 @@ class DBConnection(object):
                            place holder (?), the second entry is the argument
                            list that should substitute the place holder.
            :param mapset: The mapset of the abstract dataset or temporal
-                          database location, if None the current mapset 
+                          database location, if None the current mapset
                           will be used
 
            Usage:
 
            .. code-block:: python
-           
+
                >>> init()
                >>> dbif = SQLDatabaseInterfaceConnection()
                >>> dbif.mogrify_sql_statement(["SELECT ctime FROM raster_base WHERE id = ?",
                ... ["soil@PERMANENT",]])
                "SELECT ctime FROM raster_base WHERE id = 'soil@PERMANENT'"
-           
+
         """
         sql = content[0]
         args = content[1]
@@ -1106,12 +1177,12 @@ class DBConnection(object):
 
            :param table_name: The name of the table to be checked for existence
            :param mapset: The mapset of the abstract dataset or temporal
-                          database location, if None the current mapset 
+                          database location, if None the current mapset
                           will be used
            :returns: True if the table exists, False otherwise
-           
+
            TODO:
-           There may be several temporal databases in a location, hence 
+           There may be several temporal databases in a location, hence
            the mapset is used to query the correct temporal database.
         """
         table_exists = False
@@ -1123,14 +1194,15 @@ class DBConnection(object):
         # Check if the database already exists
         if self.dbmi.__name__ == "sqlite3":
 
-            self.cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='%s';"%table_name)
+            self.cursor.execute("SELECT name FROM sqlite_master WHERE "
+                                "type='table' AND name='%s';" % table_name)
             name = self.cursor.fetchone()
             if name and name[0] == table_name:
                 table_exists = True
         else:
             # Check for raster_base table
             self.cursor.execute("SELECT EXISTS(SELECT * FROM information_schema.tables "
-                    "WHERE table_name=%s)", ('%s'%table_name,))
+                                "WHERE table_name=%s)", ('%s' % table_name,))
             if self.cursor.fetchone()[0]:
                 table_exists = True
 
@@ -1138,7 +1210,7 @@ class DBConnection(object):
             self.close()
 
         return table_exists
-    
+
     def execute(self, statement,  args=None):
         """Execute a SQL statement
 
@@ -1156,12 +1228,13 @@ class DBConnection(object):
         except:
             if connected:
                 self.close()
-            self.msgr.error(_("Unable to execute :\n %(sql)s" % {"sql":statement}))
+            self.msgr.error(_("Unable to execute :\n %(sql)s" %
+                            {"sql": statement}))
             raise
 
         if connected:
             self.close()
-        
+
     def fetchone(self):
         if self.connected:
             return self.cursor.fetchone()
@@ -1199,7 +1272,8 @@ class DBConnection(object):
         except:
             if connected:
                 self.close()
-            self.msgr.error(_("Unable to execute transaction:\n %(sql)s" % {"sql":statement}))
+            self.msgr.error(_("Unable to execute transaction:\n %(sql)s" %
+                            {"sql": statement}))
             raise
 
         if connected:
@@ -1207,17 +1281,19 @@ class DBConnection(object):
 
 ###############################################################################
 
+
 def init_dbif(dbif):
     """This method checks if the database interface connection exists,
         if not a new one will be created, connected and True will be returned.
-        If the database interface exists but is connected, the connection will be established.
+        If the database interface exists but is connected, the connection will
+        be established.
 
         :returns: the tuple (dbif, True|False)
 
         Usage code sample:
-  
+
         .. code-block:: python
-        
+
             dbif, connect = tgis.init_dbif(None)
 
             sql = dbif.mogrify_sql_statement(["SELECT * FROM raster_base WHERE ? = ?"],

+ 74 - 62
lib/python/temporal/datetime_math.py

@@ -8,7 +8,7 @@ for details.
 
 :authors: Soeren Gebbert
 """
-from datetime import datetime, date, time, timedelta
+from datetime import datetime, timedelta
 from core import *
 import copy
 
@@ -23,6 +23,7 @@ SECOND_AS_DAY = 1.1574074074074073e-05
 
 ###############################################################################
 
+
 def relative_time_to_time_delta(value):
     """Convert the double value representing days
        into a timedelta object.
@@ -69,6 +70,7 @@ def time_delta_to_relative_time_seconds(delta):
 
 ###############################################################################
 
+
 def decrement_datetime_by_string(mydate, increment, mult=1):
 
     """Return a new datetime object decremented with the provided
@@ -123,11 +125,12 @@ def decrement_datetime_by_string(mydate, increment, mult=1):
 
        :param mydate: A datetime object to incremented
        :param increment: A string providing increment information:
-                  The string may include comma separated values of type seconds,
-                  minutes, hours, days, weeks, months and years
-                  Example: Increment the datetime 2001-01-01 00:00:00
-                  with "60 seconds, 4 minutes, 12 hours, 10 days, 1 weeks, 5 months, 1 years"
-                  will result in the datetime 2003-02-18 12:05:00
+                         The string may include comma separated values of type
+                         seconds, minutes, hours, days, weeks, months and years
+                         Example: Increment the datetime 2001-01-01 00:00:00
+                         with "60 seconds, 4 minutes, 12 hours, 10 days,
+                         1 weeks, 5 months, 1 years" will result in the
+                         datetime 2003-02-18 12:05:00
        :param mult: A multiplier, default is 1
        :return: The new datetime object or none in case of an error
     """
@@ -135,6 +138,7 @@ def decrement_datetime_by_string(mydate, increment, mult=1):
 
 ###############################################################################
 
+
 def increment_datetime_by_string(mydate, increment, mult=1):
     """Return a new datetime object incremented with the provided
        relative dates specified as string.
@@ -193,11 +197,12 @@ def increment_datetime_by_string(mydate, increment, mult=1):
 
        :param mydate: A datetime object to incremented
        :param increment: A string providing increment information:
-                  The string may include comma separated values of type seconds,
-                  minutes, hours, days, weeks, months and years
-                  Example: Increment the datetime 2001-01-01 00:00:00
-                  with "60 seconds, 4 minutes, 12 hours, 10 days, 1 weeks, 5 months, 1 years"
-                  will result in the datetime 2003-02-18 12:05:00
+                         The string may include comma separated values of type
+                         seconds, minutes, hours, days, weeks, months and years
+                         Example: Increment the datetime 2001-01-01 00:00:00
+                         with "60 seconds, 4 minutes, 12 hours, 10 days,
+                         1 weeks, 5 months, 1 years" will result in the
+                         datetime 2003-02-18 12:05:00
        :param mult: A multiplier, default is 1
        :return: The new datetime object or none in case of an error
     """
@@ -205,6 +210,7 @@ def increment_datetime_by_string(mydate, increment, mult=1):
 
 ###############################################################################
 
+
 def modify_datetime_by_string(mydate, increment, mult=1, sign=1):
     """Return a new datetime object incremented with the provided
        relative dates specified as string.
@@ -213,17 +219,18 @@ def modify_datetime_by_string(mydate, increment, mult=1, sign=1):
 
        :param mydate: A datetime object to incremented
        :param increment: A string providing increment information:
-                  The string may include comma separated values of type seconds,
-                  minutes, hours, days, weeks, months and years
-                  Example: Increment the datetime 2001-01-01 00:00:00
-                  with "60 seconds, 4 minutes, 12 hours, 10 days, 1 weeks, 5 months, 1 years"
-                  will result in the datetime 2003-02-18 12:05:00
+                         The string may include comma separated values of type
+                         seconds, minutes, hours, days, weeks, months and years
+                         Example: Increment the datetime 2001-01-01 00:00:00
+                         with "60 seconds, 4 minutes, 12 hours, 10 days,
+                         1 weeks, 5 months, 1 years" will result in the
+                         datetime 2003-02-18 12:05:00
        :param mult: A multiplier, default is 1
-       :param sign: Choose 1 for positive sign (incrementing) or -1 for negative
-                   sign (decrementing).
+       :param sign: Choose 1 for positive sign (incrementing) or -1 for
+                    negative sign (decrementing).
        :return: The new datetime object or none in case of an error
     """
-    sign  = int(sign)
+    sign = int(sign)
     if sign != 1 and sign != -1:
         return None
 
@@ -266,14 +273,16 @@ def modify_datetime_by_string(mydate, increment, mult=1, sign=1):
                 msgr.error(_("Wrong increment format: %s") % (increment))
                 return None
 
-        return modify_datetime(mydate, years, months, weeks, days, hours, minutes, seconds)
+        return modify_datetime(mydate, years, months, weeks, days, hours,
+                               minutes, seconds)
 
     return mydate
 
 ###############################################################################
 
+
 def modify_datetime(mydate, years=0, months=0, weeks=0, days=0, hours=0,
-                       minutes=0, seconds=0):
+                    minutes=0, seconds=0):
     """Return a new datetime object incremented with the provided
        relative dates and times"""
 
@@ -329,7 +338,8 @@ def modify_datetime(mydate, years=0, months=0, weeks=0, days=0, hours=0,
             residual_months += 12
 
         try:
-            dt1 = dt1.replace(year=year - years_to_remove, month=residual_months)
+            dt1 = dt1.replace(year=year - years_to_remove,
+                              month=residual_months)
         except:
             raise
 
@@ -474,7 +484,7 @@ def adjust_datetime_to_granularity(mydate, granularity):
 
         dt = copy.copy(mydate)
         return dt.replace(year=years, month=months, day=days,
-                            hour=hours, minute=minutes, second=seconds)
+                          hour=hours, minute=minutes, second=seconds)
 
 ###############################################################################
 
@@ -607,8 +617,8 @@ def compute_datetime_delta(start, end):
             >>> compute_datetime_delta(start, end)
             {'hour': 0, 'month': 12, 'second': 31622405, 'max_days': 366, 'year': 1, 'day': 0, 'minute': 0}
 
-
-       :return: A dictionary with year, month, day, hour, minute and second as keys()
+       :return: A dictionary with year, month, day, hour, minute and second as
+                keys()
     """
     comp = {}
 
@@ -624,7 +634,7 @@ def compute_datetime_delta(start, end):
     # Count full months
     if start.month == 1 and end.month == 1:
         comp["month"] = 0
-    elif   start.day == 1 and end.day == 1:
+    elif start.day == 1 and end.day == 1:
         d = end.month - start.month
         if d < 0:
             d = d + 12 * comp["year"]
@@ -633,7 +643,7 @@ def compute_datetime_delta(start, end):
         comp["month"] = d
 
     # Count full days
-    if  start.day == 1 and end.day == 1:
+    if start.day == 1 and end.day == 1:
         comp["day"] = 0
     else:
         comp["day"] = day_diff
@@ -698,7 +708,7 @@ def check_datetime_string(time_string):
     """Check if  a string can be converted into a datetime object
 
         Supported ISO string formats are:
-        
+
         - YYYY-mm-dd
         - YYYY-mm-dd HH:MM:SS
 
@@ -709,23 +719,23 @@ def check_datetime_string(time_string):
     """
 
     global has_dateutil
-        
+
     if has_dateutil:
-        # First check if there is only a single number, which specifies relative time.
-        # dateutil will interprete a single number as a valid time string, so we have
-        # to catch this case beforehand
+        # First check if there is only a single number, which specifies
+        # relative time. dateutil will interprete a single number as a valid
+        # time string, so we have to catch this case beforehand
         try:
             value = int(time_string)
             return _("Time string seems to specify relative time")
         except ValueError:
             pass
-        
+
         try:
             time_object = parser.parse(time_string)
         except Exception as inst:
             time_object = str(inst)
         return time_object
-        
+
     # BC is not supported
     if time_string.find("bc") > 0:
         return _("Dates Before Christ (BC) are not supported")
@@ -740,9 +750,9 @@ def check_datetime_string(time_string):
         time_format = "%Y-%m-%d"
 
     try:
-        return  datetime.strptime(time_string, time_format)
+        return datetime.strptime(time_string, time_format)
     except:
-        return _("Unable to parse time string: %s"%time_string)
+        return _("Unable to parse time string: %s" % time_string)
 
 ###############################################################################
 
@@ -751,17 +761,17 @@ def string_to_datetime(time_string):
     """Convert a string into a datetime object
 
         In case datutil is not installed the supported ISO string formats are:
-        
+
         - YYYY-mm-dd
         - YYYY-mm-dd HH:MM:SS
         - Time zones are not supported
-        
+
         If dateutil is installed, all string formats of the dateutil module
         are supported, as well as time zones
 
         :param time_string: The time string to convert
-        :return: datetime: object or None in case the string 
-                         could not be converted
+        :return: datetime object or None in case the string
+                 could not be converted
     """
 
     if not isinstance(time_string, str):
@@ -780,9 +790,9 @@ def string_to_datetime(time_string):
 
 def datetime_to_grass_datetime_string(dt):
     """Convert a python datetime object into a GRASS datetime string
-    
+
     .. code-block:: python
-    
+
         >>> import grass.temporal as tgis
         >>> import dateutil.parser as parser
         >>> dt = parser.parse("2011-01-01 10:00:00 +01:30")
@@ -797,7 +807,7 @@ def datetime_to_grass_datetime_string(dt):
         >>> dt = parser.parse("2011-01-01 10:00:00 -01:30")
         >>> tgis.datetime_to_grass_datetime_string(dt)
         '01 jan 2011 10:00:00 -0090'
-    
+
     """
     # GRASS datetime month names
     month_names = ["", "jan", "feb", "mar", "apr", "may", "jun",
@@ -805,16 +815,18 @@ def datetime_to_grass_datetime_string(dt):
 
     # Check for time zone info in the datetime object
     if dt.tzinfo is not None:
-        
+
         tz = dt.tzinfo.utcoffset(0)
         if tz.seconds > 86400 / 2:
             tz = (tz.seconds - 86400) / 60
         else:
             tz = tz.seconds/60
-            
+
         string = "%.2i %s %.2i %.2i:%.2i:%.2i %+.4i" % (dt.day,
-                 month_names[dt.month], dt.year,
-                 dt.hour, dt.minute, dt.second, tz)
+                                                        month_names[dt.month],
+                                                        dt.year, dt.hour,
+                                                        dt.minute, dt.second,
+                                                        tz)
     else:
         string = "%.2i %s %.4i %.2i:%.2i:%.2i" % (dt.day, month_names[
             dt.month], dt.year, dt.hour, dt.minute, dt.second)
@@ -822,32 +834,32 @@ def datetime_to_grass_datetime_string(dt):
     return string
 
 ###############################################################################
-suffix_units = {"years" : "%Y", 
-                         "year" : "%Y",  
-                         "months" : "%Y_%m", 
-                         "month" : "%Y_%m", 
-                         "weeks" : "%Y_%m_%d",  
-                         "week" : "%Y_%m_%d",  
-                         "days" : "%Y_%m_%d",  
-                         "day" : "%Y_%m_%d",  
-                         "hours" : "%Y_%m_%d_%H",  
-                         "hour" : "%Y_%m_%d_%H",  
-                         "minutes" : "%Y_%m_%d_%H_%M",
-                         "minute" : "%Y_%m_%d_%H_%M",} 
+suffix_units = {"years": "%Y",
+                "year": "%Y",
+                "months": "%Y_%m",
+                "month": "%Y_%m",
+                "weeks": "%Y_%m_%d",
+                "week": "%Y_%m_%d",
+                "days": "%Y_%m_%d",
+                "day": "%Y_%m_%d",
+                "hours": "%Y_%m_%d_%H",
+                "hour": "%Y_%m_%d_%H",
+                "minutes": "%Y_%m_%d_%H_%M",
+                "minute": "%Y_%m_%d_%H_%M"}
 
 
 def create_suffix_from_datetime(start_time,  granularity):
     """Create a datetime string based on a datetime object and a provided
        granularity that can be used as suffix for map names.
-       
+
        dateteime=2001-01-01 00:00:00, granularity="1 month" returns "2001_01"
-       
+
        :param start_time: The datetime object
        :param granularity: The granularity for example "1 month" or "100 seconds"
        :return: A string
     """
     global suffix_units
-    return start_time.strftime(suffix_units[granularity.split(' ')[1]]) 
+    return start_time.strftime(suffix_units[granularity.split(' ')[1]])
 
 if __name__ == "__main__":
     import doctest

+ 20 - 21
lib/python/temporal/extract.py

@@ -59,8 +59,7 @@ def extract_dataset(input, output, type, where, expression, base, nprocs=1,
 
     sp = open_old_stds(input, type, dbif)
     # Check the new stds
-    new_sp = check_new_stds(output, type, dbif,
-                                          gscript.overwrite())
+    new_sp = check_new_stds(output, type, dbif, gscript.overwrite())
     if type == "vector":
         rows = sp.get_registered_maps(
             "id,name,mapset,layer", where, "start_time", dbif)
@@ -86,7 +85,8 @@ def extract_dataset(input, output, type, where, expression, base, nprocs=1,
                     msgr.percent(count, num_rows, 1)
 
                 map_name = "{base}_{suffix}".format(base=base,
-                                                    suffix=get_num_suffix(count, num_rows))
+                                                    suffix=get_num_suffix(count,
+                                                                          num_rows))
 
                 # We need to modify the r(3).mapcalc expression
                 if type != "vector":
@@ -98,7 +98,8 @@ def extract_dataset(input, output, type, where, expression, base, nprocs=1,
                     # We need to build the id
                     map_id = AbstractMapDataset.build_id(map_name, mapset)
                 else:
-                    map_id = AbstractMapDataset.build_id(map_name, mapset, row["layer"])
+                    map_id = AbstractMapDataset.build_id(map_name, mapset,
+                                                         row["layer"])
 
                 new_map = sp.get_new_map_instance(map_id)
 
@@ -130,16 +131,16 @@ def extract_dataset(input, output, type, where, expression, base, nprocs=1,
                                  % expression)
                     if row["layer"]:
                         proc_list.append(Process(target=run_vector_extraction,
-                                                 args=(row["name"] + "@" + \
-                                                       row["mapset"],
-                                                 map_name, row["layer"],
-                                                 vtype, expression)))
+                                                 args=(row["name"] + "@" +
+                                                       row["mapset"], map_name,
+                                                       row["layer"], vtype,
+                                                       expression)))
                     else:
                         proc_list.append(Process(target=run_vector_extraction,
-                                                 args=(row["name"] + "@" + \
-                                                       row["mapset"],
-                                                 map_name, layer, vtype,
-                                                 expression)))
+                                                 args=(row["name"] + "@" +
+                                                       row["mapset"], map_name,
+                                                       layer, vtype,
+                                                       expression)))
 
                 proc_list[proc_count].start()
                 proc_count += 1
@@ -165,11 +166,9 @@ def extract_dataset(input, output, type, where, expression, base, nprocs=1,
         msgr.percent(0, num_rows, 1)
 
         temporal_type, semantic_type, title, description = sp.get_initial_values()
-        new_sp = open_new_stds(output, type,
-                                             sp.get_temporal_type(),
-                                             title, description,
-                                             semantic_type, dbif,
-                                             gscript.overwrite())
+        new_sp = open_new_stds(output, type, sp.get_temporal_type(), title.
+                               description, semantic_type, dbif,
+                               gscript.overwrite())
 
         # collect empty maps to remove them
         empty_maps = []
@@ -251,17 +250,17 @@ def extract_dataset(input, output, type, where, expression, base, nprocs=1,
 def run_mapcalc2d(expr):
     """Helper function to run r.mapcalc in parallel"""
     exit(gscript.run_command("r.mapcalc", expression=expr,
-                            overwrite=gscript.overwrite(), quiet=True))
+                             overwrite=gscript.overwrite(), quiet=True))
 
 
 def run_mapcalc3d(expr):
     """Helper function to run r3.mapcalc in parallel"""
     exit(gscript.run_command("r3.mapcalc", expression=expr,
-                            overwrite=gscript.overwrite(), quiet=True))
+                             overwrite=gscript.overwrite(), quiet=True))
 
 
 def run_vector_extraction(input, output, layer, type, where):
     """Helper function to run r.mapcalc in parallel"""
     exit(gscript.run_command("v.extract", input=input, output=output,
-                            layer=layer, type=type, where=where,
-                            overwrite=gscript.overwrite(), quiet=True))
+                             layer=layer, type=type, where=where,
+                             overwrite=gscript.overwrite(), quiet=True))

+ 0 - 1
lib/python/temporal/factory.py

@@ -48,4 +48,3 @@ def dataset_factory(type, id):
         return None
 
     return sp
-

+ 9 - 8
lib/python/temporal/gui_support.py

@@ -16,19 +16,19 @@ import grass.script as gscript
 
 ###############################################################################
 
-def tlist_grouped(type, group_type = False, dbif=None):
+
+def tlist_grouped(type, group_type=False, dbif=None):
     """List of temporal elements grouped by mapsets.
 
-    Returns a dictionary where the keys are mapset 
+    Returns a dictionary where the keys are mapset
     names and the values are lists of space time datasets in that
     mapset. Example:
 
     .. code-block:: python
-    
+
         >>> tgis.tlist_grouped('strds')['PERMANENT']
         ['precipitation', 'temperature']
 
-    
     :param type: element type (strds, str3ds, stvds)
     :param group_type: TBD
 
@@ -36,7 +36,7 @@ def tlist_grouped(type, group_type = False, dbif=None):
     """
     result = {}
     dbif, connected = init_dbif(dbif)
-    
+
     mapset = None
     if type == 'stds':
         types = ['strds', 'str3ds', 'stvds']
@@ -65,7 +65,7 @@ def tlist_grouped(type, group_type = False, dbif=None):
             if group_type:
                 if type in result[mapset]:
                     result[mapset][type].append(name)
-                else:        
+                else:
                     result[mapset][type] = [name, ]
             else:
                 result[mapset].append(name)
@@ -77,9 +77,10 @@ def tlist_grouped(type, group_type = False, dbif=None):
 
 ###############################################################################
 
+
 def tlist(type, dbif=None):
     """Return a list of space time datasets of absolute and relative time
-     
+
     :param type: element type (strds, str3ds, stvds)
 
     :return: a list of space time dataset ids
@@ -87,7 +88,7 @@ def tlist(type, dbif=None):
     id = None
     sp = dataset_factory(type, id)
     dbif, connected = init_dbif(dbif)
-    
+
     mapsets = get_available_temporal_mapsets()
 
     output = []

+ 44 - 32
lib/python/temporal/list_stds.py

@@ -24,28 +24,33 @@ from open_stds import *
 
 ###############################################################################
 
-def get_dataset_list(type,  temporal_type,  columns=None,  where=None,  order=None):
-    """ Return a list of time stamped maps or space time datasets of a specific temporal type
-         that are registred in the temporal database
-    
-         This method returns a dictionary, the keys are the available mapsets, 
-         the values are the rows from the SQL database query.
-
-        :param type: The type of the datasets (strds, str3ds, stvds, rast, rast3d, vect)
-        :param temporal_type: The temporal type of the datasets (absolute, relative)
+
+def get_dataset_list(type, temporal_type, columns=None, where=None,
+                     order=None):
+    """ Return a list of time stamped maps or space time datasets of a specific
+        temporal type that are registred in the temporal database
+
+        This method returns a dictionary, the keys are the available mapsets,
+        the values are the rows from the SQL database query.
+
+        :param type: The type of the datasets (strds, str3ds, stvds, rast,
+                     rast3d, vect)
+        :param temporal_type: The temporal type of the datasets (absolute,
+                              relative)
         :param columns: A comma separated list of columns that will be selected
         :param where: A where statement for selected listing without "WHERE"
         :param order: A comma separated list of columns to order the
-                               datasets by category
-                      
-        :return: A dictionary with the rows of the SQL query for each available mapset
-        
+                      datasets by category
+
+        :return: A dictionary with the rows of the SQL query for each
+                 available mapset
+
         .. code-block:: python
-        
+
             >>> import grass.temporal as tgis
             >>> tgis.init()
             >>> name = "list_stds_test"
-            >>> sp = tgis.open_new_stds(name=name, type="strds", 
+            >>> sp = tgis.open_new_stds(name=name, type="strds",
             ... temporaltype="absolute", title="title", descr="descr", semantic="mean", dbif=None, overwrite=True)
             >>> mapset = tgis.get_current_mapset()
             >>> stds_list = tgis.get_dataset_list("strds", "absolute", columns="name")
@@ -68,13 +73,13 @@ def get_dataset_list(type,  temporal_type,  columns=None,  where=None,  order=No
 
     dbif = SQLDatabaseInterfaceConnection()
     dbif.connect()
-    
+
     mapsets = get_available_temporal_mapsets()
-    
+
     result = {}
-    
+
     for mapset in mapsets.keys():
-        
+
         if temporal_type == "absolute":
             table = sp.get_type() + "_view_abs_time"
         else:
@@ -87,24 +92,26 @@ def get_dataset_list(type,  temporal_type,  columns=None,  where=None,  order=No
 
         if where:
             sql += " WHERE " + where
-            sql += " AND mapset = '%s'"%(mapset)
+            sql += " AND mapset = '%s'" % (mapset)
         else:
-            sql += " WHERE mapset = '%s'"%(mapset)
+            sql += " WHERE mapset = '%s'" % (mapset)
 
         if order:
             sql += " ORDER BY " + order
 
         dbif.execute(sql,  mapset=mapset)
         rows = dbif.fetchall(mapset=mapset)
-        
+
         if rows:
             result[mapset] = rows
-        
+
     return result
-        
+
 ###############################################################################
 
-def list_maps_of_stds(type, input, columns, order, where, separator, method, no_header=False, gran=None):
+
+def list_maps_of_stds(type, input, columns, order, where, separator,
+                      method, no_header=False, gran=None):
     """ List the maps of a space time dataset using diffetent methods
 
         :param type: The type of the maps raster, raster3d or vector
@@ -117,7 +124,7 @@ def list_maps_of_stds(type, input, columns, order, where, separator, method, no_
         :param separator: The field separator character between the columns
         :param method: String identifier to select a method out of cols,
                        comma,delta or deltagaps
-                       
+
             - "cols" Print preselected columns specified by columns
             - "comma" Print the map ids ("name@mapset") as comma separated string
             - "delta" Print the map ids ("name@mapset") with start time,
@@ -127,10 +134,11 @@ def list_maps_of_stds(type, input, columns, order, where, separator, method, no_
               Gaps can be simply identified as the id is "None"
             - "gran" List map using the granularity of the space time dataset,
               columns are identical to deltagaps
-                      
+
         :param no_header: Supress the printing of column names
-        :param gran: The user defined granule to be used if method=gran is set, in case gran=None the
-            granule of the space time dataset is used
+        :param gran: The user defined granule to be used if method=gran is
+                     set, in case gran=None the granule of the space time
+                     dataset is used
     """
 
     dbif, connected = init_dbif(None)
@@ -148,12 +156,16 @@ def list_maps_of_stds(type, input, columns, order, where, separator, method, no_
         else:
             columns = "id,name,mapset,start_time,end_time"
         if method == "deltagaps":
-            maps = sp.get_registered_maps_as_objects_with_gaps(where=where, dbif=dbif)
+            maps = sp.get_registered_maps_as_objects_with_gaps(where=where,
+                                                               dbif=dbif)
         elif method == "delta":
-            maps = sp.get_registered_maps_as_objects(where=where, order="start_time", dbif=dbif)
+            maps = sp.get_registered_maps_as_objects(where=where,
+                                                     order="start_time",
+                                                     dbif=dbif)
         elif method == "gran":
             if gran is not None and gran != "":
-                maps = sp.get_registered_maps_as_objects_by_granularity(gran=gran, dbif=dbif)
+                maps = sp.get_registered_maps_as_objects_by_granularity(gran=gran,
+                                                                        dbif=dbif)
             else:
                 maps = sp.get_registered_maps_as_objects_by_granularity(dbif=dbif)
 

+ 29 - 32
lib/python/temporal/mapcalc.py

@@ -28,7 +28,7 @@ def dataset_mapcalculator(inputs, output, type, expression, base, method,
        the r.mapcalc operators:
 
        Supported operators for relative and absolute time are:
-       
+
        - td() - the time delta of the current interval in days
                 and fractions of days or the unit in case of relative time
        - start_time() - The start time of the interval from the begin of
@@ -39,7 +39,7 @@ def dataset_mapcalculator(inputs, output, type, expression, base, method,
                       unit in case of relative time
 
        Supported operators for absolute time:
-       
+
        - start_doy() - Day of year (doy) from the start time [1 - 366]
        - start_dow() - Day of week (dow) from the start time [1 - 7],
                        the start of the week is monday == 1
@@ -97,8 +97,7 @@ def dataset_mapcalculator(inputs, output, type, expression, base, method,
         sp = open_old_stds(input, type, dbif)
         input_list.append(copy.copy(sp))
 
-    new_sp = check_new_stds(output, type, dbif,
-                                         gscript.overwrite())
+    new_sp = check_new_stds(output, type, dbif, gscript.overwrite())
 
     # Sample all inputs by the first input and create a sample matrix
     if spatial:
@@ -282,10 +281,8 @@ def dataset_mapcalculator(inputs, output, type, expression, base, method,
 
         temporal_type, semantic_type, title, description = first_input.get_initial_values()
 
-        new_sp = open_new_stds(output, type,
-                                         temporal_type, title, description,
-                                         semantic_type, dbif,
-                                         gscript.overwrite())
+        new_sp = open_new_stds(output, type, temporal_type, title, description,
+                               semantic_type, dbif, gscript.overwrite())
         count = 0
 
         # collect empty maps to remove them
@@ -344,7 +341,7 @@ def dataset_mapcalculator(inputs, output, type, expression, base, method,
 def _run_mapcalc2d(expr):
     """Helper function to run r.mapcalc in parallel"""
     exit(gscript.run_command("r.mapcalc", expression=expr,
-                            overwrite=gscript.overwrite(), quiet=True))
+                             overwrite=gscript.overwrite(), quiet=True))
 
 ###############################################################################
 
@@ -352,7 +349,7 @@ def _run_mapcalc2d(expr):
 def _run_mapcalc3d(expr):
     """Helper function to run r3.mapcalc in parallel"""
     exit(gscript.run_command("r3.mapcalc", expression=expr,
-                            overwrite=gscript.overwrite(), quiet=True))
+                             overwrite=gscript.overwrite(), quiet=True))
 
 ###############################################################################
 
@@ -362,7 +359,7 @@ def _operator_parser(expr, first, current):
        the temporal operators with numerical values.
 
        Supported operators for relative and absolute time are:
-       
+
        - td() - the time delta of the current interval in days
          and fractions of days or the unit in case of relative time
        - start_time() - The start time of the interval from the begin of the
@@ -373,7 +370,7 @@ def _operator_parser(expr, first, current):
                       unit in case of relative time
 
        Supported operators for absolute time:
-       
+
        - start_doy() - Day of year (doy) from the start time [1 - 366]
        - start_dow() - Day of week (dow) from the start time [1 - 7],
                        the start of the week is monday == 1
@@ -431,55 +428,55 @@ def _parse_start_operators(expr, is_time_absolute, current):
 
     if expr.find("start_year()") >= 0:
         if not is_time_absolute:
-            msgr.fatal(_("The temporal operators <%s> support only absolute "\
+            msgr.fatal(_("The temporal operators <%s> support only absolute "
                          "time." % ("start_*")))
         expr = expr.replace("start_year()", str(start.year))
 
     if expr.find("start_month()") >= 0:
         if not is_time_absolute:
-            msgr.fatal(_("The temporal operators <%s> support only absolute "\
+            msgr.fatal(_("The temporal operators <%s> support only absolute "
                          "time." % ("start_*")))
         expr = expr.replace("start_month()", str(start.month))
 
     if expr.find("start_week()") >= 0:
         if not is_time_absolute:
-            msgr.fatal(_("The temporal operators <%s> support only absolute "\
+            msgr.fatal(_("The temporal operators <%s> support only absolute "
                          "time." % ("start_*")))
         expr = expr.replace("start_week()", str(start.isocalendar()[1]))
 
     if expr.find("start_day()") >= 0:
         if not is_time_absolute:
-            msgr.fatal(_("The temporal operators <%s> support only absolute "\
+            msgr.fatal(_("The temporal operators <%s> support only absolute "
                          "time." % ("start_*")))
         expr = expr.replace("start_day()", str(start.day))
 
     if expr.find("start_hour()") >= 0:
         if not is_time_absolute:
-            msgr.fatal(_("The temporal operators <%s> support only absolute "\
+            msgr.fatal(_("The temporal operators <%s> support only absolute "
                          "time." % ("start_*")))
         expr = expr.replace("start_hour()", str(start.hour))
 
     if expr.find("start_minute()") >= 0:
         if not is_time_absolute:
-            msgr.fatal(_("The temporal operators <%s> support only absolute "\
+            msgr.fatal(_("The temporal operators <%s> support only absolute "
                          "time." % ("start_*")))
         expr = expr.replace("start_minute()", str(start.minute))
 
     if expr.find("start_second()") >= 0:
         if not is_time_absolute:
-            msgr.fatal(_("The temporal operators <%s> support only absolute "\
+            msgr.fatal(_("The temporal operators <%s> support only absolute "
                          "time." % ("start_*")))
         expr = expr.replace("start_second()", str(start.second))
 
     if expr.find("start_dow()") >= 0:
         if not is_time_absolute:
-            msgr.fatal(_("The temporal operators <%s> support only absolute "\
+            msgr.fatal(_("The temporal operators <%s> support only absolute "
                          "time." % ("start_*")))
         expr = expr.replace("start_dow()", str(start.isoweekday()))
 
     if expr.find("start_doy()") >= 0:
         if not is_time_absolute:
-            msgr.fatal(_("The temporal operators <%s> support only absolute "\
+            msgr.fatal(_("The temporal operators <%s> support only absolute "
                          "time." % ("start_*")))
         year = datetime(start.year, 1, 1)
         delta = start - year
@@ -514,7 +511,7 @@ def _parse_end_operators(expr, is_time_absolute, current):
 
     if expr.find("end_year()") >= 0:
         if not is_time_absolute:
-            msgr.fatal(_("The temporal operators <%s> support only absolute "\
+            msgr.fatal(_("The temporal operators <%s> support only absolute "
                          "time." % ("end_*")))
         if not end:
             expr = expr.replace("end_year()", "null()")
@@ -523,7 +520,7 @@ def _parse_end_operators(expr, is_time_absolute, current):
 
     if expr.find("end_month()") >= 0:
         if not is_time_absolute:
-            msgr.fatal(_("The temporal operators <%s> support only absolute "\
+            msgr.fatal(_("The temporal operators <%s> support only absolute "
                          "time." % ("end_*")))
         if not end:
             expr = expr.replace("end_month()", "null()")
@@ -532,7 +529,7 @@ def _parse_end_operators(expr, is_time_absolute, current):
 
     if expr.find("end_week()") >= 0:
         if not is_time_absolute:
-            msgr.fatal(_("The temporal operators <%s> support only absolute "\
+            msgr.fatal(_("The temporal operators <%s> support only absolute "
                          "time." % ("end_*")))
         if not end:
             expr = expr.replace("end_week()", "null()")
@@ -541,7 +538,7 @@ def _parse_end_operators(expr, is_time_absolute, current):
 
     if expr.find("end_day()") >= 0:
         if not is_time_absolute:
-            msgr.fatal(_("The temporal operators <%s> support only absolute "\
+            msgr.fatal(_("The temporal operators <%s> support only absolute "
                          "time." % ("end_*")))
         if not end:
             expr = expr.replace("end_day()", "null()")
@@ -550,7 +547,7 @@ def _parse_end_operators(expr, is_time_absolute, current):
 
     if expr.find("end_hour()") >= 0:
         if not is_time_absolute:
-            msgr.fatal(_("The temporal operators <%s> support only absolute "\
+            msgr.fatal(_("The temporal operators <%s> support only absolute "
                          "time." % ("end_*")))
         if not end:
             expr = expr.replace("end_hour()", "null()")
@@ -559,7 +556,7 @@ def _parse_end_operators(expr, is_time_absolute, current):
 
     if expr.find("end_minute()") >= 0:
         if not is_time_absolute:
-            msgr.fatal(_("The temporal operators <%s> support only absolute "\
+            msgr.fatal(_("The temporal operators <%s> support only absolute "
                          "time." % ("end_*")))
         if not end:
             expr = expr.replace("end_minute()", "null()")
@@ -568,7 +565,7 @@ def _parse_end_operators(expr, is_time_absolute, current):
 
     if expr.find("end_second()") >= 0:
         if not is_time_absolute:
-            msgr.fatal(_("The temporal operators <%s> support only absolute "\
+            msgr.fatal(_("The temporal operators <%s> support only absolute "
                          "time." % ("end_*")))
         if not end:
             expr = expr.replace("end_second()", "null()")
@@ -577,7 +574,7 @@ def _parse_end_operators(expr, is_time_absolute, current):
 
     if expr.find("end_dow()") >= 0:
         if not is_time_absolute:
-            msgr.fatal(_("The temporal operators <%s> support only absolute "\
+            msgr.fatal(_("The temporal operators <%s> support only absolute "
                          "time." % ("end_*")))
         if not end:
             expr = expr.replace("end_dow()", "null()")
@@ -586,7 +583,7 @@ def _parse_end_operators(expr, is_time_absolute, current):
 
     if expr.find("end_doy()") >= 0:
         if not is_time_absolute:
-            msgr.fatal(_("The temporal operators <%s> support only absolute "\
+            msgr.fatal(_("The temporal operators <%s> support only absolute "
                          "time." % ("end_*")))
         if not end:
             expr = expr.replace("end_doy()", "null()")
@@ -613,11 +610,11 @@ def _parse_td_operator(expr, is_time_absolute, first, current):
         td = "null()"
         if is_time_absolute:
             start, end = current.get_absolute_time()
-            if end != None:
+            if end is not None:
                 td = time_delta_to_relative_time(end - start)
         else:
             start, end, unit = current.get_relative_time()
-            if end != None:
+            if end is not None:
                 td = end - start
         expr = expr.replace("td()", str(td))
 

+ 42 - 35
lib/python/temporal/metadata.py

@@ -74,8 +74,8 @@ class RasterMetadataBase(SQLDatabaseInterface):
 
     """
     def __init__(self, table=None, ident=None, datatype=None, cols=None,
-		rows=None, number_of_cells=None, nsres=None, ewres=None,
-		min=None, max=None):
+                 rows=None, number_of_cells=None, nsres=None, ewres=None,
+                 min=None, max=None):
 
         SQLDatabaseInterface.__init__(self, table, ident)
 
@@ -224,7 +224,8 @@ class RasterMetadataBase(SQLDatabaseInterface):
     datatype = property(fget=get_datatype, fset=set_datatype)
     cols = property(fget=get_cols, fset=set_cols)
     rows = property(fget=get_rows, fset=set_rows)
-    number_of_cells = property(fget=get_number_of_cells, fset=set_number_of_cells)
+    number_of_cells = property(fget=get_number_of_cells,
+                               fset=set_number_of_cells)
     nsres = property(fget=get_nsres, fset=set_nsres)
     ewres = property(fget=get_ewres, fset=set_ewres)
     min = property(fget=get_min, fset=set_min)
@@ -313,12 +314,12 @@ class RasterMetadata(RasterMetadataBase):
 
     """
     def __init__(self, ident=None, datatype=None,
-		 cols=None, rows=None, number_of_cells=None, nsres=None,
-		 ewres=None, min=None, max=None):
+                 cols=None, rows=None, number_of_cells=None, nsres=None,
+                 ewres=None, min=None, max=None):
 
         RasterMetadataBase.__init__(self, "raster_metadata", ident, datatype,
-                                      cols, rows, number_of_cells, nsres,
-                                      ewres, min, max)
+                                    cols, rows, number_of_cells, nsres,
+                                    ewres, min, max)
 
     def print_info(self):
         """Print information about this class in human readable style"""
@@ -399,12 +400,12 @@ class Raster3DMetadata(RasterMetadataBase):
 
     """
     def __init__(self, ident=None, datatype=None,
-		 cols=None, rows=None, depths=None, number_of_cells=None,
-		 nsres=None, ewres=None, tbres=None, min=None, max=None):
+                 cols=None, rows=None, depths=None, number_of_cells=None,
+                 nsres=None, ewres=None, tbres=None, min=None, max=None):
 
         RasterMetadataBase.__init__(self, "raster3d_metadata", ident,
-				datatype, cols, rows, number_of_cells, nsres,
-				ewres, min, max)
+                                    datatype, cols, rows, number_of_cells,
+                                    nsres,	ewres, min, max)
 
         self.set_tbres(tbres)
         self.set_depths(depths)
@@ -536,12 +537,13 @@ class VectorMetadata(SQLDatabaseInterface):
             volumes=12
 
     """
-    def __init__(
-        self, ident=None, is_3d=False,
-        number_of_points=None, number_of_lines=None, number_of_boundaries=None,
-        number_of_centroids=None, number_of_faces=None, number_of_kernels=None,
-        number_of_primitives=None, number_of_nodes=None, number_of_areas=None,
-        number_of_islands=None, number_of_holes=None, number_of_volumes=None):
+    def __init__(self, ident=None, is_3d=False, number_of_points=None,
+                 number_of_lines=None, number_of_boundaries=None,
+                 number_of_centroids=None, number_of_faces=None,
+                 number_of_kernels=None, number_of_primitives=None,
+                 number_of_nodes=None, number_of_areas=None,
+                 number_of_islands=None, number_of_holes=None,
+                 number_of_volumes=None):
 
         SQLDatabaseInterface.__init__(self, "vector_metadata", ident)
 
@@ -731,8 +733,8 @@ class VectorMetadata(SQLDatabaseInterface):
             return None
 
     # Set the properties
-    id  = property(fget=get_id, fset=set_id)
-    is_3d  = property(fget=get_3d_info, fset=set_3d_info)
+    id = property(fget=get_id, fset=set_id)
+    is_3d = property(fget=get_3d_info, fset=set_3d_info)
     number_of_points = property(fget=get_number_of_points,
                                 fset=set_number_of_points)
     number_of_lines = property(fget=get_number_of_lines,
@@ -826,7 +828,8 @@ class STDSMetadataBase(SQLDatabaseInterface):
             number_of_maps=None
 
     """
-    def __init__(self, table=None, ident=None, title=None, description=None, command=None):
+    def __init__(self, table=None, ident=None, title=None, description=None,
+                 command=None):
 
         SQLDatabaseInterface.__init__(self, table, ident)
 
@@ -897,10 +900,10 @@ class STDSMetadataBase(SQLDatabaseInterface):
         else:
             return None
 
-    id  = property(fget=get_id, fset=set_id)
-    title  = property(fget=get_title, fset=set_title)
-    description  = property(fget=get_description, fset=set_description)
-    number_of_maps  = property(fget=get_number_of_maps)
+    id = property(fget=get_id, fset=set_id)
+    title = property(fget=get_title, fset=set_title)
+    description = property(fget=get_description, fset=set_description)
+    number_of_maps = property(fget=get_number_of_maps)
 
     def print_info(self):
         """Print information about this class in human readable style"""
@@ -967,7 +970,7 @@ class STDSRasterMetadataBase(STDSMetadataBase):
         Usage:
 
         .. code-block:: python
-        
+
             >>> init()
             >>> meta = STDSRasterMetadataBase(ident="soils@PERMANENT",
             ... title="Soils", description="Soils 1950 - 2010")
@@ -1016,7 +1019,8 @@ class STDSRasterMetadataBase(STDSMetadataBase):
             max_max=None
 
     """
-    def __init__(self, table=None, ident=None, title=None, description=None, aggregation_type=None):
+    def __init__(self, table=None, ident=None, title=None, description=None,
+                 aggregation_type=None):
 
         STDSMetadataBase.__init__(self, table, ident, title, description)
 
@@ -1043,7 +1047,7 @@ class STDSRasterMetadataBase(STDSMetadataBase):
             return self.D["aggregation_type"]
         else:
             return None
-            
+
     def get_max_min(self):
         """Get the minimal maximum of all registered maps,
            this value is set in the database
@@ -1132,7 +1136,7 @@ class STDSRasterMetadataBase(STDSMetadataBase):
     min_max = property(fget=get_min_max)
     max_min = property(fget=get_max_min)
     max_max = property(fget=get_max_max)
-    aggregation_type = property(fset=set_aggregation_type, 
+    aggregation_type = property(fset=set_aggregation_type,
                                 fget=get_aggregation_type)
 
     def print_info(self):
@@ -1232,7 +1236,8 @@ class STRDSMetadata(STDSRasterMetadataBase):
             raster_register=None
 
     """
-    def __init__(self, ident=None, raster_register=None, title=None, description=None):
+    def __init__(self, ident=None, raster_register=None, title=None,
+                 description=None):
 
         STDSRasterMetadataBase.__init__(
             self, "strds_metadata", ident, title, description)
@@ -1343,7 +1348,8 @@ class STR3DSMetadata(STDSRasterMetadataBase):
             raster3d_register=None
 
         """
-    def __init__(self, ident=None, raster3d_register=None, title=None, description=None):
+    def __init__(self, ident=None, raster3d_register=None, title=None,
+                 description=None):
 
         STDSRasterMetadataBase.__init__(
             self, "str3ds_metadata", ident, title, description)
@@ -1385,7 +1391,7 @@ class STR3DSMetadata(STDSRasterMetadataBase):
             return None
 
     raster3d_register = property(fget=get_raster3d_register,
-                               fset=set_raster3d_register)
+                                 fset=set_raster3d_register)
     tbres_min = property(fget=get_tbres_min)
     tbres_max = property(fget=get_tbres_max)
 
@@ -1409,6 +1415,7 @@ class STR3DSMetadata(STDSRasterMetadataBase):
 
 ###############################################################################
 
+
 class STVDSMetadata(STDSMetadataBase):
     """This is the space time vector dataset metadata class
 
@@ -1485,8 +1492,8 @@ class STVDSMetadata(STDSMetadataBase):
             volumes=None
 
     """
-    def __init__(
-        self, ident=None, vector_register=None, title=None, description=None):
+    def __init__(self, ident=None, vector_register=None, title=None,
+                 description=None):
 
         STDSMetadataBase.__init__(
             self, "stvds_metadata", ident, title, description)
@@ -1638,8 +1645,8 @@ class STVDSMetadata(STDSMetadataBase):
             return None
 
     # Set the properties
-    vector_register  = property(fget=get_vector_register,
-                                fset=set_vector_register)
+    vector_register = property(fget=get_vector_register,
+                               fset=set_vector_register)
     number_of_points = property(fget=get_number_of_points)
     number_of_lines = property(fget=get_number_of_lines)
     number_of_boundaries = property(fget=get_number_of_boundaries)

+ 23 - 17
lib/python/temporal/open_stds.py

@@ -22,18 +22,20 @@ from factory import *
 
 ###############################################################################
 
+
 def open_old_stds(name, type, dbif=None):
     """This function opens an existing space time dataset and return the
        created and intialized object of the specified type.
 
-       This function will call exit() or raise a grass.pygrass.messages.FatalError in case the type is wrong,
+       This function will call exit() or raise a
+       grass.pygrass.messages.FatalError in case the type is wrong,
        or the space time dataset was not found.
 
        :param name: The name of the space time dataset, if the name does not
                     contain the mapset (name@mapset) then the current mapset
                     will be used to identifiy the space time dataset
        :param type: The type of the space time dataset (strd, str3ds, stvds,
-                                                       raster, vector, raster3d)
+                    raster, vector, raster3d)
        :param dbif: The optional database interface to be used
 
     """
@@ -60,8 +62,8 @@ def open_old_stds(name, type, dbif=None):
     if not sp.is_in_db(dbif):
         dbif.close()
         msgr.fatal(_("Space time %(sp)s dataset <%(name)s> no found") %
-                     {'sp': sp.get_new_map_instance(None).get_type(),
-                      'name': name})
+                   {'sp': sp.get_new_map_instance(None).get_type(),
+                    'name': name})
 
     # Read content from temporal database
     sp.select(dbif)
@@ -72,12 +74,13 @@ def open_old_stds(name, type, dbif=None):
 
 ###############################################################################
 
+
 def check_new_stds(name, type, dbif=None, overwrite=False):
     """Check if a new space time dataset of a specific type can be created
 
        :param name: The name of the new space time dataset
-       :param type: The type of the new space time dataset (strd, str3ds, stvds,
-                                                      raster, vector, raster3d)
+       :param type: The type of the new space time dataset (strd, str3ds,
+                    stvds, raster, vector, raster3d)
        :param dbif: The temporal database interface to be used
        :param overwrite: Flag to allow overwriting
 
@@ -87,7 +90,7 @@ def check_new_stds(name, type, dbif=None, overwrite=False):
        This function will raise a FatalError in case of an error.
     """
 
-    #Get the current mapset to create the id of the space time dataset
+    # Get the current mapset to create the id of the space time dataset
 
     mapset = get_current_mapset()
     msgr = get_tgis_message_interface()
@@ -115,9 +118,9 @@ def check_new_stds(name, type, dbif=None, overwrite=False):
 
     if sp.is_in_db(dbif) and overwrite is False:
         msgr.fatal(_("Space time %(sp)s dataset <%(name)s> is already in the"
-                      " database. Use the overwrite flag.") % {
-                      'sp': sp.get_new_map_instance(None).get_type(),
-                      'name': name})
+                     " database. Use the overwrite flag.") % {
+                   'sp': sp.get_new_map_instance(None).get_type(),
+                   'name': name})
     if connected:
         dbif.close()
 
@@ -125,13 +128,14 @@ def check_new_stds(name, type, dbif=None, overwrite=False):
 
 ###############################################################################
 
+
 def open_new_stds(name, type, temporaltype, title, descr, semantic,
-                              dbif=None, overwrite=False):
+                  dbif=None, overwrite=False):
     """Create a new space time dataset of a specific type
 
        :param name: The name of the new space time dataset
-       :param type: The type of the new space time dataset (strd, str3ds, stvds,
-                                                      raster, vector, raster3d)
+       :param type: The type of the new space time dataset (strd, str3ds,
+                    stvds, raster, vector, raster3d)
        :param temporaltype: The temporal type (relative or absolute)
        :param title: The title
        :param descr: The dataset description
@@ -145,19 +149,19 @@ def open_new_stds(name, type, temporaltype, title, descr, semantic,
     """
     dbif, connected = init_dbif(dbif)
     msgr = get_tgis_message_interface()
-    sp =  check_new_stds(name, type, dbif, overwrite)
+    sp = check_new_stds(name, type, dbif, overwrite)
 
     if sp.is_in_db(dbif):
         msgr.warning(_("Overwriting space time %(sp)s dataset <%(name)s> and "
                        "unregistering all maps") % {
-                       'sp': sp.get_new_map_instance(None).get_type(),
-                       'name': name})
+                     'sp': sp.get_new_map_instance(None).get_type(),
+                     'name': name})
         id = sp.get_id()
         sp.delete(dbif)
         sp = sp.get_new_instance(id)
 
     msgr.verbose(_("Creating a new space time %s dataset") %
-                   sp.get_new_map_instance(None).get_type())
+                 sp.get_new_map_instance(None).get_type())
 
     sp.set_initial_values(temporal_type=temporaltype, semantic_type=semantic,
                           title=title, description=descr)
@@ -171,6 +175,7 @@ def open_new_stds(name, type, temporaltype, title, descr, semantic,
 
 ############################################################################
 
+
 def check_new_map_dataset(name, layer=None, type="raster",
                           overwrite=False, dbif=None):
     """Check if a new map dataset of a specific type can be created in
@@ -208,6 +213,7 @@ def check_new_map_dataset(name, layer=None, type="raster",
 
 ############################################################################
 
+
 def open_new_map_dataset(name, layer=None, type="raster",
                          temporal_extent=None, overwrite=False,
                          dbif=None):

+ 42 - 39
lib/python/temporal/register.py

@@ -36,8 +36,9 @@ def register_maps_in_space_time_dataset(
        registered maps.
 
        :param type: The type of the maps rast, rast3d or vect
-       :param name: The name of the space time dataset. Maps will be registered in the
-                   temporal database if the name was set to None
+       :param name: The name of the space time dataset. Maps will be
+                    registered in the temporal database if the name was set
+                    to None
        :param maps: A comma separated list of map names
        :param file: Input file, one map per line map with start and optional
                    end time
@@ -56,8 +57,8 @@ def register_maps_in_space_time_dataset(
        :param interval: If True, time intervals are created in case the start
                        time and an increment is provided
        :param fs: Field separator used in input file
-       :param update_cmd:_list If is True, the command that was invoking this process
-                              will be written to the process history
+       :param update_cmd_list: If is True, the command that was invoking this
+                               process will be written to the process history
     """
     start_time_in_file = False
     end_time_in_file = False
@@ -75,10 +76,12 @@ def register_maps_in_space_time_dataset(
         msgr.fatal(_("%s= and %s= are mutually exclusive") % ("maps", "file"))
 
     if end and increment:
-        msgr.fatal(_("%s= and %s= are mutually exclusive") % ("end", "increment"))
+        msgr.fatal(_("%s= and %s= are mutually exclusive") % ("end",
+                                                              "increment"))
 
     if end and not start:
-        msgr.fatal(_("Please specify %s= and %s=") % ("start_time", "end_time"))
+        msgr.fatal(_("Please specify %s= and %s=") % ("start_time",
+                                                      "end_time"))
 
     if not maps and not file:
         msgr.fatal(_("Please specify %s= or %s=") % ("maps", "file"))
@@ -94,9 +97,8 @@ def register_maps_in_space_time_dataset(
             dbif.close()
             msgr.fatal(_("Space time %(sp)s dataset <%(name)s> with relative"
                          " time found, but no relative unit set for %(sp)s "
-                         "maps") % {
-                         'sp': sp.get_new_map_instance(None).get_type(),
-                         'name': name})
+                         "maps") % {'name': name,
+                       'sp': sp.get_new_map_instance(None).get_type()})
 
     maplist = []
 
@@ -166,12 +168,11 @@ def register_maps_in_space_time_dataset(
 
         # Get a new instance of the map type
         map = dataset_factory(type, maplist[count]["id"])
-        
+
         if map.map_exists() is not True:
             msgr.fatal(_("Unable to update %(t)s map <%(id)s>. "
-                            "The map does not exist.") %
-                            {'t': map.get_type(),
-                            'id': map.get_map_id()})
+                         "The map does not exist.") % {'t': map.get_type(),
+                                                       'id': map.get_map_id()})
 
         # Use the time data from file
         if "start" in maplist[count]:
@@ -190,20 +191,22 @@ def register_maps_in_space_time_dataset(
                     msgr.fatal(_("Unable to register %(t)s map <%(id)s> with "
                                  "layer %(l)s. The map has timestamp and "
                                  "the start time is not set.") % {
-                                 't': map.get_type(), 'id': map.get_map_id(),
-                                 'l': map.get_layer()})
+                               't': map.get_type(), 'id': map.get_map_id(),
+                               'l': map.get_layer()})
                 else:
                     msgr.fatal(_("Unable to register %(t)s map <%(id)s>. The"
                                  " map has no timestamp and the start time "
                                  "is not set.") % {'t': map.get_type(),
                                                    'id': map.get_map_id()})
-            if start != "" and start != None:
+            if start != "" and start is not None:
                 # We need to check if the time is absolute and the unit was specified
                 time_object = check_datetime_string(start)
                 if isinstance(time_object, datetime) and unit:
-                    msgr.fatal(_("%(u)s= can only be set for relative time") % {'u': "unit"})
+                    msgr.fatal(_("%(u)s= can only be set for relative time") %
+                               {'u': "unit"})
                 if not isinstance(time_object, datetime) and not unit:
-                    msgr.fatal(_("%(u)s= must be set in case of relative time stamps") % {'u': "unit"})
+                    msgr.fatal(_("%(u)s= must be set in case of relative time"
+                                 " stamps") % {'u': "unit"})
 
                 if unit:
                     map.set_time_to_relative()
@@ -224,8 +227,8 @@ def register_maps_in_space_time_dataset(
                 else:
                     msgr.warning(_("Map is already registered in temporal "
                                    "database. Unable to update %(t)s map "
-                                   "<%(id)s>. Overwrite flag is not set.") % {
-                                   't': map.get_type(), 'id': map.get_map_id()})
+                                   "<%(id)s>. Overwrite flag is not set.") %
+                                 {'t': map.get_type(), 'id': map.get_map_id()})
 
                 # Simple registration is allowed
                 if name:
@@ -249,13 +252,13 @@ def register_maps_in_space_time_dataset(
                         msgr.fatal(_("Unable to update %(t)s map <%(id)s> "
                                      "with layer %(l)s. The temporal types "
                                      "are different.") % {'t': map.get_type(),
-                                                        'id': map.get_map_id(),
-                                                        'l': map.get_layer()})
+                                                          'id': map.get_map_id(),
+                                                          'l': map.get_layer()})
                     else:
                         msgr.fatal(_("Unable to update %(t)s map <%(id)s>. "
                                      "The temporal types are different.") %
-                                     {'t': map.get_type(),
-                                      'id': map.get_map_id()})
+                                   {'t': map.get_type(),
+                                    'id': map.get_map_id()})
 
         # Load the data from the grass file database
         map.load()
@@ -331,7 +334,7 @@ def register_maps_in_space_time_dataset(
             ds.select(dbif)
             ds.update_from_registered_maps(dbif)
 
-    if connected == True:
+    if connected is True:
         dbif.close()
 
     msgr.percent(num_maps, num_maps, 1)
@@ -392,12 +395,12 @@ def assign_valid_time_to_map(ttype, map, start, end, unit, increment=None,
 
         if map.get_layer():
             msgr.debug(1, _("Set absolute valid time for map <%(id)s> with "
-                           "layer %(layer)s to %(start)s - %(end)s") %
-                         {'id': map.get_map_id(), 'layer': map.get_layer(),
-                          'start': str(start_time), 'end': str(end_time)})
+                            "layer %(layer)s to %(start)s - %(end)s") %
+                       {'id': map.get_map_id(), 'layer': map.get_layer(),
+                        'start': str(start_time), 'end': str(end_time)})
         else:
-            msgr.debug(1, _("Set absolute valid time for map <%s> to %s - %s") %
-                         (map.get_map_id(), str(start_time), str(end_time)))
+            msgr.debug(1, _("Set absolute valid time for map <%s> to %s - %s")
+                       % (map.get_map_id(), str(start_time), str(end_time)))
 
         map.set_absolute_time(start_time, end_time)
     else:
@@ -413,14 +416,14 @@ def assign_valid_time_to_map(ttype, map, start, end, unit, increment=None,
                 end_time = start_time + int(increment)
 
         if map.get_layer():
-            msgr.debug(1, _("Set relative valid time for map <%s> with layer %s "
-                           "to %i - %s with unit %s") %
-                         (map.get_map_id(), map.get_layer(), start_time,
-                          str(end_time), unit))
+            msgr.debug(1, _("Set relative valid time for map <%s> with layer"
+                            " %s to %i - %s with unit %s") %
+                       (map.get_map_id(), map.get_layer(), start_time,
+                       str(end_time), unit))
         else:
             msgr.debug(1, _("Set relative valid time for map <%s> to %i - %s "
                             "with unit %s") % (map.get_map_id(), start_time,
-                            str(end_time), unit))
+                                               str(end_time), unit))
 
         map.set_relative_time(start_time, end_time, unit)
 
@@ -431,14 +434,14 @@ def register_map_object_list(type,  map_list, output_stds,
                              delete_empty, unit, dbif=None):
     """Register a list of AbstractMapDataset objects in the temporal database
        and optional in a space time dataset.
-       
+
        :param type: The type of the map layer (rast, rast3d, vect)
        :param map_list: List of AbstractMapDataset objects
        :param output_stds: The output stds
        :param delete_empty: Set True to delete empty map layer found in the map_list
        :param unit: The temporal unit of the space time dataset
        :param dbif: The database interface to be used
-       
+
     """
     import grass.pygrass.modules as pymod
     import copy
@@ -456,7 +459,7 @@ def register_map_object_list(type,  map_list, output_stds,
 
         if delete_empty:
             if map_layer.metadata.get_min() is None and \
-                map_layer.metadata.get_max() is None:
+               map_layer.metadata.get_max() is None:
                 empty_maps.append(map_layer)
                 continue
 
@@ -482,7 +485,7 @@ def register_map_object_list(type,  map_list, output_stds,
     # Remove empty maps
     if len(empty_maps) > 0:
         for map in empty_maps:
-            if  map.is_in_db(dbif):
+            if map.is_in_db(dbif):
                 map.delete(dbif)
             mod = copy.deepcopy(g_remove)
             mod(type='rast', pattern=map.get_name())

+ 16 - 11
lib/python/temporal/sampling.py

@@ -19,6 +19,7 @@ for details.
 
 from factory import *
 
+
 def sample_stds_by_stds_topology(intype, sampletype, inputs, sampler, header,
                                  separator, method, spatial=False,
                                  print_only=True):
@@ -34,19 +35,22 @@ def sample_stds_by_stds_topology(intype, sampletype, inputs, sampler, header,
 
         Attention: Do not use the comma as separator for printing
 
-        :param intype:  Type of the input space time dataset (strds, stvds or str3ds)
-        :param sampletype: Type of the sample space time datasets (strds, stvds or str3ds)
-        :param inputs: Name or comma separated names of space time datasets or a list of map names
+        :param intype: Type of the input space time dataset (strds, stvds or
+                       str3ds)
+        :param sampletype: Type of the sample space time datasets (strds,
+                           stvds or str3ds)
+        :param inputs: Name or comma separated names of space time datasets or
+                       a list of map names
         :param sampler: Name of a space time dataset used for temporal sampling
         :param header: Set True to print column names
         :param separator: The field separator character between the columns
         :param method: The method to be used for temporal sampling
-                       (start,during,contain,overlap,equal) as comma separated string
-                       or as a list of methods
+                       (start,during,contain,overlap,equal) as comma separated
+                       string or as a list of methods
         :param spatial: Perform spatial overlapping check
-        :param print_only: If set True (default) then the result of the sampling will be
-                    printed to stdout, if set to False the resulting map matrix
-                    will be returned.
+        :param print_only: If set True (default) then the result of the
+                           sampling will be printed to stdout, if set to False
+                           the resulting map matrix will be returned.
 
         :return: The map matrix or None if nothing found
     """
@@ -83,11 +87,12 @@ def sample_stds_by_stds_topology(intype, sampletype, inputs, sampler, header,
     dbif.connect()
 
     for st in sts:
-        if st.is_in_db(dbif) == False:
-            msgr.fatal(_("Dataset <%s> not found in temporal database") % (st.get_id()))
+        if st.is_in_db(dbif) is False:
+            msgr.fatal(_("Dataset <%s> not found in temporal database")
+                       % (st.get_id()))
         st.select(dbif)
 
-    if sst.is_in_db(dbif) == False:
+    if sst.is_in_db(dbif) is False:
         msgr.fatal(_("Dataset <%s> not found in temporal database") % (sid))
 
     sst.select(dbif)

+ 73 - 62
lib/python/temporal/space_time_datasets.py

@@ -9,12 +9,12 @@ for details.
 :authors: Soeren Gebbert
 """
 import getpass
-import logging
 from abstract_map_dataset import *
 from abstract_space_time_dataset import *
 
 ###############################################################################
 
+
 class RasterDataset(AbstractMapDataset):
     """Raster dataset class
 
@@ -115,7 +115,7 @@ class RasterDataset(AbstractMapDataset):
            :return: True if this class is a space time dataset, False otherwise
         """
         return False
-        
+
     def get_type(self):
         return 'raster'
 
@@ -214,11 +214,11 @@ class RasterDataset(AbstractMapDataset):
             return False
 
         check, dates = self.ciface.read_raster_timestamp(self.get_name(),
-                                                      self.get_mapset(),)
+                                                         self.get_mapset(),)
 
         if check < 1:
             self.msgr.error(_("Unable to read timestamp file "
-                         "for raster map <%s>" % (self.get_map_id())))
+                              "for raster map <%s>" % (self.get_map_id())))
             return False
 
         if len(dates) == 2:
@@ -242,12 +242,12 @@ class RasterDataset(AbstractMapDataset):
 
         if check == -1:
             self.msgr.error(_("Unable to create timestamp file "
-                         "for raster map <%s>" % (self.get_map_id())))
+                              "for raster map <%s>" % (self.get_map_id())))
             return False
 
         if check == -2:
-            self.msgr.error(_("Invalid datetime in timestamp for raster map <%s>" %
-                         (self.get_map_id())))
+            self.msgr.error(_("Invalid datetime in timestamp for raster map "
+                              "<%s>" % (self.get_map_id())))
             return False
 
         if check == -3:
@@ -268,8 +268,8 @@ class RasterDataset(AbstractMapDataset):
                                                     self.get_mapset())
 
         if check == -1:
-            self.msgr.error(_("Unable to remove timestamp for raster map <%s>" %
-                         (self.get_name())))
+            self.msgr.error(_("Unable to remove timestamp for raster map <%s>"
+                            % (self.get_name())))
             return False
 
         return True
@@ -284,12 +284,13 @@ class RasterDataset(AbstractMapDataset):
 
     def load(self):
         """Load all info from an existing raster map into the internal structure
-            
-            This method checks first if the map exists, in case it exists
-            the metadata of the map is put into this object and True is returned.
-            
-            :return: True is the map exists and the metadata was filled successfully
-                          and getting the data was successfull, False otherwise
+
+           This method checks first if the map exists, in case it exists
+           the metadata of the map is put into this object and True is returned
+
+           :return: True is the map exists and the metadata was filled
+                    successfully and getting the data was successfull,
+                    False otherwise
         """
 
         if self.map_exists() is not True:
@@ -323,13 +324,14 @@ class RasterDataset(AbstractMapDataset):
             self.metadata.set_cols(cols)
             self.metadata.set_rows(rows)
             self.metadata.set_number_of_cells(ncells)
-            
+
             return True
 
         return False
 
 ###############################################################################
 
+
 class Raster3DDataset(AbstractMapDataset):
     """Raster3d dataset class
 
@@ -431,7 +433,7 @@ class Raster3DDataset(AbstractMapDataset):
            :return: True if this class is a space time dataset, False otherwise
         """
         return False
-        
+
     def get_type(self):
         return "raster3d"
 
@@ -494,8 +496,9 @@ class Raster3DDataset(AbstractMapDataset):
             return self.spatial_extent.disjoint_union_2d(dataset.spatial_extent)
 
     def get_np_array(self):
-        """Return this 3D raster map as memmap numpy style array to access the 3D raster
-           values in numpy style without loading the whole map in the RAM.
+        """Return this 3D raster map as memmap numpy style array to access the
+           3D raster values in numpy style without loading the whole map in
+           the RAM.
 
            In case this 3D raster map does exists in the grass spatial database,
            the map will be exported using r3.out.bin to a temporary location
@@ -530,7 +533,7 @@ class Raster3DDataset(AbstractMapDataset):
            :return: True if success, False on error
         """
         return self.ciface.has_raster3d_timestamp(self.get_name(),
-                                                self.get_mapset())
+                                                  self.get_mapset())
 
     def read_timestamp_from_grass(self):
         """Read the timestamp of this map from the map metadata
@@ -545,11 +548,11 @@ class Raster3DDataset(AbstractMapDataset):
             return False
 
         check, dates = self.ciface.read_raster3d_timestamp(self.get_name(),
-                                                      self.get_mapset(),)
+                                                           self.get_mapset(),)
 
         if check < 1:
             self.msgr.error(_("Unable to read timestamp file "
-                         "for 3D raster map <%s>" % (self.get_map_id())))
+                              "for 3D raster map <%s>" % (self.get_map_id())))
             return False
 
         if len(dates) == 2:
@@ -573,12 +576,12 @@ class Raster3DDataset(AbstractMapDataset):
 
         if check == -1:
             self.msgr.error(_("Unable to create timestamp file "
-                         "for 3D raster map <%s>" % (self.get_map_id())))
+                              "for 3D raster map <%s>" % (self.get_map_id())))
             return False
 
         if check == -2:
-            self.msgr.error(_("Invalid datetime in timestamp for 3D raster map <%s>" %
-                         (self.get_map_id())))
+            self.msgr.error(_("Invalid datetime in timestamp for 3D raster "
+                              "map <%s>" % (self.get_map_id())))
             return False
 
         if check == -3:
@@ -596,8 +599,8 @@ class Raster3DDataset(AbstractMapDataset):
                                                       self.get_mapset())
 
         if check == -1:
-            self.msgr.error(_("Unable to remove timestamp for raster map <%s>" %
-                         (self.get_name())))
+            self.msgr.error(_("Unable to remove timestamp for raster map "
+                              "<%s>" % (self.get_name())))
             return False
 
         return True
@@ -612,12 +615,13 @@ class Raster3DDataset(AbstractMapDataset):
 
     def load(self):
         """Load all info from an existing 3d raster map into the internal structure
-            
-            This method checks first if the map exists, in case it exists
-            the metadata of the map is put into this object and True is returned.
-            
-            :return: True is the map exists and the metadata was filled successfully
-                         and getting the data was successfull, False otherwise
+
+           This method checks first if the map exists, in case it exists
+           the metadata of the map is put into this object and True is returned
+
+           :return: True is the map exists and the metadata was filled
+                    successfully and getting the data was successfull,
+                    False otherwise
         """
 
         if self.map_exists() is not True:
@@ -628,12 +632,15 @@ class Raster3DDataset(AbstractMapDataset):
 
         # Fill spatial extent
         kvp = self.ciface.read_raster3d_info(self.get_name(),
-                                           self.get_mapset())
+                                             self.get_mapset())
 
         if kvp:
-            self.set_spatial_extent_from_values(north=kvp["north"], south=kvp["south"],
-                                    east=kvp["east"], west=kvp["west"],
-                                    top=kvp["top"], bottom=kvp["bottom"])
+            self.set_spatial_extent_from_values(north=kvp["north"],
+                                                south=kvp["south"],
+                                                east=kvp["east"],
+                                                west=kvp["west"],
+                                                top=kvp["top"],
+                                                bottom=kvp["bottom"])
 
             # Fill metadata
             self.metadata.set_nsres(kvp["nsres"])
@@ -653,13 +660,14 @@ class Raster3DDataset(AbstractMapDataset):
             self.metadata.set_rows(rows)
             self.metadata.set_depths(depths)
             self.metadata.set_number_of_cells(ncells)
-            
+
             return True
 
         return False
 
 ###############################################################################
 
+
 class VectorDataset(AbstractMapDataset):
     """Vector dataset class
 
@@ -753,7 +761,7 @@ class VectorDataset(AbstractMapDataset):
            :return: True if this class is a space time dataset, False otherwise
         """
         return False
-        
+
     def get_type(self):
         return "vector"
 
@@ -822,7 +830,6 @@ class VectorDataset(AbstractMapDataset):
                                                 self.get_mapset(),
                                                 self.get_layer())
 
-
     def read_timestamp_from_grass(self):
         """Read the timestamp of this map from the map metadata
            in the grass file system based spatial database and
@@ -834,11 +841,11 @@ class VectorDataset(AbstractMapDataset):
             return False
 
         check, dates = self.ciface.read_vector_timestamp(self.get_name(),
-                                                      self.get_mapset(),)
+                                                         self.get_mapset(),)
 
         if check < 1:
             self.msgr.error(_("Unable to read timestamp file "
-                         "for vector map <%s>" % (self.get_map_id())))
+                              "for vector map <%s>" % (self.get_map_id())))
             return False
 
         if len(dates) == 2:
@@ -861,12 +868,12 @@ class VectorDataset(AbstractMapDataset):
 
         if check == -1:
             self.msgr.error(_("Unable to create timestamp file "
-                         "for vector map <%s>" % (self.get_map_id())))
+                              "for vector map <%s>" % (self.get_map_id())))
             return False
 
         if check == -2:
-            self.msgr.error(_("Invalid datetime in timestamp for vector map <%s>" %
-                         (self.get_map_id())))
+            self.msgr.error(_("Invalid datetime in timestamp for vector "
+                              "map <%s>" % (self.get_map_id())))
             return False
 
         return True
@@ -881,8 +888,8 @@ class VectorDataset(AbstractMapDataset):
                                                     self.get_mapset())
 
         if check == -1:
-            self.msgr.error(_("Unable to remove timestamp for vector map <%s>" %
-                         (self.get_name())))
+            self.msgr.error(_("Unable to remove timestamp for vector "
+                              "map <%s>" % (self.get_name())))
             return False
 
         return True
@@ -895,22 +902,21 @@ class VectorDataset(AbstractMapDataset):
         return self.ciface.vector_map_exists(self.get_name(),
                                              self.get_mapset())
 
-
     def load(self):
 
         """Load all info from an existing vector map into the internal structure
-            
-            This method checks first if the map exists, in case it exists
-            the metadata of the map is put into this object and True is returned.
-            
-            :return: True is the map exists and the metadata was filled successfully
-                          and getting the data was successfull, False otherwise
+
+           This method checks first if the map exists, in case it exists
+           the metadata of the map is put into this object and True is returned
+
+           :return: True is the map exists and the metadata was filled
+                    successfully and getting the data was successfull,
+                    False otherwise
         """
 
         if self.map_exists() is not True:
             return False
 
-
         # Fill base information
         self.base.set_creator(str(getpass.getuser()))
 
@@ -921,9 +927,12 @@ class VectorDataset(AbstractMapDataset):
 
         if kvp:
             # Fill spatial extent
-            self.set_spatial_extent_from_values(north=kvp["north"], south=kvp["south"],
-                                    east=kvp["east"], west=kvp["west"],
-                                    top=kvp["top"], bottom=kvp["bottom"])
+            self.set_spatial_extent_from_values(north=kvp["north"],
+                                                south=kvp["south"],
+                                                east=kvp["east"],
+                                                west=kvp["west"],
+                                                top=kvp["top"],
+                                                bottom=kvp["bottom"])
 
             # Fill metadata
             self.metadata.set_3d_info(kvp["map3d"])
@@ -939,13 +948,14 @@ class VectorDataset(AbstractMapDataset):
             self.metadata.set_number_of_islands(kvp["islands"])
             self.metadata.set_number_of_holes(kvp["holes"])
             self.metadata.set_number_of_volumes(kvp["volumes"])
-            
+
             return True
 
         return False
 
 ###############################################################################
 
+
 class SpaceTimeRasterDataset(AbstractSpaceTimeDataset):
     """Space time raster dataset class
     """
@@ -958,7 +968,7 @@ class SpaceTimeRasterDataset(AbstractSpaceTimeDataset):
            :return: True if this class is a space time dataset, False otherwise
         """
         return True
-        
+
     def get_type(self):
         return "strds"
 
@@ -1025,6 +1035,7 @@ class SpaceTimeRasterDataset(AbstractSpaceTimeDataset):
 
 ###############################################################################
 
+
 class SpaceTimeRaster3DDataset(AbstractSpaceTimeDataset):
     """Space time raster3d dataset class
     """
@@ -1038,7 +1049,7 @@ class SpaceTimeRaster3DDataset(AbstractSpaceTimeDataset):
            :return: True if this class is a space time dataset, False otherwise
         """
         return True
-        
+
     def get_type(self):
         return "str3ds"
 
@@ -1137,7 +1148,7 @@ class SpaceTimeVectorDataset(AbstractSpaceTimeDataset):
            :return: True if this class is a space time dataset, False otherwise
         """
         return True
-        
+
     def get_type(self):
         return "stvds"
 

+ 38 - 28
lib/python/temporal/spatial_extent.py

@@ -86,7 +86,8 @@ class SpatialExtent(SQLDatabaseInterface):
 
         SQLDatabaseInterface.__init__(self, table, ident)
         self.set_id(ident)
-        self.set_spatial_extent_from_values(north, south, east, west, top, bottom)
+        self.set_spatial_extent_from_values(north, south, east, west, top,
+                                            bottom)
         self.set_projection(proj)
 
     def overlapping_2d(self, extent):
@@ -115,7 +116,7 @@ class SpatialExtent(SQLDatabaseInterface):
 
         if self.get_projection() != extent.get_projection():
             self.msgr.error(_("Projections are different. Unable to compute "
-                         "overlapping_2d for spatial extents"))
+                              "overlapping_2d for spatial extents"))
             return False
 
         N = extent.get_north()
@@ -233,7 +234,7 @@ class SpatialExtent(SQLDatabaseInterface):
             nS = eS
 
         new = SpatialExtent(north=nN, south=nS, east=nE, west=nW,
-                             top=0, bottom=0, proj=self.get_projection())
+                            top=0, bottom=0, proj=self.get_projection())
         return new
 
     def intersect(self, extent):
@@ -385,7 +386,7 @@ class SpatialExtent(SQLDatabaseInterface):
             nS = eS
 
         new = SpatialExtent(north=nN, south=nS, east=nE, west=nW,
-                             top=0, bottom=0, proj=self.get_projection())
+                            top=0, bottom=0, proj=self.get_projection())
         return new
 
     def union(self, extent):
@@ -507,9 +508,9 @@ class SpatialExtent(SQLDatabaseInterface):
     def is_in_2d(self, extent):
         """Return True if this extent (A) is located in the provided spatial
         extent (B) in two dimensions.
-        
+
         ::
-        
+
              _____
             |A _  |
             | |_| |
@@ -521,7 +522,7 @@ class SpatialExtent(SQLDatabaseInterface):
         """
         if self.get_projection() != extent.get_projection():
             self.msgr.error(_("Projections are different. Unable to compute "
-                         "is_in_2d for spatial extents"))
+                              "is_in_2d for spatial extents"))
             return False
 
         eN = extent.get_north()
@@ -653,7 +654,7 @@ class SpatialExtent(SQLDatabaseInterface):
         """
         if self.get_projection() != extent.get_projection():
             self.msgr.error(_("Projections are different. Unable to compute "
-                         "equivalent_2d for spatial extents"))
+                              "equivalent_2d for spatial extents"))
             return False
 
         eN = extent.get_north()
@@ -729,7 +730,7 @@ class SpatialExtent(SQLDatabaseInterface):
         extent (B) in two dimensions.
 
         ::
-        
+
              _____    _____    _____    _____
             |A  __|  |__  A|  |A | B|  |B | A|
             |  |B |  | B|  |  |  |__|  |__|  |
@@ -757,7 +758,8 @@ class SpatialExtent(SQLDatabaseInterface):
         """
 
         if self.get_projection() != extent.get_projection():
-            self.msgr.error(_("Projections are different. Unable to compute cover_2d for spatial extents"))
+            self.msgr.error(_("Projections are different. Unable to compute"
+                              " cover_2d for spatial extents"))
             return False
 
         # Exclude equivalent_2d
@@ -829,7 +831,7 @@ class SpatialExtent(SQLDatabaseInterface):
         """
         if self.get_projection() != extent.get_projection():
             self.msgr.error(_("Projections are different. Unable to compute "
-                         "cover for spatial extents"))
+                              "cover for spatial extents"))
             return False
 
         # Exclude equivalent_2d
@@ -942,7 +944,7 @@ class SpatialExtent(SQLDatabaseInterface):
         Code is lend from wind_overlap.c in lib/gis
 
         ::
-        
+
              _____
             |A  __|__
             |  |  | B|
@@ -1079,7 +1081,7 @@ class SpatialExtent(SQLDatabaseInterface):
         extent (B) in two dimensions.
 
         ::
-        
+
               _____ _____
              |  A  |  B  |
              |_____|     |
@@ -1244,7 +1246,7 @@ class SpatialExtent(SQLDatabaseInterface):
         extent (B) in three dimensions.
 
         ::
-        
+
               _____
              |  A  |
              |_____|
@@ -1275,7 +1277,7 @@ class SpatialExtent(SQLDatabaseInterface):
         if self.overlapping_2d(extent):
             return False
 
-        if  self.meet_2d(extent):
+        if self.meet_2d(extent):
             return False
 
         return True
@@ -1306,7 +1308,7 @@ class SpatialExtent(SQLDatabaseInterface):
         if self.overlapping(extent):
             return False
 
-        if  self.meet(extent):
+        if self.meet(extent):
             return False
 
         return True
@@ -1547,7 +1549,8 @@ class SpatialExtent(SQLDatabaseInterface):
 
         return "unknown"
 
-    def set_spatial_extent_from_values(self, north, south, east, west, top, bottom):
+    def set_spatial_extent_from_values(self, north, south, east, west, top,
+                                       bottom):
         """Set the three dimensional spatial extent
 
            :param north: The northern edge
@@ -1568,7 +1571,8 @@ class SpatialExtent(SQLDatabaseInterface):
     def set_spatial_extent(self, spatial_extent):
         """Set the three dimensional spatial extent
 
-            :param spatial_extent: An object of type SpatialExtent or its subclasses
+           :param spatial_extent: An object of type SpatialExtent or its
+                                  subclasses
         """
 
         self.set_north(spatial_extent.get_north())
@@ -1604,7 +1608,8 @@ class SpatialExtent(SQLDatabaseInterface):
     def set_spatial_extent_2d(self, spatial_extent):
         """Set the three dimensional spatial extent
 
-            :param spatial_extent: An object of type SpatialExtent or its subclasses
+           :param spatial_extent: An object of type SpatialExtent or its
+                                  subclasses
         """
 
         self.set_north(spatial_extent.north)
@@ -1678,7 +1683,7 @@ class SpatialExtent(SQLDatabaseInterface):
 
         if self.get_projection() == "LL":
             self.msgr.error(_("Volume computation is not supported "
-                         "for LL projections"))
+                              "for LL projections"))
 
         area = self.get_area()
 
@@ -1696,7 +1701,7 @@ class SpatialExtent(SQLDatabaseInterface):
 
         if self.get_projection() == "LL":
             self.msgr.error(_("Area computation is not supported "
-                         "for LL projections"))
+                              "for LL projections"))
 
         bbox = self.get_spatial_extent_as_tuple()
 
@@ -1772,7 +1777,7 @@ class SpatialExtent(SQLDatabaseInterface):
     east = property(fget=get_east, fset=set_east)
     west = property(fget=get_west, fset=set_west)
     top = property(fget=get_top, fset=set_top)
-    bottom= property(fget=get_bottom, fset=set_bottom)
+    bottom = property(fget=get_bottom, fset=set_bottom)
 
     def print_info(self):
         """Print information about this class in human readable style"""
@@ -1801,37 +1806,42 @@ class RasterSpatialExtent(SpatialExtent):
     def __init__(self, ident=None, north=None, south=None, east=None,
                  west=None, top=None, bottom=None):
         SpatialExtent.__init__(self, "raster_spatial_extent",
-                                ident, north, south, east, west, top, bottom)
+                               ident, north, south, east, west, top, bottom)
+
 
 class Raster3DSpatialExtent(SpatialExtent):
     def __init__(self, ident=None, north=None, south=None, east=None,
                  west=None, top=None, bottom=None):
         SpatialExtent.__init__(self, "raster3d_spatial_extent",
-                                ident, north, south, east, west, top, bottom)
+                               ident, north, south, east, west, top, bottom)
+
 
 class VectorSpatialExtent(SpatialExtent):
     def __init__(self, ident=None, north=None, south=None, east=None,
                  west=None, top=None, bottom=None):
         SpatialExtent.__init__(self, "vector_spatial_extent",
-                                ident, north, south, east, west, top, bottom)
+                               ident, north, south, east, west, top, bottom)
+
 
 class STRDSSpatialExtent(SpatialExtent):
     def __init__(self, ident=None, north=None, south=None, east=None,
                  west=None, top=None, bottom=None):
         SpatialExtent.__init__(self, "strds_spatial_extent",
-                                ident, north, south, east, west, top, bottom)
+                               ident, north, south, east, west, top, bottom)
+
 
 class STR3DSSpatialExtent(SpatialExtent):
     def __init__(self, ident=None, north=None, south=None, east=None,
                  west=None, top=None, bottom=None):
         SpatialExtent.__init__(self, "str3ds_spatial_extent",
-                                ident, north, south, east, west, top, bottom)
+                               ident, north, south, east, west, top, bottom)
+
 
 class STVDSSpatialExtent(SpatialExtent):
     def __init__(self, ident=None, north=None, south=None, east=None,
                  west=None, top=None, bottom=None):
         SpatialExtent.__init__(self, "stvds_spatial_extent",
-                                ident, north, south, east, west, top, bottom)
+                               ident, north, south, east, west, top, bottom)
 
 ###############################################################################
 

+ 62 - 66
lib/python/temporal/spatial_topology_dataset_connector.py

@@ -16,14 +16,16 @@ for details.
 """
 import copy
 
+
 class SpatialTopologyDatasetConnector(object):
-    """This class implements a spatial topology access structure to connect spatial related datasets
+    """This class implements a spatial topology access structure to connect
+       spatial related datasets
 
-       This object will be set up by spatial topology creation method provided by the 
-       SpatioTemporalTopologyBuilder.
+       This object will be set up by spatial topology creation method provided
+       by the SpatioTemporalTopologyBuilder.
 
        The following spatial relations with access methods are supported:
-       
+
        - equivalent
        - overlap
        - in
@@ -31,11 +33,11 @@ class SpatialTopologyDatasetConnector(object):
        - meet
        - cover
        - covered
-            
+
         Usage:
-        
+
         .. code-block:: python
-        
+
             >>> import grass.temporal as tgis
             >>> tgis.init()
             >>> map = tgis.RasterDataset("a@P")
@@ -78,17 +80,17 @@ class SpatialTopologyDatasetConnector(object):
         """Reset any information about temporal topology"""
         self._spatial_topology = {}
         self._has_spatial_topology = False
-        
+
     def get_spatial_relations(self):
         """Return the dictionary of spatial relationships
-        
-            Keys are the spatial relationships in upper case,
-            values are abstract map objects.
-            
-            :return: The spatial relations dictionary
+
+           Keys are the spatial relationships in upper case,
+           values are abstract map objects.
+
+           :return: The spatial relations dictionary
         """
         return copy.copy(self._spatial_topology)
-    
+
     def get_number_of_spatial_relations(self):
         """ Return a dictionary in which the keys are the relation names and the value
             are the number of relations.
@@ -103,44 +105,45 @@ class SpatialTopologyDatasetConnector(object):
             - cover
             - covered
 
-            To access topological information the spatial topology must be build first
-            using the SpatialTopologyBuilder.
+            To access topological information the spatial topology must be
+            build first using the SpatialTopologyBuilder.
 
-            :return: the dictionary with relations as keys and number as values or None in case the topology wasn't build
+            :return: the dictionary with relations as keys and number as
+                     values or None in case the topology wasn't build
         """
-        if self._has_spatial_topology == False:
+        if self._has_spatial_topology is False:
             return None
-    
+
         relations = {}
         try:
-            relations["equivalent"] = len(self._spatial_topology["EQUIVALENT"]) 
+            relations["equivalent"] = len(self._spatial_topology["EQUIVALENT"])
         except:
             relations["equivalent"] = 0
-        try: 
-            relations["overlap"] = len(self._spatial_topology["OVERLAP"]) 
-        except: 
+        try:
+            relations["overlap"] = len(self._spatial_topology["OVERLAP"])
+        except:
             relations["overlap"] = 0
-        try: 
+        try:
             relations["in"] = len(self._spatial_topology["IN"])
-        except: 
+        except:
             relations["in"] = 0
-        try: 
+        try:
             relations["contain"] = len(self._spatial_topology["CONTAIN"])
-        except: 
+        except:
             relations["contain"] = 0
-        try: 
+        try:
             relations["meet"] = len(self._spatial_topology["MEET"])
-        except: 
+        except:
             relations["meet"] = 0
-        try: 
+        try:
             relations["cover"] = len(self._spatial_topology["COVER"])
-        except: 
+        except:
             relations["cover"] = 0
-        try: 
+        try:
             relations["covered"] = len(self._spatial_topology["COVERED"])
-        except: 
+        except:
             relations["covered"] = 0
-            
+
         return relations
 
     def set_spatial_topology_build_true(self):
@@ -158,8 +161,8 @@ class SpatialTopologyDatasetConnector(object):
     def append_equivalent(self, map):
         """Append a map with equivalent spatial extent as this map
 
-           :param map: This object should be of type AbstractMapDataset 
-                        or derived classes
+           :param map: This object should be of type AbstractMapDataset
+                       or derived classes
         """
         if "EQUIVALENT" not in self._spatial_topology:
             self._spatial_topology["EQUIVALENT"] = []
@@ -177,8 +180,8 @@ class SpatialTopologyDatasetConnector(object):
     def append_overlap(self, map):
         """Append a map that this spatial overlap with this map
 
-           :param map: This object should be of type AbstractMapDataset 
-                        or derived classes
+           :param map: This object should be of type AbstractMapDataset
+                       or derived classes
         """
         if "OVERLAP" not in self._spatial_topology:
             self._spatial_topology["OVERLAP"] = []
@@ -196,8 +199,8 @@ class SpatialTopologyDatasetConnector(object):
     def append_in(self, map):
         """Append a map that this is spatial in this map
 
-           :param map: This object should be of type AbstractMapDataset 
-                        or derived classes
+           :param map: This object should be of type AbstractMapDataset
+                       or derived classes
         """
         if "IN" not in self._spatial_topology:
             self._spatial_topology["IN"] = []
@@ -215,8 +218,8 @@ class SpatialTopologyDatasetConnector(object):
     def append_contain(self, map):
         """Append a map that this map spatially contains
 
-           :param map: This object should be of type AbstractMapDataset 
-                        or derived classes
+           :param map: This object should be of type AbstractMapDataset
+                       or derived classes
         """
         if "CONTAIN" not in self._spatial_topology:
             self._spatial_topology["CONTAIN"] = []
@@ -234,8 +237,8 @@ class SpatialTopologyDatasetConnector(object):
     def append_meet(self, map):
         """Append a map that spatially meet with this map
 
-           :param map: This object should be of type AbstractMapDataset 
-                        or derived classes
+           :param map: This object should be of type AbstractMapDataset
+                       or derived classes
         """
         if "MEET" not in self._spatial_topology:
             self._spatial_topology["MEET"] = []
@@ -253,8 +256,8 @@ class SpatialTopologyDatasetConnector(object):
     def append_cover(self, map):
         """Append a map that spatially cover this map
 
-           :param map: This object should be of type AbstractMapDataset 
-                        or derived classes
+           :param map: This object should be of type AbstractMapDataset
+                       or derived classes
         """
         if "COVER" not in self._spatial_topology:
             self._spatial_topology["COVER"] = []
@@ -272,8 +275,8 @@ class SpatialTopologyDatasetConnector(object):
     def append_covered(self, map):
         """Append a map that is spatially covered by this map
 
-           :param map: This object should be of type AbstractMapDataset 
-                        or derived classes
+           :param map: This object should be of type AbstractMapDataset
+                       or derived classes
         """
         if "COVERED" not in self._spatial_topology:
             self._spatial_topology["COVERED"] = []
@@ -288,7 +291,6 @@ class SpatialTopologyDatasetConnector(object):
             return None
         return self._spatial_topology["COVERED"]
 
-
     def _generate_map_list_string(self, map_list, line_wrap=True):
         count = 0
         string = ""
@@ -303,26 +305,19 @@ class SpatialTopologyDatasetConnector(object):
             count += 1
 
         return string
-    
+
     # Set the properties
-    equivalent = property(fget=get_equivalent, 
-                                       fset=append_equivalent)
-    cover = property(fget=get_cover, 
-                                     fset=append_cover)
-    covered = property(fget=get_covered, 
-                                       fset=append_covered)
-    overlap = property(fget=get_overlap, 
-                                     fset=append_overlap)
-    in_ = property(fget=get_in, 
-                                     fset=append_in)
-    contain = property(fget=get_contain, 
-                                     fset=append_contain)
-    meet = property(fget=get_meet, 
-                                     fset=append_meet)
+    equivalent = property(fget=get_equivalent, fset=append_equivalent)
+    cover = property(fget=get_cover, fset=append_cover)
+    covered = property(fget=get_covered, fset=append_covered)
+    overlap = property(fget=get_overlap, fset=append_overlap)
+    in_ = property(fget=get_in, fset=append_in)
+    contain = property(fget=get_contain, fset=append_contain)
+    meet = property(fget=get_meet, fset=append_meet)
 
     def print_spatial_topology_info(self):
         """Print information about this class in human readable style"""
-        
+
         print " +-------------------- Spatial Topology --------------------------------------+"
         #          0123456789012345678901234567890
         if self.equivalent is not None:
@@ -351,7 +346,8 @@ class SpatialTopologyDatasetConnector(object):
         """Print information about this class in shell style"""
 
         if self.equivalent is not None:
-            print "equivalent=" + self._generate_map_list_string(self.equivalent, False)
+            print "equivalent=" + self._generate_map_list_string(self.equivalent,
+                                                                 False)
         if self.cover is not None:
             print "cover=" + self._generate_map_list_string(
                 self.cover, False)

+ 113 - 88
lib/python/temporal/spatio_temporal_relationships.py

@@ -4,6 +4,7 @@ Class to build the spatio-temporal topology between map lists
 Usage:
 
 .. code-block:: python
+
     import grass.temporal as tgis
 
     tgis.print_temporal_relations(maps)
@@ -25,11 +26,13 @@ from ctypes import *
 
 ###############################################################################
 
+
 class SpatioTemporalTopologyBuilder(object):
     """This class is designed to build the spatio-temporal topology
        of spatio-temporally related abstract dataset objects.
 
-       The abstract dataset objects must be provided as a single list, or in two lists.
+       The abstract dataset objects must be provided as a single list, or in
+       two lists.
 
         Example:
 
@@ -355,7 +358,7 @@ class SpatioTemporalTopologyBuilder(object):
     def __init__(self):
         self._reset()
         # 0001-01-01 00:00:00
-        self._timeref = datetime(1,1,1)
+        self._timeref = datetime(1, 1, 1)
 
     def _reset(self):
         self._store = {}
@@ -443,9 +446,10 @@ class SpatioTemporalTopologyBuilder(object):
         """Use the spatio-temporal extent of a map to create and
            return a RTree rectange
 
-           :param spatial: This indicates if the spatial topology is created as well:
-                          spatial can be None (no spatial topology), "2D" using west, east,
-                          #south, north or "3D" using west, east, south, north, bottom, top
+           :param spatial: This indicates if the spatial topology is created
+                           as well: spatial can be None (no spatial topology),
+                           "2D" using west, east, south, north or "3D" using
+                           west, east, south, north, bottom, top
         """
         rect = rtree.RTreeAllocRect(tree)
 
@@ -463,21 +467,21 @@ class SpatioTemporalTopologyBuilder(object):
         elif spatial == "2D":
             north, south, east, west, top, bottom = map_.get_spatial_extent_as_tuple()
             rtree.RTreeSetRect3D(rect, tree, west, east, south, north,
-                                  float(start), float(end))
+                                 float(start), float(end))
         elif spatial == "3D":
             north, south, east, west, top, bottom = map_.get_spatial_extent_as_tuple()
             rtree.RTreeSetRect4D(rect, tree, west, east, south, north,
-                                  bottom, top, float(start), float(end))
+                                 bottom, top, float(start), float(end))
 
         return rect
 
     def _build_rtree(self, maps, spatial=None):
         """Build and return the 1-4 dimensional R*-Tree
 
-
-           :param spatial: This indicates if the spatial topology is created as well:
-                          spatial can be None (no spatial topology), "2D" using west, east,
-                          south, north or "3D" using west, east, south, north, bottom, top
+           :param spatial: This indicates if the spatial topology is created
+                           as well: spatial can be None (no spatial topology),
+                           "2D" using west, east, south, north or "3D" using
+                           west, east, south, north, bottom, top
         """
         dim = 1
         if spatial == "2D":
@@ -498,9 +502,10 @@ class SpatioTemporalTopologyBuilder(object):
         """Build the spatio-temporal topology structure between
            one or two unordered lists of abstract dataset objects
 
-           This method builds the temporal or spatio-temporal topology from mapsA to
-           mapsB and vice verse. The spatio-temporal topology structure of each map
-           will be reseted and rebuild for mapsA and mapsB.
+           This method builds the temporal or spatio-temporal topology from
+           mapsA to mapsB and vice verse. The spatio-temporal topology
+           structure of each map will be reseted and rebuild for mapsA and
+           mapsB.
 
            After building the temporal or spatio-temporal topology the modified
            map objects of mapsA can be accessed
@@ -512,16 +517,17 @@ class SpatioTemporalTopologyBuilder(object):
                          objects with initiated spatio-temporal extent
            :param mapsB: An optional list of abstract_dataset
                          objects with initiated spatio-temporal extent
-           :param spatial: This indicates if the spatial topology is created as well:
-                          spatial can be None (no spatial topology), "2D" using west, east,
-                          south, north or "3D" using west, east, south, north, bottom, top
+           :param spatial: This indicates if the spatial topology is created
+                           as well: spatial can be None (no spatial topology),
+                           "2D" using west, east, south, north or "3D" using
+                           west, east, south, north, bottom, top
         """
 
         identical = False
         if mapsA == mapsB:
             identical = True
 
-        if mapsB == None:
+        if mapsB is None:
             mapsB = mapsA
             identical = True
 
@@ -557,7 +563,7 @@ class SpatioTemporalTopologyBuilder(object):
                     set_spatial_relationship(A, B, relation)
 
         self._build_internal_iteratable(mapsA, spatial)
-        if not identical and mapsB != None:
+        if not identical and mapsB is not None:
             self._build_iteratable(mapsB, spatial)
 
         gis.G_free_ilist(list_)
@@ -581,6 +587,7 @@ class SpatioTemporalTopologyBuilder(object):
 
 ###############################################################################
 
+
 def set_temoral_relationship(A, B, relation):
     if relation == "equal" or relation == "equals":
         if A != B:
@@ -756,6 +763,7 @@ def set_spatial_relationship(A, B, relation):
 
 ###############################################################################
 
+
 def print_temporal_topology_relationships(maps1, maps2=None, dbif=None):
     """Print the temporal relationships of the
        map lists maps1 and maps2 to stdout.
@@ -784,6 +792,7 @@ def print_temporal_topology_relationships(maps1, maps2=None, dbif=None):
 
 ###############################################################################
 
+
 def print_spatio_temporal_topology_relationships(maps1, maps2=None,
                                                  spatial="2D", dbif=None):
     """Print the temporal relationships of the
@@ -793,8 +802,9 @@ def print_spatio_temporal_topology_relationships(maps1, maps2=None,
                       objects with initiated temporal extent
         :param maps2: An optional list of abstract_dataset
                       objects with initiated temporal extent
-        :param spatial: The dimension of the spatial extent to be used: "2D" using west, east,
-                        south, north or "3D" using west, east, south, north, bottom, top
+        :param spatial: The dimension of the spatial extent to be used: "2D"
+                        using west, east, south, north or "3D" using west,
+                        east, south, north, bottom, top
         :param dbif: The database interface to be used
     """
 
@@ -815,8 +825,10 @@ def print_spatio_temporal_topology_relationships(maps1, maps2=None,
 
 ###############################################################################
 
+
 def count_temporal_topology_relationships(maps1, maps2=None, dbif=None):
-    """Count the temporal relations of a single list of maps or between two lists of maps
+    """Count the temporal relations of a single list of maps or between two
+       lists of maps
 
 
         :param maps1: A list of abstract_dataset
@@ -827,7 +839,6 @@ def count_temporal_topology_relationships(maps1, maps2=None, dbif=None):
         :return: A dictionary with counted temporal relationships
     """
 
-
     tb = SpatioTemporalTopologyBuilder()
     tb.build(maps1, maps2)
 
@@ -850,65 +861,70 @@ def count_temporal_topology_relationships(maps1, maps2=None, dbif=None):
 
 ###############################################################################
 
-def create_temporal_relation_sql_where_statement(
-                        start, end, use_start=True, use_during=False,
-                        use_overlap=False, use_contain=False, use_equal=False,
-                        use_follows=False, use_precedes=False):
-    """Create a SQL WHERE statement for temporal relation selection of maps in space time datasets
+
+def create_temporal_relation_sql_where_statement(start, end, use_start=True,
+                                                 use_during=False,
+                                                 use_overlap=False,
+                                                 use_contain=False,
+                                                 use_equal=False,
+                                                 use_follows=False,
+                                                 use_precedes=False):
+    """Create a SQL WHERE statement for temporal relation selection of maps in
+       space time datasets
 
         :param start: The start time
         :param end: The end time
-        :param use_start: Select maps of which the start time is located in the selection granule
-                         ::
-                         
-                             map    :        s
-                             granule:  s-----------------e
-
-                             map    :        s--------------------e
-                             granule:  s-----------------e
-
-                             map    :        s--------e
-                             granule:  s-----------------e
-
-
-        :param use_during: Select maps which are temporal during the selection granule
-                         ::
-                         
-                             map    :     s-----------e
-                             granule:  s-----------------e
-
-        :param use_overlap: Select maps which temporal overlap the selection granule
-                         ::
-                         
-                             map    :     s-----------e
-                             granule:        s-----------------e
-
-                             map    :     s-----------e
-                             granule:  s----------e
-
-        :param use_contain: Select maps which temporally contain the selection granule
-                         ::
-                         
-                             map    :  s-----------------e
-                             granule:     s-----------e
-
-        :param use_equal: Select maps which temporally equal to the selection granule
-                         ::
-                         
-                             map    :  s-----------e
-                             granule:  s-----------e
-
-        :param use_follows: Select maps which temporally follow the selection granule
-                         ::
-                         
-                             map    :              s-----------e
-                             granule:  s-----------e
-
-        :param use_precedes: Select maps which temporally precedes the selection granule
-                         ::
-                         
-                             map    :  s-----------e
-                             granule:              s-----------e
+        :param use_start: Select maps of which the start time is located in
+                          the selection granule ::
+
+                              map    :        s
+                              granule:  s-----------------e
+
+                              map    :        s--------------------e
+                              granule:  s-----------------e
+
+                              map    :        s--------e
+                              granule:  s-----------------e
+
+
+        :param use_during: Select maps which are temporal during the selection
+                           granule  ::
+
+                               map    :     s-----------e
+                               granule:  s-----------------e
+
+        :param use_overlap: Select maps which temporal overlap the selection
+                            granule ::
+
+                                map    :     s-----------e
+                                granule:        s-----------------e
+
+                                map    :     s-----------e
+                                granule:  s----------e
+
+        :param use_contain: Select maps which temporally contain the selection
+                            granule ::
+
+                                map    :  s-----------------e
+                                granule:     s-----------e
+
+        :param use_equal: Select maps which temporally equal to the selection
+                          granule ::
+
+                              map    :  s-----------e
+                              granule:  s-----------e
+
+        :param use_follows: Select maps which temporally follow the selection
+                            granule ::
+
+                                map    :              s-----------e
+                                granule:  s-----------e
+
+        :param use_precedes: Select maps which temporally precedes the
+                             selection granule ::
+
+                                 map    :  s-----------e
+                                 granule:              s-----------e
 
         Usage:
 
@@ -986,7 +1002,8 @@ def create_temporal_relation_sql_where_statement(
 
     if use_start:
         if isinstance(start, datetime):
-            where += "(start_time >= '%s' and start_time < '%s') " % (start, end)
+            where += "(start_time >= '%s' and start_time < '%s') " % (start,
+                                                                      end)
         else:
             where += "(start_time >= %i and start_time < %i) " % (start, end)
 
@@ -995,8 +1012,10 @@ def create_temporal_relation_sql_where_statement(
             where += " OR "
 
         if isinstance(start, datetime):
-            where += "((start_time > '%s' and end_time < '%s') OR " % (start, end)
-            where += "(start_time >= '%s' and end_time < '%s') OR " % (start, end)
+            where += "((start_time > '%s' and end_time < '%s') OR " % (start,
+                                                                       end)
+            where += "(start_time >= '%s' and end_time < '%s') OR " % (start,
+                                                                       end)
             where += "(start_time > '%s' and end_time <= '%s'))" % (start, end)
         else:
             where += "((start_time > %i and end_time < %i) OR " % (start, end)
@@ -1008,19 +1027,25 @@ def create_temporal_relation_sql_where_statement(
             where += " OR "
 
         if isinstance(start, datetime):
-            where += "((start_time < '%s' and end_time > '%s' and end_time < '%s') OR " % (start, start, end)
-            where += "(start_time < '%s' and start_time > '%s' and end_time > '%s'))" % (end, start, end)
+            where += "((start_time < '%s' and end_time > '%s' and end_time <" \
+                     " '%s') OR " % (start, start, end)
+            where += "(start_time < '%s' and start_time > '%s' and end_time " \
+                     "> '%s'))" % (end, start, end)
         else:
-            where += "((start_time < %i and end_time > %i and end_time < %i) OR " % (start, start, end)
-            where += "(start_time < %i and start_time > %i and end_time > %i))" % (end, start, end)
+            where += "((start_time < %i and end_time > %i and end_time < %i)" \
+                     " OR " % (start, start, end)
+            where += "(start_time < %i and start_time > %i and end_time > " \
+                     "%i))" % (end, start, end)
 
     if use_contain:
         if use_start or use_during or use_overlap:
             where += " OR "
 
         if isinstance(start, datetime):
-            where += "((start_time < '%s' and end_time > '%s') OR " % (start, end)
-            where += "(start_time <= '%s' and end_time > '%s') OR " % (start, end)
+            where += "((start_time < '%s' and end_time > '%s') OR " % (start,
+                                                                       end)
+            where += "(start_time <= '%s' and end_time > '%s') OR " % (start,
+                                                                       end)
             where += "(start_time < '%s' and end_time >= '%s'))" % (start, end)
         else:
             where += "((start_time < %i and end_time > %i) OR " % (start, end)

+ 26 - 22
lib/python/temporal/stds_export.py

@@ -78,16 +78,18 @@ def _export_raster_maps_as_gdal(rows, tar, list_file, new_cwd, fs, format_):
                 else:
                     gdal_type = "Int32"
                 ret = gscript.run_command("r.out.gdal", flags="c", input=name,
-                                    output=out_name, nodata=nodata,
-                                    type=gdal_type, format="GTiff")
+                                          output=out_name, nodata=nodata,
+                                          type=gdal_type, format="GTiff")
             else:
                 ret = gscript.run_command("r.out.gdal", flags="c",
-                                    input=name, output=out_name, format="GTiff")
+                                          input=name, output=out_name,
+                                          format="GTiff")
         elif format_ == "AAIGrid":
             # Export the raster map with r.out.gdal as Arc/Info ASCII Grid
             out_name = name + ".asc"
-            ret = gscript.run_command("r.out.gdal", flags="c", input=name, output=out_name, format="AAIGrid")
-            
+            ret = gscript.run_command("r.out.gdal", flags="c", input=name,
+                                      output=out_name, format="AAIGrid")
+
         if ret != 0:
             shutil.rmtree(new_cwd)
             tar.close()
@@ -102,12 +104,13 @@ def _export_raster_maps_as_gdal(rows, tar, list_file, new_cwd, fs, format_):
             shutil.rmtree(new_cwd)
             tar.close()
             gscript.fatal(_("Unable to export color rules for raster "
-                         "map <%s> r.out.gdal" % name))
+                            "map <%s> r.out.gdal" % name))
 
         tar.add(out_name)
 
 ############################################################################
 
+
 def _export_raster_maps(rows, tar, list_file, new_cwd, fs):
     for row in rows:
         name = row["name"]
@@ -124,12 +127,13 @@ def _export_raster_maps(rows, tar, list_file, new_cwd, fs):
             shutil.rmtree(new_cwd)
             tar.close()
             gscript.fatal(_("Unable to export raster map <%s> with r.pack" %
-                         name))
+                          name))
 
         tar.add(name + ".pack")
 
 ############################################################################
 
+
 def _export_vector_maps_as_gml(rows, tar, list_file, new_cwd, fs):
     for row in rows:
         name = row["name"]
@@ -145,18 +149,19 @@ def _export_vector_maps_as_gml(rows, tar, list_file, new_cwd, fs):
         list_file.write(string)
         # Export the vector map with v.out.ogr
         ret = gscript.run_command("v.out.ogr", input=name, dsn=(name + ".xml"),
-                               layer=layer, format="GML")
+                                  layer=layer, format="GML")
         if ret != 0:
             shutil.rmtree(new_cwd)
             tar.close()
             gscript.fatal(_("Unable to export vector map <%s> as "
-                         "GML with v.out.ogr" % name))
+                            "GML with v.out.ogr" % name))
 
         tar.add(name + ".xml")
         tar.add(name + ".xsd")
 
 ############################################################################
 
+
 def _export_vector_maps(rows, tar, list_file, new_cwd, fs):
     for row in rows:
         name = row["name"]
@@ -181,7 +186,7 @@ def _export_vector_maps(rows, tar, list_file, new_cwd, fs):
             shutil.rmtree(new_cwd)
             tar.close()
             gscript.fatal(_("Unable to export vector map <%s> with v.pack" %
-                         name))
+                          name))
 
         tar.add(name + ".pack")
 
@@ -206,7 +211,7 @@ def _export_raster3d_maps(rows, tar, list_file, new_cwd, fs):
             shutil.rmtree(new_cwd)
             tar.close()
             gscript.fatal(_("Unable to export raster map <%s> with r3.pack" %
-                         name))
+                          name))
 
         tar.add(name + ".pack")
 
@@ -228,27 +233,26 @@ def export_stds(input, output, compression, workdir, where, format_="pack",
               - "no"  no compression
               - "gzip" GNU zip compression
               - "bzip2" Bzip compression
-          
+
         :param workdir: The working directory used for extraction and packing
         :param where: The temporal WHERE SQL statement to select a subset
                       of maps from the space time dataset
         :param format_: The export format:
-        
+
               - "GTiff" Geotiff format, only for raster maps
               - "AAIGrid" Arc/Info ASCII Grid format, only for raster maps
               - "pack" The GRASS raster, 3D raster or vector Pack format,
                        this is the default setting
               - "GML" GML file export format, only for vector maps,
                       v.out.ogr export option
-                  
+
         :param type_: The space time dataset type
-        
+
               - "strds" Space time raster dataset
               - "str3ds" Space time 3D raster dataset
               - "stvds" Space time vector dataset
     """
 
-
     # Save current working directory path
     old_cwd = os.getcwd()
 
@@ -306,11 +310,11 @@ def export_stds(input, output, compression, workdir, where, format_="pack",
     init_file = open(init_file_name, "w")
     # Create the init string
     string = ""
-     # This is optional, if not present strds will be assumed for backward
-     # compatibility
+    # This is optional, if not present strds will be assumed for backward
+    # compatibility
     string += "%s=%s\n" % ("stds_type", sp.get_type())
-     # This is optional, if not present gtiff will be assumed for
-     # backward compatibility
+    # This is optional, if not present gtiff will be assumed for
+    # backward compatibility
     string += "%s=%s\n" % ("format", format_)
     string += "%s=%s\n" % ("temporal_type", sp.get_temporal_type())
     string += "%s=%s\n" % ("semantic_type", sp.get_semantic_type())
@@ -345,13 +349,13 @@ def export_stds(input, output, compression, workdir, where, format_="pack",
     read_file.write("Files:\n")
     if type_ == "strds":
         if format_ == "GTiff":
-                                #123456789012345678901234567890
+                                # 123456789012345678901234567890
             read_file.write("       *.tif  -- GeoTIFF raster files\n")
             read_file.write("     *.color  -- GRASS GIS raster color rules\n")
         elif format_ == "pack":
             read_file.write("      *.pack  -- GRASS raster files packed with r.pack\n")
     elif type_ == "stvds":
-                                #123456789012345678901234567890
+                                # 123456789012345678901234567890
         if format_ == "GML":
             read_file.write("       *.xml  -- Vector GML files\n")
         else:

+ 76 - 73
lib/python/temporal/stds_import.py

@@ -51,7 +51,7 @@ imported_maps = {}
 ############################################################################
 
 
-def _import_raster_maps_from_gdal(maplist, overr, exp, location, link, format_, 
+def _import_raster_maps_from_gdal(maplist, overr, exp, location, link, format_,
                                   set_current_region=False):
     impflags = ""
     if overr:
@@ -62,35 +62,35 @@ def _import_raster_maps_from_gdal(maplist, overr, exp, location, link, format_,
         name = row["name"]
         if format_ == "GTiff":
             filename = row["filename"] + ".tif"
-        elif format_=="AAIGrid":
+        elif format_ == "AAIGrid":
             filename = row["filename"] + ".asc"
             if not overr:
                 impflags += "o"
 
         if link:
             ret = gscript.run_command("r.external", input=filename,
-                                   output=name,
-                                   flags=impflags,
-                                   overwrite=gscript.overwrite())
+                                      output=name,
+                                      flags=impflags,
+                                      overwrite=gscript.overwrite())
         else:
             ret = gscript.run_command("r.in.gdal", input=filename,
-                                   output=name,
-                                   flags=impflags,
-                                   overwrite=gscript.overwrite())
+                                      output=name,
+                                      flags=impflags,
+                                      overwrite=gscript.overwrite())
 
         if ret != 0:
-            gscript.fatal(_("Unable to import/link raster map <%s> from file %s.") %(name, 
-                                                                     filename))
+            gscript.fatal(_("Unable to import/link raster map <%s> from file"
+                            " %s.") % (name, filename))
 
         # Set the color rules if present
         filename = row["filename"] + ".color"
         if os.path.isfile(filename):
             ret = gscript.run_command("r.colors", map=name,
-                                   rules=filename,
-                                   overwrite=gscript.overwrite())
+                                      rules=filename,
+                                      overwrite=gscript.overwrite())
             if ret != 0:
                 gscript.fatal(_("Unable to set the color rules for "
-                             "raster map <%s>.") % name)
+                                "raster map <%s>.") % name)
 
     # Set the computational region from the last map imported
     if set_current_region is True:
@@ -107,14 +107,13 @@ def _import_raster_maps(maplist, set_current_region=False):
         name = row["name"]
         filename = row["filename"] + ".pack"
         ret = gscript.run_command("r.unpack", input=filename,
-                               output=name,
-                               flags=impflags,
-                               overwrite=gscript.overwrite(),
-                               verbose=True)
+                                  output=name, flags=impflags,
+                                  overwrite=gscript.overwrite(),
+                                  verbose=True)
 
         if ret != 0:
-            gscript.fatal(_("Unable to unpack raster map <%s> from file %s.") % (name, 
-                                                                              filename))
+            gscript.fatal(_("Unable to unpack raster map <%s> from file "
+                            "%s.") % (name, filename))
 
     # Set the computational region from the last map imported
     if set_current_region is True:
@@ -132,13 +131,12 @@ def _import_vector_maps_from_gml(maplist, overr, exp, location, link):
         filename = row["filename"] + ".xml"
 
         ret = gscript.run_command("v.in.ogr", dsn=filename,
-                               output=name,
-                               flags=impflags,
-                               overwrite=gscript.overwrite())
+                                  output=name, flags=impflags,
+                                  overwrite=gscript.overwrite())
 
         if ret != 0:
-            gscript.fatal(_("Unable to import vector map <%s> from file %s.") % (name,
-                                                                              filename))
+            gscript.fatal(_("Unable to import vector map <%s> from file "
+                            "%s.") % (name, filename))
 
 ############################################################################
 
@@ -155,22 +153,21 @@ def _import_vector_maps(maplist):
             continue
         filename = row["filename"] + ".pack"
         ret = gscript.run_command("v.unpack", input=filename,
-                               output=name,
-                               flags=impflags,
-                               overwrite=gscript.overwrite(),
-                               verbose=True)
+                                  output=name, flags=impflags,
+                                  overwrite=gscript.overwrite(),
+                                  verbose=True)
 
         if ret != 0:
-            gscript.fatal(_("Unable to unpack vector map <%s> from file %s.") % (name, 
-                                                                              filename))
+            gscript.fatal(_("Unable to unpack vector map <%s> from file "
+                            "%s.") % (name, filename))
 
         imported_maps[name] = name
 ############################################################################
 
 
 def import_stds(input, output, extrdir, title=None, descr=None, location=None,
-        link=False, exp=False, overr=False, create=False, stds_type="strds", 
-        base=None, set_current_region=False):
+                link=False, exp=False, overr=False, create=False,
+                stds_type="strds", base=None, set_current_region=False):
     """Import space time datasets of type raster and vector
 
         :param input: Name of the input archive file
@@ -189,8 +186,8 @@ def import_stds(input, output, extrdir, title=None, descr=None, location=None,
                       Do not import the space time datasets.
         :param stds_type: The type of the space time dataset that
                          should be imported
-        :param base: The base name of the new imported maps, it will be extended
-                    using a numerical index.
+        :param base: The base name of the new imported maps, it will be
+                     extended using a numerical index.
     """
 
     global raise_on_error
@@ -200,7 +197,7 @@ def import_stds(input, output, extrdir, title=None, descr=None, location=None,
     # Check if input file and extraction directory exits
     if not os.path.exists(input):
         gscript.fatal(_("Space time raster dataset archive <%s> not found")
-                   % input)
+                      % input)
     if not create and not os.path.exists(extrdir):
         gscript.fatal(_("Extraction directory <%s> not found") % extrdir)
 
@@ -237,15 +234,18 @@ def import_stds(input, output, extrdir, title=None, descr=None, location=None,
         p.communicate()
         temp_file.close()
 
-        if not gscript.compare_key_value_text_files(temp_name, proj_name, sep="="):
+        if not gscript.compare_key_value_text_files(temp_name, proj_name,
+                                                    sep="="):
             if overr:
                 gscript.warning(_("Projection information does not match. "
-                               "Proceeding..."))
+                                  "Proceeding..."))
             else:
                 diff = ''.join(gscript.diff_files(temp_name, proj_name))
-                gscript.warning(_("Difference between PROJ_INFO file of imported map "
-                               "and of current location:\n{diff}").format(diff=diff))
-                gscript.fatal(_("Projection information does not match. Aborting."))
+                gscript.warning(_("Difference between PROJ_INFO file of "
+                                  "imported map and of current location:"
+                                  "\n{diff}").format(diff=diff))
+                gscript.fatal(_("Projection information does not match. "
+                                "Aborting."))
 
     # Create a new location based on the projection information and switch
     # into it
@@ -254,26 +254,26 @@ def import_stds(input, output, extrdir, title=None, descr=None, location=None,
         try:
             proj4_string = open(proj_file_name, 'r').read()
             gscript.create_location(dbase=old_env["GISDBASE"],
-                                 location=location,
-                                 proj4=proj4_string)
+                                    location=location,
+                                    proj4=proj4_string)
             # Just create a new location and return
             if create:
                 os.chdir(old_cwd)
                 return
         except Exception as e:
             gscript.fatal(_("Unable to create location %(l)s. Reason: %(e)s")
-                         % {'l': location, 'e': str(e)})
+                          % {'l': location, 'e': str(e)})
         # Switch to the new created location
         ret = gscript.run_command("g.mapset", mapset="PERMANENT",
-                               location=location,
-                               gisdbase=old_env["GISDBASE"])
+                                  location=location,
+                                  gisdbase=old_env["GISDBASE"])
         if ret != 0:
             gscript.fatal(_("Unable to switch to location %s") % location)
         # create default database connection
         ret = gscript.run_command("t.connect", flags="d")
         if ret != 0:
             gscript.fatal(_("Unable to create default temporal database "
-                         "in new location %s") % location)
+                            "in new location %s") % location)
 
     try:
         # Make sure the temporal database exists
@@ -305,8 +305,9 @@ def import_stds(input, output, extrdir, title=None, descr=None, location=None,
             # that must be extended by the file suffix
             filename = line_list[0].strip().split(":")[0]
             if base:
-                mapname = "%s_%s" % (base, gscript.get_num_suffix(line_count + 1, max_count))
-                mapid= "%s@%s"%(mapname, mapset)
+                mapname = "%s_%s" % (base, gscript.get_num_suffix(line_count + 1,
+                                                                  max_count))
+                mapid = "%s@%s" % (mapname, mapset)
             else:
                 mapname = filename
                 mapid = mapname + "@" + mapset
@@ -317,16 +318,16 @@ def import_stds(input, output, extrdir, title=None, descr=None, location=None,
             row["id"] = mapid
             row["start"] = line_list[1].strip()
             row["end"] = line_list[2].strip()
-            
-            new_list_file.write("%s%s%s%s%s\n"%(mapname,fs, row["start"], 
-                                              fs, row["end"]))
+
+            new_list_file.write("%s%s%s%s%s\n" % (mapname, fs, row["start"],
+                                                  fs, row["end"]))
 
             maplist.append(row)
             line_count += 1
 
         list_file.close()
         new_list_file.close()
-        
+
         # Read the init file
         fs = "="
         init = {}
@@ -345,9 +346,9 @@ def import_stds(input, output, extrdir, title=None, descr=None, location=None,
            "semantic_type" not in init or \
            "number_of_maps" not in init:
             gscript.fatal(_("Key words %(t)s, %(s)s or %(n)s not found in init"
-                         " file.") % {'t': "temporal_type",
-                                      's': "semantic_type",
-                                      'n': "number_of_maps"})
+                            " file.") % {'t': "temporal_type",
+                                         's': "semantic_type",
+                                         'n': "number_of_maps"})
 
         if line_count != int(init["number_of_maps"]):
             gscript.fatal(_("Number of maps mismatch in init and list file."))
@@ -361,7 +362,8 @@ def import_stds(input, output, extrdir, title=None, descr=None, location=None,
             format_ = init["format"]
 
         if stds_type != type_:
-            gscript.fatal(_("The archive file is of wrong space time dataset type"))
+            gscript.fatal(_("The archive file is of wrong space time dataset"
+                            " type"))
 
         # Check the existence of the files
         if format_ == "GTiff":
@@ -369,19 +371,19 @@ def import_stds(input, output, extrdir, title=None, descr=None, location=None,
                 filename = row["filename"] + ".tif"
                 if not os.path.exists(filename):
                     gscript.fatal(_("Unable to find GeoTIFF raster file "
-                                 "<%s> in archive.") % filename)
+                                    "<%s> in archive.") % filename)
         elif format_ == "AAIGrid":
             for row in maplist:
                 filename = row["filename"] + ".asc"
                 if not os.path.exists(filename):
                     gscript.fatal(_("Unable to find AAIGrid raster file "
-                                 "<%s> in archive.") % filename)
+                                    "<%s> in archive.") % filename)
         elif format_ == "GML":
             for row in maplist:
                 filename = row["filename"] + ".xml"
                 if not os.path.exists(filename):
                     gscript.fatal(_("Unable to find GML vector file "
-                                 "<%s> in archive.") % filename)
+                                    "<%s> in archive.") % filename)
         elif format_ == "pack":
             for row in maplist:
                 if type_ == "stvds":
@@ -390,22 +392,22 @@ def import_stds(input, output, extrdir, title=None, descr=None, location=None,
                     filename = row["filename"] + ".pack"
                 if not os.path.exists(filename):
                     gscript.fatal(_("Unable to find GRASS package file "
-                                 "<%s> in archive.") % filename)
+                                    "<%s> in archive.") % filename)
         else:
             gscript.fatal(_("Unsupported input format"))
 
         # Check the space time dataset
         id = output + "@" + mapset
         sp = dataset_factory(type_, id)
-        if sp.is_in_db() and gscript.overwrite() == False:
-            gscript.fatal(_("Space time %(t)s dataset <%(sp)s> is already in the "
-                         "database. Use the overwrite flag.") % {'t': type_,
-                                                                 'sp': sp.get_id()})
+        if sp.is_in_db() and gscript.overwrite() is False:
+            gscript.fatal(_("Space time %(t)s dataset <%(sp)s> is already in"
+                            " the database. Use the overwrite flag.") %
+                          {'t': type_, 'sp': sp.get_id()})
 
         # Import the maps
         if type_ == "strds":
             if format_ == "GTiff" or format_ == "AAIGrid":
-                _import_raster_maps_from_gdal(maplist, overr, exp, location, 
+                _import_raster_maps_from_gdal(maplist, overr, exp, location,
                                               link, format_, set_current_region)
             if format_ == "pack":
                 _import_raster_maps(maplist, set_current_region)
@@ -417,11 +419,11 @@ def import_stds(input, output, extrdir, title=None, descr=None, location=None,
                 _import_vector_maps(maplist)
 
         # Create the space time dataset
-        if sp.is_in_db() and gscript.overwrite() == True:
+        if sp.is_in_db() and gscript.overwrite() is True:
             gscript.info(_("Overwrite space time %(sp)s dataset "
-                        "<%(id)s> and unregister all maps.") % {
-                        'sp': sp.get_new_map_instance(None).get_type(),
-                        'id': sp.get_id()})
+                           "<%(id)s> and unregister all maps.") %
+                         {'sp': sp.get_new_map_instance(None).get_type(),
+                          'id': sp.get_id()})
             sp.delete()
             sp = sp.get_new_instance(id)
 
@@ -430,12 +432,13 @@ def import_stds(input, output, extrdir, title=None, descr=None, location=None,
         relative_time_unit = None
         if temporal_type == "relative":
             if "relative_time_unit" not in init:
-                gscript.fatal(_("Key word %s not found in init file.") % ("relative_time_unit"))
+                gscript.fatal(_("Key word %s not found in init file.") %
+                              ("relative_time_unit"))
             relative_time_unit = init["relative_time_unit"]
             sp.set_relative_time_unit(relative_time_unit)
 
         gscript.verbose(_("Create space time %s dataset.") %
-                     sp.get_new_map_instance(None).get_type())
+                        sp.get_new_map_instance(None).get_type())
 
         sp.set_initial_values(temporal_type=temporal_type,
                               semantic_type=semantic_type, title=title,
@@ -459,7 +462,7 @@ def import_stds(input, output, extrdir, title=None, descr=None, location=None,
         if location:
             # Switch to the old location
             ret = gscript.run_command("g.mapset", mapset=old_env["MAPSET"],
-                                   location=old_env["LOCATION_NAME"],
-                                   gisdbase=old_env["GISDBASE"])
+                                      location=old_env["LOCATION_NAME"],
+                                      gisdbase=old_env["GISDBASE"])
 
         gscript.set_raise_on_error(old_state)

Diferenças do arquivo suprimidas por serem muito extensas
+ 390 - 360
lib/python/temporal/temporal_algebra.py


+ 147 - 125
lib/python/temporal/temporal_extent.py

@@ -23,51 +23,53 @@ from base import *
 
 ###############################################################################
 
-class TemporalExtent(SQLDatabaseInterface):
-    """This is the abstract time base class for relative and absolute time objects
-
-        It abstract class implements the interface to absolute and relative time.
-        Absolute time is represented by datetime time stamps,
-        relative time is represented by a unit an integer value.
-
-        This class implements temporal topology relationships computation
-        after [Allen and Ferguson 1994 Actions and Events in Interval Temporal Logic].
-
-        Usage:
 
-        .. code-block:: python
-
-            >>> init()
-            >>> A = TemporalExtent(table="raster_absolute_time",
-            ... ident="soil@PERMANENT", start_time=datetime(2001, 01, 01),
-            ... end_time=datetime(2005,01,01) )
-            >>> A.id
-            'soil@PERMANENT'
-            >>> A.start_time
-            datetime.datetime(2001, 1, 1, 0, 0)
-            >>> A.end_time
-            datetime.datetime(2005, 1, 1, 0, 0)
-            >>> A.print_info()
-             | Start time:................. 2001-01-01 00:00:00
-             | End time:................... 2005-01-01 00:00:00
-            >>> A.print_shell_info()
-            start_time=2001-01-01 00:00:00
-            end_time=2005-01-01 00:00:00
-            >>> # relative time
-            >>> A = TemporalExtent(table="raster_absolute_time",
-            ... ident="soil@PERMANENT", start_time=0, end_time=1 )
-            >>> A.id
-            'soil@PERMANENT'
-            >>> A.start_time
-            0
-            >>> A.end_time
-            1
-            >>> A.print_info()
-             | Start time:................. 0
-             | End time:................... 1
-            >>> A.print_shell_info()
-            start_time=0
-            end_time=1
+class TemporalExtent(SQLDatabaseInterface):
+    """This is the abstract time base class for relative and absolute time
+    objects.
+
+    It abstract class implements the interface to absolute and relative time.
+    Absolute time is represented by datetime time stamps,
+    relative time is represented by a unit an integer value.
+
+    This class implements temporal topology relationships computation
+    after [Allen and Ferguson 1994 Actions and Events in Interval Temporal Logic].
+
+    Usage:
+
+    .. code-block:: python
+
+        >>> init()
+        >>> A = TemporalExtent(table="raster_absolute_time",
+        ... ident="soil@PERMANENT", start_time=datetime(2001, 01, 01),
+        ... end_time=datetime(2005,01,01) )
+        >>> A.id
+        'soil@PERMANENT'
+        >>> A.start_time
+        datetime.datetime(2001, 1, 1, 0, 0)
+        >>> A.end_time
+        datetime.datetime(2005, 1, 1, 0, 0)
+        >>> A.print_info()
+         | Start time:................. 2001-01-01 00:00:00
+         | End time:................... 2005-01-01 00:00:00
+        >>> A.print_shell_info()
+        start_time=2001-01-01 00:00:00
+        end_time=2005-01-01 00:00:00
+        >>> # relative time
+        >>> A = TemporalExtent(table="raster_absolute_time",
+        ... ident="soil@PERMANENT", start_time=0, end_time=1 )
+        >>> A.id
+        'soil@PERMANENT'
+        >>> A.start_time
+        0
+        >>> A.end_time
+        1
+        >>> A.print_info()
+         | Start time:................. 0
+         | End time:................... 1
+        >>> A.print_shell_info()
+        start_time=0
+        end_time=1
 
     """
     def __init__(self, table=None, ident=None, start_time=None, end_time=None):
@@ -386,7 +388,8 @@ class TemporalExtent(SQLDatabaseInterface):
 
            :param extent: The temporal extent to create a union with
            :return: The new temporal extent with start and end time,
-                   or None in case the temporal extents are unrelated (before or after)
+                    or None in case the temporal extents are unrelated
+                    (before or after)
 
            .. code-block:: python
 
@@ -415,12 +418,13 @@ class TemporalExtent(SQLDatabaseInterface):
         """Return True if this temporal extent (A) starts at the start of the
            provided temporal extent (B) and finishes within it
            ::
-           
+
                A  |-----|
                B  |---------|
 
 
-           :param extent: The temporal extent object with which this extent starts
+           :param extent: The temporal extent object with which this extent
+                          starts
 
            Usage:
 
@@ -434,7 +438,7 @@ class TemporalExtent(SQLDatabaseInterface):
                False
 
         """
-        if  self.D["end_time"] is None or extent.D["end_time"] is None:
+        if self.D["end_time"] is None or extent.D["end_time"] is None:
             return False
 
         if self.D["start_time"] == extent.D["start_time"] and \
@@ -447,11 +451,12 @@ class TemporalExtent(SQLDatabaseInterface):
         """Return True if this temporal extent (A) started at the start of the
            provided temporal extent (B) and finishes after it
            ::
-           
+
                A  |---------|
                B  |-----|
 
-           :param extent: The temporal extent object with which this extent started
+           :param extent: The temporal extent object with which this extent
+                          started
 
            Usage:
 
@@ -465,7 +470,7 @@ class TemporalExtent(SQLDatabaseInterface):
                False
 
         """
-        if  self.D["end_time"] is None or extent.D["end_time"] is None:
+        if self.D["end_time"] is None or extent.D["end_time"] is None:
             return False
 
         if self.D["start_time"] == extent.D["start_time"] and \
@@ -475,14 +480,15 @@ class TemporalExtent(SQLDatabaseInterface):
             return False
 
     def finishes(self, extent):
-        """Return True if this temporal extent (A) starts after the start of the
-           provided temporal extent (B) and finishes with it
+        """Return True if this temporal extent (A) starts after the start of
+           the provided temporal extent (B) and finishes with it
            ::
-           
+
                A      |-----|
                B  |---------|
 
-           :param extent: The temporal extent object with which this extent finishes
+           :param extent: The temporal extent object with which this extent
+                          finishes
 
            Usage:
 
@@ -496,7 +502,7 @@ class TemporalExtent(SQLDatabaseInterface):
                False
 
         """
-        if  self.D["end_time"] is None or extent.D["end_time"] is None:
+        if self.D["end_time"] is None or extent.D["end_time"] is None:
             return False
 
         if self.D["end_time"] == extent.D["end_time"] and \
@@ -506,14 +512,15 @@ class TemporalExtent(SQLDatabaseInterface):
             return False
 
     def finished(self, extent):
-        """Return True if this temporal extent (A) starts before the start of the
-           provided temporal extent (B) and finishes with it
+        """Return True if this temporal extent (A) starts before the start of
+           the provided temporal extent (B) and finishes with it
            ::
-           
+
                A  |---------|
                B      |-----|
 
-           :param extent: The temporal extent object with which this extent finishes
+           :param extent: The temporal extent object with which this extent
+                          finishes
 
            Usage:
 
@@ -527,7 +534,7 @@ class TemporalExtent(SQLDatabaseInterface):
                False
 
         """
-        if  self.D["end_time"] is None or extent.D["end_time"] is None:
+        if self.D["end_time"] is None or extent.D["end_time"] is None:
             return False
 
         if self.D["end_time"] == extent.D["end_time"] and \
@@ -540,11 +547,12 @@ class TemporalExtent(SQLDatabaseInterface):
         """Return True if this temporal extent (A) is located after the
            provided temporal extent (B)
            ::
-           
+
                A             |---------|
                B  |---------|
 
-           :param extent: The temporal extent object that is located before this extent
+           :param extent: The temporal extent object that is located before
+                          this extent
 
            Usage:
 
@@ -573,11 +581,12 @@ class TemporalExtent(SQLDatabaseInterface):
         """Return True if this temporal extent (A) is located before the
            provided temporal extent (B)
            ::
-           
+
                A  |---------|
                B             |---------|
 
-           :param extent: The temporal extent object that is located after this extent
+           :param extent: The temporal extent object that is located after
+                          this extent
 
            Usage:
 
@@ -606,7 +615,7 @@ class TemporalExtent(SQLDatabaseInterface):
         """Return True if this temporal extent (A) is a meeting neighbor the
            provided temporal extent (B)
            ::
-           
+
                A            |---------|
                B  |---------|
                A  |---------|
@@ -633,7 +642,7 @@ class TemporalExtent(SQLDatabaseInterface):
                True
 
         """
-        if  self.D["end_time"] is None and extent.D["end_time"] is None:
+        if self.D["end_time"] is None and extent.D["end_time"] is None:
             return False
 
         if (self.D["start_time"] == extent.D["end_time"]) or \
@@ -646,7 +655,7 @@ class TemporalExtent(SQLDatabaseInterface):
         """Return True if this temporal extent (A) follows the
            provided temporal extent (B)
            ::
-           
+
                A            |---------|
                B  |---------|
 
@@ -665,7 +674,7 @@ class TemporalExtent(SQLDatabaseInterface):
                False
 
         """
-        if  extent.D["end_time"] is None:
+        if extent.D["end_time"] is None:
             return False
 
         if self.D["start_time"] == extent.D["end_time"]:
@@ -677,7 +686,7 @@ class TemporalExtent(SQLDatabaseInterface):
         """Return True if this temporal extent (A) precedes the provided
            temporal extent (B)
            ::
-           
+
                A  |---------|
                B            |---------|
 
@@ -698,7 +707,7 @@ class TemporalExtent(SQLDatabaseInterface):
 
 
         """
-        if  self.D["end_time"] is None:
+        if self.D["end_time"] is None:
             return False
 
         if self.D["end_time"] == extent.D["start_time"]:
@@ -710,7 +719,7 @@ class TemporalExtent(SQLDatabaseInterface):
         """Return True if this temporal extent (A) is located during the provided
            temporal extent (B)
            ::
-           
+
                A   |-------|
                B  |---------|
 
@@ -729,11 +738,11 @@ class TemporalExtent(SQLDatabaseInterface):
 
         """
         # Check single point of time in interval
-        if  extent.D["end_time"] is None:
+        if extent.D["end_time"] is None:
             return False
 
         # Check single point of time in interval
-        if  self.D["end_time"] is None:
+        if self.D["end_time"] is None:
             if self.D["start_time"] >= extent.D["start_time"] and \
                self.D["start_time"] < extent.D["end_time"]:
                 return True
@@ -750,7 +759,7 @@ class TemporalExtent(SQLDatabaseInterface):
         """Return True if this temporal extent (A) contains the provided
            temporal extent (B)
            ::
-           
+
                A  |---------|
                B   |-------|
 
@@ -770,11 +779,11 @@ class TemporalExtent(SQLDatabaseInterface):
 
         """
         # Check single point of time in interval
-        if  self.D["end_time"] is None:
+        if self.D["end_time"] is None:
             return False
 
         # Check single point of time in interval
-        if  extent.D["end_time"] is None:
+        if extent.D["end_time"] is None:
             if self.D["start_time"] <= extent.D["start_time"] and \
                self.D["end_time"] > extent.D["start_time"]:
                 return True
@@ -791,7 +800,7 @@ class TemporalExtent(SQLDatabaseInterface):
         """Return True if this temporal extent (A) is equal to the provided
            temporal extent (B)
            ::
-           
+
                A  |---------|
                B  |---------|
 
@@ -810,13 +819,13 @@ class TemporalExtent(SQLDatabaseInterface):
                True
 
         """
-        if  self.D["end_time"] is None and extent.D["end_time"] is None:
+        if self.D["end_time"] is None and extent.D["end_time"] is None:
             if self.D["start_time"] == extent.D["start_time"]:
                 return True
             else:
                 return False
 
-        if  self.D["end_time"] is None or extent.D["end_time"] is None:
+        if self.D["end_time"] is None or extent.D["end_time"] is None:
             return False
 
         if self.D["start_time"] == extent.D["start_time"] and \
@@ -829,7 +838,7 @@ class TemporalExtent(SQLDatabaseInterface):
         """Return True if this temporal extent (A) overlapped the provided
            temporal extent (B)
            ::
-           
+
                A  |---------|
                B    |---------|
 
@@ -855,7 +864,7 @@ class TemporalExtent(SQLDatabaseInterface):
                False
 
         """
-        if  self.D["end_time"] is None or extent.D["end_time"] is None:
+        if self.D["end_time"] is None or extent.D["end_time"] is None:
             return False
 
         if self.D["start_time"] < extent.D["start_time"] and \
@@ -869,7 +878,7 @@ class TemporalExtent(SQLDatabaseInterface):
         """Return True if this temporal extent (A) overlapps the provided
            temporal extent (B)
            ::
-           
+
                A    |---------|
                B  |---------|
 
@@ -896,7 +905,7 @@ class TemporalExtent(SQLDatabaseInterface):
                False
 
         """
-        if  self.D["end_time"] is None or extent.D["end_time"] is None:
+        if self.D["end_time"] is None or extent.D["end_time"] is None:
             return False
 
         if self.D["start_time"] > extent.D["start_time"] and \
@@ -908,27 +917,28 @@ class TemporalExtent(SQLDatabaseInterface):
 
     def temporal_relation(self, extent):
         """Returns the temporal relation between temporal objects
-           Temporal relationships are implemented after
-           [Allen and Ferguson 1994 Actions and Events in Interval Temporal Logic]
-
-           The following temporal relationships are supported:
-
-               - equal
-               - during
-               - contains
-               - overlaps
-               - overlapped
-               - after
-               - before
-               - starts
-               - finishes
-               - started
-               - finished
-               - follows
-               - precedes
-
-           :param extent: The temporal extent
-           :return: The name of the temporal relation or None if no relation found
+        Temporal relationships are implemented after
+        [Allen and Ferguson 1994 Actions and Events in Interval Temporal Logic]
+
+        The following temporal relationships are supported:
+
+            - equal
+            - during
+            - contains
+            - overlaps
+            - overlapped
+            - after
+            - before
+            - starts
+            - finishes
+            - started
+            - finished
+            - follows
+            - precedes
+
+        :param extent: The temporal extent
+        :return: The name of the temporal relation or None if no relation
+                 found
         """
 
         # First check for correct time
@@ -1051,23 +1061,27 @@ class AbsoluteTemporalExtent(TemporalExtent):
 
 ###############################################################################
 
+
 class RasterAbsoluteTime(AbsoluteTemporalExtent):
     def __init__(self, ident=None, start_time=None, end_time=None):
         AbsoluteTemporalExtent.__init__(self, "raster_absolute_time",
-            ident, start_time, end_time)
+                                        ident, start_time, end_time)
+
 
 class Raster3DAbsoluteTime(AbsoluteTemporalExtent):
     def __init__(self, ident=None, start_time=None, end_time=None):
         AbsoluteTemporalExtent.__init__(self, "raster3d_absolute_time",
-            ident, start_time, end_time)
+                                        ident, start_time, end_time)
+
 
 class VectorAbsoluteTime(AbsoluteTemporalExtent):
     def __init__(self, ident=None, start_time=None, end_time=None):
         AbsoluteTemporalExtent.__init__(self, "vector_absolute_time",
-            ident, start_time, end_time)
+                                        ident, start_time, end_time)
 
 ###############################################################################
 
+
 class STDSAbsoluteTime(AbsoluteTemporalExtent):
     """This class implements the absolute time extent for space time dataset
 
@@ -1122,7 +1136,7 @@ class STDSAbsoluteTime(AbsoluteTemporalExtent):
         """Set the type of the map time
 
            Registered maps may have different types of time:
-           
+
            - Single point of time "point"
            - Time intervals "interval"
            - Single point and interval time "mixed"
@@ -1143,7 +1157,7 @@ class STDSAbsoluteTime(AbsoluteTemporalExtent):
         """Get the type of the map time
 
            Registered maps may have different types of time:
-           
+
            - Single point of time "point"
            - Time intervals "interval"
            - Single point and interval time "mixed"
@@ -1174,28 +1188,30 @@ class STDSAbsoluteTime(AbsoluteTemporalExtent):
 
 ###############################################################################
 
+
 class STRDSAbsoluteTime(STDSAbsoluteTime):
     def __init__(self, ident=None, start_time=None, end_time=None,
                  granularity=None):
         STDSAbsoluteTime.__init__(self, "strds_absolute_time",
-            ident, start_time, end_time, granularity)
+                                  ident, start_time, end_time, granularity)
 
 
 class STR3DSAbsoluteTime(STDSAbsoluteTime):
     def __init__(self, ident=None, start_time=None, end_time=None,
                  granularity=None):
         STDSAbsoluteTime.__init__(self, "str3ds_absolute_time",
-            ident, start_time, end_time, granularity)
+                                  ident, start_time, end_time, granularity)
 
 
 class STVDSAbsoluteTime(STDSAbsoluteTime):
     def __init__(self, ident=None, start_time=None, end_time=None,
                  granularity=None):
         STDSAbsoluteTime.__init__(self, "stvds_absolute_time",
-            ident, start_time, end_time, granularity)
+                                  ident, start_time, end_time, granularity)
 
 ###############################################################################
 
+
 class RelativeTemporalExtent(TemporalExtent):
     """This is the relative time class for all maps and space time datasets
 
@@ -1289,18 +1305,19 @@ class RelativeTemporalExtent(TemporalExtent):
 
 ###############################################################################
 
+
 class RasterRelativeTime(RelativeTemporalExtent):
     def __init__(self, ident=None, start_time=None, end_time=None,
                  unit=None):
-        RelativeTemporalExtent.__init__(
-            self, "raster_relative_time", ident, start_time, end_time, unit)
+        RelativeTemporalExtent.__init__(self, "raster_relative_time", ident,
+                                        start_time, end_time, unit)
 
 
 class Raster3DRelativeTime(RelativeTemporalExtent):
     def __init__(self, ident=None, start_time=None, end_time=None,
                  unit=None):
-        RelativeTemporalExtent.__init__(self,
-            "raster3d_relative_time", ident, start_time, end_time, unit)
+        RelativeTemporalExtent.__init__(self, "raster3d_relative_time", ident,
+                                        start_time, end_time, unit)
 
 
 class VectorRelativeTime(RelativeTemporalExtent):
@@ -1311,6 +1328,7 @@ class VectorRelativeTime(RelativeTemporalExtent):
 
 ###############################################################################
 
+
 class STDSRelativeTime(RelativeTemporalExtent):
     """This is the relative time class for all maps and space time datasets
 
@@ -1367,7 +1385,7 @@ class STDSRelativeTime(RelativeTemporalExtent):
         """Set the type of the map time
 
            Registered maps may have different types of time:
-           
+
            - Single point of time "point"
            - Time intervals "interval"
            - Single point and interval time "mixed"
@@ -1388,7 +1406,7 @@ class STDSRelativeTime(RelativeTemporalExtent):
         """Get the type of the map time
 
            Registered maps may have different types of time:
-           
+
            - Single point of time "point"
            - Time intervals "interval"
            - Single point and interval time "mixed"
@@ -1419,25 +1437,29 @@ class STDSRelativeTime(RelativeTemporalExtent):
 
 ###############################################################################
 
+
 class STRDSRelativeTime(STDSRelativeTime):
     def __init__(self, ident=None, start_time=None, end_time=None,
                  unit=None, granularity=None, map_time=None):
-        STDSRelativeTime.__init__(self, "strds_relative_time",
-            ident, start_time, end_time, unit, granularity, map_time)
+        STDSRelativeTime.__init__(self, "strds_relative_time", ident,
+                                  start_time, end_time, unit, granularity,
+                                  map_time)
 
 
 class STR3DSRelativeTime(STDSRelativeTime):
     def __init__(self, ident=None, start_time=None, end_time=None,
                  unit=None, granularity=None, map_time=None):
-        STDSRelativeTime.__init__(self, "str3ds_relative_time",
-            ident, start_time, end_time, unit, granularity, map_time)
+        STDSRelativeTime.__init__(self, "str3ds_relative_time", ident,
+                                  start_time, end_time, unit, granularity,
+                                  map_time)
 
 
 class STVDSRelativeTime(STDSRelativeTime):
     def __init__(self, ident=None, start_time=None, end_time=None,
                  unit=None, granularity=None, map_time=None):
-        STDSRelativeTime.__init__(self, "stvds_relative_time",
-            ident, start_time, end_time, unit, granularity, map_time)
+        STDSRelativeTime.__init__(self, "stvds_relative_time", ident,
+                                  start_time, end_time, unit, granularity,
+                                  map_time)
 
 ###############################################################################
 

+ 11 - 6
lib/python/temporal/temporal_granularity.py

@@ -22,11 +22,13 @@ from datetime_math import *
 
 ###############################################################################
 
+
 def check_granularity_string(granularity, temporal_type):
     """Check if the granularity string is valid
 
         :param granularity: The granularity string
-        :param temporal_type: The temporal type of the granularity relative or absolute
+        :param temporal_type: The temporal type of the granularity relative or
+                              absolute
         :return: True if valid, False if invalid
 
         .. code-block:: python
@@ -98,6 +100,7 @@ def check_granularity_string(granularity, temporal_type):
 
 ###############################################################################
 
+
 def compute_relative_time_granularity(maps):
     """Compute the relative time granularity
 
@@ -245,8 +248,8 @@ def compute_absolute_time_granularity(maps):
         is only correct in case of not overlapping intervals.
         Hence a correct temporal topology is required for computation.
 
-        The computed granularity is returned as number of seconds or minutes or hours
-        or days or months or years.
+        The computed granularity is returned as number of seconds or minutes
+        or hours or days or months or years.
 
         :param maps: a ordered by start_time list of map objects
         :return: The temporal topology as string "integer unit"
@@ -501,9 +504,11 @@ def gcd(a, b):
 
 def gcd_list(list):
     """Finds the GCD of numbers in a list.
-    Input: List of numbers you want to find the GCD of
-            E.g. [8, 24, 12]
-    Returns: GCD of all numbers
+
+    :param list: List of numbers you want to find the GCD of
+                 E.g. [8, 24, 12]
+    :return: GCD of all numbers
+
     """
     return reduce(gcd, list)
 

+ 21 - 13
lib/python/temporal/temporal_raster3d_algebra.py

@@ -10,21 +10,24 @@ for details.
 
 """
 
-import grass.pygrass.modules as pygrass
 from temporal_raster_base_algebra import *
 
 ###############################################################################
 
+
 class TemporalRaster3DAlgebraParser(TemporalRasterBaseAlgebraParser):
     """The temporal raster algebra class"""
 
-    def __init__(self, pid=None, run=False, debug=True, spatial = False, nprocs = 1, register_null = False):
-        TemporalRasterBaseAlgebraParser.__init__(self, pid, run, debug, spatial, nprocs, register_null)
+    def __init__(self, pid=None, run=False, debug=True, spatial=False,
+                 nprocs=1, register_null=False):
+        TemporalRasterBaseAlgebraParser.__init__(self, pid, run, debug,
+                                                 spatial, nprocs,
+                                                 register_null)
 
         self.m_mapcalc = pymod.Module('r3.mapcalc')
         self.m_remove = pymod.Module('g.remove')
 
-    def parse(self, expression, basename = None, overwrite=False):
+    def parse(self, expression, basename=None, overwrite=False):
         self.lexer = TemporalRasterAlgebraLexer()
         self.lexer.build()
         self.parser = yacc.yacc(module=self, debug=self.debug)
@@ -102,17 +105,24 @@ class TemporalRaster3DAlgebraParser(TemporalRasterBaseAlgebraParser):
                     # Get neighboring map and set temporal extent.
                     map_n = maplist[new_index]
                     # Generate an intermediate map for the result map list.
-                    map_new = self.generate_new_map(map_n, bool_op = 'and', copy = True)
+                    map_new = self.generate_new_map(map_n, bool_op='and',
+                                                    copy=True)
                     map_new.set_temporal_extent(map_i_t_extent)
                     # Create r.mapcalc expression string for the operation.
                     if "cmd_list" in dir(map_new) and len(t) == 5:
-                        cmdstring = "%s" %(map_new.cmd_list)
+                        cmdstring = "%s" % (map_new.cmd_list)
                     elif "cmd_list" not in dir(map_new) and len(t) == 5:
-                        cmdstring = "%s" %(map_n.get_id())
-                    elif "cmd_list" in dir(map_new) and len(t) in (9,11):
-                        cmdstring = "%s[%s,%s,%s]" %(map_new.cmd_list, row_neighbor, col_neighbor, depth_neighbor)
-                    elif "cmd_list" not in dir(map_new) and len(t) in (9,11):
-                        cmdstring = "%s[%s,%s,%s]" %(map_n.get_id(), row_neighbor, col_neighbor, depth_neighbor)
+                        cmdstring = "%s" % (map_n.get_id())
+                    elif "cmd_list" in dir(map_new) and len(t) in (9, 11):
+                        cmdstring = "%s[%s,%s,%s]" % (map_new.cmd_list,
+                                                      row_neighbor,
+                                                      col_neighbor,
+                                                      depth_neighbor)
+                    elif "cmd_list" not in dir(map_new) and len(t) in (9, 11):
+                        cmdstring = "%s[%s,%s,%s]" % (map_n.get_id(),
+                                                      row_neighbor,
+                                                      col_neighbor,
+                                                      depth_neighbor)
                     # Set new command list for map.
                     map_new.cmd_list = cmdstring
                     # Append map with updated command list to result list.
@@ -125,5 +135,3 @@ class TemporalRaster3DAlgebraParser(TemporalRasterBaseAlgebraParser):
 if __name__ == "__main__":
     import doctest
     doctest.testmod()
-
-

+ 15 - 10
lib/python/temporal/temporal_raster_algebra.py

@@ -55,16 +55,20 @@ from temporal_raster_base_algebra import *
 
 ###############################################################################
 
+
 class TemporalRasterAlgebraParser(TemporalRasterBaseAlgebraParser):
     """The temporal raster algebra class"""
 
-    def __init__(self, pid=None, run=False, debug=True, spatial = False, nprocs = 1, register_null = False):
-        TemporalRasterBaseAlgebraParser.__init__(self, pid, run, debug, spatial, nprocs, register_null)
+    def __init__(self, pid=None, run=False, debug=True, spatial=False,
+                 nprocs=1, register_null=False):
+        TemporalRasterBaseAlgebraParser.__init__(self, pid, run, debug,
+                                                 spatial, nprocs,
+                                                 register_null)
 
         self.m_mapcalc = pymod.Module('r.mapcalc', run_=False, finish_=False)
         self.m_remove = pymod.Module('g.remove')
 
-    def parse(self, expression, basename = None, overwrite=False):
+    def parse(self, expression, basename=None, overwrite=False):
         self.lexer = TemporalRasterAlgebraLexer()
         self.lexer.build()
         self.parser = yacc.yacc(module=self, debug=self.debug)
@@ -139,17 +143,20 @@ class TemporalRasterAlgebraParser(TemporalRasterBaseAlgebraParser):
                     # Get neighbouring map and set temporal extent.
                     map_n = maplist[new_index]
                     # Generate an intermediate map for the result map list.
-                    map_new = self.generate_new_map(map_n, bool_op = 'and', copy = True)
+                    map_new = self.generate_new_map(map_n, bool_op='and',
+                                                    copy=True)
                     map_new.set_temporal_extent(map_i_t_extent)
                     # Create r.mapcalc expression string for the operation.
                     if "cmd_list" in dir(map_new) and len(t) == 5:
-                        cmdstring = "%s" %(map_new.cmd_list)
+                        cmdstring = "%s" % (map_new.cmd_list)
                     elif "cmd_list" not in dir(map_new) and len(t) == 5:
-                        cmdstring = "%s" %(map_n.get_id())
+                        cmdstring = "%s" % (map_n.get_id())
                     elif "cmd_list" in dir(map_new) and len(t) in (7, 9):
-                        cmdstring = "%s[%s,%s]" %(map_new.cmd_list, row_neigbour, col_neigbour)
+                        cmdstring = "%s[%s,%s]" % (map_new.cmd_list,
+                                                   row_neigbour, col_neigbour)
                     elif "cmd_list" not in dir(map_new) and len(t) in (7, 9):
-                        cmdstring = "%s[%s,%s]" %(map_n.get_id(), row_neigbour, col_neigbour)
+                        cmdstring = "%s[%s,%s]" % (map_n.get_id(),
+                                                   row_neigbour, col_neigbour)
                     # Set new command list for map.
                     map_new.cmd_list = cmdstring
                     # Append map with updated command list to result list.
@@ -162,5 +169,3 @@ class TemporalRasterAlgebraParser(TemporalRasterBaseAlgebraParser):
 if __name__ == "__main__":
     import doctest
     doctest.testmod()
-
-

Diferenças do arquivo suprimidas por serem muito extensas
+ 473 - 342
lib/python/temporal/temporal_raster_base_algebra.py


+ 48 - 48
lib/python/temporal/temporal_raster_operator.py

@@ -72,23 +72,24 @@ try:
 except:
     pass
 
+
 class TemporalRasterOperatorLexer(object):
     """Lexical analyzer for the GRASS GIS temporal operator"""
 
     # Functions that defines topological relations.
     relations = {
-        'equal'      : "EQUAL",
-        'follows'    : "FOLLOWS",
-        'precedes'   : "PRECEDES",
-        'overlaps'   : "OVERLAPS",
-        'overlapped' : "OVERLAPPED",
-        'during'     : "DURING",
-        'starts'     : "STARTS",
-        'finishes'   : "FINISHES",
-        'contains'   : "CONTAINS",
-        'started'    : "STARTED",
-        'finished'   : "FINISHED",
-        'over'       : "OVER"
+        'equal': "EQUAL",
+        'follows': "FOLLOWS",
+        'precedes': "PRECEDES",
+        'overlaps': "OVERLAPS",
+        'overlapped': "OVERLAPPED",
+        'during': "DURING",
+        'starts': "STARTS",
+        'finishes': "FINISHES",
+        'contains': "CONTAINS",
+        'started': "STARTED",
+        'finished': "FINISHED",
+        'over': "OVER"
         }
 
     # This is the list of token names.
@@ -113,20 +114,20 @@ class TemporalRasterOperatorLexer(object):
     tokens = tokens + tuple(relations.values())
 
     # Regular expression rules for simple tokens
-    t_T_SELECT           = r':'
-    t_T_NOT_SELECT       = r'!:'
-    t_COMMA              = r','
-    t_LEFTREF             = r'='
-    t_HASH               = r'\#'
-    t_OR                 = r'[\|]'
-    t_AND                = r'[&]'
-    t_MOD                = r'[\%]'
-    t_DIV                = r'[\/]'
-    t_MULT               = r'[\*]'
-    t_ADD                = r'[\+]'
-    t_SUB                = r'[-]'
-    t_CLPAREN             = r'\{'
-    t_CRPAREN             = r'\}'
+    t_T_SELECT = r':'
+    t_T_NOT_SELECT = r'!:'
+    t_COMMA = r','
+    t_LEFTREF = r'='
+    t_HASH = r'\#'
+    t_OR = r'[\|]'
+    t_AND = r'[&]'
+    t_MOD = r'[\%]'
+    t_DIV = r'[\/]'
+    t_MULT = r'[\*]'
+    t_ADD = r'[\+]'
+    t_SUB = r'[-]'
+    t_CLPAREN = r'\{'
+    t_CRPAREN = r'\}'
 
     # These are the things that should be ignored.
     t_ignore = ' \t'
@@ -153,24 +154,25 @@ class TemporalRasterOperatorLexer(object):
     # Handle errors.
     def t_error(self, t):
         raise SyntaxError("syntax error on line %d near '%s'" %
-            (t.lineno, t.value))
+                          (t.lineno, t.value))
 
     # Build the lexer
-    def build(self,**kwargs):
+    def build(self, **kwargs):
         self.lexer = lex.lex(module=self, **kwargs)
 
     # Just for testing
-    def test(self,data):
+    def test(self, data):
         self.name_list = {}
         print(data)
         self.lexer.input(data)
         while True:
-             tok = self.lexer.token()
-             if not tok: break
-             print tok
+            tok = self.lexer.token()
+            if not tok: break
+            print tok
 
 ###############################################################################
 
+
 class TemporalRasterOperatorParser(object):
     """The temporal algebra class"""
 
@@ -179,7 +181,7 @@ class TemporalRasterOperatorParser(object):
         self.lexer.build()
         self.parser = yacc.yacc(module=self)
 
-    def parse(self, expression, comparison = False):
+    def parse(self, expression, comparison=False):
         self.comparison = comparison
         self.parser.parse(expression)
 
@@ -201,12 +203,11 @@ class TemporalRasterOperatorParser(object):
             self.relations = t[2]
         else:
             self.relations = [t[2]]
-        self.temporal  = None
-        self.function  = None
+        self.temporal = None
+        self.function = None
 
         t[0] = t[2]
 
-
     def p_operator(self, t):
         # The expression should always return a list of maps.
         """
@@ -216,8 +217,8 @@ class TemporalRasterOperatorParser(object):
         """
         # Set three operator components.
         self.relations = ['equal']
-        self.temporal  = "="
-        self.function  = t[2]
+        self.temporal = "="
+        self.function = t[2]
 
         t[0] = t[2]
 
@@ -229,12 +230,12 @@ class TemporalRasterOperatorParser(object):
         """
         # Set three operator components.
         self.relations = ['equal']
-        self.temporal  = "="
+        self.temporal = "="
         if t[2] == t[3]:
-            self.function  = t[2] + t[3]
+            self.function = t[2] + t[3]
         else:
             raise SyntaxError("syntax error on line %d near '%s'" %
-                                (t.lineno, t.value))
+                              (t.lineno, t.value))
 
         t[0] = t[2]
 
@@ -267,8 +268,8 @@ class TemporalRasterOperatorParser(object):
             self.relations = t[2]
         else:
             self.relations = [t[2]]
-        self.temporal  = "="
-        self.function  = t[4]
+        self.temporal = "="
+        self.function = t[4]
 
         t[0] = t[4]
 
@@ -285,12 +286,12 @@ class TemporalRasterOperatorParser(object):
             self.relations = t[2]
         else:
             self.relations = [t[2]]
-        self.temporal  = "="
+        self.temporal = "="
         if t[4] == t[5]:
-            self.function  = t[4] + t[5]
+            self.function = t[4] + t[5]
         else:
             raise SyntaxError("syntax error on line %d near '%s'" %
-                                (t.lineno, t.value))
+                              (t.lineno, t.value))
 
         t[0] = t[4]
 
@@ -350,7 +351,7 @@ class TemporalRasterOperatorParser(object):
             rel_list = rel_list + t[3]
         else:
             rel_list.append(t[3])
-        t[0] =  rel_list
+        t[0] = rel_list
 
     def p_temporal_operator(self, t):
         # The list of relations.
@@ -386,4 +387,3 @@ class TemporalRasterOperatorParser(object):
 if __name__ == "__main__":
     import doctest
     doctest.testmod()
-

+ 97 - 103
lib/python/temporal/temporal_topology_dataset_connector.py

@@ -18,17 +18,19 @@ for details.
 """
 import copy
 
+
 class TemporalTopologyDatasetConnector(object):
-    """This class implements a temporal topology access structure to connect temporal related datasets
+    """This class implements a temporal topology access structure to connect
+       temporal related datasets
 
-       This object will be set up by temporal topology creation method provided by the 
-       SpatioTemporalTopologyBuilder.
+       This object will be set up by temporal topology creation method provided
+       by the SpatioTemporalTopologyBuilder.
 
-       If correctly initialize the calls next() and prev() 
+       If correctly initialize the calls next() and prev()
        let the user walk temporally forward and backward in time.
 
        The following temporal relations with access methods are supported:
-       
+
        - equal
        - follows
        - precedes
@@ -41,9 +43,8 @@ class TemporalTopologyDatasetConnector(object):
        - finishes
        - finished
 
-
        .. code-block:: python:
-       
+
            # We have build the temporal topology and we know the first map
            start = first
            while start:
@@ -54,7 +55,7 @@ class TemporalTopologyDatasetConnector(object):
                    map.print_info()
 
                start = start.next()
-        
+
             >>> import grass.temporal as tgis
             >>> tgis.init()
             >>> map = tgis.RasterDataset("a@P")
@@ -115,23 +116,23 @@ class TemporalTopologyDatasetConnector(object):
         """Reset any information about temporal topology"""
         self._temporal_topology = {}
         self._has_temporal_topology = False
-        
+
     def get_temporal_relations(self):
         """Return the dictionary of temporal relationships
-        
+
             Keys are the temporal relationships in upper case,
             values are abstract map objects.
-            
+
             :return: The temporal relations dictionary
         """
         return copy.copy(self._temporal_topology)
-        
+
     def get_number_of_temporal_relations(self):
-        """ Return a dictionary in which the keys are the relation names and the value
-        are the number of relations.
-        
+        """ Return a dictionary in which the keys are the relation names and
+        the value are the number of relations.
+
         The following relations are available:
-        
+
         - equal
         - follows
         - precedes
@@ -143,61 +144,62 @@ class TemporalTopologyDatasetConnector(object):
         - started
         - finishes
         - finished
-        
-        To access topological information the temporal topology must be build first
-        using the SpatioTemporalTopologyBuilder.
-        
-        :return: the dictionary with relations as keys and number as values or None in case the topology wasn't build
+
+        To access topological information the temporal topology must be build
+        first using the SpatioTemporalTopologyBuilder.
+
+        :return: the dictionary with relations as keys and number as values
+                 or None in case the topology wasn't build
         """
-        if self._has_temporal_topology == False:
+        if self._has_temporal_topology is False:
             return None
-    
+
         relations = {}
         try:
-            relations["equal"] = len(self._temporal_topology["EQUAL"]) 
+            relations["equal"] = len(self._temporal_topology["EQUAL"])
         except:
             relations["equal"] = 0
-        try: 
-            relations["follows"] = len(self._temporal_topology["FOLLOWS"]) 
-        except: 
+        try:
+            relations["follows"] = len(self._temporal_topology["FOLLOWS"])
+        except:
             relations["follows"] = 0
-        try: 
+        try:
             relations["precedes"] = len(self._temporal_topology["PRECEDES"])
-        except: 
+        except:
             relations["precedes"] = 0
-        try: 
+        try:
             relations["overlaps"] = len(self._temporal_topology["OVERLAPS"])
-        except: 
+        except:
             relations["overlaps"] = 0
-        try: 
+        try:
             relations["overlapped"] = len(self._temporal_topology["OVERLAPPED"])
-        except: 
+        except:
             relations["overlapped"] = 0
-        try: 
+        try:
             relations["during"] = len(self._temporal_topology["DURING"])
-        except: 
+        except:
             relations["during"] = 0
-        try: 
+        try:
             relations["contains"] = len(self._temporal_topology["CONTAINS"])
-        except: 
+        except:
             relations["contains"] = 0
-        try: 
+        try:
             relations["starts"] = len(self._temporal_topology["STARTS"])
-        except: 
+        except:
             relations["starts"] = 0
-        try:    
+        try:
             relations["started"] = len(self._temporal_topology["STARTED"])
-        except: 
+        except:
             relations["started"] = 0
-        try: 
+        try:
             relations["finishes"] = len(self._temporal_topology["FINISHES"])
-        except: 
+        except:
             relations["finishes"] = 0
-        try: 
+        try:
             relations["finished"] = len(self._temporal_topology["FINISHED"])
-        except: 
+        except:
             relations["finished"] = 0
-            
+
         return relations
 
     def set_temporal_topology_build_true(self):
@@ -219,20 +221,20 @@ class TemporalTopologyDatasetConnector(object):
            temporally located AFTER the start time of this map, but temporally
            near than other maps of the same dataset.
 
-           :param map: This object should be of type AbstractMapDataset 
-                        or derived classes
+           :param map: This object should be of type AbstractMapDataset
+                       or derived classes
         """
         self._temporal_topology["NEXT"] = map
 
     def set_prev(self, map):
         """Set the map that is temporally as closest located before this map.
 
-           Temporally located means that the start time of the "previous" map is
-           temporally located BEFORE the start time of this map, but temporally
-           near than other maps of the same dataset.
+           Temporally located means that the start time of the "previous" map
+           is temporally located BEFORE the start time of this map, but
+           temporally near than other maps of the same dataset.
 
-           :param map: This object should be of type AbstractMapDataset 
-                        or derived classes
+           :param map: This object should be of type AbstractMapDataset
+                       or derived classes
         """
         self._temporal_topology["PREV"] = map
 
@@ -259,15 +261,16 @@ class TemporalTopologyDatasetConnector(object):
     def append_equal(self, map):
         """Append a map with equivalent temporal extent as this map
 
-           :param map: This object should be of type AbstractMapDataset 
-                        or derived classes
+           :param map: This object should be of type AbstractMapDataset
+                       or derived classes
         """
         if "EQUAL" not in self._temporal_topology:
             self._temporal_topology["EQUAL"] = []
         self._temporal_topology["EQUAL"].append(map)
 
     def get_equal(self):
-        """Return a list of map objects with equivalent temporal extent as this map
+        """Return a list of map objects with equivalent temporal extent as
+           this map
 
            :return: A list of map objects or None
         """
@@ -278,8 +281,8 @@ class TemporalTopologyDatasetConnector(object):
     def append_starts(self, map):
         """Append a map that this map temporally starts with
 
-           :param map: This object should be of type AbstractMapDataset 
-                        or derived classes
+           :param map: This object should be of type AbstractMapDataset
+                       or derived classes
         """
         if "STARTS" not in self._temporal_topology:
             self._temporal_topology["STARTS"] = []
@@ -297,8 +300,8 @@ class TemporalTopologyDatasetConnector(object):
     def append_started(self, map):
         """Append a map that this map temporally started with
 
-           :param map: This object should be of type AbstractMapDataset 
-                        or derived classes
+           :param map: This object should be of type AbstractMapDataset
+                       or derived classes
         """
         if "STARTED" not in self._temporal_topology:
             self._temporal_topology["STARTED"] = []
@@ -316,8 +319,8 @@ class TemporalTopologyDatasetConnector(object):
     def append_finishes(self, map):
         """Append a map that this map temporally finishes with
 
-           :param map: This object should be of type AbstractMapDataset 
-                        or derived classes
+           :param map: This object should be of type AbstractMapDataset
+                       or derived classes
         """
         if "FINISHES" not in self._temporal_topology:
             self._temporal_topology["FINISHES"] = []
@@ -335,8 +338,8 @@ class TemporalTopologyDatasetConnector(object):
     def append_finished(self, map):
         """Append a map that this map temporally finished with
 
-           :param map: This object should be of type AbstractMapDataset 
-                        or derived classes
+           :param map: This object should be of type AbstractMapDataset
+                       or derived classes
         """
         if "FINISHED" not in self._temporal_topology:
             self._temporal_topology["FINISHED"] = []
@@ -354,8 +357,8 @@ class TemporalTopologyDatasetConnector(object):
     def append_overlaps(self, map):
         """Append a map that this map temporally overlaps
 
-           :param map: This object should be of type AbstractMapDataset 
-                        or derived classes
+           :param map: This object should be of type AbstractMapDataset
+                       or derived classes
         """
         if "OVERLAPS" not in self._temporal_topology:
             self._temporal_topology["OVERLAPS"] = []
@@ -373,8 +376,8 @@ class TemporalTopologyDatasetConnector(object):
     def append_overlapped(self, map):
         """Append a map that this map temporally overlapped
 
-           :param map: This object should be of type AbstractMapDataset 
-                        or derived classes
+           :param map: This object should be of type AbstractMapDataset
+                       or derived classes
         """
         if "OVERLAPPED" not in self._temporal_topology:
             self._temporal_topology["OVERLAPPED"] = []
@@ -392,8 +395,8 @@ class TemporalTopologyDatasetConnector(object):
     def append_follows(self, map):
         """Append a map that this map temporally follows
 
-           :param map: This object should be of type AbstractMapDataset 
-                        or derived classes
+           :param map: This object should be of type AbstractMapDataset
+                       or derived classes
         """
         if "FOLLOWS" not in self._temporal_topology:
             self._temporal_topology["FOLLOWS"] = []
@@ -411,8 +414,8 @@ class TemporalTopologyDatasetConnector(object):
     def append_precedes(self, map):
         """Append a map that this map temporally precedes
 
-           :param map: This object should be of type AbstractMapDataset 
-                        or derived classes
+           :param map: This object should be of type AbstractMapDataset
+                       or derived classes
         """
         if "PRECEDES" not in self._temporal_topology:
             self._temporal_topology["PRECEDES"] = []
@@ -431,8 +434,8 @@ class TemporalTopologyDatasetConnector(object):
         """Append a map that this map is temporally located during
            This includes temporal relationships starts and finishes
 
-           :param map: This object should be of type 
-                        AbstractMapDataset or derived classes
+           :param map: This object should be of type
+                       AbstractMapDataset or derived classes
         """
         if "DURING" not in self._temporal_topology:
             self._temporal_topology["DURING"] = []
@@ -452,8 +455,8 @@ class TemporalTopologyDatasetConnector(object):
         """Append a map that this map temporally contains
            This includes temporal relationships started and finished
 
-           :param map: This object should be of type AbstractMapDataset 
-                        or derived classes
+           :param map: This object should be of type AbstractMapDataset
+                       or derived classes
         """
         if "CONTAINS" not in self._temporal_topology:
             self._temporal_topology["CONTAINS"] = []
@@ -483,34 +486,23 @@ class TemporalTopologyDatasetConnector(object):
             count += 1
 
         return string
-    
+
     # Set the properties
-    equal = property(fget=get_equal, 
-                                       fset=append_equal)
-    follows = property(fget=get_follows, 
-                                    fset=append_follows)
-    precedes = property(fget=get_precedes, 
-                                     fset=append_precedes)
-    overlaps = property(fget=get_overlaps, 
-                                     fset=append_overlaps)
-    overlapped = property(fget=get_overlapped, 
-                                       fset=append_overlapped)
-    during = property(fget=get_during, 
-                                   fset=append_during)
-    contains = property(fget=get_contains, 
-                                     fset=append_contains)
-    starts = property(fget=get_starts, 
-                                     fset=append_starts)
-    started = property(fget=get_started, 
-                                     fset=append_started)
-    finishes = property(fget=get_finishes, 
-                                     fset=append_finishes)
-    finished = property(fget=get_finished, 
-                                     fset=append_finished)
+    equal = property(fget=get_equal, fset=append_equal)
+    follows = property(fget=get_follows, fset=append_follows)
+    precedes = property(fget=get_precedes, fset=append_precedes)
+    overlaps = property(fget=get_overlaps, fset=append_overlaps)
+    overlapped = property(fget=get_overlapped, fset=append_overlapped)
+    during = property(fget=get_during, fset=append_during)
+    contains = property(fget=get_contains, fset=append_contains)
+    starts = property(fget=get_starts, fset=append_starts)
+    started = property(fget=get_started, fset=append_started)
+    finishes = property(fget=get_finishes, fset=append_finishes)
+    finished = property(fget=get_finished, fset=append_finished)
 
     def print_temporal_topology_info(self):
         """Print information about this class in human readable style"""
-        
+
         print " +-------------------- Temporal Topology -------------------------------------+"
         #          0123456789012345678901234567890
         if self.next() is not None:
@@ -553,7 +545,7 @@ class TemporalTopologyDatasetConnector(object):
 
     def print_temporal_topology_shell_info(self):
         """Print information about this class in shell style"""
-        
+
         if self.next() is not None:
             print "next=" + self.next().get_id()
         if self.prev() is not None:
@@ -561,7 +553,8 @@ class TemporalTopologyDatasetConnector(object):
         if self.equal is not None:
             print "equal=" + self._generate_map_list_string(self.equal, False)
         if self.follows is not None:
-            print "follows=" + self._generate_map_list_string(self.follows, False)
+            print "follows=" + self._generate_map_list_string(self.follows,
+                                                              False)
         if self.precedes is not None:
             print "precedes=" + self._generate_map_list_string(
                 self.precedes, False)
@@ -572,7 +565,8 @@ class TemporalTopologyDatasetConnector(object):
             print "overlapped=" + \
                 self._generate_map_list_string(self.overlapped, False)
         if self.during is not None:
-            print "during=" + self._generate_map_list_string(self.during, False)
+            print "during=" + self._generate_map_list_string(self.during,
+                                                             False)
         if self.contains is not None:
             print "contains=" + self._generate_map_list_string(
                 self.contains, False)

+ 80 - 59
lib/python/temporal/temporal_vector_algebra.py

@@ -423,6 +423,7 @@ from temporal_algebra import *
 
 ##############################################################################
 
+
 class TemporalVectorAlgebraLexer(TemporalAlgebraLexer):
     """Lexical analyzer for the GRASS GIS temporal vector algebra"""
 
@@ -431,9 +432,9 @@ class TemporalVectorAlgebraLexer(TemporalAlgebraLexer):
 
     # Buffer functions from v.buffer
     vector_buff_functions = {
-       'buff_p'  : 'BUFF_POINT',
-       'buff_l'   : 'BUFF_LINE',
-       'buff_a'   : 'BUFF_AREA',
+       'buff_p': 'BUFF_POINT',
+       'buff_l': 'BUFF_LINE',
+       'buff_a': 'BUFF_AREA',
        }
 
     # This is the list of token names.
@@ -445,14 +446,13 @@ class TemporalVectorAlgebraLexer(TemporalAlgebraLexer):
     )
 
     # Build the token list
-    tokens = TemporalAlgebraLexer.tokens \
-                    + vector_tokens \
-                    + tuple(vector_buff_functions.values())
+    tokens = TemporalAlgebraLexer.tokens + vector_tokens \
+             + tuple(vector_buff_functions.values())
 
     # Regular expression rules for simple tokens
-    t_DISOR              = r'\+'
-    t_XOR                = r'\^'
-    t_NOT                = r'\~'
+    t_DISOR = r'\+'
+    t_XOR = r'\^'
+    t_NOT = r'\~'
     t_T_OVERLAY_OPERATOR = r'\{([a-zA-Z\|]+[,])?([\|&+=]?[\|&+=\^\~])\}'
 
     # Parse symbols
@@ -472,6 +472,7 @@ class TemporalVectorAlgebraLexer(TemporalAlgebraLexer):
 
 ##############################################################################
 
+
 class TemporalVectorAlgebraParser(TemporalAlgebraParser):
     """The temporal algebra class"""
 
@@ -480,12 +481,12 @@ class TemporalVectorAlgebraParser(TemporalAlgebraParser):
 
     # Setting equal precedence level for select and hash operations.
     precedence = (
-        ('left', 'T_SELECT_OPERATOR', 'T_SELECT', 'T_NOT_SELECT'), # 1
-        ('left', 'AND', 'OR', 'T_COMP_OPERATOR', 'T_OVERLAY_OPERATOR', 'DISOR', \
-          'NOT', 'XOR'), #2
+        ('left', 'T_SELECT_OPERATOR', 'T_SELECT', 'T_NOT_SELECT'),  # 1
+        ('left', 'AND', 'OR', 'T_COMP_OPERATOR', 'T_OVERLAY_OPERATOR', 'DISOR',
+         'NOT', 'XOR'),  # 2
         )
 
-    def __init__(self, pid=None, run=False, debug=True, spatial = False):
+    def __init__(self, pid=None, run=False, debug=True, spatial=False):
         TemporalAlgebraParser.__init__(self, pid, run, debug, spatial)
 
         self.m_overlay = pygrass.Module('v.overlay', quiet=True, run_=False)
@@ -494,7 +495,7 @@ class TemporalVectorAlgebraParser(TemporalAlgebraParser):
         self.m_remove = pygrass.Module('g.remove', quiet=True, run_=False)
         self.m_buffer = pygrass.Module('v.buffer', quiet=True, run_=False)
 
-    def parse(self, expression, basename = None, overwrite = False):
+    def parse(self, expression, basename=None, overwrite=False):
         self.lexer = TemporalVectorAlgebraLexer()
         self.lexer.build()
         self.parser = yacc.yacc(module=self, debug=self.debug)
@@ -518,7 +519,7 @@ class TemporalVectorAlgebraParser(TemporalAlgebraParser):
             for chunk in chunklist:
                 stringlist = ",".join(chunk)
                 if self.debug:
-                    print "g.remove type=vect pattern=%s"%(stringlist)
+                    print "g.remove type=vect pattern=%s" % (stringlist)
 
                 if self.run:
                     m = copy.deepcopy(self.m_remove)
@@ -527,16 +528,17 @@ class TemporalVectorAlgebraParser(TemporalAlgebraParser):
                     m.flags["f"].value = True
                     m.run()
 
-    def eval_toperator(self, operator, comparison = False):
+    def eval_toperator(self, operator, comparison=False):
         """This function evaluates a string containing temporal operations.
 
-          :param operator: String of temporal operations, e.g. {equal|during,=!:}.
+          :param operator: String of temporal operations, e.g.
+                           {equal|during,=!:}.
 
-          :return: List of temporal relations (equal, during), the given function
-           (!:) and the interval/instances (=).
+          :return: List of temporal relations (equal, during), the given
+                   function (!:) and the interval/instances (=).
 
           .. code-block:: python
-          
+
               >>> import grass.temporal as tgis
               >>> tgis.init(True)
               >>> p = tgis.TemporalVectorAlgebraParser()
@@ -591,8 +593,8 @@ class TemporalVectorAlgebraParser(TemporalAlgebraParser):
 
         return(p.relations, p.temporal, p.function)
 
-    def overlay_map_extent(self, mapA, mapB, bool_op = None, temp_op = '=',
-                            copy = False):
+    def overlay_map_extent(self, mapA, mapB, bool_op=None, temp_op='=',
+                           copy=False):
         """Compute the spatio-temporal extent of two topological related maps
 
            :param mapA: The first map
@@ -634,18 +636,20 @@ class TemporalVectorAlgebraParser(TemporalAlgebraParser):
                 for i in range(num):
                     # Check if resultmap names exist in GRASS database.
                     vectorname = self.basename + "_" + str(i)
-                    vectormap = VectorDataset(vectorname + "@" + get_current_mapset())
-                    if vectormap.map_exists() and self.overwrite == False:
-                        self.msgr.fatal(_("Error vector maps with basename %s exist. "
-                                      "Use --o flag to overwrite existing file") \
-                                      %(vectorname))
+                    vectormap = VectorDataset(vectorname + "@" +
+                                              get_current_mapset())
+                    if vectormap.map_exists() and self.overwrite is False:
+                        self.msgr.fatal(_("Error vector maps with basename %s"
+                                          " exist. Use --o flag to overwrite"
+                                          " existing file") % (vectorname))
                 for map_i in t[3]:
                     if "cmd_list" in dir(map_i):
                         # Execute command list.
                         for cmd in map_i.cmd_list:
                             try:
-                                # We need to check if the input maps have areas in case of v.overlay
-                                # otherwise v.overlay will break
+                                # We need to check if the input maps have
+                                # areas in case of v.overlay otherwise
+                                # v.overlay will break
                                 if cmd.name == "v.overlay":
                                     for name in (cmd.inputs["ainput"].value,
                                                     cmd.inputs["binput"].value):
@@ -668,14 +672,16 @@ class TemporalVectorAlgebraParser(TemporalAlgebraParser):
                             self.msgr.message("Run command:\n" + cmd.get_bash())
                             cmd.run()
                             if cmd.popen.returncode != 0:
-                                self.msgr.fatal(_("Error starting %s : \n%s") \
-                                                    %(cmd.get_bash(), \
-                                                    cmd.popen.stderr))
+                                self.msgr.fatal(_("Error starting %s : \n%s")
+                                                % (cmd.get_bash(),
+                                                   cmd.popen.stderr))
                             mapname = cmd.outputs['output'].value
                             if mapname.find("@") >= 0:
                                 map_test = map_i.get_new_instance(mapname)
                             else:
-                                map_test = map_i.get_new_instance(mapname + "@" + self.mapset)
+                                map_test = map_i.get_new_instance(mapname +
+                                                                  "@" +
+                                                                  self.mapset)
                             if not map_test.map_exists():
                                 returncode = 1
                                 break
@@ -687,7 +693,8 @@ class TemporalVectorAlgebraParser(TemporalAlgebraParser):
                             # Change map name to given basename.
                             newident = self.basename + "_" + str(count)
                             m = copy.deepcopy(self.m_rename)
-                            m.inputs["vect"].value = (map_i.get_name(),newident)
+                            m.inputs["vect"].value = (map_i.get_name(),
+                                                      newident)
                             m.flags["overwrite"].value = self.overwrite
                             m.run()
                             #m(vect = (map_i.get_name(),newident), \
@@ -702,28 +709,32 @@ class TemporalVectorAlgebraParser(TemporalAlgebraParser):
                     # Open connection to temporal database.
                     dbif, connected = init_dbif(dbif=self.dbif)
                     # Create result space time dataset.
-                    resultstds = open_new_stds(t[1], self.stdstype, \
-                                                                'absolute', t[1], t[1], \
-                                                                "temporal vector algebra", dbif=dbif,
-                                                                overwrite = self.overwrite)
+                    resultstds = open_new_stds(t[1], self.stdstype, 'absolute',
+                                               t[1], t[1],
+                                               "temporal vector algebra",
+                                               dbif=dbif,
+                                               overwrite=self.overwrite)
                     for map_i in register_list:
-                        # Check if modules should be executed from command list.
+                        # Check if modules should be executed from command list
                         if "cmd_list" in dir(map_i):
                             # Get meta data from grass database.
                             map_i.load()
                             if map_i.is_in_db(dbif=dbif) and self.overwrite:
                                 # Update map in temporal database.
                                 map_i.update_all(dbif=dbif)
-                            elif map_i.is_in_db(dbif=dbif) and self.overwrite == False:
-                                # Raise error if map exists and no overwrite flag is given.
-                                self.msgr.fatal(_("Error vector map %s exist in temporal database. "
-                                                  "Use overwrite flag.  : \n%s") \
-                                                  %(map_i.get_map_id(), cmd.popen.stderr))
+                            elif map_i.is_in_db(dbif=dbif) and self.overwrite is False:
+                                # Raise error if map exists and no overwrite
+                                # flag is given.
+                                self.msgr.fatal(_("Error vector map %s exist"
+                                                  " in temporal database. Use "
+                                                  "overwrite flag.  : \n%s") %
+                                                (map_i.get_map_id(),
+                                                 cmd.popen.stderr))
                             else:
                                 # Insert map into temporal database.
                                 map_i.insert(dbif=dbif)
                         else:
-                            #Get metadata from temporal database.
+                            # Get metadata from temporal database.
                             map_i.select(dbif=dbif)
                         # Register map in result space time dataset.
                         resultstds.register_map(map_i, dbif=dbif)
@@ -764,7 +775,8 @@ class TemporalVectorAlgebraParser(TemporalAlgebraParser):
         maplistB = self.check_stds(t[3])
 
         if self.run:
-            t[0] = self.create_overlay_operations(maplistA, maplistB, ("EQUAL",), "=", t[2])
+            t[0] = self.create_overlay_operations(maplistA, maplistB,
+                                                  ("EQUAL",), "=", t[2])
         else:
             t[0] = t[1]
 
@@ -778,25 +790,31 @@ class TemporalVectorAlgebraParser(TemporalAlgebraParser):
         # Check input stds.
         maplistA = self.check_stds(t[1])
         maplistB = self.check_stds(t[3])
-        relations, temporal, function= self.eval_toperator(t[2])
+        relations, temporal, function = self.eval_toperator(t[2])
 
         if self.run:
-            t[0] = self.create_overlay_operations(maplistA, maplistB, relations, temporal, function)
+            t[0] = self.create_overlay_operations(maplistA, maplistB,
+                                                  relations, temporal,
+                                                  function)
         else:
             t[0] = t[1]
 
-    def create_overlay_operations(self, maplistA, maplistB, relations, temporal, function):
+    def create_overlay_operations(self, maplistA, maplistB, relations,
+                                  temporal, function):
         """Create the spatial overlay operation commad list
 
            :param maplistA: A list of map objects
            :param maplistB: A list of map objects
-           :param relations: The temporal relationships that must be fullfilled as list of strings
-                            ("EQUAL", "DURING", ...)
+           :param relations: The temporal relationships that must be
+                             fullfilled as list of strings("EQUAL", "DURING",
+                             ...)
            :param temporal: The temporal operator as string "=" or "&", ...
-           :param function: The spatial overlay operations as string "&", "|", ...
+           :param function: The spatial overlay operations as string "&", "|",
+                            ...
            :return: Return the list of maps with overlay commands
         """
-        topolist = self.get_temporal_topo_list(maplistA, maplistB, topolist = relations)
+        topolist = self.get_temporal_topo_list(maplistA, maplistB,
+                                               topolist=relations)
 
         # Select operation name.
         if function == "&":
@@ -825,9 +843,11 @@ class TemporalVectorAlgebraParser(TemporalAlgebraParser):
                     if topo in tbrelations.keys():
                         for map_j in (tbrelations[topo]):
                             # Create overlayed map extent.
-                            returncode = self.overlay_map_extent(map_new, map_j, opname, \
-                                                                    temp_op = temporal)
-                            # Stop the loop if no temporal or spatial relationship exist.
+                            returncode = self.overlay_map_extent(map_new,
+                                                                 map_j, opname,
+                                                                 temp_op=temporal)
+                            # Stop the loop if no temporal or spatial
+                            # relationship exist.
                             if returncode == 0:
                                 break
                             if count == 0:
@@ -839,7 +859,8 @@ class TemporalVectorAlgebraParser(TemporalAlgebraParser):
                                 map_new.set_id(name + "@" + mapset)
                             # Set second input for overlay module.
                             mapbinput = map_j.get_id()
-                            # Create module command in PyGRASS for v.overlay and v.patch.
+                            # Create module command in PyGRASS for v.overlay
+                            # and v.patch.
                             if opname != "disor":
                                 m = copy.deepcopy(self.m_overlay)
                                 m.run_ = False
@@ -871,7 +892,7 @@ class TemporalVectorAlgebraParser(TemporalAlgebraParser):
 
             return resultlist
 
-    def p_buffer_operation(self,t):
+    def p_buffer_operation(self, t):
         """
         expr : buff_function LPAREN stds COMMA number RPAREN
              | buff_function LPAREN expr COMMA number RPAREN
@@ -923,7 +944,7 @@ class TemporalVectorAlgebraParser(TemporalAlgebraParser):
     # Handle errors.
     def p_error(self, t):
         raise SyntaxError("syntax error on line %d near '%s' expression '%s'" %
-            (t.lineno, t.value, self.expression))
+                          (t.lineno, t.value, self.expression))
 
 ###############################################################################
 

+ 61 - 61
lib/python/temporal/temporal_vector_operator.py

@@ -61,7 +61,7 @@ for details.
     >>> p.parse(expression)
     >>> print(p.relations, p.temporal, p.function)
     ([['overlaps', 'overlapped'], 'equal'], '|', '|')
-    
+
 """
 try:
     import ply.lex as lex
@@ -69,25 +69,26 @@ try:
 except:
     pass
 
+
 class TemporalVectorOperatorLexer(object):
     """Lexical analyzer for the GRASS GIS temporal vector operators"""
-    
+
     # Functions that defines topological relations.
     relations = {
-        'equal'      : "EQUAL",
-        'follows'    : "FOLLOWS",
-        'precedes'   : "PRECEDES",
-        'overlaps'   : "OVERLAPS",
-        'overlapped' : "OVERLAPPED",
-        'during'     : "DURING",
-        'starts'     : "STARTS",
-        'finishes'   : "FINISHES",
-        'contains'   : "CONTAINS",
-        'started'    : "STARTED",
-        'finished'   : "FINISHED",
-        'over'       : "OVER"
+        'equal': "EQUAL",
+        'follows': "FOLLOWS",
+        'precedes': "PRECEDES",
+        'overlaps': "OVERLAPS",
+        'overlapped': "OVERLAPPED",
+        'during': "DURING",
+        'starts': "STARTS",
+        'finishes': "FINISHES",
+        'contains': "CONTAINS",
+        'started': "STARTED",
+        'finished': "FINISHED",
+        'over': "OVER"
         }
-        
+
     # This is the list of token names.
     tokens = (
         'COMMA',
@@ -103,24 +104,24 @@ class TemporalVectorOperatorLexer(object):
         'CLPAREN',
         'CRPAREN',
     )
-    
+
     # Build the token list
     tokens = tokens + tuple(relations.values())
 
     # Regular expression rules for simple tokens
-    t_T_SELECT           = r':'
-    t_T_NOT_SELECT       = r'!:'
-    t_COMMA              = r','
-    t_LEFTREF             = r'='
-    t_HASH               = r'\#'
-    t_OR                 = r'[\|]'
-    t_AND                = r'[&]'
-    t_DISOR              = r'\+'
-    t_XOR                = r'\^'
-    t_NOT                = r'\~'
-    t_CLPAREN             = r'\{'
-    t_CRPAREN             = r'\}'
-    
+    t_T_SELECT = r':'
+    t_T_NOT_SELECT = r'!:'
+    t_COMMA = r','
+    t_LEFTREF = r'='
+    t_HASH = r'\#'
+    t_OR = r'[\|]'
+    t_AND = r'[&]'
+    t_DISOR = r'\+'
+    t_XOR = r'\^'
+    t_NOT = r'\~'
+    t_CLPAREN = r'\{'
+    t_CRPAREN = r'\}'
+
     # These are the things that should be ignored.
     t_ignore = ' \t'
 
@@ -128,12 +129,12 @@ class TemporalVectorOperatorLexer(object):
     def t_newline(self, t):
         r'\n+'
         t.lineno += len(t.value)
-        
+
     def t_NAME(self, t):
         r'[a-zA-Z_][a-zA-Z_0-9]*'
         self.temporal_symbol(t)
         return t
-        
+
     # Parse symbols
     def temporal_symbol(self, t):
         # Check for reserved words
@@ -145,35 +146,36 @@ class TemporalVectorOperatorLexer(object):
 
     # Handle errors.
     def t_error(self, t):
-        raise SyntaxError("syntax error on line %d near '%s'" % 
-            (t.lineno, t.value))
+        raise SyntaxError("syntax error on line %d near '%s'" %
+                          (t.lineno, t.value))
 
     # Build the lexer
-    def build(self,**kwargs):
+    def build(self, **kwargs):
         self.lexer = lex.lex(module=self, **kwargs)
-        
+
     # Just for testing
-    def test(self,data):
+    def test(self, data):
         self.name_list = {}
         print(data)
         self.lexer.input(data)
         while True:
-             tok = self.lexer.token()
-             if not tok: break
-             print tok
-             
+            tok = self.lexer.token()
+            if not tok: break
+            print tok
+
+
 class TemporalVectorOperatorParser(object):
     """The parser for the GRASS GIS temporal vector operators"""
-    
+
     def __init__(self):
         self.lexer = TemporalVectorOperatorLexer()
         self.lexer.build()
         self.parser = yacc.yacc(module=self)
 
-    def parse(self, expression, comparison = False):
+    def parse(self, expression, comparison=False):
         self.comparison = comparison
         self.parser.parse(expression)
-        
+
     # Error rule for syntax errors.
     def p_error(self, t):
         raise SyntaxError("invalid syntax")
@@ -192,11 +194,10 @@ class TemporalVectorOperatorParser(object):
             self.relations = t[2]
         else:
             self.relations = [t[2]]
-        self.temporal  = None 
-        self.function  = None
-        
-        t[0] = t[2]
+        self.temporal = None
+        self.function = None
 
+        t[0] = t[2]
 
     def p_operator(self, t):
         # The expression should always return a list of maps.
@@ -207,11 +208,11 @@ class TemporalVectorOperatorParser(object):
         """
         # Set three operator components.
         self.relations = ['equal']
-        self.temporal  = "=" 
-        self.function  = t[2]
-        
+        self.temporal = "="
+        self.function = t[2]
+
         t[0] = t[2]
-        
+
     def p_operator_temporal(self, t):
         # The expression should always return a list of maps.
         """
@@ -227,7 +228,7 @@ class TemporalVectorOperatorParser(object):
         else:
             self.temporal = t[2]
             self.function = t[3]
-        
+
         t[0] = t[3]
 
     def p_operator_relation(self, t):
@@ -245,9 +246,9 @@ class TemporalVectorOperatorParser(object):
             self.relations = t[2]
         else:
             self.relations = [t[2]]
-        self.temporal  = "=" 
-        self.function  = t[4]
-        
+        self.temporal = "="
+        self.function = t[4]
+
         t[0] = t[4]
 
     def p_operator_relation_temporal(self, t):
@@ -272,7 +273,7 @@ class TemporalVectorOperatorParser(object):
             self.temporal = t[4]
             self.function = t[5]
         t[0] = t[5]
-        
+
     def p_relation(self, t):
         # The list of relations.
         """
@@ -289,7 +290,7 @@ class TemporalVectorOperatorParser(object):
                  | FINISHED
         """
         t[0] = t[1]
-        
+
     def p_over(self, t):
         # The list of relations.
         """
@@ -310,7 +311,7 @@ class TemporalVectorOperatorParser(object):
             rel_list = rel_list + t[3]
         else:
             rel_list.append(t[3])
-        t[0] =  rel_list
+        t[0] = rel_list
 
     def p_temporal_operator(self, t):
         # The list of relations.
@@ -340,9 +341,8 @@ class TemporalVectorOperatorParser(object):
                 | NOT
         """
         t[0] = t[1]
-###############################################################################             
-        
+###############################################################################
+
 if __name__ == "__main__":
     import doctest
     doctest.testmod()
-    

+ 1 - 0
lib/python/temporal/testsuite/test_doctests.py

@@ -7,6 +7,7 @@ import doctest
 import grass.temporal
 import grass.gunittest
 import grass.gunittest.utils
+import os
 
 doctest.DocFileCase = type('DocFileCase',
                            (grass.gunittest.TestCase,),

+ 43 - 34
lib/python/temporal/unit_tests.py

@@ -9,7 +9,7 @@ for details.
 :authors: Soeren Gebbert
 """
 import copy
-from datetime import datetime, date, time, timedelta
+from datetime import datetime
 import grass.script.core as core
 from temporal_granularity import *
 from datetime_math import *
@@ -25,6 +25,7 @@ core.set_raise_on_error(True)
 
 ###############################################################################
 
+
 def test_increment_datetime_by_string():
 
     # First test
@@ -93,6 +94,7 @@ def test_increment_datetime_by_string():
 
 ###############################################################################
 
+
 def test_adjust_datetime_to_granularity():
 
     # First test
@@ -209,6 +211,7 @@ def test_adjust_datetime_to_granularity():
 
 ###############################################################################
 
+
 def test_compute_datetime_delta():
 
     print "Test 1"
@@ -552,6 +555,7 @@ def test_compute_datetime_delta():
     if delta != 0:
         core.fatal("Compute datetime delta is wrong %s" % (delta))
 
+
 def test_compute_absolute_time_granularity():
 
     # First we test intervals
@@ -890,6 +894,7 @@ def test_compute_absolute_time_granularity():
 
 ###############################################################################
 
+
 def test_spatial_extent_intersection():
     # Generate the extents
 
@@ -953,6 +958,7 @@ def test_spatial_extent_intersection():
 
 ###############################################################################
 
+
 def test_spatial_relations():
     # Generate the extents
 
@@ -1308,7 +1314,6 @@ def test_spatial_relations():
     print relation
     if relation != "meet":
         core.fatal("Wrong spatial relation: %s" % (relation))
- ###
 
     A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
     A.print_info()
@@ -1362,6 +1367,7 @@ def test_spatial_relations():
 
 ###############################################################################
 
+
 def test_temporal_topology_builder():
     map_listA = []
 
@@ -1390,7 +1396,7 @@ def test_temporal_topology_builder():
         _map.print_topology_info()
         if _map.get_id() != map_listA[count].get_id():
             core.fatal("Error building temporal topology <%s> != <%s>" %
-                (_map.get_id(), map_listA[count].get_id()))
+                       (_map.get_id(), map_listA[count].get_id()))
         count += 1
 
     map_listB = []
@@ -1426,12 +1432,11 @@ def test_temporal_topology_builder():
 
     count = 0
     for _map in tb:
-        print "[%s]" % (_map.get_map_id
-        ())
+        print "[%s]" % (_map.get_map_id())
         _map.print_topology_shell_info()
         if _map.get_id() != map_listB[count].get_id():
             core.fatal("Error building temporal topology <%s> != <%s>" %
-                (_map.get_id(), map_listB[count].get_id()))
+                       (_map.get_id(), map_listB[count].get_id()))
         count += 1
 
     tb = SpatioTemporalTopologyBuilder()
@@ -1443,7 +1448,7 @@ def test_temporal_topology_builder():
         _map.print_topology_shell_info()
         if _map.get_id() != map_listA[count].get_id():
             core.fatal("Error building temporal topology <%s> != <%s>" %
-                (_map.get_id(), map_listA[count].get_id()))
+                       (_map.get_id(), map_listA[count].get_id()))
         count += 1
 
     count = 0
@@ -1468,6 +1473,7 @@ def test_temporal_topology_builder():
 
 ###############################################################################
 
+
 def test_map_list_sorting():
 
     map_list = []
@@ -1511,13 +1517,14 @@ def test_map_list_sorting():
 
 ###############################################################################
 
+
 def test_1d_rtree():
     """Testing the rtree ctypes wrapper"""
 
     tree = rtree.RTreeCreateTree(-1, 0, 1)
 
     for i in xrange(10):
-        
+
         rect = rtree.RTreeAllocRect(tree)
         rtree.RTreeSetRect1D(rect, tree, float(i - 2), float(i + 2))
         rtree.RTreeInsertRect(rect, i + 1, tree)
@@ -1528,7 +1535,7 @@ def test_1d_rtree():
     list_ = gis.ilist()
 
     num = vector.RTreeSearch2(tree, rect, byref(list_))
-    
+
     rtree.RTreeFreeRect(rect)
 
     # print rectangle ids
@@ -1537,24 +1544,24 @@ def test_1d_rtree():
         print "id", list_.value[i]
 
     rtree.RTreeDestroyTree(tree)
-    
+
 ###############################################################################
 
+
 def test_2d_rtree():
     """Testing the rtree ctypes wrapper"""
 
     tree = rtree.RTreeCreateTree(-1, 0, 2)
 
     for i in xrange(10):
-        
-        
+
         rect = rtree.RTreeAllocRect(tree)
 
-        rtree.RTreeSetRect2D(rect, tree, 
-                              float(i - 2), float(i + 2), 
-                              float(i - 2), float(i + 2))
+        rtree.RTreeSetRect2D(rect, tree,
+                             float(i - 2), float(i + 2),
+                             float(i - 2), float(i + 2))
         rtree.RTreeInsertRect(rect, i + 1, tree)
-    
+
     rect = rtree.RTreeAllocRect(tree)
     rtree.RTreeSetRect2D(rect, tree, 2.0, 7.0, 2.0, 7.0)
 
@@ -1569,21 +1576,22 @@ def test_2d_rtree():
         print "id", list_.value[i]
 
     rtree.RTreeDestroyTree(tree)
-    
+
 ###############################################################################
 
+
 def test_3d_rtree():
     """Testing the rtree ctypes wrapper"""
 
     tree = rtree.RTreeCreateTree(-1, 0, 3)
 
     for i in xrange(10):
-        
+
         rect = rtree.RTreeAllocRect(tree)
-        rtree.RTreeSetRect3D(rect, tree, 
-                              float(i - 2), float(i + 2), 
-                              float(i - 2), float(i + 2), 
-                              float(i - 2), float(i + 2))
+        rtree.RTreeSetRect3D(rect, tree,
+                             float(i - 2), float(i + 2),
+                             float(i - 2), float(i + 2),
+                             float(i - 2), float(i + 2))
         rtree.RTreeInsertRect(rect, i + 1, tree)
         print i + 1
         rtree.RTreePrintRect(rect, 1, tree)
@@ -1592,7 +1600,7 @@ def test_3d_rtree():
     rtree.RTreeSetRect3D(rect, tree, 2.0, 7.0, 2.0, 7.0, 2.0, 7.0)
     print "Select"
     rtree.RTreePrintRect(rect, 1, tree)
-        
+
     list_ = gis.ilist()
 
     num = vector.RTreeSearch2(tree, rect, byref(list_))
@@ -1602,31 +1610,32 @@ def test_3d_rtree():
     print "Number of overlapping rectangles", num
     for i in xrange(list_.n_values):
         print "id", list_.value[i]
-        
+
     rtree.RTreeDestroyTree(tree)
 
 ###############################################################################
 
+
 def test_4d_rtree():
     """Testing the rtree ctypes wrapper"""
 
     tree = rtree.RTreeCreateTree(-1, 0, 4)
 
     for i in xrange(10):
-        
+
         # Allocate the boundary
         rect = rtree.RTreeAllocRect(tree)
-        rtree.RTreeSetRect4D(rect, tree, 
-                              float(i - 2), float(i + 2), 
-                              float(i - 2), float(i + 2), 
-                              float(i - 2), float(i + 2), 
-                              float(i - 2), float(i + 2))
+        rtree.RTreeSetRect4D(rect, tree,
+                             float(i - 2), float(i + 2),
+                             float(i - 2), float(i + 2),
+                             float(i - 2), float(i + 2),
+                             float(i - 2), float(i + 2))
         rtree.RTreeInsertRect(rect, i + 1, tree)
 
     rect = rtree.RTreeAllocRect(tree)
-    rtree.RTreeSetRect4D(rect, tree, 2.0, 7.0, 2.0, 
-                          7.0, 2.0, 7.0, 2.0, 7.0)
-    
+    rtree.RTreeSetRect4D(rect, tree, 2.0, 7.0, 2.0,
+                         7.0, 2.0, 7.0, 2.0, 7.0)
+
     list_ = gis.ilist()
 
     num = vector.RTreeSearch2(tree, rect, byref(list_))
@@ -1637,7 +1646,7 @@ def test_4d_rtree():
     print "Number of overlapping rectangles", num
     for i in xrange(list_.n_values):
         print "id", list_.value[i]
-        
+
     rtree.RTreeDestroyTree(tree)
 
 ###############################################################################

+ 27 - 22
lib/python/temporal/univar_statistics.py

@@ -49,7 +49,8 @@ def print_gridded_dataset_univar_statistics(type, input, where, extended,
     if not rows:
         dbif.close()
         gscript.fatal(_("Space time %(sp)s dataset <%(i)s> is empty") % {
-                     'sp': sp.get_new_map_instance(None).get_type(), 'i': sp.get_id()})
+                      'sp': sp.get_new_map_instance(None).get_type(),
+                      'i': sp.get_id()})
 
     if no_header is False:
         string = ""
@@ -57,7 +58,7 @@ def print_gridded_dataset_univar_statistics(type, input, where, extended,
         string += "min" + fs + "max" + fs
         string += "mean_of_abs" + fs + "stddev" + fs + "variance" + fs
         string += "coeff_var" + fs + "sum" + fs + "null_cells" + fs + "cells"
-        if extended == True:
+        if extended is True:
             string += fs + "first_quartile" + fs + "median" + fs
             string += "third_quartile" + fs + "percentile_90"
 
@@ -71,7 +72,7 @@ def print_gridded_dataset_univar_statistics(type, input, where, extended,
 
         flag = "g"
 
-        if extended == True:
+        if extended is True:
             flag += "e"
 
         if type == "strds":
@@ -81,18 +82,20 @@ def print_gridded_dataset_univar_statistics(type, input, where, extended,
 
         if not stats:
             if type == "strds":
-                gscript.warning(_("Unable to get statistics for raster map <%s>") % id)
+                gscript.warning(_("Unable to get statistics for raster map "
+                                  "<%s>") % id)
             elif type == "str3ds":
-                gscript.warning(_("Unable to get statistics for 3d raster map <%s>") % id)
+                gscript.warning(_("Unable to get statistics for 3d raster map"
+                                  " <%s>") % id)
             continue
-        
+
         string += str(id) + fs + str(start) + fs + str(end)
         string += fs + str(stats["mean"]) + fs + str(stats["min"])
         string += fs + str(stats["max"]) + fs + str(stats["mean_of_abs"])
         string += fs + str(stats["stddev"]) + fs + str(stats["variance"])
         string += fs + str(stats["coeff_var"]) + fs + str(stats["sum"])
         string += fs + str(stats["null_cells"]) + fs + str(stats["cells"])
-        if extended == True:            
+        if extended is True:
             string += fs + str(stats["first_quartile"]) + fs + str(stats["median"])
             string += fs + str(stats["third_quartile"]) + fs + str(stats["percentile_90"])
         print string
@@ -103,7 +106,8 @@ def print_gridded_dataset_univar_statistics(type, input, where, extended,
 
 
 def print_vector_dataset_univar_statistics(input, twhere, layer, type, column,
-                                           where, extended, no_header=False, fs="|"):
+                                           where, extended, no_header=False,
+                                           fs="|"):
     """Print univariate statistics for a space time vector dataset
 
        :param input: The name of the space time dataset
@@ -131,10 +135,10 @@ def print_vector_dataset_univar_statistics(input, twhere, layer, type, column,
 
     sp = dataset_factory("stvds", id)
 
-    if sp.is_in_db(dbif) == False:
+    if sp.is_in_db(dbif) is False:
         dbif.close()
         gscript.fatal(_("Space time %(sp)s dataset <%(i)s> not found") % {
-                     'sp': sp.get_new_map_instance(None).get_type(), 'i': id})
+                      'sp': sp.get_new_map_instance(None).get_type(), 'i': id})
 
     sp.select(dbif)
 
@@ -144,7 +148,7 @@ def print_vector_dataset_univar_statistics(input, twhere, layer, type, column,
     if not rows:
         dbif.close()
         gscript.fatal(_("Space time %(sp)s dataset <%(i)s> is empty") % {
-                     'sp': sp.get_new_map_instance(None).get_type(), 'i': id})
+                      'sp': sp.get_new_map_instance(None).get_type(), 'i': id})
 
     string = ""
     if no_header is False:
@@ -157,7 +161,7 @@ def print_vector_dataset_univar_statistics(input, twhere, layer, type, column,
             string += "population_coeff_variation" + fs + \
                 "sample_stddev" + fs + "sample_variance" + fs
             string += "kurtosis" + fs + "skewness"
-            if extended == True:
+            if extended is True:
                 string += fs + "first_quartile" + fs + "median" + fs + \
                     "third_quartile" + fs + "percentile_90"
 
@@ -171,22 +175,23 @@ def print_vector_dataset_univar_statistics(input, twhere, layer, type, column,
 
         flags = "g"
 
-        if extended == True:
+        if extended is True:
             flags += "e"
 
         if not mylayer:
             mylayer = layer
 
         stats = gscript.parse_command("v.univar", map=id, where=where,
-                                   column=column, layer=mylayer,
-                                   type=type, flags=flags)
+                                      column=column, layer=mylayer,
+                                      type=type, flags=flags)
 
         string = ""
 
         if not stats:
-            gscript.warning(_("Unable to get statistics for vector map <%s>") % id)
+            gscript.warning(_("Unable to get statistics for vector map <%s>")
+                            % id)
             continue
-        
+
         string += str(id) + fs + str(start) + fs + str(end)
         string += fs + str(stats["n"]) + fs + str(stats[
             "nmissing"]) + fs + str(stats["nnull"])
@@ -208,15 +213,15 @@ def print_vector_dataset_univar_statistics(input, twhere, layer, type, column,
                 str(stats["sample_variance"])
 
                 string += fs + str(stats["kurtosis"]) + fs + \
-                str(stats["skewness"])
+                          str(stats["skewness"])
             else:
                 string += fs + fs + fs + fs + fs + fs + fs + fs + fs
-            if extended == True:
+            if extended is True:
                 if "first_quartile" in stats:
                     string += fs + str(stats["first_quartile"]) + fs + \
-                    str(stats["median"]) + fs + \
-                    str(stats["third_quartile"]) + fs + \
-                    str(stats["percentile_90"])
+                              str(stats["median"]) + fs + \
+                              str(stats["third_quartile"]) + fs + \
+                              str(stats["percentile_90"])
                 else:
                     string += fs + fs + fs + fs