Procházet zdrojové kódy

Implemented open_old and open_new space time raster dataset functions
and replaced lots of code with this functions. Renamed create.py to open.py.


git-svn-id: https://svn.osgeo.org/grass/grass/trunk@57479 15284696-431f-4ddb-bdfa-cd5b030d7da7

Soeren Gebbert před 11 roky
rodič
revize
97ee589445

+ 1 - 1
lib/python/temporal/Makefile

@@ -8,7 +8,7 @@ PYDIR = $(ETC)/python
 GDIR = $(PYDIR)/grass
 DSTDIR = $(GDIR)/temporal
 
-MODULES = base core abstract_dataset abstract_map_dataset abstract_space_time_dataset space_time_datasets create factory gui_support list register sampling metadata spatial_extent temporal_extent datetime_math temporal_granularity spatio_temporal_relationships unit_tests aggregation stds_export stds_import extract mapcalc univar_statistics temporal_topology_dataset_connector spatial_topology_dataset_connector
+MODULES = base core abstract_dataset abstract_map_dataset abstract_space_time_dataset space_time_datasets open factory gui_support list register sampling metadata spatial_extent temporal_extent datetime_math temporal_granularity spatio_temporal_relationships unit_tests aggregation stds_export stds_import extract mapcalc univar_statistics temporal_topology_dataset_connector spatial_topology_dataset_connector
 
 PYFILES := $(patsubst %,$(DSTDIR)/%.py,$(MODULES) __init__)
 PYCFILES := $(patsubst %,$(DSTDIR)/%.pyc,$(MODULES) __init__)

+ 1 - 1
lib/python/temporal/__init__.py

@@ -12,7 +12,7 @@ from space_time_datasets import *
 from datetime_math import *
 from temporal_granularity import *
 from spatio_temporal_relationships import *
-from create import *
+from open import *
 from factory import *
 from gui_support import *
 from list import *

+ 0 - 89
lib/python/temporal/create.py

@@ -1,89 +0,0 @@
-"""!@package grass.temporal
-
-@brief GRASS Python scripting module (temporal GIS functions)
-
-Temporal GIS related functions to be used in Python scripts.
-
-Usage:
-
-@code
-import grass.temporal as tgis
-
-tgis.register_maps_in_space_time_dataset(type, name, maps)
-
-...
-@endcode
-
-(C) 2008-2011 by the GRASS Development Team
-This program is free software under the GNU General Public
-License (>=v2). Read the file COPYING that comes with GRASS
-for details.
-
-@author Soeren Gebbert
-"""
-
-from space_time_datasets import *
-from factory import *
-
-###############################################################################
-
-
-def create_space_time_dataset(name, type, temporaltype, title, descr, semantic,
-                              dbif=None, overwrite=False):
-    """!Create a new space time dataset
-
-       This function is sensitive to the settings in grass.core.overwrite to
-       overwrute existing space time datasets.
-
-       @param name The name of the new space time dataset
-       @param type The type (strds, stvds, str3ds) of the new space time
-                   dataset
-       @param temporaltype The temporal type (relative or absolute)
-       @param title The title
-       @param descr The dataset description
-       @param semantic Semantical information
-       @param dbif The temporal database interface to be used
-       @param overwrite Flag to allow overwriting
-
-       @return The new created space time dataset
-
-       This function will raise a ScriptError in case of an error.
-    """
-
-    #Get the current mapset to create the id of the space time dataset
-
-    mapset = core.gisenv()["MAPSET"]
-    id = name + "@" + mapset
-
-    sp = dataset_factory(type, id)
-
-    dbif, connected = init_dbif(dbif)
-
-    if sp.is_in_db(dbif) and overwrite == False:
-        if connected:
-            dbif.close()
-        core.fatal(_("Space time %(sp)s dataset <%(name)s> is already in the"
-                      " database. Use the overwrite flag.") % {
-                      'sp': sp.get_new_map_instance(None).get_type(),
-                      'name': name})
-        return None
-
-    if sp.is_in_db(dbif) and overwrite == True:
-        core.warning(_("Overwrite space time %(sp)s dataset <%(name)s> and "
-                       "unregister all maps.") % {
-                       'sp': sp.get_new_map_instance(None).get_type(),
-                       'name': name})
-        sp.delete(dbif)
-        sp = sp.get_new_instance(id)
-
-    core.verbose(_("Create new space time %s dataset.") %
-                   sp.get_new_map_instance(None).get_type())
-
-    sp.set_initial_values(temporal_type=temporaltype, semantic_type=semantic,
-                          title=title, description=descr)
-    sp.insert(dbif)
-
-    if connected:
-        dbif.close()
-        
-    return sp

+ 12 - 48
lib/python/temporal/extract.py

@@ -13,6 +13,7 @@ for details.
 """
 
 from space_time_datasets import *
+from open import *
 from multiprocessing import Process
 
 ############################################################################
@@ -53,48 +54,15 @@ def extract_dataset(input, output, type, where, expression, base, nprocs=1,
 
     mapset = core.gisenv()["MAPSET"]
 
-    if input.find("@") >= 0:
-        id = input
-    else:
-        id = input + "@" + mapset
-
-    if type == "raster":
-        sp = SpaceTimeRasterDataset(id)
-    elif type == "raster3d":
-        sp = SpaceTimeRaster3DDataset(id)
-    elif type == "vector":
-        sp = SpaceTimeVectorDataset(id)
-
-    dummy = sp.get_new_map_instance(None)
-
     dbif = SQLDatabaseInterfaceConnection()
     dbif.connect()
 
-    if not sp.is_in_db(dbif):
-        dbif.close()
-        core.fatal(_("Space time %(type)s dataset <%(id)s> not found") % {
-                     'type': type, 'id': id})
-
-    if expression and not base:
-        dbif.close()
-        core.fatal(_("Please specify base="))
-
-    sp.select(dbif)
-
-    if output.find("@") >= 0:
-        out_id = output
-    else:
-        out_id = output + "@" + mapset
-
-    # The new space time dataset
-    new_sp = sp.get_new_instance(out_id)
-
-    if new_sp.is_in_db():
-        if not core.overwrite():
-            dbif.close()
-            core.fatal(_("Space time %(type)s dataset <%(id)s> is already in "
-                         "database, use overwrite flag to overwrite") % {
-                         'type': type, 'id': out_id})
+    sp = open_old_space_time_dataset(input, type, dbif)
+    dummy = sp.get_new_map_instance(None)
+    # Check the new stds
+    new_sp = open_new_space_time_dataset(output, type, sp.get_temporal_type(),
+                                         "None", "None", "mean", dbif,
+                                         core.overwrite(), dry=True)
     if type == "vector":
         rows = sp.get_registered_maps(
             "id,name,mapset,layer", where, "start_time", dbif)
@@ -198,16 +166,12 @@ def extract_dataset(input, output, type, where, expression, base, nprocs=1,
 
         core.percent(0, num_rows, 1)
 
-        # Insert the new space time dataset
-        if new_sp.is_in_db(dbif):
-            if core.overwrite():
-                new_sp.delete(dbif)
-                new_sp = sp.get_new_instance(out_id)
-
         temporal_type, semantic_type, title, description = sp.get_initial_values()
-        new_sp.set_initial_values(
-            temporal_type, semantic_type, title, description)
-        new_sp.insert(dbif)
+        new_sp = open_new_space_time_dataset(output, type,
+                                             sp.get_temporal_type(),
+                                             title, description,
+                                             semantic_type, dbif,
+                                             core.overwrite(), dry=False)
 
         # collect empty maps to remove them
         empty_maps = []

+ 4 - 14
lib/python/temporal/list.py

@@ -24,6 +24,7 @@ for details.
 
 from space_time_datasets import *
 from factory import *
+from open import *
 
 ###############################################################################
 
@@ -50,24 +51,13 @@ def list_maps_of_stds(type, input, columns, order, where, separator, method, hea
             - "gran" List map using the granularity of the space time dataset,
                       columns are identical to deltagaps
         @param header Set True to print column names
-        @param gran The user defined granule to be used if method=gran is set, in case gran=None the 
+        @param gran The user defined granule to be used if method=gran is set, in case gran=None the
             granule of the space time dataset is used
     """
-    mapset = core.gisenv()["MAPSET"]
-
-    if input.find("@") >= 0:
-        id = input
-    else:
-        id = input + "@" + mapset
 
     dbif, connected = init_dbif(None)
-    
-    sp = dataset_factory(type, id)
 
-    if not sp.is_in_db(dbif=dbif):
-        core.fatal(_("Dataset <%s> not found in temporal database") % (id))
-
-    sp.select(dbif=dbif)
+    sp = open_old_space_time_dataset(input, type, dbif)
 
     if separator is None or separator == "":
         separator = "\t"
@@ -87,7 +77,7 @@ def list_maps_of_stds(type, input, columns, order, where, separator, method, hea
                 maps = sp.get_registered_maps_as_objects_by_granularity(gran=gran, dbif=dbif)
             else:
                 maps = sp.get_registered_maps_as_objects_by_granularity(dbif=dbif)
-            
+
         if header:
             string = ""
             string += "%s%s" % ("id", separator)

+ 12 - 60
lib/python/temporal/mapcalc.py

@@ -13,6 +13,7 @@ for details.
 """
 
 from space_time_datasets import *
+from open import *
 from multiprocessing import Process
 
 ############################################################################
@@ -82,24 +83,7 @@ def dataset_mapcalculator(inputs, output, type, expression, base, method,
 
     input_name_list = inputs.split(",")
 
-    # Process the first input
-    if input_name_list[0].find("@") >= 0:
-        id = input_name_list[0]
-    else:
-        id = input_name_list[0] + "@" + mapset
-
-    if type == "raster":
-        first_input = SpaceTimeRasterDataset(id)
-    else:
-        first_input = SpaceTimeRaster3DDataset(id)
-
-    if not first_input.is_in_db(dbif):
-        dbif.close()
-        core.fatal(_("Space time %(t)s dataset <%(i)s> not found") % {'t': type,
-                                                                      'i': id})
-
-    # Fill the object with data from the temporal database
-    first_input.select(dbif)
+    first_input = open_old_space_time_dataset(input_name_list[0], type, dbif)
 
     # All additional inputs in reverse sorted order to avoid
     # wrong name substitution
@@ -109,38 +93,13 @@ def dataset_mapcalculator(inputs, output, type, expression, base, method,
     input_list = []
 
     for input in input_name_list:
-
-        if input.find("@") >= 0:
-            id = input
-        else:
-            id = input + "@" + mapset
-
-        sp = first_input.get_new_instance(id)
-
-        if not sp.is_in_db(dbif):
-            dbif.close()
-            core.fatal(_("Space time %(t)s dataset <%(i)s> not "
-                         "found in temporal database") % {'t': type, 'i': id})
-
-        sp.select(dbif)
-
+        sp = open_old_space_time_dataset(input, type, dbif)
         input_list.append(copy.copy(sp))
 
-    # Create the new space time dataset
-    if output.find("@") >= 0:
-        out_id = output
-    else:
-        out_id = output + "@" + mapset
-
-    new_sp = first_input.get_new_instance(out_id)
-
-    # Check if in database
-    if new_sp.is_in_db(dbif):
-        if not core.overwrite():
-            dbif.close()
-            core.fatal(_("Space time %(t)s dataset <%(i)s> is already in "
-                         "database, use overwrite flag to overwrite") % {'t': type,
-                                                                         'i': out_id})
+    new_sp = open_new_space_time_dataset(output, type,
+                                         first_input.get_temporal_type(),
+                                         "New", "New dataset", "mean", dbif,
+                                         core.overwrite(), True)
 
     # Sample all inputs by the first input and create a sample matrix
     if spatial:
@@ -321,19 +280,12 @@ def dataset_mapcalculator(inputs, output, type, expression, base, method,
         # Register the new maps in the output space time dataset
         core.message(_("Start map registration in temporal database"))
 
-        # Overwrite an existing dataset if requested
-        if new_sp.is_in_db(dbif):
-            if core.overwrite():
-                new_sp.delete(dbif)
-                new_sp = first_input.get_new_instance(out_id)
-
-        # Copy the ids from the first input
         temporal_type, semantic_type, title, description = first_input.get_initial_values()
-        new_sp.set_initial_values(
-            temporal_type, semantic_type, title, description)
-        # Insert the dataset in the temporal database
-        new_sp.insert(dbif)
 
+        new_sp = open_new_space_time_dataset(output, type,
+                                         temporal_type, title, description,
+                                         semantic_type, dbif,
+                                         core.overwrite(), False)
         count = 0
 
         # collect empty maps to remove them
@@ -670,7 +622,7 @@ def _parse_td_operator(expr, is_time_absolute, first, current):
 
 
 def _parse_start_time_operator(expr, is_time_absolute, first, current):
-    """Parse the start_time() operator. This operator represent 
+    """Parse the start_time() operator. This operator represent
     the time difference between the start time of the sample space time
     raster dataset and the start time of the current sample interval or
     instance. The time is measured  in days and fraction of days for absolute

+ 156 - 0
lib/python/temporal/open.py

@@ -0,0 +1,156 @@
+"""!@package grass.temporal
+
+@brief GRASS Python scripting module (temporal GIS functions)
+
+Temporal GIS related functions to be used in Python scripts.
+
+Usage:
+
+@code
+import grass.temporal as tgis
+
+tgis.register_maps_in_space_time_dataset(type, name, maps)
+
+...
+@endcode
+
+(C) 2008-2011 by the GRASS Development Team
+This program is free software under the GNU General Public
+License (>=v2). Read the file COPYING that comes with GRASS
+for details.
+
+@author Soeren Gebbert
+"""
+
+from space_time_datasets import *
+from factory import *
+
+###############################################################################
+
+def open_old_space_time_dataset(name, type, dbif=None):
+    """!This function opens an existing space time dataset and return the
+       created and intialized object of the specified type.
+
+       This function will raise a ScriptError in case the type is wrong,
+       or the space time dataset was not found.
+
+       @param name The name of the space time dataset, if the name does not
+                    contain the mapset (name@mapset) then the current mapset
+                    will be used to identifiy the space time dataset
+       @param type The type of the space time dataset (strd, str3ds, stvds,
+                                                       raster, vector, raster3d)
+       @param dbif The optional database interface to be used
+
+    """
+    mapset = core.gisenv()["MAPSET"]
+
+    # Check if the dataset name contains the mapset as well
+    if name.find("@") < 0:
+        id = name + "@" + mapset
+    else:
+        id = name
+
+    if type == "strds" or type == "rast" or type == "raster":
+        sp = dataset_factory("strds", id)
+    elif type == "str3ds" or type == "rast3d" or type == "raster3d":
+        sp = dataset_factory("str3ds", id)
+    elif type == "stvds" or type == "vect" or type == "vector":
+        sp = dataset_factory("stvds", id)
+    else:
+        core.fatal(_("Unkown type: %s") % (type))
+
+    dbif, connected = init_dbif(dbif)
+
+    if not sp.is_in_db(dbif):
+        dbif.close()
+        core.fatal(_("Space time %(sp)s dataset <%(name)s> no found") %
+                     {'sp': sp.get_new_map_instance(None).get_type(),
+                      'name': name})
+
+    # Read content from temporal database
+    sp.select(dbif)
+    if connected:
+        dbif.close()
+
+    return sp
+
+###############################################################################
+
+def open_new_space_time_dataset(name, type, temporaltype, title, descr, semantic,
+                              dbif=None, overwrite=False, dry=False):
+    """!Create a new space time dataset of a specific type
+
+       This function is sensitive to the settings in grass.core.overwrite to
+       overwrite existing space time datasets.
+
+       @param name The name of the new space time dataset
+       @param type The type of the new space time dataset (strd, str3ds, stvds,
+                                                      raster, vector, raster3d)
+       @param temporaltype The temporal type (relative or absolute)
+       @param title The title
+       @param descr The dataset description
+       @param semantic Semantical information
+       @param dbif The temporal database interface to be used
+       @param overwrite Flag to allow overwriting
+       @param dry Do not create the space time dataset in the temporal database,
+                  make a dry run with including all checks
+
+       @return The new created space time dataset
+
+       This function will raise a ScriptError in case of an error.
+    """
+
+    #Get the current mapset to create the id of the space time dataset
+
+    mapset = core.gisenv()["MAPSET"]
+
+    if name.find("@") < 0:
+        id = name + "@" + mapset
+    else:
+        n, m = name.split("@")
+        if mapset != m:
+            core.fatal(_("Space time datasets can only be created in the "
+                         "current mapset"))
+        id = name
+
+    if type == "strds" or type == "rast" or type == "raster":
+        sp = dataset_factory("strds", id)
+    elif type == "str3ds" or type == "rast3d" or type == "raster3d":
+        sp = dataset_factory("str3ds", id)
+    elif type == "stvds" or type == "vect" or type == "vector":
+        sp = dataset_factory("stvds", id)
+    else:
+        core.fatal(_("Unkown type: %s") % (type))
+
+    dbif, connected = init_dbif(dbif)
+
+    if sp.is_in_db(dbif) and overwrite is False:
+        if connected:
+            dbif.close()
+        core.fatal(_("Space time %(sp)s dataset <%(name)s> is already in the"
+                      " database. Use the overwrite flag.") % {
+                      'sp': sp.get_new_map_instance(None).get_type(),
+                      'name': name})
+        return None
+
+    if sp.is_in_db(dbif) and overwrite is True:
+        core.warning(_("Overwrite space time %(sp)s dataset <%(name)s> and "
+                       "unregister all maps.") % {
+                       'sp': sp.get_new_map_instance(None).get_type(),
+                       'name': name})
+        if not dry:
+            sp.delete(dbif)
+        sp = sp.get_new_instance(id)
+
+    core.verbose(_("Create new space time %s dataset.") %
+                   sp.get_new_map_instance(None).get_type())
+
+    sp.set_initial_values(temporal_type=temporaltype, semantic_type=semantic,
+                          title=title, description=descr)
+    if not dry:
+        sp.insert(dbif)
+
+    if connected:
+        dbif.close()
+
+    return sp

+ 5 - 28
lib/python/temporal/register.py

@@ -24,6 +24,7 @@ for details.
 
 from space_time_datasets import *
 from factory import *
+from open import *
 
 ###############################################################################
 
@@ -43,8 +44,8 @@ def register_maps_in_space_time_dataset(
        @param type The type of the maps rast, rast3d or vect
        @param name The name of the space time dataset
        @param maps A comma separated list of map names
-       @param file Input file one map with start and optional end time,
-                   one per line
+       @param file Input file, one map per line map with start and optional
+                   end time
        @param start The start date and time of the first raster map
                     (format absolute: "yyyy-mm-dd HH:MM:SS" or "yyyy-mm-dd",
                     format relative is integer 5)
@@ -83,35 +84,11 @@ def register_maps_in_space_time_dataset(
 
     # We may need the mapset
     mapset = core.gisenv()["MAPSET"]
-
-    # The name of the space time dataset is optional
-    if name:
-        # Check if the dataset name contains the mapset as well
-        if name.find("@") < 0:
-            id = name + "@" + mapset
-        else:
-            id = name
-
-        if type == "rast" or type == "raster":
-            sp = dataset_factory("strds", id)
-        elif type == "rast3d":
-            sp = dataset_factory("str3ds", id)
-        elif type == "vect" or type == "vector":
-            sp = dataset_factory("stvds", id)
-        else:
-            core.fatal(_("Unkown map type: %s") % (type))
-
     dbif, connected = init_dbif(None)
 
+    # The name of the space time dataset is optional
     if name:
-        # Read content from temporal database
-        sp.select(dbif)
-
-        if not sp.is_in_db(dbif):
-            dbif.close()
-            core.fatal(_("Space time %(sp)s dataset <%(name)s> no found") %
-                         {'sp': sp.get_new_map_instance(None).get_type(),
-                          'name': name})
+        sp = open_old_space_time_dataset(name, type, dbif)
 
         if sp.is_time_relative() and not unit:
             dbif.close()

Rozdílová data souboru nebyla zobrazena, protože soubor je příliš velký
+ 53 - 52
lib/python/temporal/spatio_temporal_relationships.py


+ 9 - 19
lib/python/temporal/stds_export.py

@@ -35,6 +35,7 @@ import tempfile
 
 from space_time_datasets import *
 from factory import *
+from open import *
 
 proj_file_name = "proj.txt"
 init_file_name = "init.txt"
@@ -215,8 +216,8 @@ def export_stds(input, output, compression, workdir, where, format_="pack",
     """
             !Export space time datasets as tar archive with optional compression
 
-            This method should be used to export space time datasets 
-            of type raster and vector as tar archive that can be reimported 
+            This method should be used to export space time datasets
+            of type raster and vector as tar archive that can be reimported
             with the method import_stds().
 
             @param input The name of the space time dataset to export
@@ -226,31 +227,20 @@ def export_stds(input, output, compression, workdir, where, format_="pack",
               - "gzip" GNU zip compression
               - "bzip2" Bzip compression
             @param workdir The working directory used for extraction and packing
-            @param where The temporal WHERE SQL statement to select a subset 
+            @param where The temporal WHERE SQL statement to select a subset
                           of maps from the space time dataset
             @param format_ The export format:
               - "GTiff" Geotiff format, only for raster maps
-              - "pack" The GRASS raster, 3D raster or vector Pack format, 
+              - "pack" The GRASS raster, 3D raster or vector Pack format,
                        this is the default setting
-              - "GML" GML file export format, only for vector maps, 
+              - "GML" GML file export format, only for vector maps,
                       v.out.ogr export option
             @param type_ The space time dataset type
               - "strds" Space time raster dataset
               - "str3ds" Space time 3D raster dataset
               - "stvds" Space time vector dataset
     """
-    mapset = core.gisenv()["MAPSET"]
 
-    if input.find("@") >= 0:
-        id = input
-    else:
-        id = input + "@" + mapset
-
-    sp = dataset_factory(type_, id)
-
-    if sp.is_in_db() == False:
-        core.fatal(_("Space time %(sp)s dataset <%(i)s> not found") % {
-                     'sp': sp.get_new_map_instance(None).get_type(), 'i': id})
 
     # Save current working directory path
     old_cwd = os.getcwd()
@@ -259,14 +249,14 @@ def export_stds(input, output, compression, workdir, where, format_="pack",
     new_cwd = tempfile.mkdtemp(dir=workdir)
     os.chdir(new_cwd)
 
-    sp.select()
-
     if type_ == "strds":
         columns = "name,start_time,end_time,min,max,datatype"
     elif type_ == "stvds":
         columns = "name,start_time,end_time,layer"
     else:
         columns = "name,start_time,end_time"
+
+    sp = open_old_space_time_dataset(input, type_)
     rows = sp.get_registered_maps(columns, where, "start_time", None)
 
     if compression == "gzip":
@@ -329,7 +319,7 @@ def export_stds(input, output, compression, workdir, where, format_="pack",
     init_file.write(string)
     init_file.close()
 
-    metadata = core.read_command("t.info", type=type_, input=id)
+    metadata = core.read_command("t.info", type=type_, input=sp.get_id())
     metadata_file = open(metadata_file_name, "w")
     metadata_file.write(metadata)
     metadata_file.close()

+ 8 - 4
lib/python/temporal/temporal_extent.py

@@ -195,9 +195,11 @@ class AbstractTemporalExtent(SQLDatabaseInterface):
             end = self.D["end_time"]
 
         if issubclass(type(self), RelativeTemporalExtent):
-            return RelativeTemporalExtent(start_time=start, end_time=end, unit=self.get_unit())
+            return RelativeTemporalExtent(start_time=start, end_time=end,
+                                          unit=self.get_unit())
         elif issubclass(type(self), AbsoluteTemporalExtent):
-            return AbsoluteTemporalExtent(start_time=start, end_time=end, timezone=self.get_timezone())
+            return AbsoluteTemporalExtent(start_time=start, end_time=end,
+                                          timezone=self.get_timezone())
         elif issubclass(type(self), AbstractTemporalExtent):
             return AbstractTemporalExtent(start_time=start, end_time=end)
 
@@ -379,9 +381,11 @@ class AbstractTemporalExtent(SQLDatabaseInterface):
             end = self.D["end_time"]
 
         if issubclass(type(self), RelativeTemporalExtent):
-            return RelativeTemporalExtent(start_time=start, end_time=end, unit=self.get_unit())
+            return RelativeTemporalExtent(start_time=start, end_time=end,
+                                          unit=self.get_unit())
         elif issubclass(type(self), AbsoluteTemporalExtent):
-            return AbsoluteTemporalExtent(start_time=start, end_time=end, timezone=self.get_timezone())
+            return AbsoluteTemporalExtent(start_time=start, end_time=end,
+                                          timezone=self.get_timezone())
         elif issubclass(type(self), AbstractTemporalExtent):
             return AbstractTemporalExtent(start_time=start, end_time=end)
 

+ 2 - 15
lib/python/temporal/univar_statistics.py

@@ -25,6 +25,7 @@ for details.
 
 from space_time_datasets import *
 from factory import *
+from open import *
 
 ###############################################################################
 
@@ -45,21 +46,7 @@ def print_gridded_dataset_univar_statistics(type, input, where, extended,
     dbif = SQLDatabaseInterfaceConnection()
     dbif.connect()
 
-    mapset = core.gisenv()["MAPSET"]
-
-    if input.find("@") >= 0:
-        id = input
-    else:
-        id = input + "@" + mapset
-
-    sp = dataset_factory(type, id)
-
-    if sp.is_in_db(dbif) == False:
-        dbif.close()
-        core.fatal(_("Space time %(sp)s dataset <%(i)s> not found") % {
-                     'sp': sp.get_new_map_instance(None).get_type(), 'i': id})
-
-    sp.select(dbif)
+    sp = open_old_space_time_dataset(input, "strds", dbif)
 
     rows = sp.get_registered_maps(
         "id,start_time,end_time", where, "start_time", dbif)