Procházet zdrojové kódy

Added check map/stds dataset functions

git-svn-id: https://svn.osgeo.org/grass/grass/trunk@57515 15284696-431f-4ddb-bdfa-cd5b030d7da7
Soeren Gebbert před 11 roky
rodič
revize
2351f1bf45

+ 2 - 1
lib/python/temporal/abstract_map_dataset.py

@@ -166,7 +166,8 @@ class AbstractMapDataset(AbstractDataset):
         """
         """
         return self.base.get_map_id()
         return self.base.get_map_id()
 
 
-    def build_id(self, name, mapset, layer=None):
+    @staticmethod
+    def build_id(name, mapset, layer=None):
         """!Convenient method to build the unique identifier
         """!Convenient method to build the unique identifier
 
 
             Existing layer and mapset definitions in the name
             Existing layer and mapset definitions in the name

+ 5 - 7
lib/python/temporal/extract.py

@@ -58,11 +58,9 @@ def extract_dataset(input, output, type, where, expression, base, nprocs=1,
     dbif.connect()
     dbif.connect()
 
 
     sp = open_old_space_time_dataset(input, type, dbif)
     sp = open_old_space_time_dataset(input, type, dbif)
-    dummy = sp.get_new_map_instance(None)
     # Check the new stds
     # Check the new stds
-    new_sp = open_new_space_time_dataset(output, type, sp.get_temporal_type(),
-                                         "None", "None", "mean", dbif,
-                                         core.overwrite(), dry=True)
+    new_sp = check_new_space_time_dataset(output, type, dbif,
+                                          core.overwrite())
     if type == "vector":
     if type == "vector":
         rows = sp.get_registered_maps(
         rows = sp.get_registered_maps(
             "id,name,mapset,layer", where, "start_time", dbif)
             "id,name,mapset,layer", where, "start_time", dbif)
@@ -97,9 +95,9 @@ def extract_dataset(input, output, type, where, expression, base, nprocs=1,
                     expr = expr.replace(sp.base.get_name(), row["id"])
                     expr = expr.replace(sp.base.get_name(), row["id"])
 
 
                     # We need to build the id
                     # We need to build the id
-                    map_id = dummy.build_id(map_name, mapset)
+                    map_id = AbstractMapDataset.build_id(map_name, mapset)
                 else:
                 else:
-                    map_id = dummy.build_id(map_name, mapset, row["layer"])
+                    map_id = AbstractMapDataset.build_id(map_name, mapset, row["layer"])
 
 
                 new_map = sp.get_new_map_instance(map_id)
                 new_map = sp.get_new_map_instance(map_id)
 
 
@@ -171,7 +169,7 @@ def extract_dataset(input, output, type, where, expression, base, nprocs=1,
                                              sp.get_temporal_type(),
                                              sp.get_temporal_type(),
                                              title, description,
                                              title, description,
                                              semantic_type, dbif,
                                              semantic_type, dbif,
-                                             core.overwrite(), dry=False)
+                                             core.overwrite())
 
 
         # collect empty maps to remove them
         # collect empty maps to remove them
         empty_maps = []
         empty_maps = []

+ 3 - 5
lib/python/temporal/mapcalc.py

@@ -96,10 +96,8 @@ def dataset_mapcalculator(inputs, output, type, expression, base, method,
         sp = open_old_space_time_dataset(input, type, dbif)
         sp = open_old_space_time_dataset(input, type, dbif)
         input_list.append(copy.copy(sp))
         input_list.append(copy.copy(sp))
 
 
-    new_sp = open_new_space_time_dataset(output, type,
-                                         first_input.get_temporal_type(),
-                                         "New", "New dataset", "mean", dbif,
-                                         core.overwrite(), True)
+    new_sp = check_new_space_time_dataset(output, type, dbif,
+                                         core.overwrite())
 
 
     # Sample all inputs by the first input and create a sample matrix
     # Sample all inputs by the first input and create a sample matrix
     if spatial:
     if spatial:
@@ -285,7 +283,7 @@ def dataset_mapcalculator(inputs, output, type, expression, base, method,
         new_sp = open_new_space_time_dataset(output, type,
         new_sp = open_new_space_time_dataset(output, type,
                                          temporal_type, title, description,
                                          temporal_type, title, description,
                                          semantic_type, dbif,
                                          semantic_type, dbif,
-                                         core.overwrite(), False)
+                                         core.overwrite())
         count = 0
         count = 0
 
 
         # collect empty maps to remove them
         # collect empty maps to remove them

+ 124 - 22
lib/python/temporal/open.py

@@ -76,26 +76,17 @@ def open_old_space_time_dataset(name, type, dbif=None):
 
 
 ###############################################################################
 ###############################################################################
 
 
-def open_new_space_time_dataset(name, type, temporaltype, title, descr, semantic,
-                              dbif=None, overwrite=False, dry=False):
-    """!Create a new space time dataset of a specific type
-
-       This function is sensitive to the settings in grass.core.overwrite to
-       overwrite existing space time datasets.
+def check_new_space_time_dataset(name, type, dbif=None, overwrite=False):
+    """!Check if a new space time dataset of a specific type can be created
 
 
        @param name The name of the new space time dataset
        @param name The name of the new space time dataset
        @param type The type of the new space time dataset (strd, str3ds, stvds,
        @param type The type of the new space time dataset (strd, str3ds, stvds,
                                                       raster, vector, raster3d)
                                                       raster, vector, raster3d)
-       @param temporaltype The temporal type (relative or absolute)
-       @param title The title
-       @param descr The dataset description
-       @param semantic Semantical information
        @param dbif The temporal database interface to be used
        @param dbif The temporal database interface to be used
        @param overwrite Flag to allow overwriting
        @param overwrite Flag to allow overwriting
-       @param dry Do not create the space time dataset in the temporal database,
-                  make a dry run with including all checks
 
 
-       @return The new created space time dataset
+       @return A space time dataset object that must be filled with
+               content before insertion in the temporal database
 
 
        This function will raise a ScriptError in case of an error.
        This function will raise a ScriptError in case of an error.
     """
     """
@@ -120,26 +111,53 @@ def open_new_space_time_dataset(name, type, temporaltype, title, descr, semantic
     elif type == "stvds" or type == "vect" or type == "vector":
     elif type == "stvds" or type == "vect" or type == "vector":
         sp = dataset_factory("stvds", id)
         sp = dataset_factory("stvds", id)
     else:
     else:
-        core.fatal(_("Unkown type: %s") % (type))
+        core.error(_("Unkown type: %s") % (type))
+        return None
 
 
     dbif, connected = init_dbif(dbif)
     dbif, connected = init_dbif(dbif)
 
 
     if sp.is_in_db(dbif) and overwrite is False:
     if sp.is_in_db(dbif) and overwrite is False:
-        if connected:
-            dbif.close()
         core.fatal(_("Space time %(sp)s dataset <%(name)s> is already in the"
         core.fatal(_("Space time %(sp)s dataset <%(name)s> is already in the"
                       " database. Use the overwrite flag.") % {
                       " database. Use the overwrite flag.") % {
                       'sp': sp.get_new_map_instance(None).get_type(),
                       'sp': sp.get_new_map_instance(None).get_type(),
                       'name': name})
                       'name': name})
-        return None
+    if connected:
+        dbif.close()
+
+    return sp
+
+###############################################################################
 
 
-    if sp.is_in_db(dbif) and overwrite is True:
+def open_new_space_time_dataset(name, type, temporaltype, title, descr, semantic,
+                              dbif=None, overwrite=False):
+    """!Create a new space time dataset of a specific type
+
+       @param name The name of the new space time dataset
+       @param type The type of the new space time dataset (strd, str3ds, stvds,
+                                                      raster, vector, raster3d)
+       @param temporaltype The temporal type (relative or absolute)
+       @param title The title
+       @param descr The dataset description
+       @param semantic Semantical information
+       @param dbif The temporal database interface to be used
+       @param overwrite Flag to allow overwriting
+       @param dry Do not create the space time dataset in the temporal database,
+                  make a dry run with including all checks
+
+       @return The new created space time dataset
+
+       This function will raise a ScriptError in case of an error.
+    """
+    dbif, connected = init_dbif(dbif)
+    sp =  check_new_space_time_dataset(name, type, dbif, overwrite)
+
+    if sp.is_in_db(dbif):
         core.warning(_("Overwrite space time %(sp)s dataset <%(name)s> and "
         core.warning(_("Overwrite space time %(sp)s dataset <%(name)s> and "
                        "unregister all maps.") % {
                        "unregister all maps.") % {
                        'sp': sp.get_new_map_instance(None).get_type(),
                        'sp': sp.get_new_map_instance(None).get_type(),
                        'name': name})
                        'name': name})
-        if not dry:
-            sp.delete(dbif)
+        id = sp.get_id()
+        sp.delete(dbif)
         sp = sp.get_new_instance(id)
         sp = sp.get_new_instance(id)
 
 
     core.verbose(_("Create new space time %s dataset.") %
     core.verbose(_("Create new space time %s dataset.") %
@@ -147,10 +165,94 @@ def open_new_space_time_dataset(name, type, temporaltype, title, descr, semantic
 
 
     sp.set_initial_values(temporal_type=temporaltype, semantic_type=semantic,
     sp.set_initial_values(temporal_type=temporaltype, semantic_type=semantic,
                           title=title, description=descr)
                           title=title, description=descr)
-    if not dry:
-        sp.insert(dbif)
+
+    sp.insert(dbif)
 
 
     if connected:
     if connected:
         dbif.close()
         dbif.close()
 
 
     return sp
     return sp
+
+############################################################################
+
+def check_new_map_dataset(name, layer=None, mapset=None,
+                          type="raster", overwrite=False, dbif=None):
+    """!Check if a new map dataset of a specific type can be created in
+        the temporal database
+
+       @param name The name of the new map dataset
+       @param layer The layer of the new map dataset
+       @param mapset The current mapset the new map dataset is created in,
+                     this argument is optional, if not provided g.gisenv
+                     will be called to reveive the current mapset
+       @param type The type of the new map dataset (raster, vector, raster3d)
+       @param dbif The temporal database interface to be used
+       @param overwrite Flag to allow overwriting
+
+       @return A map dataset object
+
+       This function will raise a ScriptError in case of an error.
+    """
+    if not mapset:
+        mapset = core.gisenv()["MAPSET"]
+
+    dbif, connected = init_dbif(dbif)
+    map_id = AbstractMapDataset.build_id(name, mapset, layer)
+
+    new_map = dataset_factory(type, map_id)
+    # Check if new map is in the temporal database
+    if new_map.is_in_db(dbif):
+        if not overwrite:
+            if connected:
+                dbif.close()
+            core.fatal(_("Map <%s> is already in temporal database,"
+                         " use overwrite flag to overwrite") % (map_id))
+
+    if connected:
+        dbif.close()
+
+    return new_map
+
+############################################################################
+
+def open_new_map_dataset(name, layer=None, mapset=None, type="raster",
+                         temporal_extent=None, overwrite=False,
+                         dbif=None):
+    """!Create a new map dataset object of a specific type that can be
+        registered in the temporal database
+
+       @param name The name of the new map dataset
+       @param layer The layer of the new map dataset
+       @param mapset The current mapset the new map dataset is created in,
+                     this argument is optional, if not provided g.gisenv
+                     will be called to reveive the current mapset
+       @param type The type of the new map dataset (raster, vector, raster3d)
+       @param dbif The temporal database interface to be used
+       @param overwrite Flag to allow overwriting
+
+       @return A map dataset object
+
+       This function will raise a ScriptError in case of an error.
+    """
+
+    if not mapset:
+        mapset = core.gisenv()["MAPSET"]
+
+    dbif, connected = init_dbif(dbif)
+    new_map = check_new_map_dataset(name, layer, mapset, "raster",
+                                    overwrite, dbif)
+
+    # Check if new map is in the temporal database
+    if new_map.is_in_db(dbif):
+        # Remove the existing temporal database entry
+        map_id = new_map.get_id()
+        new_map.delete(dbif)
+        new_map = new_map.get_new_instance(map_id)
+
+    if temporal_extent:
+        new_map.set_temporal_extent(temporal_extent)
+
+    if connected:
+        dbif.close()
+
+    return new_map

+ 2 - 5
lib/python/temporal/register.py

@@ -98,9 +98,6 @@ def register_maps_in_space_time_dataset(
                          'sp': sp.get_new_map_instance(None).get_type(),
                          'sp': sp.get_new_map_instance(None).get_type(),
                          'name': name})
                          'name': name})
 
 
-    # We need a dummy map object to build the map ids
-    dummy = dataset_factory(type, None)
-
     maplist = []
     maplist = []
 
 
     # Map names as comma separated string
     # Map names as comma separated string
@@ -113,7 +110,7 @@ def register_maps_in_space_time_dataset(
         # Build the map list again with the ids
         # Build the map list again with the ids
         for count in range(len(maplist)):
         for count in range(len(maplist)):
             row = {}
             row = {}
-            mapid = dummy.build_id(maplist[count], mapset, None)
+            mapid = AbstractMapDataset.build_id(maplist[count], mapset, None)
 
 
             row["id"] = mapid
             row["id"] = mapid
             maplist[count] = row
             maplist[count] = row
@@ -151,7 +148,7 @@ def register_maps_in_space_time_dataset(
             if start_time_in_file and not end_time_in_file:
             if start_time_in_file and not end_time_in_file:
                 row["start"] = line_list[1].strip()
                 row["start"] = line_list[1].strip()
 
 
-            row["id"] = dummy.build_id(mapname, mapset)
+            row["id"] = AbstractMapDataset.build_id(mapname, mapset)
 
 
             maplist.append(row)
             maplist.append(row)