Pārlūkot izejas kodu

Better sqlite database interface design. Reduced the sqlite3 open and close calls, now the registration and unregistration of maps in space time datasets should be much faster (5 - 50 times).

git-svn-id: https://svn.osgeo.org/grass/grass/trunk@48354 15284696-431f-4ddb-bdfa-cd5b030d7da7
Soeren Gebbert 13 gadi atpakaļ
vecāks
revīzija
3d1eefb579

+ 179 - 108
lib/python/tgis_abstract_datasets.py

@@ -69,43 +69,43 @@ class abstract_dataset(object):
         
         return (north, south, east, west, top, bottom)
         
-    def select(self):
+    def select(self, dbif=None):
 	"""Select temporal dataset entry from database and fill up the internal structure"""
-	self.base.select()
+	self.base.select(dbif)
 	if self.is_time_absolute():
-	    self.absolute_time.select()
+	    self.absolute_time.select(dbif)
         if self.is_time_relative():
-	    self.relative_time.select()
-	self.spatial_extent.select()
-	self.metadata.select()
+	    self.relative_time.select(dbif)
+	self.spatial_extent.select(dbif)
+	self.metadata.select(dbif)
         
-    def is_in_db(self):
+    def is_in_db(self, dbif=None):
 	"""Check if the temporal dataset entry is in the database"""
-	return self.base.is_in_db()
+	return self.base.is_in_db(dbif)
 
     def delete(self):
 	"""Delete temporal dataset entry from database if it exists"""
         raise IOError("This method must be implemented in the subclasses")
 
-    def insert(self):
+    def insert(self, dbif=None):
 	"""Insert temporal dataset entry into database from the internal structure"""
-	self.base.insert()
+	self.base.insert(dbif)
 	if self.is_time_absolute():
-	    self.absolute_time.insert()
+	    self.absolute_time.insert(dbif)
         if self.is_time_relative():
-	    self.relative_time.insert()
-	self.spatial_extent.insert()
-	self.metadata.insert()
+	    self.relative_time.insert(dbif)
+	self.spatial_extent.insert(dbif)
+	self.metadata.insert(dbif)
 
-    def update(self):
+    def update(self, dbif=None):
 	"""Update temporal dataset entry of database from the internal structure"""
-	self.base.update()
+	self.base.update(dbif)
 	if self.is_time_absolute():
-	    self.absolute_time.update()
+	    self.absolute_time.update(dbif)
         if self.is_time_relative():
-	    self.relative_time.update()
-	self.spatial_extent.update()
-	self.metadata.update()
+	    self.relative_time.update(dbif)
+	self.spatial_extent.update(dbif)
+	self.metadata.update(dbif)
 
     def print_self(self):
 	"""Print the content of the internal structure to stdout"""
@@ -229,17 +229,27 @@ class abstract_map_dataset(abstract_dataset):
         self.absolute_time.set_end_time(end_time)
         self.absolute_time.set_timezone(timezone)
 
-    def update_absolute_time(self, start_time, end_time=None, timezone=None):
+    def update_absolute_time(self, start_time, end_time=None, timezone=None, dbif = None):
         """Update the absolute time
 
            @start_time a datetime object specifying the start time of the map
            @end_time a datetime object specifying the end time of the map
            @timezone Thee timezone of the map
         """
+        connect = False
+
+        if dbif == None:
+            dbif = sql_database_interface()
+            dbif.connect()
+            connect = True
+
         self.set_absolute_time(start_time, end_time, timezone)
-        self.absolute_time.update()
-        self.base.update()
-    
+        self.absolute_time.update(dbif)
+        self.base.update(dbif)
+
+        if connect == True:
+            dbif.close()
+
     def set_relative_time(self, interval):
         """Set the relative time interval 
         
@@ -250,73 +260,123 @@ class abstract_map_dataset(abstract_dataset):
         
         self.relative_time.set_interval(interval)
 
-    def update_relative_time(self, interval):
+    def update_relative_time(self, interval, dbif = None):
         """Set the relative time interval
 
            @interval A double value in days
 
         """
+        connect = False
+
+        if dbif == None:
+            dbif = sql_database_interface()
+            dbif.connect()
+            connect = True
+
         self.set_relative_time(interval)
-        self.relative_time.update()
-        self.base.update()
+        self.relative_time.update(dbif)
+        self.base.update(dbif)
+
+        if connect == True:
+            dbif.close()
 
     def set_spatial_extent(self, north, south, east, west, top=0, bottom=0):
         """Set the spatial extent of the map"""
         self.spatial_extent.set_spatial_extent(north, south, east, west, top, bottom)
         
-    def delete(self):
+    def delete(self, dbif=None):
 	"""Delete a map entry from database if it exists
         
             Remove dependent entries:
             * Remove the map entry in each space time dataset in which this map is registered
             * Remove the space time dataset register table
         """
-        if self.is_in_db():
+
+        connect = False
+
+        if dbif == None:
+            dbif = sql_database_interface()
+            dbif.connect()
+            connect = True
+
+        if self.is_in_db(dbif):
             
             # First we unregister from all dependent space time datasets
-            self.unregister()
+            self.unregister(dbif)
 
             # Remove the strds register table
             sql = "DROP TABLE " + self.get_stds_register()
             #print sql
-            self.base.connect()
-            self.base.cursor.execute(sql)
-            self.base.close()
+            dbif.cursor.execute(sql)
 
             core.verbose("Delete " + self.get_type() + " dataset <" + self.get_id() + "> from temporal database")
 
             # Delete yourself from the database, trigger functions will take care of dependencies
-            self.base.delete()
+            self.base.delete(dbif)
 
-    def unregister(self):
+        if connect == True:
+            dbif.close()
+
+    def unregister(self, dbif=None):
 	""" Remove the map entry in each space time dataset in which this map is registered
         """
 
         core.verbose("Unregister " + self.get_type() + " dataset <" + self.get_id() + "> from space time datasets")
+        
+        connect = False
+
+        if dbif == None:
+            dbif = sql_database_interface()
+            dbif.connect()
+            connect = True
+
+        # Get all datasets in which this map is registered
+        rows = self.get_registered_datasets(dbif)
+
+        # For each stds in which the map is registered
+        if rows:
+            for row in rows:
+                # Create a space time dataset object to remove the map
+                # from its register
+                stds = self.get_new_stds_instance(row["id"])
+                stds.select(dbif)
+                stds.unregister_map(self, dbif)
+                # Take care to update the space time dataset after
+                # the map has been unregistred
+                stds.update_from_registered_maps(dbif)
+
+        if connect == True:
+            dbif.close()
+            
+    def get_registered_datasets(self, dbif=None):
+        """Return all space time dataset ids in which this map is registered as
+          sqlite3 rows with column "id" or None if this map is not registered in any
+          space time dataset.
+        """
+        connect = False
+
+        if dbif == None:
+            dbif = sql_database_interface()
+            dbif.connect()
+            connect = True
 
         # Select all data from the database
-        self.select()
+        self.select(dbif)
+
+        rows = None
+
         # Remove the map from all registered space time datasets
         if self.get_stds_register() != None:
             # Select all stds tables in which this map is registered
             sql = "SELECT id FROM " + self.get_stds_register()
             #print sql
-            self.base.connect()
-            self.base.cursor.execute(sql)
-            rows = self.base.cursor.fetchall()
-            self.base.close()
+            dbif.cursor.execute(sql)
+            rows = dbif.cursor.fetchall()
 
-            # For each stds in which the map is registered
-            if rows:
-                for row in rows:
-                    # Create a space time dataset object to remove the map
-                    # from its register
-                    stds = self.get_new_stds_instance(row["id"])
-                    stds.select()
-                    stds.unregister_map(self)
-                    # Take care to update the space time dataset after
-                    # the map has been unregistred
-                    stds.update_from_registered_maps()
+        if connect == True:
+            dbif.close()
+            
+        return rows
 
 ###############################################################################
 
@@ -371,36 +431,42 @@ class abstract_space_time_dataset(abstract_dataset):
         self.metadata.set_title(title)
         self.metadata.set_description(description)
 
-    def delete(self):
+    def delete(self, dbif=None):
         """Delete a space time dataset from the database"""
         # First we need to check if maps are registered in this dataset and
         # unregister them
 
         core.verbose("Delete space time " + self.get_new_map_instance(ident=None).get_type() + " dataset <" + self.get_id() + "> from temporal database")
 
+        connect = False
+
+        if dbif == None:
+            dbif = sql_database_interface()
+            dbif.connect()
+            connect = True
+
         if self.get_map_register():
             sql = "SELECT id FROM " + self.get_map_register()
-            self.base.connect()
-            self.base.cursor.execute(sql)
-            rows = self.base.cursor.fetchall()
-            self.base.close()
+            dbif.cursor.execute(sql)
+            rows = dbif.cursor.fetchall()
             # Unregister each registered map in the table
             if rows:
                 for row in rows:
                     # Unregister map
                     map = self.get_new_map_instance(row["id"])
-                    self.unregister_map(map)
+                    self.unregister_map(map, dbif)
 
             # Drop remove the map register table
             sql = "DROP TABLE " + self.get_map_register()
-            self.base.connect()
-            self.base.cursor.execute(sql)
-            self.base.close()
+            dbif.cursor.execute(sql)
 
         # Remove the primary key, the foreign keys will be removed by trigger
-        self.base.delete()
+        self.base.delete(dbif)
 
-    def register_map(self, map):
+        if connect == True:
+            dbif.close()
+            
+    def register_map(self, map, dbif=None):
         """Register a map in the space time dataset.
 
             This method takes care of the registration of a map
@@ -410,13 +476,20 @@ class abstract_space_time_dataset(abstract_dataset):
             and return False
         """
 
-        if map.is_in_db() == False:
+        connect = False
+
+        if dbif == None:
+            dbif = sql_database_interface()
+            dbif.connect()
+            connect = True
+
+        if map.is_in_db(dbif) == False:
             core.fatal("Only maps with absolute or relative valid time can be registered")
 
         core.verbose("Register " + map.get_type() + " map: " + map.get_id() + " in space time " + map.get_type() + " dataset <" + self.get_id() + ">")
 
         # First select all data from the database
-        map.select()
+        map.select(dbif)
         map_id = map.base.get_id()
         map_name = map.base.get_name()
         map_mapset = map.base.get_mapset()
@@ -437,10 +510,8 @@ class abstract_space_time_dataset(abstract_dataset):
         # Check if map is already registred
         if stds_register_table:
             sql = "SELECT id FROM " + stds_register_table + " WHERE id = (?)"
-            self.base.connect()
-            self.base.cursor.execute(sql, (map_id,))
-            row = self.base.cursor.fetchone()
-            self.base.close()
+            dbif.cursor.execute(sql, (map_id,))
+            row = dbif.cursor.fetchone()
             # In case of no entry make a new one
             if row and row[0] == map_id:
                 core.warning("Map " + map_id + "is already registered.")
@@ -462,15 +533,12 @@ class abstract_space_time_dataset(abstract_dataset):
             sql = sql.replace("TABLE_NAME", uuid_rand )
             sql = sql.replace("MAP_ID", map_id)
             sql = sql.replace("STDS", self.get_type())
-
-            self.base.connect()
-            self.base.cursor.executescript(sql)
-            self.base.close()
+            dbif.cursor.executescript(sql)
 
             map_register_table = uuid_rand + "_" + self.get_type() + "_register"
             # Set the stds register table name and put it into the DB
             map.set_stds_register(map_register_table)
-            map.metadata.update()
+            map.metadata.update(dbif)
             
             core.verbose("Created register table <" +  map_register_table + "> for " + map.get_type() + " map <" + map.get_id() + ">")
 
@@ -489,10 +557,7 @@ class abstract_space_time_dataset(abstract_dataset):
             sql_script += sql
             sql_script += "\n"
             sql_script += "END TRANSACTION;"
-
-            self.base.connect()
-            self.base.cursor.executescript(sql_script)
-            self.base.close()
+            dbif.cursor.executescript(sql_script)
 
             # Trigger have been disabled due to peformance issues while registration
             ## We need raster specific trigger
@@ -508,57 +573,58 @@ class abstract_space_time_dataset(abstract_dataset):
             #sql_script += "\n"
             #sql_script += "END TRANSACTION;"
 
-            #self.base.connect()
-            #self.base.cursor.executescript(sql_script)
-            #self.base.close()
+            #dbif.cursor.executescript(sql_script)
 
             stds_register_table = stds_name + "_" + stds_mapset + "_" + map.get_type() + "_register"
 
             # Set the map register table name and put it into the DB
             self.set_map_register(stds_register_table)
-            self.metadata.update()
+            self.metadata.update(dbif)
 
             core.verbose("Created register table <" +  stds_register_table + "> for space time " + map.get_type() + " dataset <" + self.get_id() + ">")
 
         # Register the stds in the map stds register table
         # Check if the entry is already there
         sql = "SELECT id FROM " + map_register_table + " WHERE id = ?"
-        self.base.connect()
-        self.base.cursor.execute(sql, (self.base.get_id(),))
-      	row = self.base.cursor.fetchone()
-	self.base.close()
+        dbif.cursor.execute(sql, (self.base.get_id(),))
+      	row = dbif.cursor.fetchone()
 
         # In case of no entry make a new one
         if row == None:
             sql = "INSERT INTO " + map_register_table + " (id) " + "VALUES (?)"
             #print sql
-            self.base.connect()
-            self.base.cursor.execute(sql, (self.base.get_id(),))
-            self.base.close()
+            dbif.cursor.execute(sql, (self.base.get_id(),))
 
         # Now put the raster name in the stds map register table
         sql = "INSERT INTO " + stds_register_table + " (id) " + "VALUES (?)"
         #print sql
-        self.base.connect()
-        self.base.cursor.execute(sql, (map_id,))
-        self.base.close()
+        dbif.cursor.execute(sql, (map_id,))
 
+        if connect == True:
+            dbif.close()
+            
         return True
 
-    def unregister_map(self, map):
+    def unregister_map(self, map, dbif = None):
         """Unregister a map from the space time dataset.
 
             This method takes care of the unregistration of a map
             from a space time dataset.
         """
+        connect = False
+
+        if dbif == None:
+            dbif = sql_database_interface()
+            dbif.connect()
+            connect = True
 
-        if map.is_in_db() == False:
+        if map.is_in_db(dbif) == False:
             core.fatal("Unable to find map <" + map.get_id() + "> in temporal database")
 
         core.info("Unregister " + map.get_type() + " map: " + map.get_id())
 
         # First select all data from the database
-        map.select()
+        map.select(dbif)
         map_id = map.base.get_id()
         map_register_table = map.get_stds_register()
 
@@ -567,10 +633,8 @@ class abstract_space_time_dataset(abstract_dataset):
 
         # Check if the map is registered in the space time raster dataset
         sql = "SELECT id FROM " + map_register_table + " WHERE id = ?"
-        self.base.connect()
-        self.base.cursor.execute(sql, (self.base.get_id(),))
-      	row = self.base.cursor.fetchone()
-	self.base.close()
+        dbif.cursor.execute(sql, (self.base.get_id(),))
+      	row = dbif.cursor.fetchone()
 
         # Break if the map is not registered
         if row == None:
@@ -580,18 +644,17 @@ class abstract_space_time_dataset(abstract_dataset):
         # Remove the space time raster dataset from the raster dataset register
         if map_register_table != None:
             sql = "DELETE FROM " + map_register_table + " WHERE id = ?"
-            self.base.connect()
-            self.base.cursor.execute(sql, (self.base.get_id(),))
-            self.base.close()
+            dbif.cursor.execute(sql, (self.base.get_id(),))
 
         # Remove the raster map from the space time raster dataset register
         if stds_register_table != None:
             sql = "DELETE FROM " + stds_register_table + " WHERE id = ?"
-            self.base.connect()
-            self.base.cursor.execute(sql, (map_id,))
-            self.base.close()
+            dbif.cursor.execute(sql, (map_id,))
 
-    def update_from_registered_maps(self):
+        if connect == True:
+            dbif.close()
+            
+    def update_from_registered_maps(self, dbif = None):
         """This methods updates the spatial and temporal extent as well as
            type specific metadata. It should always been called after maps are registered
            or unregistered/deleted from the space time dataset.
@@ -602,6 +665,13 @@ class abstract_space_time_dataset(abstract_dataset):
         """
         core.info("Update metadata, spatial and temporal extent from all registered maps of <" + self.get_id() + ">")
 
+        connect = False
+
+        if dbif == None:
+            dbif = sql_database_interface()
+            dbif.connect()
+            connect = True
+
         # Get basic info
         stds_name = self.base.get_name()
         stds_mapset = self.base.get_mapset()
@@ -634,6 +704,7 @@ class abstract_space_time_dataset(abstract_dataset):
 
         sql_script += "END TRANSACTION;"
 
-        self.base.connect()
-        self.base.cursor.executescript(sql_script)
-        self.base.close()
+        dbif.cursor.executescript(sql_script)
+
+        if connect == True:
+            dbif.close()

+ 53 - 26
lib/python/tgis_base.py

@@ -137,7 +137,6 @@ class dict_sql_serializer(object):
 class sql_database_interface(dict_sql_serializer):
     """This is the sql database interface to sqlite3"""
     def __init__(self, table=None, ident=None, database=None):
-
         dict_sql_serializer.__init__(self)
 
         self.table = table # Name of the table, set in the subclass
@@ -151,34 +150,46 @@ class sql_database_interface(dict_sql_serializer):
         return self.table
 
     def connect(self):
+        #print "Connect to",  self.database
 	self.connection = sqlite3.connect(self.database, detect_types=sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES)
 	self.connection.row_factory = sqlite3.Row
         self.cursor = self.connection.cursor()
 
     def close(self):
+        #print "Close connection to",  self.database
 	self.connection.commit()
         self.cursor.close()
 
     def get_delete_statement(self):
 	return "DELETE FROM " + self.get_table_name() + " WHERE id = \"" + str(self.ident) + "\""
 
-    def delete(self):
-	self.connect()
+    def delete(self, dbif=None):
 	sql = self.get_delete_statement()
         #print sql
-        self.cursor.execute(sql)
-	self.close()
+        
+        if dbif:
+            dbif.cursor.execute(sql)
+        else:
+            self.connect()
+            self.cursor.execute(sql)
+            self.close()
 
     def get_is_in_db_statement(self):
 	return "SELECT id FROM " + self.get_table_name() + " WHERE id = \"" + str(self.ident) + "\""
 
-    def is_in_db(self):
-	self.connect()
+    def is_in_db(self, dbif=None):
+
 	sql = self.get_is_in_db_statement()
         #print sql
-        self.cursor.execute(sql)
-	row = self.cursor.fetchone()
-	self.close()
+
+        if dbif:
+            dbif.cursor.execute(sql)
+            row = dbif.cursor.fetchone()
+        else:
+            self.connect()
+            self.cursor.execute(sql)
+            row = self.cursor.fetchone()
+            self.close()
 
 	# Nothing found
 	if row == None:
@@ -189,16 +200,25 @@ class sql_database_interface(dict_sql_serializer):
     def get_select_statement(self):
 	return self.serialize("SELECT", self.get_table_name(), "WHERE id = \"" + str(self.ident) + "\"")
 
-    def select(self):
-	self.connect()
+    def select(self, dbif=None):
 	sql, args = self.get_select_statement()
 	#print sql
 	#print args
-	if len(args) == 0:
-            self.cursor.execute(sql)
-	else:
-            self.cursor.execute(sql, args)
-	row = self.cursor.fetchone()
+
+        if dbif:
+            if len(args) == 0:
+                dbif.cursor.execute(sql)
+            else:
+                dbif.cursor.execute(sql, args)
+            row = dbif.cursor.fetchone()
+        else:
+            self.connect()
+            if len(args) == 0:
+                self.cursor.execute(sql)
+            else:
+                self.cursor.execute(sql, args)
+            row = self.cursor.fetchone()
+            self.close()
 
 	# Nothing found
 	if row == None:
@@ -208,34 +228,41 @@ class sql_database_interface(dict_sql_serializer):
 	    self.deserialize(row)
 	else:
 	    raise IOError
-	self.close()
 
 	return True
 
     def get_insert_statement(self):
 	return self.serialize("INSERT", self.get_table_name())
 
-    def insert(self):
-	self.connect()
+    def insert(self, dbif=None):
 	sql, args = self.get_insert_statement()
 	#print sql
 	#print args
-        self.cursor.execute(sql, args)
-	self.close()
+
+        if dbif:
+            dbif.cursor.execute(sql, args)
+        else:
+            self.connect()
+            self.cursor.execute(sql, args)
+            self.close()
 
     def get_update_statement(self):
 	return self.serialize("UPDATE", self.get_table_name(), "WHERE id = \"" + str(self.ident) + "\"")
 
-    def update(self):
+    def update(self, dbif=None):
 	if self.ident == None:
 	    raise IOError("Missing identifer");
 
 	sql, args = self.get_update_statement()
 	#print sql
 	#print args
-	self.connect()
-        self.cursor.execute(sql, args)
-	self.close()
+
+        if dbif:
+            dbif.cursor.execute(sql, args)
+        else:
+            self.connect()
+            self.cursor.execute(sql, args)
+            self.close()
 
 ###############################################################################
 

+ 47 - 20
lib/python/tgis_space_time_datasets.py

@@ -373,7 +373,7 @@ class space_time_vector_dataset(abstract_space_time_dataset):
 
 ###############################################################################
 
-def register_maps_in_space_time_dataset(type, name, maps, start=None, increment=None):
+def register_maps_in_space_time_dataset(type, name, maps, start=None, increment=None, dbif = None):
     """Use this method to register maps in space time datasets. This function is generic and
        can handle raster, vector and raster3d maps as well as there space time datasets.
 
@@ -406,10 +406,15 @@ def register_maps_in_space_time_dataset(type, name, maps, start=None, increment=
     if type == "vector":
         sp = space_time_vector_dataset(id)
 
+    if dbif == None:
+        dbif = sql_database_interface()
+        dbif.connect()
+        connect = True
+
     # Read content from temporal database
-    sp.select()
+    sp.select(dbif)
 
-    if sp.is_in_db() == False:
+    if sp.is_in_db(dbif) == False:
         core.fatal("Space time " + sp.get_new_map_instance(None).get_type() + " dataset <" + name + "> not found")
 
     if maps.find(",") == -1:
@@ -431,33 +436,36 @@ def register_maps_in_space_time_dataset(type, name, maps, start=None, increment=
         # In case the map is already registered print a message and continue to the next map
 
         # Put the map into the database
-        if map.is_in_db() == False:
+        if map.is_in_db(dbif) == False:
             # Break in case no valid time is provided
             if start == "" or start == None:
                 core.fatal("Unable to register " + map.get_type() + " map <" + map.get_id() + ">. The map has no valid time and the start time is not set.")
             # Load the data from the grass file database
             map.load()
             #  Put it into the temporal database
-            map.insert()
+            map.insert(dbif)
         else:
-            map.select()
+            map.select(dbif)
             if map.get_temporal_type() != sp.get_temporal_type():
                 core.fatal("Unable to register " + map.get_type() + " map <" + map.get_id() + ">. The temporal types are different.")
 
         # Set the valid time
         if start:
-            assign_valid_time_to_map(sp.get_temporal_type(), map, start, increment, count)
+            assign_valid_time_to_map(sp.get_temporal_type(), map, start, increment, count, dbif)
 
         # Finally Register map in the space time dataset
-        sp.register_map(map)
+        sp.register_map(map, dbif)
         count += 1
 
     # Update the space time tables
-    sp.update_from_registered_maps()
+    sp.update_from_registered_maps(dbif)
 
+    if connect == True:
+        dbif.close()
+        
 ###############################################################################
 
-def unregister_maps_from_space_time_datasets(type, name, maps):
+def unregister_maps_from_space_time_datasets(type, name, maps, dbif = None):
     """Unregister maps from a single space time dataset or, in case no dataset name is provided,
        unregister from all datasets within the maps are registered.
 
@@ -467,6 +475,11 @@ def unregister_maps_from_space_time_datasets(type, name, maps):
     """
     mapset =  core.gisenv()["MAPSET"]
 
+    if dbif == None:
+        dbif = sql_database_interface()
+        dbif.connect()
+        connect = True
+
     # In case a space time dataset is specified
     if name:
         # Check if the dataset name contains the mapset as well
@@ -482,7 +495,7 @@ def unregister_maps_from_space_time_datasets(type, name, maps):
         if type == "vector":
             sp = space_time_vector_dataset(id)
 
-        if sp.is_in_db() == False:
+        if sp.is_in_db(dbif) == False:
             core.fatal("Space time " + sp.get_new_map_instance(None).get_type() + " dataset <" + name + "> not found")
 
     # Build the list of maps
@@ -508,18 +521,21 @@ def unregister_maps_from_space_time_datasets(type, name, maps):
             map = vector_dataset(mapid)
 
         # Unregister map if in database
-        if map.is_in_db() == True:
+        if map.is_in_db(dbif) == True:
             if name:
-                sp.select()
-                sp.unregister_map(map)
+                sp.select(dbif)
+                sp.unregister_map(map, dbif)
             else:
-                map.select()
-                map.unregister()
+                map.select(dbif)
+                map.unregister(dbif)
 
     if name:
-        sp.update_from_registered_maps()
+        sp.update_from_registered_maps(dbif)
 
-def assign_valid_time_to_map(ttype, map, start, increment=None, mult=1):
+    if connect == True:
+        dbif.close()
+        
+def assign_valid_time_to_map(ttype, map, start, increment=None, mult=1, dbif = None):
     """Assign the valid time to a map dataset
 
        @ttype The temporal type which should be assigned and which the time format is of
@@ -528,6 +544,14 @@ def assign_valid_time_to_map(ttype, map, start, increment=None, mult=1):
        @increment Time increment between maps for time stamp creation (format absolute: NNN seconds, minutes, hours, days, weeks, months, years; format relative: 1.0)
        @multi A multiplier for the increment
     """
+
+    connect = False
+
+    if dbif == None:
+        dbif = sql_database_interface()
+        dbif.connect()
+        connect = True
+
     if ttype == "absolute":
         # Create the start time object
         if start.find(":") > 0:
@@ -544,11 +568,14 @@ def assign_valid_time_to_map(ttype, map, start, increment=None, mult=1):
             end_time = increment_datetime_by_string(start_time, increment, 1)
 
         core.verbose("Set absolute valid time for map <" + map.get_id() + "> to " + str(start_time) + " - " + str(end_time))
-        map.update_absolute_time(start_time, end_time)
+        map.update_absolute_time(start_time, end_time, None, dbif)
     else:
         if increment:
             interval = float(start) + mult * float(increment)
         else:
             interval = float(start)
         core.verbose("Set relative valid time for map <" + map.get_id() + "> to " + str(interval))
-        map.update_relative_time(interval)
+        map.update_relative_time(interval, dbif)
+
+    if connect == True:
+        dbif.close()