stds_import.py 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368
  1. """!@package grass.temporal
  2. @brief GRASS Python scripting module (temporal GIS functions)
  3. Temporal GIS export functions to be used in temporal modules
  4. Usage:
  5. @code
  6. import grass.temporal as tgis
  7. input="/tmp/temp_1950_2012.tar.gz"
  8. output="temp_1950_2012"
  9. extrdir="/tmp"
  10. title="My new dataset"
  11. descr="May new shiny dataset"
  12. location=None
  13. link=True
  14. exp=True
  15. overr=False
  16. create=False
  17. tgis.import_stds(input, output, extrdir, title, descr, location,
  18. link, exp, overr, create, "strds")
  19. ...
  20. @endcode
  21. (C) 2008-2011 by the GRASS Development Team
  22. This program is free software under the GNU General Public
  23. License (>=v2). Read the file COPYING that comes with GRASS
  24. for details.
  25. @author Soeren Gebbert
  26. """
  27. import shutil
  28. import os
  29. import os.path
  30. import tarfile
  31. import tempfile
  32. import time
  33. import filecmp
  34. from space_time_datasets_tools import *
  35. proj_file_name = "proj.txt"
  36. init_file_name = "init.txt"
  37. list_file_name = "list.txt"
  38. # This global variable is for unique vector map export,
  39. # since single vector maps may have several layer
  40. # and therefore several attribute tables
  41. imported_maps = {}
  42. ############################################################################
  43. def _import_raster_maps_from_geotiff(maplist, overr, exp, location, link):
  44. impflags = ""
  45. if overr:
  46. impflags += "o"
  47. if exp or location:
  48. impflags += "e"
  49. for row in maplist:
  50. name = row["name"]
  51. filename = str(row["name"]) + ".tif"
  52. if link:
  53. ret = core.run_command("r.external", input = filename,
  54. output = name,
  55. flags = impflags,
  56. overwrite = core.overwrite())
  57. else:
  58. ret = core.run_command("r.in.gdal", input = filename,
  59. output = name,
  60. flags = impflags,
  61. overwrite = core.overwrite())
  62. if ret != 0:
  63. core.fatal(_("Unable to import/link raster map <%s>.") % name)
  64. # Set the color rules if present
  65. filename = str(row["name"]) + ".color"
  66. if os.path.isfile(filename):
  67. ret = core.run_command("r.colors", map = name,
  68. rules = filename,
  69. overwrite = core.overwrite())
  70. if ret != 0:
  71. core.fatal(_("Unable to set the color rules for raster map <%s>.") % name)
  72. ############################################################################
  73. def _import_raster_maps(maplist):
  74. # We need to disable the projection check because of its simple implementation
  75. impflags = "o"
  76. for row in maplist:
  77. name = row["name"]
  78. filename = str(row["name"]) + ".pack"
  79. ret = core.run_command("r.unpack", input = filename,
  80. output = name,
  81. flags = impflags,
  82. overwrite = core.overwrite(),
  83. verbose = True)
  84. if ret != 0:
  85. core.fatal(_("Unable to unpack raster map <%s>.") % name)
  86. ############################################################################
  87. def _import_vector_maps_from_gml(maplist, overr, exp, location, link):
  88. impflags = "o"
  89. if exp or location:
  90. impflags += "e"
  91. for row in maplist:
  92. name = row["name"]
  93. filename = str(row["name"]) + ".xml"
  94. ret = core.run_command("v.in.ogr", dsn = filename,
  95. output = name,
  96. flags = impflags,
  97. overwrite = core.overwrite())
  98. if ret != 0:
  99. core.fatal(_("Unable to import vector map <%s>.") % name)
  100. ############################################################################
  101. def _import_vector_maps(maplist):
  102. # We need to disable the projection check because of its simple implementation
  103. impflags = "o"
  104. for row in maplist:
  105. # Separate the name from the layer
  106. name = row["name"].split(":")[0]
  107. # Import only unique maps
  108. if name in imported_maps:
  109. continue
  110. filename = name + ".pack"
  111. ret = core.run_command("v.unpack", input = filename,
  112. output = name,
  113. flags = impflags,
  114. overwrite = core.overwrite(),
  115. verbose = True)
  116. if ret != 0:
  117. core.fatal(_("Unable to unpack vector map <%s>.") % name)
  118. imported_maps[name] = name
  119. ############################################################################
  120. def import_stds(input, output, extrdir, title = None, descr = None, location = None,
  121. link = False, exp = False, overr = False, create = False, stds_type = "strds"):
  122. """
  123. !Import space time datasets of type raster and vector
  124. @param input Name of the input archive file
  125. @param output The name of the output space time dataset
  126. @param extrdir The extraction directory
  127. @param title The title of the new created space time dataset
  128. @param description The description of the new created space time dataset
  129. @param location The name of the location that should be created,
  130. maps are imported into this location
  131. @param link Switch to link raster maps instead importing them
  132. @param exp Extend location extents based on new dataset
  133. @param overr Override projection (use location's projection)
  134. @param create Create the location specified by the "location" parameter and exit.
  135. Do not import the space time datasets.
  136. @param stds_type The type of the space time dataset that should be imported
  137. """
  138. core.set_raise_on_error(True)
  139. # Check if input file and extraction directory exits
  140. if not os.path.exists(input):
  141. core.fatal(_("Space time raster dataset archive <%s> not found") % input)
  142. if not create and not os.path.exists(extrdir):
  143. core.fatal(_("Extraction directory <%s> not found") % extrdir)
  144. tar = tarfile.open(name = input, mode = 'r')
  145. # Check for important files
  146. members = tar.getnames()
  147. if init_file_name not in members:
  148. core.fatal(_("Unable to find init file <%s>") % init_file_name)
  149. if list_file_name not in members:
  150. core.fatal(_("Unable to find list file <%s>") % list_file_name)
  151. if proj_file_name not in members:
  152. core.fatal(_("Unable to find projection file <%s>") % proj_file_name)
  153. tar.extractall(path = extrdir)
  154. tar.close()
  155. # Save current working directory path
  156. old_cwd = os.getcwd()
  157. # Switch into the data directory
  158. os.chdir(extrdir)
  159. # Check projection information
  160. if not location:
  161. temp_name = core.tempfile()
  162. temp_file = open(temp_name, "w")
  163. proj_name = os.path.abspath(proj_file_name)
  164. p = core.start_command("g.proj", flags = "j", stdout = temp_file)
  165. p.communicate()
  166. temp_file.close()
  167. if not core.compare_key_value_text_files(temp_name, proj_name, sep="="):
  168. if overr:
  169. core.warning(_("Projection information does not match. Proceeding..."))
  170. else:
  171. core.fatal(_("Projection information does not match. Aborting."))
  172. # Create a new location based on the projection information and switch into it
  173. old_env = core.gisenv()
  174. if location:
  175. try:
  176. proj4_string = open(proj_file_name, 'r').read()
  177. core.create_location(dbase = old_env["GISDBASE"],
  178. location = location,
  179. proj4 = proj4_string)
  180. # Just create a new location and return
  181. if create:
  182. os.chdir(old_cwd)
  183. return
  184. except Exception as e:
  185. core.fatal(_("Unable to create location %s. Reason: %s") % (location, str(e)))
  186. # Switch to the new created location
  187. ret = core.run_command("g.mapset", mapset = "PERMANENT",
  188. location = location,
  189. gisdbase = old_env["GISDBASE"])
  190. if ret != 0:
  191. core.fatal(_("Unable to switch to location %s") % location)
  192. # create default database connection
  193. ret = core.run_command("t.connect", flags = "d")
  194. if ret != 0:
  195. core.fatal(_("Unable to create default temporal database in new location %s") % location)
  196. try:
  197. # Make sure the temporal database exists
  198. create_temporal_database()
  199. fs = "|"
  200. maplist = []
  201. mapset = core.gisenv()["MAPSET"]
  202. list_file = open(list_file_name, "r")
  203. # Read the map list from file
  204. line_count = 0
  205. while True:
  206. line = list_file.readline()
  207. if not line:
  208. break
  209. line_list = line.split(fs)
  210. mapname = line_list[0].strip()
  211. mapid = mapname + "@" + mapset
  212. row = {}
  213. row["name"] = mapname
  214. row["id"] = mapid
  215. row["start"] = line_list[1].strip()
  216. row["end"] = line_list[2].strip()
  217. maplist.append(row)
  218. line_count += 1
  219. list_file.close()
  220. # Read the init file
  221. fs = "="
  222. init = {}
  223. init_file = open(init_file_name, "r")
  224. while True:
  225. line = init_file.readline()
  226. if not line:
  227. break
  228. kv = line.split(fs)
  229. init[kv[0]] = kv[1].strip()
  230. init_file.close()
  231. if not init.has_key("temporal_type") or \
  232. not init.has_key("semantic_type") or \
  233. not init.has_key("number_of_maps"):
  234. core.fatal(_("Key words %s, %s or %s not found in init file.") %
  235. ("temporal_type", "semantic_type", "number_of_maps"))
  236. if line_count != int(init["number_of_maps"]):
  237. core.fatal(_("Number of maps mismatch in init and list file."))
  238. _format = "GTiff"
  239. _type = "strds"
  240. if init.has_key("stds_type"):
  241. _type = init["stds_type"]
  242. if init.has_key("format"):
  243. _format = init["format"]
  244. if stds_type != _type:
  245. core.fatal(_("The archive file is of wrong space time dataset type"))
  246. # Check the existence of the files
  247. if _format == "GTiff":
  248. for row in maplist:
  249. filename = str(row["name"]) + ".tif"
  250. if not os.path.exists(filename):
  251. core.fatal(_("Unable to find geotiff raster file <%s> in archive.") % filename)
  252. elif _format == "GML":
  253. for row in maplist:
  254. filename = str(row["name"]) + ".xml"
  255. if not os.path.exists(filename):
  256. core.fatal(_("Unable to find GML vector file <%s> in archive.") % filename)
  257. elif _format == "pack":
  258. for row in maplist:
  259. if _type == "stvds":
  260. filename = str(row["name"].split(":")[0]) + ".pack"
  261. else:
  262. filename = str(row["name"]) + ".pack"
  263. if not os.path.exists(filename):
  264. core.fatal(_("Unable to find GRASS package file <%s> in archive.") % filename)
  265. else:
  266. core.fatal(_("Unsupported input format"))
  267. # Check the space time dataset
  268. id = output + "@" + mapset
  269. sp = dataset_factory(_type, id)
  270. if sp.is_in_db() and core.overwrite() == False:
  271. core.fatal(_("Space time %s dataset <%s> is already in the database. Use the overwrite flag.") % (_type, sp.get_id()))
  272. # Import the maps
  273. if _type == "strds":
  274. if _format == "GTiff":
  275. _import_raster_maps_from_geotiff(maplist, overr, exp, location, link)
  276. if _format == "pack":
  277. _import_raster_maps(maplist)
  278. elif _type == "stvds":
  279. if _format == "GML":
  280. _import_vector_maps_from_gml(maplist, overr, exp, location, link)
  281. if _format == "pack":
  282. _import_vector_maps(maplist)
  283. # Create the space time dataset
  284. if sp.is_in_db() and core.overwrite() == True:
  285. core.info(_("Overwrite space time %s dataset <%s> and unregister all maps.") % (sp.get_new_map_instance(None).get_type(), sp.get_id()))
  286. sp.delete()
  287. sp = sp.get_new_instance(id)
  288. temporal_type = init["temporal_type"]
  289. semantic_type = init["semantic_type"]
  290. core.verbose(_("Create space time %s dataset.") % sp.get_new_map_instance(None).get_type())
  291. sp.set_initial_values(temporal_type = temporal_type, semantic_type = semantic_type, title = title, description = descr)
  292. sp.insert()
  293. # register the maps
  294. fs = "|"
  295. register_maps_in_space_time_dataset(type = sp.get_new_map_instance(None).get_type(),
  296. name = output, file = list_file_name, start = "file", end = "file", dbif = None, fs = fs)
  297. os.chdir(old_cwd)
  298. except:
  299. raise
  300. # Make sure the location is switched back correctly
  301. finally:
  302. if location:
  303. # Switch to the old location
  304. ret = core.run_command("g.mapset", mapset = old_env["MAPSET"],
  305. location = old_env["LOCATION_NAME"],
  306. gisdbase = old_env["GISDBASE"])