stds_import.py 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450
  1. """!@package grass.temporal
  2. @brief GRASS Python scripting module (temporal GIS functions)
  3. Temporal GIS export functions to be used in temporal modules
  4. Usage:
  5. @code
  6. import grass.temporal as tgis
  7. input="/tmp/temp_1950_2012.tar.gz"
  8. output="temp_1950_2012"
  9. extrdir="/tmp"
  10. title="My new dataset"
  11. descr="May new shiny dataset"
  12. location=None
  13. link=True
  14. exp=True
  15. overr=False
  16. create=False
  17. tgis.import_stds(input, output, extrdir, title, descr, location,
  18. link, exp, overr, create, "strds")
  19. ...
  20. @endcode
  21. (C) 2012-2013 by the GRASS Development Team
  22. This program is free software under the GNU General Public
  23. License (>=v2). Read the file COPYING that comes with GRASS
  24. for details.
  25. @author Soeren Gebbert
  26. """
  27. import os
  28. import os.path
  29. import tarfile
  30. from space_time_datasets import *
  31. from register import *
  32. import factory
  33. from factory import *
  34. proj_file_name = "proj.txt"
  35. init_file_name = "init.txt"
  36. list_file_name = "list.txt"
  37. # This global variable is for unique vector map export,
  38. # since single vector maps may have several layer
  39. # and therefore several attribute tables
  40. imported_maps = {}
  41. ############################################################################
  42. def _import_raster_maps_from_gdal(maplist, overr, exp, location, link, format_):
  43. impflags = ""
  44. if overr:
  45. impflags += "o"
  46. if exp or location:
  47. impflags += "e"
  48. for row in maplist:
  49. name = row["name"]
  50. if format_ == "GTiff":
  51. filename = row["filename"] + ".tif"
  52. elif format_=="AAIGrid":
  53. filename = row["filename"] + ".asc"
  54. if not overr:
  55. impflags += "o"
  56. if link:
  57. ret = core.run_command("r.external", input=filename,
  58. output=name,
  59. flags=impflags,
  60. overwrite=core.overwrite())
  61. else:
  62. ret = core.run_command("r.in.gdal", input=filename,
  63. output=name,
  64. flags=impflags,
  65. overwrite=core.overwrite())
  66. if ret != 0:
  67. core.fatal(_("Unable to import/link raster map <%s> from file %s.") %(name,
  68. filename))
  69. # Set the color rules if present
  70. filename = row["filename"] + ".color"
  71. if os.path.isfile(filename):
  72. ret = core.run_command("r.colors", map=name,
  73. rules=filename,
  74. overwrite=core.overwrite())
  75. if ret != 0:
  76. core.fatal(_("Unable to set the color rules for "
  77. "raster map <%s>.") % name)
  78. ############################################################################
  79. def _import_raster_maps(maplist):
  80. # We need to disable the projection check because of its
  81. # simple implementation
  82. impflags = "o"
  83. for row in maplist:
  84. name = row["name"]
  85. filename = row["filename"] + ".pack"
  86. ret = core.run_command("r.unpack", input=filename,
  87. output=name,
  88. flags=impflags,
  89. overwrite=core.overwrite(),
  90. verbose=True)
  91. if ret != 0:
  92. core.fatal(_("Unable to unpack raster map <%s> from file %s.") % (name,
  93. filename))
  94. ############################################################################
  95. def _import_vector_maps_from_gml(maplist, overr, exp, location, link):
  96. impflags = "o"
  97. if exp or location:
  98. impflags += "e"
  99. for row in maplist:
  100. name = row["name"]
  101. filename = row["filename"] + ".xml"
  102. ret = core.run_command("v.in.ogr", dsn=filename,
  103. output=name,
  104. flags=impflags,
  105. overwrite=core.overwrite())
  106. if ret != 0:
  107. core.fatal(_("Unable to import vector map <%s> from file %s.") % (name,
  108. filename))
  109. ############################################################################
  110. def _import_vector_maps(maplist):
  111. # We need to disable the projection check because of its
  112. # simple implementation
  113. impflags = "o"
  114. for row in maplist:
  115. # Separate the name from the layer
  116. name = row["name"].split(":")[0]
  117. # Import only unique maps
  118. if name in imported_maps:
  119. continue
  120. filename = row["filename"] + ".pack"
  121. ret = core.run_command("v.unpack", input=filename,
  122. output=name,
  123. flags=impflags,
  124. overwrite=core.overwrite(),
  125. verbose=True)
  126. if ret != 0:
  127. core.fatal(_("Unable to unpack vector map <%s> from file %s.") % (name,
  128. filename))
  129. imported_maps[name] = name
  130. ############################################################################
  131. def import_stds(input, output, extrdir, title=None, descr=None, location=None,
  132. link=False, exp=False, overr=False, create=False, stds_type="strds", base=None):
  133. """!Import space time datasets of type raster and vector
  134. @param input Name of the input archive file
  135. @param output The name of the output space time dataset
  136. @param extrdir The extraction directory
  137. @param title The title of the new created space time dataset
  138. @param descr The description of the new created
  139. space time dataset
  140. @param location The name of the location that should be created,
  141. maps are imported into this location
  142. @param link Switch to link raster maps instead importing them
  143. @param exp Extend location extents based on new dataset
  144. @param overr Override projection (use location's projection)
  145. @param create Create the location specified by the "location"
  146. parameter and exit.
  147. Do not import the space time datasets.
  148. @param stds_type The type of the space time dataset that
  149. should be imported
  150. @param base The base name of the new imported maps, it will be extended
  151. using a numerical index.
  152. """
  153. global raise_on_error
  154. old_state = core.raise_on_error
  155. core.set_raise_on_error(True)
  156. # Check if input file and extraction directory exits
  157. if not os.path.exists(input):
  158. core.fatal(_("Space time raster dataset archive <%s> not found")
  159. % input)
  160. if not create and not os.path.exists(extrdir):
  161. core.fatal(_("Extraction directory <%s> not found") % extrdir)
  162. tar = tarfile.open(name=input, mode='r')
  163. # Check for important files
  164. members = tar.getnames()
  165. if init_file_name not in members:
  166. core.fatal(_("Unable to find init file <%s>") % init_file_name)
  167. if list_file_name not in members:
  168. core.fatal(_("Unable to find list file <%s>") % list_file_name)
  169. if proj_file_name not in members:
  170. core.fatal(_("Unable to find projection file <%s>") % proj_file_name)
  171. tar.extractall(path=extrdir)
  172. tar.close()
  173. # We use a new list file name for map registration
  174. new_list_file_name = list_file_name + "_new"
  175. # Save current working directory path
  176. old_cwd = os.getcwd()
  177. # Switch into the data directory
  178. os.chdir(extrdir)
  179. # Check projection information
  180. if not location:
  181. temp_name = core.tempfile()
  182. temp_file = open(temp_name, "w")
  183. proj_name = os.path.abspath(proj_file_name)
  184. p = core.start_command("g.proj", flags="j", stdout=temp_file)
  185. p.communicate()
  186. temp_file.close()
  187. if not core.compare_key_value_text_files(temp_name, proj_name, sep="="):
  188. if overr:
  189. core.warning(_("Projection information does not match. "
  190. "Proceeding..."))
  191. else:
  192. diff = ''.join(core.diff_files(temp_name, proj_name))
  193. core.warning(_("Difference between PROJ_INFO file of imported map "
  194. "and of current location:\n{diff}").format(diff=diff))
  195. core.fatal(_("Projection information does not match. Aborting."))
  196. # Create a new location based on the projection information and switch
  197. # into it
  198. old_env = core.gisenv()
  199. if location:
  200. try:
  201. proj4_string = open(proj_file_name, 'r').read()
  202. core.create_location(dbase=old_env["GISDBASE"],
  203. location=location,
  204. proj4=proj4_string)
  205. # Just create a new location and return
  206. if create:
  207. os.chdir(old_cwd)
  208. return
  209. except Exception as e:
  210. core.fatal(_("Unable to create location %(l)s. Reason: %(e)s")
  211. % {'l': location, 'e': str(e)})
  212. # Switch to the new created location
  213. ret = core.run_command("g.mapset", mapset="PERMANENT",
  214. location=location,
  215. gisdbase=old_env["GISDBASE"])
  216. if ret != 0:
  217. core.fatal(_("Unable to switch to location %s") % location)
  218. # create default database connection
  219. ret = core.run_command("t.connect", flags="d")
  220. if ret != 0:
  221. core.fatal(_("Unable to create default temporal database "
  222. "in new location %s") % location)
  223. try:
  224. # Make sure the temporal database exists
  225. factory.init()
  226. fs = "|"
  227. maplist = []
  228. mapset = get_current_mapset()
  229. list_file = open(list_file_name, "r")
  230. new_list_file = open(new_list_file_name, "w")
  231. # Read the map list from file
  232. line_count = 0
  233. while True:
  234. line = list_file.readline()
  235. if not line:
  236. break
  237. line_list = line.split(fs)
  238. # The filename is actually the base name of the map
  239. # that must be extended by the file suffix
  240. filename = line_list[0].strip().split(":")[0]
  241. if base:
  242. mapname = "%s_%i"%(base, line_count)
  243. mapid= "%s@%s"%(mapname, mapset)
  244. else:
  245. mapname = filename
  246. mapid = mapname + "@" + mapset
  247. row = {}
  248. row["filename"] = filename
  249. row["name"] = mapname
  250. row["id"] = mapid
  251. row["start"] = line_list[1].strip()
  252. row["end"] = line_list[2].strip()
  253. new_list_file.write("%s%s%s%s%s\n"%(mapname,fs, row["start"],
  254. fs, row["end"]))
  255. maplist.append(row)
  256. line_count += 1
  257. list_file.close()
  258. new_list_file.close()
  259. # Read the init file
  260. fs = "="
  261. init = {}
  262. init_file = open(init_file_name, "r")
  263. while True:
  264. line = init_file.readline()
  265. if not line:
  266. break
  267. kv = line.split(fs)
  268. init[kv[0]] = kv[1].strip()
  269. init_file.close()
  270. if "temporal_type" not in init or \
  271. "semantic_type" not in init or \
  272. "number_of_maps" not in init:
  273. core.fatal(_("Key words %(t)s, %(s)s or %(n)s not found in init"
  274. " file.") % {'t': "temporal_type",
  275. 's': "semantic_type",
  276. 'n': "number_of_maps"})
  277. if line_count != int(init["number_of_maps"]):
  278. core.fatal(_("Number of maps mismatch in init and list file."))
  279. format_ = "GTiff"
  280. type_ = "strds"
  281. if "stds_type" in init:
  282. type_ = init["stds_type"]
  283. if "format" in init:
  284. format_ = init["format"]
  285. if stds_type != type_:
  286. core.fatal(_("The archive file is of wrong space time dataset type"))
  287. # Check the existence of the files
  288. if format_ == "GTiff":
  289. for row in maplist:
  290. filename = row["filename"] + ".tif"
  291. if not os.path.exists(filename):
  292. core.fatal(_("Unable to find geotiff raster file "
  293. "<%s> in archive.") % filename)
  294. elif format_ == "AAIGrid":
  295. for row in maplist:
  296. filename = row["filename"] + ".asc"
  297. if not os.path.exists(filename):
  298. core.fatal(_("Unable to find AAIGrid raster file "
  299. "<%s> in archive.") % filename)
  300. elif format_ == "GML":
  301. for row in maplist:
  302. filename = row["filename"] + ".xml"
  303. if not os.path.exists(filename):
  304. core.fatal(_("Unable to find GML vector file "
  305. "<%s> in archive.") % filename)
  306. elif format_ == "pack":
  307. for row in maplist:
  308. if type_ == "stvds":
  309. filename = str(row["filename"].split(":")[0]) + ".pack"
  310. else:
  311. filename = row["filename"] + ".pack"
  312. if not os.path.exists(filename):
  313. core.fatal(_("Unable to find GRASS package file "
  314. "<%s> in archive.") % filename)
  315. else:
  316. core.fatal(_("Unsupported input format"))
  317. # Check the space time dataset
  318. id = output + "@" + mapset
  319. sp = dataset_factory(type_, id)
  320. if sp.is_in_db() and core.overwrite() == False:
  321. core.fatal(_("Space time %(t)s dataset <%(sp)s> is already in the "
  322. "database. Use the overwrite flag.") % {'t': type_,
  323. 'sp': sp.get_id()})
  324. # Import the maps
  325. if type_ == "strds":
  326. if format_ == "GTiff" or format_ == "AAIGrid":
  327. _import_raster_maps_from_gdal(
  328. maplist, overr, exp, location, link, format_)
  329. if format_ == "pack":
  330. _import_raster_maps(maplist)
  331. elif type_ == "stvds":
  332. if format_ == "GML":
  333. _import_vector_maps_from_gml(
  334. maplist, overr, exp, location, link)
  335. if format_ == "pack":
  336. _import_vector_maps(maplist)
  337. # Create the space time dataset
  338. if sp.is_in_db() and core.overwrite() == True:
  339. core.info(_("Overwrite space time %(sp)s dataset "
  340. "<%(id)s> and unregister all maps.") % {
  341. 'sp': sp.get_new_map_instance(None).get_type(),
  342. 'id': sp.get_id()})
  343. sp.delete()
  344. sp = sp.get_new_instance(id)
  345. temporal_type = init["temporal_type"]
  346. semantic_type = init["semantic_type"]
  347. relative_time_unit = None
  348. if temporal_type == "relative":
  349. if "relative_time_unit" not in init:
  350. core.fatal(_("Key word %s not found in init file.") % ("relative_time_unit"))
  351. relative_time_unit = init["relative_time_unit"]
  352. sp.set_relative_time_unit(relative_time_unit)
  353. core.verbose(_("Create space time %s dataset.") %
  354. sp.get_new_map_instance(None).get_type())
  355. sp.set_initial_values(temporal_type=temporal_type,
  356. semantic_type=semantic_type, title=title,
  357. description=descr)
  358. sp.insert()
  359. # register the maps
  360. fs = "|"
  361. register_maps_in_space_time_dataset(
  362. type=sp.get_new_map_instance(None).get_type(),
  363. name=output, file=new_list_file_name, start="file",
  364. end="file", unit=relative_time_unit, dbif=None, fs=fs)
  365. os.chdir(old_cwd)
  366. except:
  367. raise
  368. # Make sure the location is switched back correctly
  369. finally:
  370. if location:
  371. # Switch to the old location
  372. ret = core.run_command("g.mapset", mapset=old_env["MAPSET"],
  373. location=old_env["LOCATION_NAME"],
  374. gisdbase=old_env["GISDBASE"])
  375. core.set_raise_on_error(old_state)