stds_import.py 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451
  1. """!@package grass.temporal
  2. @brief GRASS Python scripting module (temporal GIS functions)
  3. Temporal GIS export functions to be used in temporal modules
  4. Usage:
  5. @code
  6. import grass.temporal as tgis
  7. input="/tmp/temp_1950_2012.tar.gz"
  8. output="temp_1950_2012"
  9. extrdir="/tmp"
  10. title="My new dataset"
  11. descr="May new shiny dataset"
  12. location=None
  13. link=True
  14. exp=True
  15. overr=False
  16. create=False
  17. tgis.import_stds(input, output, extrdir, title, descr, location,
  18. link, exp, overr, create, "strds")
  19. ...
  20. @endcode
  21. (C) 2008-2011 by the GRASS Development Team
  22. This program is free software under the GNU General Public
  23. License (>=v2). Read the file COPYING that comes with GRASS
  24. for details.
  25. @author Soeren Gebbert
  26. """
  27. import os
  28. import os.path
  29. import tarfile
  30. import core
  31. from space_time_datasets import *
  32. from register import *
  33. import factory
  34. from factory import *
  35. proj_file_name = "proj.txt"
  36. init_file_name = "init.txt"
  37. list_file_name = "list.txt"
  38. # This global variable is for unique vector map export,
  39. # since single vector maps may have several layer
  40. # and therefore several attribute tables
  41. imported_maps = {}
  42. ############################################################################
  43. def _import_raster_maps_from_gdal(maplist, overr, exp, location, link, format_):
  44. impflags = ""
  45. if overr:
  46. impflags += "o"
  47. if exp or location:
  48. impflags += "e"
  49. for row in maplist:
  50. name = row["name"]
  51. if format_ == "GTiff":
  52. filename = row["filename"] + ".tif"
  53. elif format_=="AAIGrid":
  54. filename = row["filename"] + ".asc"
  55. if not overr:
  56. impflags += "o"
  57. if link:
  58. ret = core.run_command("r.external", input=filename,
  59. output=name,
  60. flags=impflags,
  61. overwrite=core.overwrite())
  62. else:
  63. ret = core.run_command("r.in.gdal", input=filename,
  64. output=name,
  65. flags=impflags,
  66. overwrite=core.overwrite())
  67. if ret != 0:
  68. core.fatal(_("Unable to import/link raster map <%s> from file %s.") %(name,
  69. filename))
  70. # Set the color rules if present
  71. filename = row["filename"] + ".color"
  72. if os.path.isfile(filename):
  73. ret = core.run_command("r.colors", map=name,
  74. rules=filename,
  75. overwrite=core.overwrite())
  76. if ret != 0:
  77. core.fatal(_("Unable to set the color rules for "
  78. "raster map <%s>.") % name)
  79. ############################################################################
  80. def _import_raster_maps(maplist):
  81. # We need to disable the projection check because of its
  82. # simple implementation
  83. impflags = "o"
  84. for row in maplist:
  85. name = row["name"]
  86. filename = row["filename"] + ".pack"
  87. ret = core.run_command("r.unpack", input=filename,
  88. output=name,
  89. flags=impflags,
  90. overwrite=core.overwrite(),
  91. verbose=True)
  92. if ret != 0:
  93. core.fatal(_("Unable to unpack raster map <%s> from file %s.") % (name,
  94. filename))
  95. ############################################################################
  96. def _import_vector_maps_from_gml(maplist, overr, exp, location, link):
  97. impflags = "o"
  98. if exp or location:
  99. impflags += "e"
  100. for row in maplist:
  101. name = row["name"]
  102. filename = row["filename"] + ".xml"
  103. ret = core.run_command("v.in.ogr", dsn=filename,
  104. output=name,
  105. flags=impflags,
  106. overwrite=core.overwrite())
  107. if ret != 0:
  108. core.fatal(_("Unable to import vector map <%s> from file %s.") % (name,
  109. filename))
  110. ############################################################################
  111. def _import_vector_maps(maplist):
  112. # We need to disable the projection check because of its
  113. # simple implementation
  114. impflags = "o"
  115. for row in maplist:
  116. # Separate the name from the layer
  117. name = row["name"].split(":")[0]
  118. # Import only unique maps
  119. if name in imported_maps:
  120. continue
  121. filename = row["filename"] + ".pack"
  122. ret = core.run_command("v.unpack", input=filename,
  123. output=name,
  124. flags=impflags,
  125. overwrite=core.overwrite(),
  126. verbose=True)
  127. if ret != 0:
  128. core.fatal(_("Unable to unpack vector map <%s> from file %s.") % (name,
  129. filename))
  130. imported_maps[name] = name
  131. ############################################################################
  132. def import_stds(input, output, extrdir, title=None, descr=None, location=None,
  133. link=False, exp=False, overr=False, create=False, stds_type="strds", base=None):
  134. """!Import space time datasets of type raster and vector
  135. @param input Name of the input archive file
  136. @param output The name of the output space time dataset
  137. @param extrdir The extraction directory
  138. @param title The title of the new created space time dataset
  139. @param descr The description of the new created
  140. space time dataset
  141. @param location The name of the location that should be created,
  142. maps are imported into this location
  143. @param link Switch to link raster maps instead importing them
  144. @param exp Extend location extents based on new dataset
  145. @param overr Override projection (use location's projection)
  146. @param create Create the location specified by the "location"
  147. parameter and exit.
  148. Do not import the space time datasets.
  149. @param stds_type The type of the space time dataset that
  150. should be imported
  151. @param base The base name of the new imported maps, it will be extended
  152. using a numerical index.
  153. """
  154. global raise_on_error
  155. old_state = core.raise_on_error
  156. core.set_raise_on_error(True)
  157. # Check if input file and extraction directory exits
  158. if not os.path.exists(input):
  159. core.fatal(_("Space time raster dataset archive <%s> not found")
  160. % input)
  161. if not create and not os.path.exists(extrdir):
  162. core.fatal(_("Extraction directory <%s> not found") % extrdir)
  163. tar = tarfile.open(name=input, mode='r')
  164. # Check for important files
  165. members = tar.getnames()
  166. if init_file_name not in members:
  167. core.fatal(_("Unable to find init file <%s>") % init_file_name)
  168. if list_file_name not in members:
  169. core.fatal(_("Unable to find list file <%s>") % list_file_name)
  170. if proj_file_name not in members:
  171. core.fatal(_("Unable to find projection file <%s>") % proj_file_name)
  172. tar.extractall(path=extrdir)
  173. tar.close()
  174. # We use a new list file name for map registration
  175. new_list_file_name = list_file_name + "_new"
  176. # Save current working directory path
  177. old_cwd = os.getcwd()
  178. # Switch into the data directory
  179. os.chdir(extrdir)
  180. # Check projection information
  181. if not location:
  182. temp_name = core.tempfile()
  183. temp_file = open(temp_name, "w")
  184. proj_name = os.path.abspath(proj_file_name)
  185. p = core.start_command("g.proj", flags="j", stdout=temp_file)
  186. p.communicate()
  187. temp_file.close()
  188. if not core.compare_key_value_text_files(temp_name, proj_name, sep="="):
  189. if overr:
  190. core.warning(_("Projection information does not match. "
  191. "Proceeding..."))
  192. else:
  193. diff = ''.join(core.diff_files(temp_name, proj_name))
  194. core.warning(_("Difference between PROJ_INFO file of imported map "
  195. "and of current location:\n{diff}").format(diff=diff))
  196. core.fatal(_("Projection information does not match. Aborting."))
  197. # Create a new location based on the projection information and switch
  198. # into it
  199. old_env = core.gisenv()
  200. if location:
  201. try:
  202. proj4_string = open(proj_file_name, 'r').read()
  203. core.create_location(dbase=old_env["GISDBASE"],
  204. location=location,
  205. proj4=proj4_string)
  206. # Just create a new location and return
  207. if create:
  208. os.chdir(old_cwd)
  209. return
  210. except Exception as e:
  211. core.fatal(_("Unable to create location %(l)s. Reason: %(e)s")
  212. % {'l': location, 'e': str(e)})
  213. # Switch to the new created location
  214. ret = core.run_command("g.mapset", mapset="PERMANENT",
  215. location=location,
  216. gisdbase=old_env["GISDBASE"])
  217. if ret != 0:
  218. core.fatal(_("Unable to switch to location %s") % location)
  219. # create default database connection
  220. ret = core.run_command("t.connect", flags="d")
  221. if ret != 0:
  222. core.fatal(_("Unable to create default temporal database "
  223. "in new location %s") % location)
  224. try:
  225. # Make sure the temporal database exists
  226. factory.init()
  227. fs = "|"
  228. maplist = []
  229. mapset = get_current_mapset()
  230. list_file = open(list_file_name, "r")
  231. new_list_file = open(new_list_file_name, "w")
  232. # Read the map list from file
  233. line_count = 0
  234. while True:
  235. line = list_file.readline()
  236. if not line:
  237. break
  238. line_list = line.split(fs)
  239. # The filename is actually the base name of the map
  240. # that must be extended by the file suffix
  241. filename = line_list[0].strip().split(":")[0]
  242. if base:
  243. mapname = "%s_%i"%(base, line_count)
  244. mapid= "%s@%s"%(mapname, mapset)
  245. else:
  246. mapname = filename
  247. mapid = mapname + "@" + mapset
  248. row = {}
  249. row["filename"] = filename
  250. row["name"] = mapname
  251. row["id"] = mapid
  252. row["start"] = line_list[1].strip()
  253. row["end"] = line_list[2].strip()
  254. new_list_file.write("%s%s%s%s%s\n"%(mapname,fs, row["start"],
  255. fs, row["end"]))
  256. maplist.append(row)
  257. line_count += 1
  258. list_file.close()
  259. new_list_file.close()
  260. # Read the init file
  261. fs = "="
  262. init = {}
  263. init_file = open(init_file_name, "r")
  264. while True:
  265. line = init_file.readline()
  266. if not line:
  267. break
  268. kv = line.split(fs)
  269. init[kv[0]] = kv[1].strip()
  270. init_file.close()
  271. if "temporal_type" not in init or \
  272. "semantic_type" not in init or \
  273. "number_of_maps" not in init:
  274. core.fatal(_("Key words %(t)s, %(s)s or %(n)s not found in init"
  275. " file.") % {'t': "temporal_type",
  276. 's': "semantic_type",
  277. 'n': "number_of_maps"})
  278. if line_count != int(init["number_of_maps"]):
  279. core.fatal(_("Number of maps mismatch in init and list file."))
  280. format_ = "GTiff"
  281. type_ = "strds"
  282. if "stds_type" in init:
  283. type_ = init["stds_type"]
  284. if "format" in init:
  285. format_ = init["format"]
  286. if stds_type != type_:
  287. core.fatal(_("The archive file is of wrong space time dataset type"))
  288. # Check the existence of the files
  289. if format_ == "GTiff":
  290. for row in maplist:
  291. filename = row["filename"] + ".tif"
  292. if not os.path.exists(filename):
  293. core.fatal(_("Unable to find geotiff raster file "
  294. "<%s> in archive.") % filename)
  295. elif format_ == "AAIGrid":
  296. for row in maplist:
  297. filename = row["filename"] + ".asc"
  298. if not os.path.exists(filename):
  299. core.fatal(_("Unable to find AAIGrid raster file "
  300. "<%s> in archive.") % filename)
  301. elif format_ == "GML":
  302. for row in maplist:
  303. filename = row["filename"] + ".xml"
  304. if not os.path.exists(filename):
  305. core.fatal(_("Unable to find GML vector file "
  306. "<%s> in archive.") % filename)
  307. elif format_ == "pack":
  308. for row in maplist:
  309. if type_ == "stvds":
  310. filename = str(row["filename"].split(":")[0]) + ".pack"
  311. else:
  312. filename = row["filename"] + ".pack"
  313. if not os.path.exists(filename):
  314. core.fatal(_("Unable to find GRASS package file "
  315. "<%s> in archive.") % filename)
  316. else:
  317. core.fatal(_("Unsupported input format"))
  318. # Check the space time dataset
  319. id = output + "@" + mapset
  320. sp = dataset_factory(type_, id)
  321. if sp.is_in_db() and core.overwrite() == False:
  322. core.fatal(_("Space time %(t)s dataset <%(sp)s> is already in the "
  323. "database. Use the overwrite flag.") % {'t': type_,
  324. 'sp': sp.get_id()})
  325. # Import the maps
  326. if type_ == "strds":
  327. if format_ == "GTiff" or format_ == "AAIGrid":
  328. _import_raster_maps_from_gdal(
  329. maplist, overr, exp, location, link, format_)
  330. if format_ == "pack":
  331. _import_raster_maps(maplist)
  332. elif type_ == "stvds":
  333. if format_ == "GML":
  334. _import_vector_maps_from_gml(
  335. maplist, overr, exp, location, link)
  336. if format_ == "pack":
  337. _import_vector_maps(maplist)
  338. # Create the space time dataset
  339. if sp.is_in_db() and core.overwrite() == True:
  340. core.info(_("Overwrite space time %(sp)s dataset "
  341. "<%(id)s> and unregister all maps.") % {
  342. 'sp': sp.get_new_map_instance(None).get_type(),
  343. 'id': sp.get_id()})
  344. sp.delete()
  345. sp = sp.get_new_instance(id)
  346. temporal_type = init["temporal_type"]
  347. semantic_type = init["semantic_type"]
  348. relative_time_unit = None
  349. if temporal_type == "relative":
  350. if "relative_time_unit" not in init:
  351. core.fatal(_("Key word %s not found in init file.") % ("relative_time_unit"))
  352. relative_time_unit = init["relative_time_unit"]
  353. sp.set_relative_time_unit(relative_time_unit)
  354. core.verbose(_("Create space time %s dataset.") %
  355. sp.get_new_map_instance(None).get_type())
  356. sp.set_initial_values(temporal_type=temporal_type,
  357. semantic_type=semantic_type, title=title,
  358. description=descr)
  359. sp.insert()
  360. # register the maps
  361. fs = "|"
  362. register_maps_in_space_time_dataset(
  363. type=sp.get_new_map_instance(None).get_type(),
  364. name=output, file=new_list_file_name, start="file",
  365. end="file", unit=relative_time_unit, dbif=None, fs=fs)
  366. os.chdir(old_cwd)
  367. except:
  368. raise
  369. # Make sure the location is switched back correctly
  370. finally:
  371. if location:
  372. # Switch to the old location
  373. ret = core.run_command("g.mapset", mapset=old_env["MAPSET"],
  374. location=old_env["LOCATION_NAME"],
  375. gisdbase=old_env["GISDBASE"])
  376. core.set_raise_on_error(old_state)