univar_statistics.py 8.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236
  1. """!@package grass.temporal
  2. @brief GRASS Python scripting module (temporal GIS functions)
  3. Temporal GIS related functions to be used in Python scripts.
  4. Usage:
  5. @code
  6. import grass.temporal as tgis
  7. tgis.print_gridded_dataset_univar_statistics(
  8. type, input, where, extended, header, fs)
  9. ...
  10. @endcode
  11. (C) 2008-2011 by the GRASS Development Team
  12. This program is free software under the GNU General Public
  13. License (>=v2). Read the file COPYING that comes with GRASS
  14. for details.
  15. @author Soeren Gebbert
  16. """
  17. from space_time_datasets import *
  18. from factory import *
  19. ###############################################################################
  20. def print_gridded_dataset_univar_statistics(type, input, where, extended,
  21. header, fs):
  22. """!Print univariate statistics for a space time raster or raster3d dataset
  23. @param type Must be "strds" or "str3ds"
  24. @param input The name of the space time dataset
  25. @param where A temporal database where statement
  26. @param extended If True compute extended statistics
  27. @param header If True print column names as header
  28. @param fs Field separator
  29. """
  30. # We need a database interface
  31. dbif = SQLDatabaseInterfaceConnection()
  32. dbif.connect()
  33. mapset = core.gisenv()["MAPSET"]
  34. if input.find("@") >= 0:
  35. id = input
  36. else:
  37. id = input + "@" + mapset
  38. sp = dataset_factory(type, id)
  39. if sp.is_in_db(dbif) == False:
  40. dbif.close()
  41. core.fatal(_("Space time %(sp)s dataset <%(i)s> not found") % {
  42. 'sp': sp.get_new_map_instance(None).get_type(), 'i': id})
  43. sp.select(dbif)
  44. rows = sp.get_registered_maps(
  45. "id,start_time,end_time", where, "start_time", dbif)
  46. if not rows:
  47. dbif.close()
  48. core.fatal(_("Space time %(sp)s dataset <%(i)s> is empty") % {
  49. 'sp': sp.get_new_map_instance(None).get_type(), 'i': id})
  50. if header == True:
  51. print "id" + fs + "start" + fs + "end" + fs + "mean" + fs + \
  52. "min" + fs + "max" + fs,
  53. print "mean_of_abs" + fs + "stddev" + fs + "variance" + fs,
  54. if extended == True:
  55. print "coeff_var" + fs + "sum" + fs + \
  56. "null_cells" + fs + "cells" + fs,
  57. print "first_quartile" + fs + "median" + fs + \
  58. "third_quartile" + fs + "percentile_90"
  59. else:
  60. print "coeff_var" + fs + "sum" + fs + "null_cells" + fs + "cells"
  61. for row in rows:
  62. id = row["id"]
  63. start = row["start_time"]
  64. end = row["end_time"]
  65. flag = "g"
  66. if extended == True:
  67. flag += "e"
  68. if type == "strds":
  69. stats = core.parse_command("r.univar", map=id, flags=flag)
  70. elif type == "str3ds":
  71. stats = core.parse_command("r3.univar", map=id, flags=flag)
  72. print str(id) + fs + str(start) + fs + str(end),
  73. print fs + str(stats["mean"]) + fs + str(stats["min"]) + \
  74. fs + str(stats["max"]) + fs + str(stats["mean_of_abs"]),
  75. print fs + str(stats["stddev"]) + fs + str(stats["variance"]) + \
  76. fs + str(stats["coeff_var"]) + fs + str(stats["sum"]),
  77. if extended == True:
  78. print fs + str(stats["null_cells"]) + fs + str(
  79. stats["cells"]) + fs,
  80. print str(stats["first_quartile"]) + fs + str(stats["median"]) + \
  81. fs + str(stats["third_quartile"]) + \
  82. fs + str(stats["percentile_90"])
  83. else:
  84. print fs + str(stats["null_cells"]) + fs + str(stats["cells"])
  85. dbif.close()
  86. ###############################################################################
  87. def print_vector_dataset_univar_statistics(input, twhere, layer, type, column,
  88. where, extended, header, fs):
  89. """!Print univariate statistics for a space time vector dataset
  90. @param input The name of the space time dataset
  91. @param twhere A temporal database where statement
  92. @param layer The layer number used in case no layer is present
  93. in the temporal dataset
  94. @param type options: point,line,boundary,centroid,area
  95. @param column The name of the attribute column
  96. @param where A temporal database where statement
  97. @param extended If True compute extended statistics
  98. @param header If True print column names as header
  99. @param fs Field separator
  100. """
  101. # We need a database interface
  102. dbif = SQLDatabaseInterfaceConnection()
  103. dbif.connect()
  104. mapset = core.gisenv()["MAPSET"]
  105. if input.find("@") >= 0:
  106. id = input
  107. else:
  108. id = input + "@" + mapset
  109. sp = dataset_factory("stvds", id)
  110. if sp.is_in_db(dbif) == False:
  111. dbif.close()
  112. core.fatal(_("Space time %(sp)s dataset <%(i)s> not found") % {
  113. 'sp': sp.get_new_map_instance(None).get_type(), 'i': id})
  114. sp.select(dbif)
  115. rows = sp.get_registered_maps("id,name,mapset,start_time,end_time,layer",
  116. twhere, "start_time", dbif)
  117. if not rows:
  118. dbif.close()
  119. core.fatal(_("Space time %(sp)s dataset <%(i)s> is empty") % {
  120. 'sp': sp.get_new_map_instance(None).get_type(), 'i': id})
  121. string = ""
  122. if header == True:
  123. string += "id" + fs + "start" + fs + "end" + fs + "n" + \
  124. fs + "nmissing" + fs + "nnull" + fs
  125. string += "min" + fs + "max" + fs + "range"
  126. if type == "point" or type == "centroid":
  127. string += fs + "mean" + fs + "mean_abs" + fs + "population_stddev" +\
  128. fs + "population_variance" + fs
  129. string += "population_coeff_variation" + fs + \
  130. "sample_stddev" + fs + "sample_variance" + fs
  131. string += "kurtosis" + fs + "skewness"
  132. if extended == True:
  133. string += fs + "first_quartile" + fs + "median" + fs + \
  134. "third_quartile" + fs + "percentile_90"
  135. print string
  136. for row in rows:
  137. id = row["name"] + "@" + row["mapset"]
  138. start = row["start_time"]
  139. end = row["end_time"]
  140. mylayer = row["layer"]
  141. flags = "g"
  142. if extended == True:
  143. flags += "e"
  144. if not mylayer:
  145. mylayer = layer
  146. stats = core.parse_command("v.univar", map=id, where=where,
  147. column=column, layer=mylayer,
  148. type=type, flags=flags)
  149. string = ""
  150. if stats:
  151. string += str(id) + fs + str(start) + fs + str(end)
  152. string += fs + str(stats["n"]) + fs + str(stats[
  153. "nmissing"]) + fs + str(stats["nnull"])
  154. if "min" in stats:
  155. string += fs + str(stats["min"]) + fs + str(
  156. stats["max"]) + fs + str(stats["range"])
  157. else:
  158. string += fs + fs + fs
  159. if type == "point" or type == "centroid":
  160. if "mean" in stats:
  161. string += fs + str(stats["mean"]) + fs + \
  162. str(stats["mean_abs"]) + fs + \
  163. str(stats["population_stddev"]) + fs + \
  164. str(stats["population_variance"])
  165. string += fs + str(stats["population_coeff_variation"]) + \
  166. fs + str(stats["sample_stddev"]) + fs + \
  167. str(stats["sample_variance"])
  168. string += fs + str(stats["kurtosis"]) + fs + \
  169. str(stats["skewness"])
  170. else:
  171. string += fs + fs + fs + fs + fs + fs + fs + fs + fs
  172. if extended == True:
  173. if "first_quartile" in stats:
  174. string += fs + str(stats["first_quartile"]) + fs + \
  175. str(stats["median"]) + fs + \
  176. str(stats["third_quartile"]) + fs + \
  177. str(stats["percentile_90"])
  178. else:
  179. string += fs + fs + fs + fs
  180. print string
  181. dbif.close()