univar_statistics.py 7.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223
  1. """!@package grass.temporal
  2. @brief GRASS Python scripting module (temporal GIS functions)
  3. Temporal GIS related functions to be used in Python scripts.
  4. Usage:
  5. @code
  6. import grass.temporal as tgis
  7. tgis.print_gridded_dataset_univar_statistics(
  8. type, input, where, extended, header, fs)
  9. ...
  10. @endcode
  11. (C) 2008-2011 by the GRASS Development Team
  12. This program is free software under the GNU General Public
  13. License (>=v2). Read the file COPYING that comes with GRASS
  14. for details.
  15. @author Soeren Gebbert
  16. """
  17. from space_time_datasets import *
  18. from factory import *
  19. from open import *
  20. ###############################################################################
  21. def print_gridded_dataset_univar_statistics(type, input, where, extended,
  22. header, fs):
  23. """!Print univariate statistics for a space time raster or raster3d dataset
  24. @param type Must be "strds" or "str3ds"
  25. @param input The name of the space time dataset
  26. @param where A temporal database where statement
  27. @param extended If True compute extended statistics
  28. @param header If True print column names as header
  29. @param fs Field separator
  30. """
  31. # We need a database interface
  32. dbif = SQLDatabaseInterfaceConnection()
  33. dbif.connect()
  34. sp = open_old_space_time_dataset(input, "strds", dbif)
  35. rows = sp.get_registered_maps(
  36. "id,start_time,end_time", where, "start_time", dbif)
  37. if not rows:
  38. dbif.close()
  39. core.fatal(_("Space time %(sp)s dataset <%(i)s> is empty") % {
  40. 'sp': sp.get_new_map_instance(None).get_type(), 'i': id})
  41. if header == True:
  42. print "id" + fs + "start" + fs + "end" + fs + "mean" + fs + \
  43. "min" + fs + "max" + fs,
  44. print "mean_of_abs" + fs + "stddev" + fs + "variance" + fs,
  45. if extended == True:
  46. print "coeff_var" + fs + "sum" + fs + \
  47. "null_cells" + fs + "cells" + fs,
  48. print "first_quartile" + fs + "median" + fs + \
  49. "third_quartile" + fs + "percentile_90"
  50. else:
  51. print "coeff_var" + fs + "sum" + fs + "null_cells" + fs + "cells"
  52. for row in rows:
  53. id = row["id"]
  54. start = row["start_time"]
  55. end = row["end_time"]
  56. flag = "g"
  57. if extended == True:
  58. flag += "e"
  59. if type == "strds":
  60. stats = core.parse_command("r.univar", map=id, flags=flag)
  61. elif type == "str3ds":
  62. stats = core.parse_command("r3.univar", map=id, flags=flag)
  63. print str(id) + fs + str(start) + fs + str(end),
  64. print fs + str(stats["mean"]) + fs + str(stats["min"]) + \
  65. fs + str(stats["max"]) + fs + str(stats["mean_of_abs"]),
  66. print fs + str(stats["stddev"]) + fs + str(stats["variance"]) + \
  67. fs + str(stats["coeff_var"]) + fs + str(stats["sum"]),
  68. if extended == True:
  69. print fs + str(stats["null_cells"]) + fs + str(
  70. stats["cells"]) + fs,
  71. print str(stats["first_quartile"]) + fs + str(stats["median"]) + \
  72. fs + str(stats["third_quartile"]) + \
  73. fs + str(stats["percentile_90"])
  74. else:
  75. print fs + str(stats["null_cells"]) + fs + str(stats["cells"])
  76. dbif.close()
  77. ###############################################################################
  78. def print_vector_dataset_univar_statistics(input, twhere, layer, type, column,
  79. where, extended, header, fs):
  80. """!Print univariate statistics for a space time vector dataset
  81. @param input The name of the space time dataset
  82. @param twhere A temporal database where statement
  83. @param layer The layer number used in case no layer is present
  84. in the temporal dataset
  85. @param type options: point,line,boundary,centroid,area
  86. @param column The name of the attribute column
  87. @param where A temporal database where statement
  88. @param extended If True compute extended statistics
  89. @param header If True print column names as header
  90. @param fs Field separator
  91. """
  92. # We need a database interface
  93. dbif = SQLDatabaseInterfaceConnection()
  94. dbif.connect()
  95. mapset = core.gisenv()["MAPSET"]
  96. if input.find("@") >= 0:
  97. id = input
  98. else:
  99. id = input + "@" + mapset
  100. sp = dataset_factory("stvds", id)
  101. if sp.is_in_db(dbif) == False:
  102. dbif.close()
  103. core.fatal(_("Space time %(sp)s dataset <%(i)s> not found") % {
  104. 'sp': sp.get_new_map_instance(None).get_type(), 'i': id})
  105. sp.select(dbif)
  106. rows = sp.get_registered_maps("id,name,mapset,start_time,end_time,layer",
  107. twhere, "start_time", dbif)
  108. if not rows:
  109. dbif.close()
  110. core.fatal(_("Space time %(sp)s dataset <%(i)s> is empty") % {
  111. 'sp': sp.get_new_map_instance(None).get_type(), 'i': id})
  112. string = ""
  113. if header == True:
  114. string += "id" + fs + "start" + fs + "end" + fs + "n" + \
  115. fs + "nmissing" + fs + "nnull" + fs
  116. string += "min" + fs + "max" + fs + "range"
  117. if type == "point" or type == "centroid":
  118. string += fs + "mean" + fs + "mean_abs" + fs + "population_stddev" +\
  119. fs + "population_variance" + fs
  120. string += "population_coeff_variation" + fs + \
  121. "sample_stddev" + fs + "sample_variance" + fs
  122. string += "kurtosis" + fs + "skewness"
  123. if extended == True:
  124. string += fs + "first_quartile" + fs + "median" + fs + \
  125. "third_quartile" + fs + "percentile_90"
  126. print string
  127. for row in rows:
  128. id = row["name"] + "@" + row["mapset"]
  129. start = row["start_time"]
  130. end = row["end_time"]
  131. mylayer = row["layer"]
  132. flags = "g"
  133. if extended == True:
  134. flags += "e"
  135. if not mylayer:
  136. mylayer = layer
  137. stats = core.parse_command("v.univar", map=id, where=where,
  138. column=column, layer=mylayer,
  139. type=type, flags=flags)
  140. string = ""
  141. if stats:
  142. string += str(id) + fs + str(start) + fs + str(end)
  143. string += fs + str(stats["n"]) + fs + str(stats[
  144. "nmissing"]) + fs + str(stats["nnull"])
  145. if "min" in stats:
  146. string += fs + str(stats["min"]) + fs + str(
  147. stats["max"]) + fs + str(stats["range"])
  148. else:
  149. string += fs + fs + fs
  150. if type == "point" or type == "centroid":
  151. if "mean" in stats:
  152. string += fs + str(stats["mean"]) + fs + \
  153. str(stats["mean_abs"]) + fs + \
  154. str(stats["population_stddev"]) + fs + \
  155. str(stats["population_variance"])
  156. string += fs + str(stats["population_coeff_variation"]) + \
  157. fs + str(stats["sample_stddev"]) + fs + \
  158. str(stats["sample_variance"])
  159. string += fs + str(stats["kurtosis"]) + fs + \
  160. str(stats["skewness"])
  161. else:
  162. string += fs + fs + fs + fs + fs + fs + fs + fs + fs
  163. if extended == True:
  164. if "first_quartile" in stats:
  165. string += fs + str(stats["first_quartile"]) + fs + \
  166. str(stats["median"]) + fs + \
  167. str(stats["third_quartile"]) + fs + \
  168. str(stats["percentile_90"])
  169. else:
  170. string += fs + fs + fs + fs
  171. print string
  172. dbif.close()