univar_statistics.py 7.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220
  1. """!@package grass.temporal
  2. @brief GRASS Python scripting module (temporal GIS functions)
  3. Temporal GIS related functions to be used in Python scripts.
  4. Usage:
  5. @code
  6. import grass.temporal as tgis
  7. tgis.print_gridded_dataset_univar_statistics(
  8. type, input, where, extended, header, fs)
  9. ...
  10. @endcode
  11. (C) 2008-2011 by the GRASS Development Team
  12. This program is free software under the GNU General Public
  13. License (>=v2). Read the file COPYING that comes with GRASS
  14. for details.
  15. @author Soeren Gebbert
  16. """
  17. from space_time_datasets import *
  18. from factory import *
  19. from open import *
  20. ###############################################################################
  21. def print_gridded_dataset_univar_statistics(type, input, where, extended,
  22. header, fs):
  23. """!Print univariate statistics for a space time raster or raster3d dataset
  24. @param type Must be "strds" or "str3ds"
  25. @param input The name of the space time dataset
  26. @param where A temporal database where statement
  27. @param extended If True compute extended statistics
  28. @param header If True print column names as header
  29. @param fs Field separator
  30. """
  31. # We need a database interface
  32. dbif = SQLDatabaseInterfaceConnection()
  33. dbif.connect()
  34. sp = open_old_space_time_dataset(input, type, dbif)
  35. rows = sp.get_registered_maps(
  36. "id,start_time,end_time", where, "start_time", dbif)
  37. if not rows:
  38. dbif.close()
  39. core.fatal(_("Space time %(sp)s dataset <%(i)s> is empty") % {
  40. 'sp': sp.get_new_map_instance(None).get_type(), 'i': id})
  41. if header == True:
  42. string = ""
  43. string += "id" + fs + "start" + fs + "end" + fs + "mean" + fs
  44. string += "min" + fs + "max" + fs
  45. string += "mean_of_abs" + fs + "stddev" + fs + "variance" + fs
  46. string += "coeff_var" + fs + "sum" + fs + "null_cells" + fs + "cells"
  47. if extended == True:
  48. string += fs + "first_quartile" + fs + "median" + fs
  49. string += "third_quartile" + fs + "percentile_90"
  50. print string
  51. for row in rows:
  52. string = ""
  53. id = row["id"]
  54. start = row["start_time"]
  55. end = row["end_time"]
  56. flag = "g"
  57. if extended == True:
  58. flag += "e"
  59. if type == "strds":
  60. stats = core.parse_command("r.univar", map=id, flags=flag)
  61. elif type == "str3ds":
  62. stats = core.parse_command("r3.univar", map=id, flags=flag)
  63. string += str(id) + fs + str(start) + fs + str(end)
  64. string += fs + str(stats["mean"]) + fs + str(stats["min"])
  65. string += fs + str(stats["max"]) + fs + str(stats["mean_of_abs"])
  66. string += fs + str(stats["stddev"]) + fs + str(stats["variance"])
  67. string += fs + str(stats["coeff_var"]) + fs + str(stats["sum"])
  68. string += fs + str(stats["null_cells"]) + fs + str(stats["cells"])
  69. if extended == True:
  70. string += fs + str(stats["first_quartile"]) + fs + str(stats["median"])
  71. string += fs + str(stats["third_quartile"]) + fs + str(stats["percentile_90"])
  72. print string
  73. dbif.close()
  74. ###############################################################################
  75. def print_vector_dataset_univar_statistics(input, twhere, layer, type, column,
  76. where, extended, header, fs):
  77. """!Print univariate statistics for a space time vector dataset
  78. @param input The name of the space time dataset
  79. @param twhere A temporal database where statement
  80. @param layer The layer number used in case no layer is present
  81. in the temporal dataset
  82. @param type options: point,line,boundary,centroid,area
  83. @param column The name of the attribute column
  84. @param where A temporal database where statement
  85. @param extended If True compute extended statistics
  86. @param header If True print column names as header
  87. @param fs Field separator
  88. """
  89. # We need a database interface
  90. dbif = SQLDatabaseInterfaceConnection()
  91. dbif.connect()
  92. mapset = get_current_mapset()
  93. if input.find("@") >= 0:
  94. id = input
  95. else:
  96. id = input + "@" + mapset
  97. sp = dataset_factory("stvds", id)
  98. if sp.is_in_db(dbif) == False:
  99. dbif.close()
  100. core.fatal(_("Space time %(sp)s dataset <%(i)s> not found") % {
  101. 'sp': sp.get_new_map_instance(None).get_type(), 'i': id})
  102. sp.select(dbif)
  103. rows = sp.get_registered_maps("id,name,mapset,start_time,end_time,layer",
  104. twhere, "start_time", dbif)
  105. if not rows:
  106. dbif.close()
  107. core.fatal(_("Space time %(sp)s dataset <%(i)s> is empty") % {
  108. 'sp': sp.get_new_map_instance(None).get_type(), 'i': id})
  109. string = ""
  110. if header == True:
  111. string += "id" + fs + "start" + fs + "end" + fs + "n" + \
  112. fs + "nmissing" + fs + "nnull" + fs
  113. string += "min" + fs + "max" + fs + "range"
  114. if type == "point" or type == "centroid":
  115. string += fs + "mean" + fs + "mean_abs" + fs + "population_stddev" +\
  116. fs + "population_variance" + fs
  117. string += "population_coeff_variation" + fs + \
  118. "sample_stddev" + fs + "sample_variance" + fs
  119. string += "kurtosis" + fs + "skewness"
  120. if extended == True:
  121. string += fs + "first_quartile" + fs + "median" + fs + \
  122. "third_quartile" + fs + "percentile_90"
  123. print string
  124. for row in rows:
  125. id = row["name"] + "@" + row["mapset"]
  126. start = row["start_time"]
  127. end = row["end_time"]
  128. mylayer = row["layer"]
  129. flags = "g"
  130. if extended == True:
  131. flags += "e"
  132. if not mylayer:
  133. mylayer = layer
  134. stats = core.parse_command("v.univar", map=id, where=where,
  135. column=column, layer=mylayer,
  136. type=type, flags=flags)
  137. string = ""
  138. if stats:
  139. string += str(id) + fs + str(start) + fs + str(end)
  140. string += fs + str(stats["n"]) + fs + str(stats[
  141. "nmissing"]) + fs + str(stats["nnull"])
  142. if "min" in stats:
  143. string += fs + str(stats["min"]) + fs + str(
  144. stats["max"]) + fs + str(stats["range"])
  145. else:
  146. string += fs + fs + fs
  147. if type == "point" or type == "centroid":
  148. if "mean" in stats:
  149. string += fs + str(stats["mean"]) + fs + \
  150. str(stats["mean_abs"]) + fs + \
  151. str(stats["population_stddev"]) + fs + \
  152. str(stats["population_variance"])
  153. string += fs + str(stats["population_coeff_variation"]) + \
  154. fs + str(stats["sample_stddev"]) + fs + \
  155. str(stats["sample_variance"])
  156. string += fs + str(stats["kurtosis"]) + fs + \
  157. str(stats["skewness"])
  158. else:
  159. string += fs + fs + fs + fs + fs + fs + fs + fs + fs
  160. if extended == True:
  161. if "first_quartile" in stats:
  162. string += fs + str(stats["first_quartile"]) + fs + \
  163. str(stats["median"]) + fs + \
  164. str(stats["third_quartile"]) + fs + \
  165. str(stats["percentile_90"])
  166. else:
  167. string += fs + fs + fs + fs
  168. print string
  169. dbif.close()