do_flatarea.c 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439
  1. /**********************************************************************
  2. *
  3. * flat area beautification after Garbrecht and Martz (1997)
  4. *
  5. * Garbrecht, J. & Martz, L. W. (1997)
  6. * The assignment of drainage direction over flat surfaces in raster
  7. * digital elevation models. J. Hydrol 193, 204–213.
  8. *
  9. * the method is modifed for speed, only one pass is necessary to get
  10. * the gradient away from higher terrain
  11. *
  12. *********************************************************************/
  13. #include <limits.h>
  14. #include <assert.h>
  15. #include <grass/gis.h>
  16. #include <grass/glocale.h>
  17. #include <grass/rbtree.h>
  18. #include "Gwater.h"
  19. #include "do_astar.h"
  20. struct pq_node
  21. {
  22. int idx;
  23. struct pq_node *next;
  24. };
  25. struct pq
  26. {
  27. struct pq_node *first, *last;
  28. int size;
  29. };
  30. struct pq *pq_create(void)
  31. {
  32. struct pq *q = G_malloc(sizeof(struct pq));
  33. q->first = G_malloc(sizeof(struct pq_node));
  34. q->first->next = NULL;
  35. q->first->idx = -1;
  36. q->last = q->first;
  37. q->size = 0;
  38. return q;
  39. }
  40. /* dummy end must always be allocated and empty */
  41. int pq_add(int idx, struct pq *q)
  42. {
  43. assert(q->last);
  44. assert(q->last->idx == -1);
  45. q->last->idx = idx;
  46. if (q->last->next != NULL) {
  47. G_fatal_error(_("Beautify flat areas: priority queue error"));
  48. }
  49. struct pq_node *n = (struct pq_node *) G_malloc(sizeof(struct pq_node));
  50. n->next = NULL;
  51. n->idx = -1;
  52. q->last->next = n;
  53. q->last = q->last->next;
  54. assert(q->last != q->last->next);
  55. assert(q->first != q->last);
  56. q->size++;
  57. return 0;
  58. }
  59. int pq_drop(struct pq *q)
  60. {
  61. int idx = q->first->idx;
  62. struct pq_node *n = q->first;
  63. q->size--;
  64. q->first = q->first->next;
  65. assert(q->first);
  66. assert(q->first != q->first->next);
  67. assert(n != q->first);
  68. G_free(n);
  69. return idx;
  70. }
  71. int pq_destroy(struct pq *q)
  72. {
  73. struct pq_node *delme;
  74. while (q->first) {
  75. delme = q->first;
  76. q->first = q->first->next;
  77. G_free(delme);
  78. }
  79. G_free(q);
  80. return 0;
  81. }
  82. struct orders {
  83. int index, uphill, downhill;
  84. char flag;
  85. };
  86. int cmp_orders(const void *a, const void *b)
  87. {
  88. struct orders *oa = (struct orders *)a;
  89. struct orders *ob = (struct orders *)b;
  90. return (oa->index < ob->index ? -1 : (oa->index > ob->index));
  91. }
  92. /*
  93. * return 0 if nothing was modidied
  94. * return 1 if elevation was modified
  95. */
  96. int do_flatarea(int index, CELL ele, CELL *alt_org, CELL *alt_new)
  97. {
  98. int upr, upc, r, c, ct_dir;
  99. CELL is_in_list, is_worked, this_in_list;
  100. int index_doer, index_up;
  101. int n_flat_cells = 0, counter;
  102. CELL ele_nbr, min_ele_diff;
  103. int uphill_order, downhill_order, max_uphill_order, max_downhill_order;
  104. int last_order;
  105. struct pq *up_pq = pq_create();
  106. struct pq *down_pq = pq_create();
  107. struct orders inc_order, *order_found, *nbr_order_found;
  108. struct RB_TREE *order_tree = rbtree_create(cmp_orders, sizeof(struct orders));
  109. pq_add(index, down_pq);
  110. pq_add(index, up_pq);
  111. inc_order.downhill = -1;
  112. inc_order.uphill = 0;
  113. inc_order.index = index;
  114. inc_order.flag = 0;
  115. rbtree_insert(order_tree, &inc_order);
  116. n_flat_cells = 1;
  117. min_ele_diff = INT_MAX;
  118. max_uphill_order = max_downhill_order = 0;
  119. /* get uphill start points */
  120. G_debug(2, "get uphill start points");
  121. counter = 0;
  122. while (down_pq->size) {
  123. if ((index_doer = pq_drop(down_pq)) == -1)
  124. G_fatal_error("get start points: no more points in down queue");
  125. seg_index_rc(alt_seg, index_doer, &r, &c);
  126. FLAG_SET(flat_done, r, c);
  127. /* check all neighbours, breadth first search */
  128. for (ct_dir = 0; ct_dir < sides; ct_dir++) {
  129. /* get r, c (upr, upc) for this neighbour */
  130. upr = r + nextdr[ct_dir];
  131. upc = c + nextdc[ct_dir];
  132. /* check if r, c are within region */
  133. if (upr >= 0 && upr < nrows && upc >= 0 && upc < ncols) {
  134. index_up = SEG_INDEX(alt_seg, upr, upc);
  135. is_in_list = FLAG_GET(in_list, upr, upc);
  136. is_worked = FLAG_GET(worked, upr, upc);
  137. ele_nbr = alt_org[index_up];
  138. if (ele_nbr == ele && !is_worked) {
  139. inc_order.downhill = -1;
  140. inc_order.uphill = -1;
  141. inc_order.index = index_up;
  142. inc_order.flag = 0;
  143. /* not yet added to queue */
  144. if ((order_found = rbtree_find(order_tree, &inc_order)) == NULL) {
  145. n_flat_cells++;
  146. /* add to down queue if not yet in there */
  147. pq_add(index_up, down_pq);
  148. /* add to up queue if not yet in there */
  149. if (is_in_list) {
  150. pq_add(index_up, up_pq);
  151. /* set uphill order to 0 */
  152. inc_order.uphill = 0;
  153. counter++;
  154. }
  155. rbtree_insert(order_tree, &inc_order);
  156. }
  157. }
  158. }
  159. }
  160. }
  161. /* flat area too small, not worth the effort */
  162. if (n_flat_cells < 5) {
  163. /* clean up */
  164. pq_destroy(up_pq);
  165. pq_destroy(down_pq);
  166. rbtree_destroy(order_tree);
  167. return 0;
  168. }
  169. G_debug(2, "%d flat cells, %d cells in tree, %d start cells",
  170. n_flat_cells, (int)order_tree->count, counter);
  171. pq_destroy(down_pq);
  172. down_pq = pq_create();
  173. /* got uphill start points, do uphill correction */
  174. G_debug(2, "got uphill start points, do uphill correction");
  175. counter = 0;
  176. uphill_order = 1;
  177. while (up_pq->size) {
  178. int is_in_down_queue = 0;
  179. if ((index_doer = pq_drop(up_pq)) == -1)
  180. G_fatal_error("uphill order: no more points in up queue");
  181. seg_index_rc(alt_seg, index_doer, &r, &c);
  182. this_in_list = FLAG_GET(in_list, r, c);
  183. /* get uphill order for this point */
  184. inc_order.index = index_doer;
  185. if ((order_found = rbtree_find(order_tree, &inc_order)) == NULL)
  186. G_fatal_error(_("flat cell escaped for uphill correction"));
  187. last_order = uphill_order - 1;
  188. uphill_order = order_found->uphill;
  189. if (last_order > uphill_order)
  190. G_warning(_("queue error: last uphill order %d > current uphill order %d"),
  191. last_order, uphill_order);
  192. /* debug */
  193. if (uphill_order == -1)
  194. G_fatal_error(_("uphill order not set"));
  195. if (max_uphill_order < uphill_order)
  196. max_uphill_order = uphill_order;
  197. uphill_order++;
  198. counter++;
  199. /* check all neighbours, breadth first search */
  200. for (ct_dir = 0; ct_dir < sides; ct_dir++) {
  201. /* get r, c (upr, upc) for this neighbour */
  202. upr = r + nextdr[ct_dir];
  203. upc = c + nextdc[ct_dir];
  204. /* check if r, c are within region */
  205. if (upr >= 0 && upr < nrows && upc >= 0 && upc < ncols) {
  206. index_up = SEG_INDEX(alt_seg, upr, upc);
  207. is_in_list = FLAG_GET(in_list, upr, upc);
  208. is_worked = FLAG_GET(worked, upr, upc);
  209. ele_nbr = alt_org[index_up];
  210. /* all cells that are in_list should have been added
  211. * previously as uphill start points */
  212. if (ele_nbr == ele && !is_worked) {
  213. inc_order.index = index_up;
  214. if ((nbr_order_found = rbtree_find(order_tree, &inc_order)) == NULL) {
  215. G_fatal_error(_("flat cell escaped in uphill correction"));
  216. }
  217. /* not yet added to queue */
  218. if (nbr_order_found->uphill == -1) {
  219. if (is_in_list)
  220. G_warning("cell should be in queue");
  221. /* add to up queue */
  222. pq_add(index_up, up_pq);
  223. /* set nbr uphill order = current uphill order + 1 */
  224. nbr_order_found->uphill = uphill_order;
  225. }
  226. }
  227. /* add focus cell to down queue */
  228. if (!this_in_list && !is_in_down_queue &&
  229. ele_nbr != ele && !is_in_list && !is_worked) {
  230. pq_add(index_doer, down_pq);
  231. /* set downhill order to 0 */
  232. order_found->downhill = 0;
  233. is_in_down_queue = 1;
  234. }
  235. if (ele_nbr > ele && min_ele_diff > ele_nbr - ele)
  236. min_ele_diff = ele_nbr - ele;
  237. }
  238. }
  239. }
  240. /* debug: all flags should be set to 0 */
  241. pq_destroy(up_pq);
  242. up_pq = pq_create();
  243. /* got downhill start points, do downhill correction */
  244. G_debug(2, "got downhill start points, do downhill correction");
  245. downhill_order = 1;
  246. while (down_pq->size) {
  247. if ((index_doer = pq_drop(down_pq)) == -1)
  248. G_fatal_error(_("downhill order: no more points in down queue"));
  249. seg_index_rc(alt_seg, index_doer, &r, &c);
  250. this_in_list = FLAG_GET(in_list, r, c);
  251. /* get downhill order for this point */
  252. inc_order.index = index_doer;
  253. if ((order_found = rbtree_find(order_tree, &inc_order)) == NULL)
  254. G_fatal_error(_("flat cell escaped for downhill correction"));
  255. last_order = downhill_order - 1;
  256. downhill_order = order_found->downhill;
  257. if (last_order > downhill_order)
  258. G_warning(_("queue error: last downhill order %d > current downhill order %d"),
  259. last_order, downhill_order);
  260. /* debug */
  261. if (downhill_order == -1)
  262. G_fatal_error(_("downhill order: downhill order not set"));
  263. if (max_downhill_order < downhill_order)
  264. max_downhill_order = downhill_order;
  265. downhill_order++;
  266. /* check all neighbours, breadth first search */
  267. for (ct_dir = 0; ct_dir < sides; ct_dir++) {
  268. /* get r, c (upr, upc) for this neighbour */
  269. upr = r + nextdr[ct_dir];
  270. upc = c + nextdc[ct_dir];
  271. /* check if r, c are within region */
  272. if (upr >= 0 && upr < nrows && upc >= 0 && upc < ncols) {
  273. index_up = SEG_INDEX(alt_seg, upr, upc);
  274. is_in_list = FLAG_GET(in_list, upr, upc);
  275. is_worked = FLAG_GET(worked, upr, upc);
  276. ele_nbr = alt_org[index_up];
  277. if (ele_nbr == ele && !is_worked) {
  278. inc_order.index = index_up;
  279. if ((nbr_order_found = rbtree_find(order_tree, &inc_order)) == NULL)
  280. G_fatal_error(_("flat cell escaped in downhill correction"));
  281. /* not yet added to queue */
  282. if (nbr_order_found->downhill == -1) {
  283. /* add to down queue */
  284. pq_add(index_up, down_pq);
  285. /* set nbr downhill order = current downhill order + 1 */
  286. nbr_order_found->downhill = downhill_order;
  287. /* add to up queue */
  288. if (is_in_list) {
  289. pq_add(index_up, up_pq);
  290. /* set flag */
  291. nbr_order_found->flag = 1;
  292. }
  293. }
  294. }
  295. }
  296. }
  297. }
  298. /* got uphill and downhill order, adjust ele */
  299. /* increment: ele += uphill_order + max_downhill_order - downhill_order */
  300. /* decrement: ele += uphill_order - max_uphill_order - downhill_order */
  301. G_debug(2, "adjust ele");
  302. while (up_pq->size) {
  303. if ((index_doer = pq_drop(up_pq)) == -1)
  304. G_fatal_error("no more points in up queue");
  305. seg_index_rc(alt_seg, index_doer, &r, &c);
  306. this_in_list = FLAG_GET(in_list, r, c);
  307. /* get uphill and downhill order for this point */
  308. inc_order.index = index_doer;
  309. if ((order_found = rbtree_find(order_tree, &inc_order)) == NULL)
  310. G_fatal_error(_("flat cell escaped for adjustment"));
  311. uphill_order = order_found->uphill;
  312. downhill_order = order_found->downhill;
  313. /* debug */
  314. if (uphill_order == -1)
  315. G_fatal_error(_("adjustment: uphill order not set"));
  316. if (!this_in_list && downhill_order == -1)
  317. G_fatal_error(_("adjustment: downhill order not set"));
  318. /* increment */
  319. if (this_in_list) {
  320. downhill_order = max_downhill_order;
  321. uphill_order = 0;
  322. }
  323. alt_new[index_doer] +=
  324. (uphill_order + (double)(max_downhill_order - downhill_order) / 2.0 + 0.5) / 2.0 + 0.5;
  325. /* check all neighbours, breadth first search */
  326. for (ct_dir = 0; ct_dir < sides; ct_dir++) {
  327. /* get r, c (upr, upc) for this neighbour */
  328. upr = r + nextdr[ct_dir];
  329. upc = c + nextdc[ct_dir];
  330. /* check if r, c are within region */
  331. if (upr >= 0 && upr < nrows && upc >= 0 && upc < ncols) {
  332. index_up = SEG_INDEX(alt_seg, upr, upc);
  333. is_in_list = FLAG_GET(in_list, upr, upc);
  334. is_worked = FLAG_GET(worked, upr, upc);
  335. ele_nbr = alt_org[index_up];
  336. if (ele_nbr == ele && !is_worked) {
  337. inc_order.index = index_up;
  338. if ((nbr_order_found = rbtree_find(order_tree, &inc_order)) == NULL)
  339. G_fatal_error(_("flat cell escaped in adjustment"));
  340. /* not yet added to queue */
  341. if (nbr_order_found->flag == 0) {
  342. if (is_in_list)
  343. G_warning("adjustment: in_list cell should be in queue");
  344. /* add to up queue */
  345. pq_add(index_up, up_pq);
  346. nbr_order_found->flag = 1;
  347. }
  348. }
  349. }
  350. }
  351. }
  352. /* clean up */
  353. pq_destroy(up_pq);
  354. pq_destroy(down_pq);
  355. rbtree_destroy(order_tree);
  356. return 1;
  357. }