gis_lib_tokenize.py 3.2 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182
  1. """Test of gis library tokenizing of text
  2. @author Vaclav Petras
  3. """
  4. from grass.gunittest.case import TestCase
  5. from grass.gunittest.main import test
  6. import grass.lib.gis as libgis
  7. # TODO: add tests for content (now testing only number of items)
  8. class TokenizeTestCase(TestCase):
  9. """Test C function G_tokenize() from gis library"""
  10. def test_tokenize_comma(self):
  11. """Test G_tokenize with comma as delim"""
  12. tokens = libgis.G_tokenize("a,b,c,d", ",")
  13. num_of_tokens = libgis.G_number_of_tokens(tokens)
  14. self.assertEqual(num_of_tokens, 4, msg="Got wrong number of tokens")
  15. def test_tokenize_alternative_delim(self):
  16. """Test G_tokenize with semi colon as delim"""
  17. tokens = libgis.G_tokenize("a;b;c", ";")
  18. num_of_tokens = libgis.G_number_of_tokens(tokens)
  19. self.assertEqual(num_of_tokens, 3, msg="Got wrong number of tokens")
  20. def test_tokenize_with_text_delim(self):
  21. """Test G_tokenize with comma as delim and single quote text delim
  22. Expecting the 'wrong' number of tokens here.
  23. """
  24. tokens = libgis.G_tokenize("a,'b,c',d", ",")
  25. num_of_tokens = libgis.G_number_of_tokens(tokens)
  26. self.assertEqual(
  27. num_of_tokens, 4,
  28. msg="Got wrong number of tokens (expecting that the text"
  29. "delimiter is ignored)")
  30. # alternatively this can be done using test with expected failure
  31. class Tokenize2TestCase(TestCase):
  32. """Test C function G_tokenize2() from gis library"""
  33. def test_tokenize2_comma(self):
  34. """Test G_tokenize2 without any text delim"""
  35. tokens = libgis.G_tokenize2("a,b,c,d", ",", "'")
  36. num_of_tokens = libgis.G_number_of_tokens(tokens)
  37. self.assertEqual(num_of_tokens, 4, msg="Got wrong number of tokens")
  38. def test_tokenize2_with_text_delim(self):
  39. """Test G_tokenize2 with , as delim and single quote text delim"""
  40. tokens = libgis.G_tokenize2("a,'b,c',d", ",", "'")
  41. num_of_tokens = libgis.G_number_of_tokens(tokens)
  42. self.assertEqual(num_of_tokens, 3, msg="Got wrong number of tokens")
  43. def test_tokenize2_with_alternative_text_delim(self):
  44. """Test G_tokenize2 with ; as delim and double quote text delim"""
  45. tokens = libgis.G_tokenize2('a;"b;c";d', ';', '"')
  46. num_of_tokens = libgis.G_number_of_tokens(tokens)
  47. self.assertEqual(num_of_tokens, 3, msg="Got wrong number of tokens")
  48. def test_tokenize2_with_text_delim_more_text_tokens(self):
  49. """Test G_tokenize2 with comma as delim and hash as text delim"""
  50. tokens = libgis.G_tokenize2("a,#b,c#,#5,d#,#7,2#", ",", "#")
  51. num_of_tokens = libgis.G_number_of_tokens(tokens)
  52. self.assertEqual(num_of_tokens, 4, msg="Got wrong number of tokens")
  53. def test_tokenize2_with_real_text(self):
  54. """Test G_tokenize2 with real world text"""
  55. tokens = libgis.G_tokenize2(
  56. '440,617722.81,3464034.494,951.987,'
  57. '"Low Erosion (1,5)","High Deposition (8,6)"',
  58. ',', '"')
  59. num_of_tokens = libgis.G_number_of_tokens(tokens)
  60. self.assertEqual(num_of_tokens, 6, msg="Got wrong number of tokens")
  61. if __name__ == '__main__':
  62. test()