mkhtml.py 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691
  1. #!/usr/bin/env python3
  2. ############################################################################
  3. #
  4. # MODULE: Builds manual pages
  5. # AUTHOR(S): Markus Neteler
  6. # Glynn Clements
  7. # Martin Landa <landa.martin gmail.com>
  8. # PURPOSE: Create HTML manual page snippets
  9. # COPYRIGHT: (C) 2007-2022 by Glynn Clements
  10. # and the GRASS Development Team
  11. #
  12. # This program is free software under the GNU General
  13. # Public License (>=v2). Read the file COPYING that
  14. # comes with GRASS for details.
  15. #
  16. #############################################################################
  17. import http
  18. import sys
  19. import os
  20. import string
  21. import re
  22. from datetime import datetime
  23. import locale
  24. import json
  25. import pathlib
  26. import shutil
  27. import subprocess
  28. import time
  29. try:
  30. # Python 2 import
  31. from HTMLParser import HTMLParser
  32. except ImportError:
  33. # Python 3 import
  34. from html.parser import HTMLParser
  35. from six.moves.urllib import request as urlrequest
  36. from six.moves.urllib.error import HTTPError, URLError
  37. try:
  38. import urlparse
  39. except ImportError:
  40. import urllib.parse as urlparse
  41. try:
  42. import grass.script as gs
  43. except ImportError:
  44. # During compilation GRASS GIS
  45. gs = None
  46. HEADERS = {
  47. "User-Agent": "Mozilla/5.0",
  48. }
  49. HTTP_STATUS_CODES = list(http.HTTPStatus)
  50. if sys.version_info[0] == 2:
  51. PY2 = True
  52. else:
  53. PY2 = False
  54. if not PY2:
  55. unicode = str
  56. grass_version = os.getenv("VERSION_NUMBER", "unknown")
  57. trunk_url = ""
  58. addons_url = ""
  59. grass_git_branch = "main"
  60. if grass_version != "unknown":
  61. major, minor, patch = grass_version.split(".")
  62. base_url = "https://github.com/OSGeo"
  63. trunk_url = "{base_url}/grass/tree/{branch}/".format(
  64. base_url=base_url, branch=grass_git_branch
  65. )
  66. addons_url = "{base_url}/grass-addons/tree/grass{major}/".format(
  67. base_url=base_url, major=major
  68. )
  69. def _get_encoding():
  70. encoding = locale.getdefaultlocale()[1]
  71. if not encoding:
  72. encoding = "UTF-8"
  73. return encoding
  74. def decode(bytes_):
  75. """Decode bytes with default locale and return (unicode) string
  76. No-op if parameter is not bytes (assumed unicode string).
  77. :param bytes bytes_: the bytes to decode
  78. """
  79. if isinstance(bytes_, unicode):
  80. return bytes_
  81. if isinstance(bytes_, bytes):
  82. enc = _get_encoding()
  83. return bytes_.decode(enc)
  84. return unicode(bytes_)
  85. def urlopen(url, *args, **kwargs):
  86. """Wrapper around urlopen. Same function as 'urlopen', but with the
  87. ability to define headers.
  88. """
  89. request = urlrequest.Request(url, headers=HEADERS)
  90. return urlrequest.urlopen(request, *args, **kwargs)
  91. def set_proxy():
  92. """Set proxy"""
  93. proxy = os.getenv("GRASS_PROXY")
  94. if proxy:
  95. proxies = {}
  96. for ptype, purl in (p.split("=") for p in proxy.split(",")):
  97. proxies[ptype] = purl
  98. urlrequest.install_opener(
  99. urlrequest.build_opener(urlrequest.ProxyHandler(proxies))
  100. )
  101. set_proxy()
  102. def download_git_commit(url, response_format, *args, **kwargs):
  103. """Download module/addon last commit from GitHub API
  104. :param str url: url address
  105. :param str response_format: content type
  106. :return urllib.request.urlopen or None response: response object or
  107. None
  108. """
  109. try:
  110. response = urlopen(url, *args, **kwargs)
  111. if not response.code == 200:
  112. index = HTTP_STATUS_CODES.index(response.code)
  113. desc = HTTP_STATUS_CODES[index].description
  114. gs.fatal(
  115. _(
  116. "Download commit from <{url}>, return status code "
  117. "{code}, {desc}".format(
  118. url=url,
  119. code=response.code,
  120. desc=desc,
  121. ),
  122. ),
  123. )
  124. if response_format not in response.getheader("Content-Type"):
  125. gs.fatal(
  126. _(
  127. "Wrong downloaded commit file format. "
  128. "Check url <{url}>. Allowed file format is "
  129. "{response_format}.".format(
  130. url=url,
  131. response_format=response_format,
  132. ),
  133. ),
  134. )
  135. return response
  136. except HTTPError as err:
  137. gs.warning(
  138. _(
  139. "The download of the commit from the GitHub API "
  140. "server wasn't successful, <{}>. Commit and commit "
  141. "date will not be included in the <{}> addon html manual "
  142. "page.".format(err.msg, pgm)
  143. ),
  144. )
  145. except URLError:
  146. gs.warning(
  147. _(
  148. "Download file from <{url}>, failed. Check internet "
  149. "connection. Commit and commit date will not be included "
  150. "in the <{pgm}> addon manual page.".format(url=url, pgm=pgm)
  151. ),
  152. )
  153. def get_last_git_commit(src_dir, is_addon, addon_path):
  154. """Get last module/addon git commit
  155. :param str src_dir: module/addon source dir
  156. :param bool is_addon: True if it is addon
  157. :param str addon_path: addon path
  158. :return dict git_log: dict with key commit and date, if not
  159. possible download commit from GitHub API server
  160. values of keys have "unknown" string
  161. """
  162. unknown = "unknown"
  163. git_log = {"commit": unknown, "date": unknown}
  164. datetime_format = "%A %b %d %H:%M:%S %Y" # e.g. Sun Jan 16 23:09:35 2022
  165. if is_addon:
  166. grass_addons_url = (
  167. "https://api.github.com/repos/osgeo/grass-addons/commits?path={path}"
  168. "&page=1&per_page=1&sha=grass{major}".format(
  169. path=addon_path,
  170. major=major,
  171. )
  172. ) # sha=git_branch_name
  173. else:
  174. core_module_path = os.path.join(
  175. *(set(src_dir.split(os.path.sep)) ^ set(topdir.split(os.path.sep)))
  176. )
  177. grass_modules_url = (
  178. "https://api.github.com/repos/osgeo/grass/commits?path={path}"
  179. "&page=1&per_page=1&sha={branch}".format(
  180. branch=grass_git_branch,
  181. path=core_module_path,
  182. )
  183. ) # sha=git_branch_name
  184. if shutil.which("git"):
  185. if os.path.exists(src_dir):
  186. git_log["date"] = time.ctime(os.path.getmtime(src_dir))
  187. stdout, stderr = subprocess.Popen(
  188. args=["git", "log", "-1", src_dir],
  189. stdout=subprocess.PIPE,
  190. stderr=subprocess.PIPE,
  191. ).communicate()
  192. stdout = decode(stdout)
  193. stderr = decode(stderr)
  194. if stderr and "fatal: not a git repository" in stderr:
  195. response = download_git_commit(
  196. url=grass_addons_url if is_addon else grass_modules_url,
  197. response_format="application/json",
  198. )
  199. if response:
  200. commit = json.loads(response.read())
  201. if commit:
  202. git_log["commit"] = commit[0]["sha"]
  203. git_log["date"] = datetime.strptime(
  204. commit[0]["commit"]["author"]["date"],
  205. "%Y-%m-%dT%H:%M:%SZ",
  206. ).strftime(datetime_format)
  207. else:
  208. if stdout:
  209. commit = stdout.splitlines()
  210. git_log["commit"] = commit[0].split(" ")[-1]
  211. commit_date = commit[2].lstrip("Date:").strip()
  212. git_log["date"] = commit_date.rsplit(" ", 1)[0]
  213. return git_log
  214. html_page_footer_pages_path = (
  215. os.getenv("HTML_PAGE_FOOTER_PAGES_PATH")
  216. if os.getenv("HTML_PAGE_FOOTER_PAGES_PATH")
  217. else ""
  218. )
  219. pgm = sys.argv[1]
  220. src_file = "%s.html" % pgm
  221. tmp_file = "%s.tmp.html" % pgm
  222. header_base = """<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
  223. <html>
  224. <head>
  225. <meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
  226. <title>${PGM} - GRASS GIS Manual</title>
  227. <meta name="Author" content="GRASS Development Team">
  228. <meta name="description" content="${PGM}: ${PGM_DESC}">
  229. <link rel="stylesheet" href="grassdocs.css" type="text/css">
  230. </head>
  231. <body bgcolor="white">
  232. <div id="container">
  233. <a href="index.html"><img src="grass_logo.png" alt="GRASS logo"></a>
  234. <hr class="header">
  235. """
  236. header_nopgm = """<h2>${PGM}</h2>
  237. """
  238. header_pgm = """<h2>NAME</h2>
  239. <em><b>${PGM}</b></em>
  240. """
  241. header_pgm_desc = """<h2>NAME</h2>
  242. <em><b>${PGM}</b></em> - ${PGM_DESC}
  243. """
  244. sourcecode = string.Template(
  245. """<h2>SOURCE CODE</h2>
  246. <p>
  247. Available at:
  248. <a href="${URL_SOURCE}">${PGM} source code</a>
  249. (<a href="${URL_LOG}">history</a>)
  250. </p>
  251. <p>
  252. ${DATE_TAG}
  253. </p>
  254. """
  255. )
  256. footer_index = string.Template(
  257. """<hr class="header">
  258. <p>
  259. <a href="index.html">Main index</a> |
  260. <a href="${HTML_PAGE_FOOTER_PAGES_PATH}${INDEXNAME}.html">${INDEXNAMECAP} index</a> |
  261. <a href="${HTML_PAGE_FOOTER_PAGES_PATH}topics.html">Topics index</a> |
  262. <a href="${HTML_PAGE_FOOTER_PAGES_PATH}keywords.html">Keywords index</a> |
  263. <a href="${HTML_PAGE_FOOTER_PAGES_PATH}graphical_index.html">Graphical index</a> |
  264. <a href="${HTML_PAGE_FOOTER_PAGES_PATH}full_index.html">Full index</a>
  265. </p>
  266. <p>
  267. &copy; 2003-${YEAR}
  268. <a href="https://grass.osgeo.org">GRASS Development Team</a>,
  269. GRASS GIS ${GRASS_VERSION} Reference Manual
  270. </p>
  271. </div>
  272. </body>
  273. </html>
  274. """
  275. )
  276. footer_noindex = string.Template(
  277. """<hr class="header">
  278. <p>
  279. <a href="index.html">Main index</a> |
  280. <a href="${HTML_PAGE_FOOTER_PAGES_PATH}topics.html">Topics index</a> |
  281. <a href="${HTML_PAGE_FOOTER_PAGES_PATH}keywords.html">Keywords index</a> |
  282. <a href="${HTML_PAGE_FOOTER_PAGES_PATH}graphical_index.html">Graphical index</a> |
  283. <a href="${HTML_PAGE_FOOTER_PAGES_PATH}full_index.html">Full index</a>
  284. </p>
  285. <p>
  286. &copy; 2003-${YEAR}
  287. <a href="https://grass.osgeo.org">GRASS Development Team</a>,
  288. GRASS GIS ${GRASS_VERSION} Reference Manual
  289. </p>
  290. </div>
  291. </body>
  292. </html>
  293. """
  294. )
  295. def read_file(name):
  296. try:
  297. f = open(name, "rb")
  298. s = f.read()
  299. f.close()
  300. if PY2:
  301. return s
  302. else:
  303. return decode(s)
  304. except IOError:
  305. return ""
  306. def create_toc(src_data):
  307. class MyHTMLParser(HTMLParser):
  308. def __init__(self):
  309. HTMLParser.__init__(self)
  310. self.reset()
  311. self.idx = 1
  312. self.tag_curr = ""
  313. self.tag_last = ""
  314. self.process_text = False
  315. self.data = []
  316. self.tags_allowed = ("h1", "h2", "h3")
  317. self.tags_ignored = "img"
  318. self.text = ""
  319. def handle_starttag(self, tag, attrs):
  320. if tag in self.tags_allowed:
  321. self.process_text = True
  322. self.tag_last = self.tag_curr
  323. self.tag_curr = tag
  324. def handle_endtag(self, tag):
  325. if tag in self.tags_allowed:
  326. self.data.append((tag, "%s_%d" % (tag, self.idx), self.text))
  327. self.idx += 1
  328. self.process_text = False
  329. self.text = ""
  330. self.tag_curr = self.tag_last
  331. def handle_data(self, data):
  332. if not self.process_text:
  333. return
  334. if self.tag_curr in self.tags_allowed or self.tag_curr in self.tags_ignored:
  335. self.text += data
  336. else:
  337. self.text += "<%s>%s</%s>" % (self.tag_curr, data, self.tag_curr)
  338. # instantiate the parser and fed it some HTML
  339. parser = MyHTMLParser()
  340. parser.feed(src_data)
  341. return parser.data
  342. def escape_href(label):
  343. # remove html tags
  344. label = re.sub("<[^<]+?>", "", label)
  345. # fix &nbsp;
  346. label = label.replace("&nbsp;", "")
  347. # fix "
  348. label = label.replace('"', "")
  349. # replace space with underscore + lower
  350. return label.replace(" ", "-").lower()
  351. def write_toc(data):
  352. if not data:
  353. return
  354. fd = sys.stdout
  355. fd.write('<div class="toc">\n')
  356. fd.write('<h4 class="toc">Table of contents</h4>\n')
  357. fd.write('<ul class="toc">\n')
  358. first = True
  359. has_h2 = False
  360. in_h3 = False
  361. indent = 4
  362. for tag, href, text in data:
  363. if tag == "h3" and not in_h3 and has_h2:
  364. fd.write('\n%s<ul class="toc">\n' % (" " * indent))
  365. indent += 4
  366. in_h3 = True
  367. elif not first:
  368. fd.write("</li>\n")
  369. if tag == "h2":
  370. has_h2 = True
  371. if in_h3:
  372. indent -= 4
  373. fd.write("%s</ul></li>\n" % (" " * indent))
  374. in_h3 = False
  375. text = text.replace("\xa0", " ")
  376. fd.write(
  377. '%s<li class="toc"><a href="#%s" class="toc">%s</a>'
  378. % (" " * indent, escape_href(text), text)
  379. )
  380. first = False
  381. fd.write("</li>\n</ul>\n")
  382. fd.write("</div>\n")
  383. def update_toc(data):
  384. ret_data = []
  385. pat = re.compile(r"(<(h[2|3])>)(.+)(</h[2|3]>)")
  386. idx = 1
  387. for line in data.splitlines():
  388. if pat.search(line):
  389. xline = pat.split(line)
  390. line = (
  391. xline[1]
  392. + '<a name="%s">' % escape_href(xline[3])
  393. + xline[3]
  394. + "</a>"
  395. + xline[4]
  396. )
  397. idx += 1
  398. ret_data.append(line)
  399. return "\n".join(ret_data)
  400. def get_addon_path():
  401. """Check if pgm is in the addons list and get addon path
  402. return: pgm path if pgm is addon else None
  403. """
  404. addon_base = os.getenv("GRASS_ADDON_BASE")
  405. if addon_base:
  406. # addons_paths.json is file created during install extension
  407. # check get_addons_paths() function in the g.extension.py file
  408. addons_file = "addons_paths.json"
  409. addons_paths = os.path.join(addon_base, addons_file)
  410. if not os.path.exists(addons_paths):
  411. # Compiled addon has own dir e.g. ~/.grass8/addons/db.join/
  412. # with bin/ docs/ etc/ scripts/ subdir, required for compilation
  413. # addons on osgeo lxd container server and generation of
  414. # modules.xml file (build-xml.py script), when addons_paths.json
  415. # file is stored one level dir up
  416. addons_paths = os.path.join(
  417. os.path.abspath(os.path.join(addon_base, "..")),
  418. addons_file,
  419. )
  420. if not os.path.exists(addons_paths):
  421. return
  422. with open(addons_paths) as f:
  423. addons_paths = json.load(f)
  424. for addon in addons_paths["tree"]:
  425. if pgm == pathlib.Path(addon["path"]).name:
  426. return addon["path"]
  427. # process header
  428. src_data = read_file(src_file)
  429. name = re.search("(<!-- meta page name:)(.*)(-->)", src_data, re.IGNORECASE)
  430. pgm_desc = "GRASS GIS Reference Manual"
  431. if name:
  432. pgm = name.group(2).strip().split("-", 1)[0].strip()
  433. name_desc = re.search(
  434. "(<!-- meta page name description:)(.*)(-->)", src_data, re.IGNORECASE
  435. )
  436. if name_desc:
  437. pgm_desc = name_desc.group(2).strip()
  438. desc = re.search("(<!-- meta page description:)(.*)(-->)", src_data, re.IGNORECASE)
  439. if desc:
  440. pgm = desc.group(2).strip()
  441. header_tmpl = string.Template(header_base + header_nopgm)
  442. else:
  443. if not pgm_desc:
  444. header_tmpl = string.Template(header_base + header_pgm)
  445. else:
  446. header_tmpl = string.Template(header_base + header_pgm_desc)
  447. if not re.search("<html>", src_data, re.IGNORECASE):
  448. tmp_data = read_file(tmp_file)
  449. """
  450. Adjusting keywords html pages paths if add-on html man page
  451. stored on the server
  452. """
  453. if html_page_footer_pages_path:
  454. new_keywords_paths = []
  455. orig_keywords_paths = re.search(
  456. r"<h[1-9]>KEYWORDS</h[1-9]>(.*?)<h[1-9]>",
  457. tmp_data,
  458. re.DOTALL,
  459. )
  460. if orig_keywords_paths:
  461. search_txt = 'href="'
  462. for i in orig_keywords_paths.group(1).split(","):
  463. if search_txt in i:
  464. index = i.index(search_txt) + len(search_txt)
  465. new_keywords_paths.append(
  466. i[:index] + html_page_footer_pages_path + i[index:],
  467. )
  468. if new_keywords_paths:
  469. tmp_data = tmp_data.replace(
  470. orig_keywords_paths.group(1),
  471. ",".join(new_keywords_paths),
  472. )
  473. if not re.search("<html>", tmp_data, re.IGNORECASE):
  474. sys.stdout.write(header_tmpl.substitute(PGM=pgm, PGM_DESC=pgm_desc))
  475. if tmp_data:
  476. for line in tmp_data.splitlines(True):
  477. if not re.search("</body>|</html>", line, re.IGNORECASE):
  478. sys.stdout.write(line)
  479. # create TOC
  480. write_toc(create_toc(src_data))
  481. # process body
  482. sys.stdout.write(update_toc(src_data))
  483. # if </html> is found, suppose a complete html is provided.
  484. # otherwise, generate module class reference:
  485. if re.search("</html>", src_data, re.IGNORECASE):
  486. sys.exit()
  487. index_names = {
  488. "d": "display",
  489. "db": "database",
  490. "g": "general",
  491. "i": "imagery",
  492. "m": "miscellaneous",
  493. "ps": "postscript",
  494. "p": "paint",
  495. "r": "raster",
  496. "r3": "raster3d",
  497. "s": "sites",
  498. "t": "temporal",
  499. "v": "vector",
  500. }
  501. def to_title(name):
  502. """Convert name of command class/family to form suitable for title"""
  503. if name == "raster3d":
  504. return "3D raster"
  505. elif name == "postscript":
  506. return "PostScript"
  507. else:
  508. return name.capitalize()
  509. index_titles = {}
  510. for key, name in index_names.items():
  511. index_titles[key] = to_title(name)
  512. # process footer
  513. index = re.search("(<!-- meta page index:)(.*)(-->)", src_data, re.IGNORECASE)
  514. if index:
  515. index_name = index.group(2).strip()
  516. if "|" in index_name:
  517. index_name, index_name_cap = index_name.split("|", 1)
  518. else:
  519. index_name_cap = to_title(index_name)
  520. else:
  521. mod_class = pgm.split(".", 1)[0]
  522. index_name = index_names.get(mod_class, "")
  523. index_name_cap = index_titles.get(mod_class, "")
  524. year = os.getenv("VERSION_DATE")
  525. if not year:
  526. year = str(datetime.now().year)
  527. # check the names of scripts to assign the right folder
  528. topdir = os.path.abspath(os.getenv("MODULE_TOPDIR"))
  529. curdir = os.path.abspath(os.path.curdir)
  530. if curdir.startswith(topdir + os.path.sep):
  531. source_url = trunk_url
  532. pgmdir = curdir.replace(topdir, "").lstrip(os.path.sep)
  533. else:
  534. # addons
  535. source_url = addons_url
  536. pgmdir = os.path.sep.join(curdir.split(os.path.sep)[-3:])
  537. url_source = ""
  538. addon_path = None
  539. if os.getenv("SOURCE_URL", ""):
  540. addon_path = get_addon_path()
  541. if addon_path:
  542. # Addon is installed from the local dir
  543. if os.path.exists(os.getenv("SOURCE_URL")):
  544. url_source = urlparse.urljoin(
  545. addons_url,
  546. addon_path,
  547. )
  548. else:
  549. url_source = urlparse.urljoin(
  550. os.environ["SOURCE_URL"].split("src")[0],
  551. addon_path,
  552. )
  553. else:
  554. url_source = urlparse.urljoin(source_url, pgmdir)
  555. if sys.platform == "win32":
  556. url_source = url_source.replace(os.path.sep, "/")
  557. if index_name:
  558. branches = "branches"
  559. tree = "tree"
  560. commits = "commits"
  561. if branches in url_source:
  562. url_log = url_source.replace(branches, commits)
  563. url_source = url_source.replace(branches, tree)
  564. else:
  565. url_log = url_source.replace(tree, commits)
  566. git_commit = get_last_git_commit(
  567. src_dir=curdir,
  568. addon_path=addon_path if addon_path else None,
  569. is_addon=True if addon_path else False,
  570. )
  571. if git_commit["commit"] == "unknown":
  572. date_tag = "Accessed: {date}".format(date=git_commit["date"])
  573. else:
  574. date_tag = "Latest change: {date} in commit: {commit}".format(
  575. date=git_commit["date"], commit=git_commit["commit"]
  576. )
  577. sys.stdout.write(
  578. sourcecode.substitute(
  579. URL_SOURCE=url_source,
  580. PGM=pgm,
  581. URL_LOG=url_log,
  582. DATE_TAG=date_tag,
  583. )
  584. )
  585. sys.stdout.write(
  586. footer_index.substitute(
  587. INDEXNAME=index_name,
  588. INDEXNAMECAP=index_name_cap,
  589. YEAR=year,
  590. GRASS_VERSION=grass_version,
  591. HTML_PAGE_FOOTER_PAGES_PATH=html_page_footer_pages_path,
  592. ),
  593. )
  594. else:
  595. sys.stdout.write(
  596. footer_noindex.substitute(
  597. YEAR=year,
  598. GRASS_VERSION=grass_version,
  599. HTML_PAGE_FOOTER_PAGES_PATH=html_page_footer_pages_path,
  600. ),
  601. )