data_urls.xml 4.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165
  1. <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
  2. <url>
  3. <loc>http://llama.meta.com/</loc>
  4. </url>
  5. <url>
  6. <loc>http://llama.meta.com/use-policy/</loc>
  7. </url>
  8. <url>
  9. <loc>http://llama.meta.com/responsible-use-guide/</loc>
  10. </url>
  11. <url>
  12. <loc>http://llama.meta.com/llama2/</loc>
  13. </url>
  14. <url>
  15. <loc>http://llama.meta.com/llama2/license/</loc>
  16. </url>
  17. <url>
  18. <loc>http://llama.meta.com/llama2/use-policy/</loc>
  19. </url>
  20. <url>
  21. <loc>http://llama.meta.com/license/</loc>
  22. </url>
  23. <url>
  24. <loc>http://llama.meta.com/code-llama/</loc>
  25. </url>
  26. <url>
  27. <loc>http://llama.meta.com/llama3/</loc>
  28. </url>
  29. <url>
  30. <loc>http://llama.meta.com/llama3/license/</loc>
  31. </url>
  32. <url>
  33. <loc>http://llama.meta.com/docs/model-cards-and-prompt-formats/meta-llama-3</loc>
  34. </url>
  35. <url>
  36. <loc>http://llama.meta.com/docs/model-cards-and-prompt-formats/meta-llama-guard-2</loc>
  37. </url>
  38. <url>
  39. <loc>http://llama.meta.com/docs/model-cards-and-prompt-formats/meta-code-llama-70b</loc>
  40. </url>
  41. <url>
  42. <loc>http://llama.meta.com/docs/model-cards-and-prompt-formats/meta-llama-guard-1</loc>
  43. </url>
  44. <url>
  45. <loc>http://llama.meta.com/docs/model-cards-and-prompt-formats/meta-code-llama</loc>
  46. </url>
  47. <url>
  48. <loc>http://llama.meta.com/docs/model-cards-and-prompt-formats/meta-llama-2</loc>
  49. </url>
  50. <url>
  51. <loc>http://llama.meta.com/docs/getting_the_models</loc>
  52. </url>
  53. <url>
  54. <loc>http://llama.meta.com/docs/getting-the-models/hugging-face</loc>
  55. </url>
  56. <url>
  57. <loc>http://llama.meta.com/docs/getting-the-models/kaggle</loc>
  58. </url>
  59. <url>
  60. <loc>http://llama.meta.com/docs/llama-everywhere</loc>
  61. </url>
  62. <url>
  63. <loc>http://llama.meta.com/docs/llama-everywhere/running-meta-llama-on-linux/</loc>
  64. </url>
  65. <url>
  66. <loc>http://llama.meta.com/docs/llama-everywhere/running-meta-llama-on-windows/</loc>
  67. </url>
  68. <url>
  69. <loc>http://llama.meta.com/docs/llama-everywhere/running-meta-llama-on-mac/</loc>
  70. </url>
  71. <url>
  72. <loc>http://llama.meta.com/docs/llama-everywhere/running-meta-llama-in-the-cloud/</loc>
  73. </url>
  74. <url>
  75. <loc>http://llama.meta.com/docs/how-to-guides/fine-tuning</loc>
  76. </url>
  77. <url>
  78. <loc>http://llama.meta.com/docs/how-to-guides/quantization</loc>
  79. </url>
  80. <url>
  81. <loc>http://llama.meta.com/docs/how-to-guides/prompting</loc>
  82. </url>
  83. <url>
  84. <loc>http://llama.meta.com/docs/how-to-guides/validation</loc>
  85. </url>
  86. <url>
  87. <loc>http://llama.meta.com/docs/integration-guides/meta-code-llama</loc>
  88. </url>
  89. <url>
  90. <loc>http://llama.meta.com/docs/integration-guides/langchain</loc>
  91. </url>
  92. <url>
  93. <loc>http://llama.meta.com/docs/integration-guides/llamaindex</loc>
  94. </url>
  95. <url>
  96. <loc>http://raw.githubusercontent.com/meta-llama/llama-recipes/main/README.md</loc>
  97. </url>
  98. <url>
  99. <loc>http://raw.githubusercontent.com/meta-llama/llama/main/MODEL_CARD.md</loc>
  100. </url>
  101. <url>
  102. <loc>http://raw.githubusercontent.com/meta-llama/llama/main/README.md</loc>
  103. </url>
  104. <url>
  105. <loc>http://raw.githubusercontent.com/meta-llama/llama3/main/MODEL_CARD.md</loc>
  106. </url>
  107. <url>
  108. <loc>http://raw.githubusercontent.com/meta-llama/llama3/main/README.md</loc>
  109. </url>
  110. <url>
  111. <loc>http://raw.githubusercontent.com/meta-llama/codellama/main/MODEL_CARD.md</loc>
  112. </url>
  113. <url>
  114. <loc>http://raw.githubusercontent.com/meta-llama/codellama/main/README.md</loc>
  115. </url>
  116. <url>
  117. <loc>http://raw.githubusercontent.com/meta-llama/PurpleLlama/main/README.md</loc>
  118. </url>
  119. <url>
  120. <loc>http://raw.githubusercontent.com/meta-llama/PurpleLlama/main/Llama-Guard2/MODEL_CARD.md</loc>
  121. </url>
  122. <url>
  123. <loc>http://raw.githubusercontent.com/meta-llama/PurpleLlama/main/Llama-Guard2/README.md</loc>
  124. </url>
  125. <url>
  126. <loc>http://raw.githubusercontent.com/meta-llama/PurpleLlama/main/Llama-Guard/MODEL_CARD.md</loc>
  127. </url>
  128. <url>
  129. <loc>https://hamel.dev/notes/llm/inference/03_inference.html</loc>
  130. </url>
  131. <url>
  132. <loc>https://www.anyscale.com/blog/continuous-batching-llm-inference</loc>
  133. </url>
  134. <url>
  135. <loc>https://github.com/huggingface/peft</loc>
  136. </url><url>
  137. <loc>https://github.com/facebookresearch/llama-recipes/blob/main/docs/LLM_finetuning.md</loc>
  138. </url>
  139. <url>
  140. <loc>https://github.com/meta-llama/llama-recipes/blob/main/recipes/finetuning/datasets/README.md</loc>
  141. </url><url>
  142. <loc>https://www.databricks.com/blog/efficient-fine-tuning-lora-guide-llms</loc>
  143. </url>
  144. <url>
  145. <loc>https://www.wandb.courses/courses/training-fine-tuning-LLMs</loc>
  146. </url>
  147. <url>
  148. <loc>https://www.snowflake.com/blog/meta-code-llama-testing/</loc>
  149. </url><url>
  150. <loc>https://www.phind.com/blog/code-llama-beats-gpt4</loc>
  151. </url>
  152. <loc>https://www.anyscale.com/blog/llama-2-is-about-as-factually-accurate-as-gpt-4-for-summaries-and-is-30x-cheaper</loc>
  153. </url>
  154. <url>
  155. <loc>https://ragntune.com/blog/gpt3.5-vs-llama2-finetuning</loc>
  156. </url><url>
  157. <loc>https://deci.ai/blog/fine-tune-llama-2-with-lora-for-question-answering/</loc>
  158. </url>
  159. <url>
  160. <loc>https://replicate.com/blog/fine-tune-translation-model-axolotl</loc>
  161. </url><url>
  162. <loc>https://huyenchip.com/2023/04/11/llm-engineering.html</loc>
  163. </url>
  164. </urlset>