llama_messenger.py 1.2 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546
  1. import langchain
  2. from langchain.llms import Replicate
  3. from flask import Flask
  4. from flask import request
  5. import os
  6. import requests
  7. import json
  8. os.environ["REPLICATE_API_TOKEN"] = "<your replicate api token>"
  9. llama2_13b_chat = "meta/llama-2-13b-chat:f4e2de70d66816a838a89eeeb621910adffb0dd0baba3976c96980970978018d"
  10. llm = Replicate(
  11. model=llama2_13b_chat,
  12. model_kwargs={"temperature": 0.01, "top_p": 1, "max_new_tokens":500}
  13. )
  14. app = Flask(__name__)
  15. @app.route('/msgrcvd_pager', methods=['POST', 'GET'])
  16. def msgrcvd_pager():
  17. message = request.args.get('message')
  18. sender = request.args.get('sender')
  19. recipient = request.args.get('recipient')
  20. answer = llm(message)
  21. print(message)
  22. print(answer)
  23. url = f"https://graph.facebook.com/v18.0/{recipient}/messages"
  24. params = {
  25. 'recipient': '{"id": ' + sender + '}',
  26. 'message': json.dumps({'text': answer}),
  27. 'messaging_type': 'RESPONSE',
  28. 'access_token': "<your page access token>"
  29. }
  30. headers = {
  31. 'Content-Type': 'application/json'
  32. }
  33. response = requests.post(url, params=params, headers=headers)
  34. print(response.status_code)
  35. print(response.text)
  36. return message + "<p/>" + answer