|
@@ -4217,7 +4217,6 @@
|
|
"from langchain.chains.conversation.memory import ConversationBufferWindowMemory\n",
|
|
"from langchain.chains.conversation.memory import ConversationBufferWindowMemory\n",
|
|
"from langchain.llms import HuggingFacePipeline\n",
|
|
"from langchain.llms import HuggingFacePipeline\n",
|
|
"from langchain.schema import BaseOutputParser\n",
|
|
"from langchain.schema import BaseOutputParser\n",
|
|
- "from torch import LongTensor\n",
|
|
|
|
"from transformers import (\n",
|
|
"from transformers import (\n",
|
|
" AutoModelForCausalLM,\n",
|
|
" AutoModelForCausalLM,\n",
|
|
" AutoTokenizer,\n",
|
|
" AutoTokenizer,\n",
|
|
@@ -4912,7 +4911,7 @@
|
|
{
|
|
{
|
|
"cell_type": "code",
|
|
"cell_type": "code",
|
|
"source": [
|
|
"source": [
|
|
- "text_pipeline = pipeline(\n",
|
|
|
|
|
|
+ "generation_pipeline = pipeline(\n",
|
|
" model=model,\n",
|
|
" model=model,\n",
|
|
" tokenizer=tokenizer,\n",
|
|
" tokenizer=tokenizer,\n",
|
|
" return_full_text=True,\n",
|
|
" return_full_text=True,\n",
|
|
@@ -4921,7 +4920,7 @@
|
|
" generation_config=generation_config,\n",
|
|
" generation_config=generation_config,\n",
|
|
")\n",
|
|
")\n",
|
|
"\n",
|
|
"\n",
|
|
- "llm = HuggingFacePipeline(pipeline=text_pipeline)"
|
|
|
|
|
|
+ "llm = HuggingFacePipeline(pipeline=generation_pipeline)"
|
|
],
|
|
],
|
|
"metadata": {
|
|
"metadata": {
|
|
"colab": {
|
|
"colab": {
|