فهرست منبع

shifted the import statement at top

Himanshu Shukla 5 ماه پیش
والد
کامیت
3ba4b64e71
1فایلهای تغییر یافته به همراه1 افزوده شده و 3 حذف شده
  1. 1 3
      recipes/quickstart/inference/local_inference/multi_modal_infer.py

+ 1 - 3
recipes/quickstart/inference/local_inference/multi_modal_infer.py

@@ -7,8 +7,7 @@ from PIL import Image as PIL_Image
 from transformers import MllamaForConditionalGeneration, MllamaProcessor
 from peft import PeftModel
 import gradio as gr
-from huggingface_hub import login
-
+from huggingface_hub import HfFolder
 # Initialize accelerator
 accelerator = Accelerator()
 device = accelerator.device
@@ -18,7 +17,6 @@ DEFAULT_MODEL = "meta-llama/Llama-3.2-11B-Vision-Instruct"
 MAX_OUTPUT_TOKENS = 2048
 MAX_IMAGE_SIZE = (1120, 1120)
 
-from huggingface_hub import HfFolder
 
 def get_hf_token():
     """Retrieve Hugging Face token from the cache or environment."""