Skip to content

Commit

Permalink
clarify print
Browse files Browse the repository at this point in the history
  • Loading branch information
kijai committed Jan 12, 2025
1 parent 47b5547 commit dffd125
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 3 deletions.
4 changes: 2 additions & 2 deletions nodes.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ def loadmodel(self, model, precision, attention, lora=None):
local_dir=model_path,
local_dir_use_symlinks=False)

print(f"using {attention} for attention")
print(f"Florence2 using {attention} for attention")
with patch("transformers.dynamic_module_utils.get_imports", fixed_get_imports): #workaround for unnecessary flash_attn requirement
model = AutoModelForCausalLM.from_pretrained(model_path, attn_implementation=attention, device_map=device, torch_dtype=dtype,trust_remote_code=True)
processor = AutoProcessor.from_pretrained(model_path, trust_remote_code=True)
Expand Down Expand Up @@ -172,7 +172,7 @@ def loadmodel(self, model, precision, attention, lora=None):
dtype = {"bf16": torch.bfloat16, "fp16": torch.float16, "fp32": torch.float32}[precision]
model_path = Path(folder_paths.models_dir, "LLM", model)
print(f"Loading model from {model_path}")
print(f"using {attention} for attention")
print(f"Florence2 using {attention} for attention")
with patch("transformers.dynamic_module_utils.get_imports", fixed_get_imports): #workaround for unnecessary flash_attn requirement
model = AutoModelForCausalLM.from_pretrained(model_path, attn_implementation=attention, device_map=device, torch_dtype=dtype,trust_remote_code=True)
processor = AutoProcessor.from_pretrained(model_path, trust_remote_code=True)
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
[project]
name = "comfyui-florence2"
description = "Nodes to use Florence2 VLM for image vision tasks: object detection, captioning, segmentation and ocr"
version = "1.0.2"
version = "1.0.3"
license = "MIT"
dependencies = ["transformers>=4.38.0"]

Expand Down

0 comments on commit dffd125

Please sign in to comment.