Skip to content

Commit

Permalink
optimize
Browse files Browse the repository at this point in the history
  • Loading branch information
ChrisLiu6 committed Aug 14, 2024
1 parent 03e2292 commit c8e180a
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 3 deletions.
1 change: 0 additions & 1 deletion lumina_mgpt/data/item_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,6 @@ def __init__(
tokenizer="Alpha-VLLM/Lumina-mGPT-7B-768",
conv_template=Conversation,
target_size=512,
with_decoder=False,
):

super().__init__(
Expand Down
6 changes: 4 additions & 2 deletions lumina_mgpt/inference_solver.py
Original file line number Diff line number Diff line change
Expand Up @@ -279,9 +279,11 @@ def __init__(self, model_path, precision, target_size=512):
self.dtype = {"bf16": torch.bfloat16, "fp16": torch.float16, "fp32": torch.float32}[precision]

self.model = ChameleonForConditionalGeneration.from_pretrained(
model_path, torch_dtype=self.dtype, device_map="cuda"
model_path,
torch_dtype=self.dtype,
device_map="cuda",
)
self.item_processor = FlexARItemProcessor(with_decoder=True, target_size=target_size)
self.item_processor = FlexARItemProcessor(target_size=target_size)

def get_streamer(self):
return TextStreamer(self.item_processor.tokenizer)
Expand Down

0 comments on commit c8e180a

Please sign in to comment.