File tree Expand file tree Collapse file tree 1 file changed +14
-0
lines changed Expand file tree Collapse file tree 1 file changed +14
-0
lines changed Original file line number Diff line number Diff line change @@ -157,6 +157,20 @@ def load_llama_from_model_settings(settings: ModelSettings) -> llama_cpp.Llama:
157
157
chat_handler = llama_cpp .llama_chat_format .Llama3VisionAlpha (
158
158
clip_model_path = settings .clip_model_path , verbose = settings .verbose
159
159
)
160
+ elif settings .chat_format == "minicpm-v-2.6" :
161
+ assert settings .clip_model_path is not None , "clip model not found"
162
+ if settings .hf_model_repo_id is not None :
163
+ chat_handler = (
164
+ llama_cpp .llama_chat_format .MiniCPMv26ChatHandler .from_pretrained (
165
+ repo_id = settings .hf_model_repo_id ,
166
+ filename = settings .clip_model_path ,
167
+ verbose = settings .verbose ,
168
+ )
169
+ )
170
+ else :
171
+ chat_handler = llama_cpp .llama_chat_format .MiniCPMv26ChatHandler (
172
+ clip_model_path = settings .clip_model_path , verbose = settings .verbose
173
+ )
160
174
elif settings .chat_format == "hf-autotokenizer" :
161
175
assert (
162
176
settings .hf_pretrained_model_name_or_path is not None
You can’t perform that action at this time.
0 commit comments