Internvl 微调后的模型,python代码在哪里加载自己模型的路径
import os os.environ['CUDA_VISIBLE_DEVICES'] = '0'
from swift.llm import ( get_model_tokenizer, get_template, inference, ModelType, get_default_template_type, inference_stream ) from swift.utils import seed_everything import torch
model_type = ModelType.minicpm_v_v2_chat template_type = get_default_template_type(model_type) print(f'template_type: {template_type}')
model, tokenizer = get_model_tokenizer(model_type, torch.bfloat16, model_kwargs={'device_map': 'auto'}) model.generation_config.max_new_tokens = 256 template = get_template(template_type, tokenizer) seed_everything(42)
images = ['http://modelscope-open.oss-cn-hangzhou.aliyuncs.com/images/road.png'] query = '距离各城市多远?' response, history = inference(model, template, query, images=images) print(f'query: {query}') print(f'response: {response}')
参考这里改一下好了
https://github.com/modelscope/swift/blob/main/docs/source/LLM/LLM%E5%BE%AE%E8%B0%83%E6%96%87%E6%A1%A3.md#%E6%8E%A8%E7%90%86
@LiJunY get_model_tokenizer 函数里指定你训练后的位置给model_id_or_path 参数
@LiJunY
get_model_tokenizer函数里指定你训练后的位置给model_id_or_path参数
checkpoint-301 does not appear to have a file named config.json. Checkout 'https://huggingface.co///checkpoint-301/tree/None' for available files.不行啊,会出错。
查看一下checkpoint-301 里面的内容
@LiJunY
get_model_tokenizer函数里指定你训练后的位置给model_id_or_path参数checkpoint-301 does not appear to have a file named config.json. Checkout 'https://huggingface.co///checkpoint-301/tree/None' for available files.不行啊,会出错。