Docker下模型虚拟空间--qwen2.5-omni报错
System Info / 系統信息
xinference | 2025-04-20 18:07:09,455 xinference.api.restful_api 1 ERROR [address=0.0.0.0:33499, pid=11611] cannot import name 'Qwen2_5OmniForConditionalGeneration' from 'transformers' (/usr/local/lib/python3.10/dist-packages/transformers/init.py) xinference | Traceback (most recent call last): xinference | File "/usr/local/lib/python3.10/dist-packages/xinference/api/restful_api.py", line 1022, in launch_model xinference | model_uid = await (await self._get_supervisor_ref()).launch_builtin_model( xinference | File "/usr/local/lib/python3.10/dist-packages/xoscar/backends/context.py", line 262, in send xinference | return self._process_result_message(result) xinference | File "/usr/local/lib/python3.10/dist-packages/xoscar/backends/context.py", line 111, in _process_result_message xinference | raise message.as_instanceof_cause() xinference | File "/usr/local/lib/python3.10/dist-packages/xoscar/backends/pool.py", line 689, in send xinference | result = await self._run_coro(message.message_id, coro) xinference | File "/usr/local/lib/python3.10/dist-packages/xoscar/backends/pool.py", line 389, in _run_coro xinference | return await coro xinference | File "/usr/local/lib/python3.10/dist-packages/xoscar/api.py", line 384, in on_receive xinference | return await super().on_receive(message) # type: ignore xinference | File "xoscar/core.pyx", line 564, in on_receive xinference | raise ex xinference | File "xoscar/core.pyx", line 526, in xoscar.core._BaseActor.on_receive xinference | async with self._lock: xinference | File "xoscar/core.pyx", line 527, in xoscar.core._BaseActor.on_receive xinference | with debug_async_timeout('actor_lock_timeout', xinference | File "xoscar/core.pyx", line 532, in xoscar.core._BaseActor.on_receive xinference | result = await result xinference | File "/usr/local/lib/python3.10/dist-packages/xinference/core/supervisor.py", line 1199, in launch_builtin_model xinference | await _launch_model() xinference | File "/usr/local/lib/python3.10/dist-packages/xinference/core/supervisor.py", line 1134, in _launch_model xinference | subpool_address = await _launch_one_model( xinference | File "/usr/local/lib/python3.10/dist-packages/xinference/core/supervisor.py", line 1088, in _launch_one_model xinference | subpool_address = await worker_ref.launch_builtin_model( xinference | File "/usr/local/lib/python3.10/dist-packages/xoscar/backends/context.py", line 262, in send xinference | return self._process_result_message(result) xinference | File "/usr/local/lib/python3.10/dist-packages/xoscar/backends/context.py", line 111, in _process_result_message xinference | raise message.as_instanceof_cause() xinference | File "/usr/local/lib/python3.10/dist-packages/xoscar/backends/pool.py", line 689, in send xinference | result = await self._run_coro(message.message_id, coro) xinference | File "/usr/local/lib/python3.10/dist-packages/xoscar/backends/pool.py", line 389, in _run_coro xinference | return await coro xinference | File "/usr/local/lib/python3.10/dist-packages/xoscar/api.py", line 384, in on_receive xinference | return await super().on_receive(message) # type: ignore xinference | File "xoscar/core.pyx", line 564, in on_receive xinference | raise ex xinference | File "xoscar/core.pyx", line 526, in xoscar.core._BaseActor.on_receive xinference | async with self._lock: xinference | File "xoscar/core.pyx", line 527, in xoscar.core._BaseActor.on_receive xinference | with debug_async_timeout('actor_lock_timeout', xinference | File "xoscar/core.pyx", line 532, in xoscar.core._BaseActor.on_receive xinference | result = await result xinference | File "/usr/local/lib/python3.10/dist-packages/xinference/core/utils.py", line 93, in wrapped xinference | ret = await func(*args, **kwargs) xinference | File "/usr/local/lib/python3.10/dist-packages/xinference/core/worker.py", line 1135, in launch_builtin_model xinference | await model_ref.load() xinference | File "/usr/local/lib/python3.10/dist-packages/xoscar/backends/context.py", line 262, in send xinference | return self._process_result_message(result) xinference | File "/usr/local/lib/python3.10/dist-packages/xoscar/backends/context.py", line 111, in _process_result_message xinference | raise message.as_instanceof_cause() xinference | File "/usr/local/lib/python3.10/dist-packages/xoscar/backends/pool.py", line 689, in send xinference | result = await self._run_coro(message.message_id, coro) xinference | File "/usr/local/lib/python3.10/dist-packages/xoscar/backends/pool.py", line 389, in _run_coro xinference | return await coro xinference | File "/usr/local/lib/python3.10/dist-packages/xoscar/api.py", line 384, in on_receive xinference | return await super().on_receive(message) # type: ignore xinference | File "xoscar/core.pyx", line 564, in on_receive xinference | raise ex xinference | File "xoscar/core.pyx", line 526, in xoscar.core._BaseActor.on_receive xinference | async with self._lock: xinference | File "xoscar/core.pyx", line 527, in xoscar.core._BaseActor.on_receive xinference | with debug_async_timeout('actor_lock_timeout', xinference | File "xoscar/core.pyx", line 532, in xoscar.core._BaseActor.on_receive xinference | result = await result xinference | File "/usr/local/lib/python3.10/dist-packages/xinference/core/model.py", line 471, in load xinference | await asyncio.to_thread(self._model.load) xinference | File "/usr/lib/python3.10/asyncio/threads.py", line 25, in to_thread xinference | return await loop.run_in_executor(None, func_call) xinference | File "/usr/lib/python3.10/concurrent/futures/thread.py", line 58, in run xinference | result = self.fn(*self.args, **self.kwargs) xinference | File "/usr/local/lib/python3.10/dist-packages/xinference/model/llm/transformers/qwen-omni.py", line 70, in load xinference | from transformers import ( xinference | ImportError: [address=0.0.0.0:33499, pid=11611] cannot import name 'Qwen2_5OmniForConditionalGeneration' from 'transformers' (/usr/local/lib/python3.10/dist-packages/transformers/init.py)
Running Xinference with Docker? / 是否使用 Docker 运行 Xinfernece?
- [x] docker / docker
- [ ] pip install / 通过 pip install 安装
- [ ] installation from source / 从源码安装
Version info / 版本信息
Docker -- v1.5.0.post1
The command used to start Xinference / 用以启动 xinference 的命令
Docker-Compose启动--Docker-Compose.yaml :
version: '3.3' services: xinference: container_name: xinference
#image: registry.cn-hangzhou.aliyuncs.com/xprobe_xinference/xinference:v1.3.1.post1 image: registry.cn-hangzhou.aliyuncs.com/xxx_ai/xinference:v1.5.0.post1 ports: - 9997:9997 restart: always deploy:
resources:
reservations:
devices:
- driver: nvidia count: all capabilities: [gpu]
command: xinference-local -H 0.0.0.0 environment: - XINFERENCE_MODEL_SRC=modelscope - XINFERENCE_HOME=/root/.xinference - XINFERENCE_ENABLE_VIRTUAL_ENV=1 volumes: - /home/XXXXX/project/xinference/xinfer:/root/.xinference - /home/XXXXX/project/xinference/xinfer/modelscope/:/root/.cache/modelscope networks: - xinference_net logging: driver: "json-file" options: max-size: "10m" max-file: "3" healthcheck: test: ["CMD", "curl", "-f", "http://localhost:9997"] interval: 30s timeout: 10s retries: 3 shm_size: 128g networks: xinference_net: driver: bridge
Reproduction / 复现过程
通过web页面找到qwen2.5-omni启动报错,点击启动
Expected behavior / 期待表现
1.正常启动,安装虚拟环境 2.可设置pip源
pip show xinference
pip show uv
能看下这俩的版本吗?
pip show xinference
Name: xinference Version: 1.5.0.post1 Summary: Model Serving Made Easy Home-page: https://github.com/xorbitsai/inference Author: Qin Xuye Author-email: [email protected] License: Apache License 2.0 Location: /usr/local/lib/python3.10/dist-packages Requires: aioprometheus, async-timeout, click, fastapi, gradio, huggingface-hub, modelscope, nvidia-ml-py, openai, passlib, peft, pillow, pydantic, pynvml, python-jose, requests, setproctitle, sse_starlette, tabulate, timm, torch, tqdm, typing_extensions, uvicorn, xoscar Required-by:
pip show uv
Name: uv Version: 0.6.14 Summary: An extremely fast Python package and project manager, written in Rust. Home-page: https://pypi.org/project/uv/ Author: uv Author-email: "Astral Software Inc." [email protected] License: MIT OR Apache-2.0 Location: /usr/local/lib/python3.10/dist-packages Requires: Required-by:
开启了以后日志里应该有这些信息,能看下你那部分吗?
xinference | 2025-04-20 22:08:44,801 xinference.api.restful_api 1 INFO Starting Xinference at endpoint: http://0.0.0.0:9997 xinference | 2025-04-20 22:08:44,891 uvicorn.error 1 INFO Uvicorn running on http://0.0.0.0:9997 (Press CTRL+C to quit) xinference | 2025-04-20 22:08:45,129 xinference.core.supervisor 142 DEBUG [request b343f2f6-1e6e-11f0-a30d-0242ac1d0002] Enter list_model_registrations, args: <xinference.core.supervisor.SupervisorActor object at 0x733d7f916520>,LLM, kwargs: detailed=True xinference | 2025-04-20 22:08:45,287 xinference.core.supervisor 142 DEBUG [request b343f2f6-1e6e-11f0-a30d-0242ac1d0002] Leave list_model_registrations, elapsed time: 0 s xinference | 2025-04-20 22:09:16,487 xinference.core.supervisor 142 DEBUG [request c5f4bc78-1e6e-11f0-a30d-0242ac1d0002] Enter query_engines_by_model_name, args: <xinference.core.supervisor.SupervisorActor object at 0x733d7f916520>,qwen2-moe-instruct, kwargs: xinference | 2025-04-20 22:09:16,488 xinference.core.worker 142 DEBUG [request c5f4e6f8-1e6e-11f0-a30d-0242ac1d0002] Enter query_engines_by_model_name, args: <xinference.core.worker.WorkerActor object at 0x733d7f208680>,qwen2-moe-instruct, kwargs: xinference | 2025-04-20 22:09:16,488 xinference.core.worker 142 DEBUG [request c5f4e6f8-1e6e-11f0-a30d-0242ac1d0002] Leave query_engines_by_model_name, elapsed time: 0 s xinference | 2025-04-20 22:09:16,489 xinference.core.supervisor 142 DEBUG [request c5f4bc78-1e6e-11f0-a30d-0242ac1d0002] Leave query_engines_by_model_name, elapsed time: 0 s xinference | 2025-04-20 22:09:25,397 xinference.core.supervisor 142 DEBUG [request cb44481a-1e6e-11f0-a30d-0242ac1d0002] Enter query_engines_by_model_name, args: <xinference.core.supervisor.SupervisorActor object at 0x733d7f916520>,qwen2.5-omni, kwargs: xinference | 2025-04-20 22:09:25,398 xinference.core.worker 142 DEBUG [request cb4470c4-1e6e-11f0-a30d-0242ac1d0002] Enter query_engines_by_model_name, args: <xinference.core.worker.WorkerActor object at 0x733d7f208680>,qwen2.5-omni, kwargs: xinference | 2025-04-20 22:09:25,398 xinference.core.worker 142 DEBUG [request cb4470c4-1e6e-11f0-a30d-0242ac1d0002] Leave query_engines_by_model_name, elapsed time: 0 s xinference | 2025-04-20 22:09:25,399 xinference.core.supervisor 142 DEBUG [request cb44481a-1e6e-11f0-a30d-0242ac1d0002] Leave query_engines_by_model_name, elapsed time: 0 s xinference | 2025-04-20 22:09:32,940 xinference.core.supervisor 142 DEBUG Enter launch_builtin_model, model_uid: qwen2.5-omni, model_name: qwen2.5-omni, model_size: 7, model_format: pytorch, quantization: none, replica: 1, enable_xavier: False, kwargs: {} xinference | 2025-04-20 22:09:32,942 xinference.core.worker 142 DEBUG Enter get_model_count, args: <xinference.core.worker.WorkerActor object at 0x733d7f208680>, kwargs: xinference | 2025-04-20 22:09:32,942 xinference.core.worker 142 DEBUG Leave get_model_count, elapsed time: 0 s xinference | 2025-04-20 22:09:32,943 xinference.core.worker 142 INFO [request cfc3bec0-1e6e-11f0-a30d-0242ac1d0002] Enter launch_builtin_model, args: <xinference.core.worker.WorkerActor object at 0x733d7f208680>, kwargs: model_uid=qwen2.5-omni-0,model_name=qwen2.5-omni,model_size_in_billions=7,model_format=pytorch,quantization=none,model_engine=Transformers,model_type=LLM,n_gpu=auto,request_limits=None,peft_model_config=None,gpu_idx=None,download_hub=None,model_path=None,xavier_config=None xinference | 2025-04-20 22:09:32,945 xinference.core.worker 142 DEBUG GPU selected: [0] for model qwen2.5-omni-0 xinference | 2025-04-20 22:09:33,687 xinference.model.llm.core 142 DEBUG Launching qwen2.5-omni-0 with Qwen2_5OmniChatModel xinference | 2025-04-20 22:09:33,688 xinference.core.progress_tracker 142 DEBUG Setting progress, request id: launching-qwen2.5-omni-0, progress: 0.0 xinference | 2025-04-20 22:09:33,688 xinference.model.llm.llm_family 142 INFO Caching from Modelscope: Qwen/Qwen2.5-Omni-7B xinference | 2025-04-20 22:09:33,733 transformers.utils.import_utils 142 DEBUG Detected torch version: 2.6.0 xinference | 2025-04-20 22:09:33,734 transformers.utils.import_utils 142 DEBUG Detected torch version: 2.6.0 xinference | 2025-04-20 22:09:33,742 xinference.model.llm.llm_family 142 INFO Cache /root/.xinference/cache/qwen2_5-omni-pytorch-7b exists xinference | 2025-04-20 22:09:33,744 xinference.core.progress_tracker 142 DEBUG Setting progress, request id: launching-qwen2.5-omni-0, progress: 0.8 xinference | 2025-04-20 22:09:33,747 xinference.core.progress_tracker 142 DEBUG Setting progress, request id: launching-qwen2.5-omni-0, progress: 0.8 xinference | 2025-04-20 22:09:35,621 transformers.utils.import_utils 175 DEBUG Detected accelerate version: 0.34.0 xinference | Detected accelerate version: 0.34.0 xinference | 2025-04-20 22:09:35,622 transformers.utils.import_utils 175 DEBUG Detected bitsandbytes version: 0.45.5 xinference | Detected bitsandbytes version: 0.45.5 xinference | 2025-04-20 22:09:35,624 transformers.utils.import_utils 175 DEBUG Detected coloredlogs version: 15.0.1 xinference | Detected coloredlogs version: 15.0.1 xinference | 2025-04-20 22:09:35,625 transformers.utils.import_utils 175 DEBUG Detected datasets version: 2.21.0 xinference | Detected datasets version: 2.21.0 xinference | 2025-04-20 22:09:35,625 transformers.utils.import_utils 175 DEBUG Detected g2p_en version: 2.1.0 xinference | Detected g2p_en version: 2.1.0 xinference | 2025-04-20 22:09:35,626 transformers.utils.import_utils 175 DEBUG Detected jieba version: 0.42.1 xinference | Detected jieba version: 0.42.1 xinference | 2025-04-20 22:09:35,626 transformers.utils.import_utils 175 DEBUG Detected jinja2 version: 3.1.6 xinference | Detected jinja2 version: 3.1.6 xinference | 2025-04-20 22:09:35,627 transformers.utils.import_utils 175 DEBUG Detected librosa version: 0.11.0 xinference | Detected librosa version: 0.11.0 xinference | 2025-04-20 22:09:35,627 transformers.utils.import_utils 175 DEBUG Detected nltk version: 3.9.1 xinference | Detected nltk version: 3.9.1 xinference | 2025-04-20 22:09:35,628 transformers.utils.import_utils 175 DEBUG Detected openai version: 1.75.0 xinference | Detected openai version: 1.75.0 xinference | 2025-04-20 22:09:35,629 transformers.utils.import_utils 175 DEBUG Detected optimum version: 1.24.0 xinference | Detected optimum version: 1.24.0 xinference | 2025-04-20 22:09:35,630 transformers.utils.import_utils 175 DEBUG Detected pandas version: 2.2.2 xinference | Detected pandas version: 2.2.2 xinference | 2025-04-20 22:09:35,631 transformers.utils.import_utils 175 DEBUG Detected peft version: 0.15.2 xinference | Detected peft version: 0.15.2 xinference | 2025-04-20 22:09:35,631 transformers.utils.import_utils 175 DEBUG Detected phonemizer version: N/A xinference | Detected phonemizer version: N/A xinference | 2025-04-20 22:09:35,632 transformers.utils.import_utils 175 DEBUG Detected psutil version: 7.0.0 xinference | Detected psutil version: 7.0.0 xinference | 2025-04-20 22:09:35,632 transformers.utils.import_utils 175 DEBUG Detected pygments version: 2.19.1 xinference | Detected pygments version: 2.19.1 xinference | 2025-04-20 22:09:35,633 transformers.utils.import_utils 175 DEBUG Detected sacremoses version: 0.1.1 xinference | Detected sacremoses version: 0.1.1 xinference | 2025-04-20 22:09:35,633 transformers.utils.import_utils 175 DEBUG Detected safetensors version: 0.4.4 xinference | Detected safetensors version: 0.4.4 xinference | 2025-04-20 22:09:35,636 transformers.utils.import_utils 175 DEBUG Detected scipy version: 1.15.2 xinference | Detected scipy version: 1.15.2 xinference | 2025-04-20 22:09:35,636 transformers.utils.import_utils 175 DEBUG Detected sentencepiece version: 0.2.0 xinference | Detected sentencepiece version: 0.2.0 xinference | 2025-04-20 22:09:35,637 transformers.utils.import_utils 175 DEBUG Detected gguf version: 0.16.2 xinference | Detected gguf version: 0.16.2 xinference | 2025-04-20 22:09:35,638 transformers.utils.import_utils 175 DEBUG Detected soundfile version: 0.13.1 xinference | Detected soundfile version: 0.13.1 xinference | 2025-04-20 22:09:35,639 transformers.utils.import_utils 175 DEBUG Detected spacy version: 3.8.5 xinference | Detected spacy version: 3.8.5 xinference | 2025-04-20 22:09:35,640 transformers.utils.import_utils 175 DEBUG Detected timm version: 1.0.15 xinference | Detected timm version: 1.0.15 xinference | 2025-04-20 22:09:35,640 transformers.utils.import_utils 175 DEBUG Detected tokenizers version: 0.21.1 xinference | Detected tokenizers version: 0.21.1 xinference | 2025-04-20 22:09:35,641 transformers.utils.import_utils 175 DEBUG Detected torchaudio version: 2.6.0 xinference | Detected torchaudio version: 2.6.0 xinference | 2025-04-20 22:09:35,641 transformers.utils.import_utils 175 DEBUG Detected torchvision version: 0.21.0 xinference | Detected torchvision version: 0.21.0 xinference | 2025-04-20 22:09:35,642 transformers.utils.import_utils 175 DEBUG Detected num2words version: 0.5.14 xinference | Detected num2words version: 0.5.14 xinference | 2025-04-20 22:09:35,642 transformers.utils.import_utils 175 DEBUG Detected tiktoken version: 0.7.0 xinference | Detected tiktoken version: 0.7.0 xinference | 2025-04-20 22:09:35,643 transformers.utils.import_utils 175 DEBUG Detected triton version: 3.2.0 xinference | Detected triton version: 3.2.0 xinference | 2025-04-20 22:09:35,644 transformers.utils.import_utils 175 DEBUG Detected rich version: 13.9.4 xinference | Detected rich version: 13.9.4 xinference | 2025-04-20 22:09:35,644 transformers.utils.import_utils 175 DEBUG Detected torch version: 2.6.0 xinference | Detected torch version: 2.6.0 xinference | 2025-04-20 22:09:35,647 transformers.utils.import_utils 175 DEBUG Detected PIL version 10.4.0 xinference | Detected PIL version 10.4.0 xinference | 2025-04-20 22:09:35,665 transformers.utils.import_utils 175 DEBUG Detected torch version: 2.6.0 xinference | Detected torch version: 2.6.0 xinference | 2025-04-20 22:09:35,667 transformers.utils.import_utils 175 DEBUG Detected torch version: 2.6.0 xinference | Detected torch version: 2.6.0 xinference | 2025-04-20 22:09:35,668 transformers.utils.import_utils 175 DEBUG Detected torch version: 2.6.0 xinference | Detected torch version: 2.6.0 xinference | 2025-04-20 22:09:35,669 transformers.utils.import_utils 175 DEBUG Detected torch version: 2.6.0 xinference | Detected torch version: 2.6.0 xinference | 2025-04-20 22:09:35,670 transformers.utils.import_utils 175 DEBUG Detected torch version: 2.6.0 xinference | Detected torch version: 2.6.0 xinference | 2025-04-20 22:09:35,671 transformers.utils.import_utils 175 DEBUG Detected torch version: 2.6.0 xinference | Detected torch version: 2.6.0 xinference | 2025-04-20 22:09:35,672 transformers.utils.import_utils 175 DEBUG Detected torch version: 2.6.0 xinference | Detected torch version: 2.6.0 xinference | 2025-04-20 22:09:35,673 transformers.utils.import_utils 175 DEBUG Detected torch version: 2.6.0 xinference | Detected torch version: 2.6.0 xinference | 2025-04-20 22:09:35,674 transformers.utils.import_utils 175 DEBUG Detected torch version: 2.6.0 xinference | Detected torch version: 2.6.0 xinference | 2025-04-20 22:09:36,472 transformers.utils.import_utils 175 DEBUG Detected torch version: 2.6.0 xinference | Detected torch version: 2.6.0 xinference | INFO 04-20 22:09:37 [init.py:239] Automatically detected platform cuda. xinference | 2025-04-20 22:09:39,097 xinference.core.model 175 DEBUG Starting ModelActor at 0.0.0.0:39069, uid: b'qwen2.5-omni-0' xinference | 2025-04-20 22:09:39,097 xinference.core.model 175 WARNING Currently for multimodal models, xinference only supports qwen-vl-chat, cogvlm2, glm-4v, MiniCPM-V-2.6 for batching. Your model qwen2.5-omni with model family None is disqualified. xinference | 2025-04-20 22:09:39,098 xinference.core.model 175 INFO Start requests handler. xinference | 2025-04-20 22:09:39,105 xinference.core.worker 142 ERROR Failed to load model qwen2.5-omni-0 xinference | Traceback (most recent call last): xinference | File "/usr/local/lib/python3.10/dist-packages/xinference/core/worker.py", line 1135, in launch_builtin_model xinference | await model_ref.load() xinference | File "/usr/local/lib/python3.10/dist-packages/xoscar/backends/context.py", line 262, in send xinference | return self._process_result_message(result) xinference | File "/usr/local/lib/python3.10/dist-packages/xoscar/backends/context.py", line 111, in _process_result_message xinference | raise message.as_instanceof_cause() xinference | File "/usr/local/lib/python3.10/dist-packages/xoscar/backends/pool.py", line 689, in send xinference | result = await self._run_coro(message.message_id, coro) xinference | File "/usr/local/lib/python3.10/dist-packages/xoscar/backends/pool.py", line 389, in _run_coro xinference | return await coro xinference | File "/usr/local/lib/python3.10/dist-packages/xoscar/api.py", line 384, in on_receive xinference | return await super().on_receive(message) # type: ignore xinference | File "xoscar/core.pyx", line 564, in on_receive xinference | raise ex xinference | File "xoscar/core.pyx", line 526, in xoscar.core._BaseActor.on_receive xinference | async with self._lock: xinference | File "xoscar/core.pyx", line 527, in xoscar.core._BaseActor.on_receive xinference | with debug_async_timeout('actor_lock_timeout', xinference | File "xoscar/core.pyx", line 532, in xoscar.core._BaseActor.on_receive xinference | result = await result xinference | File "/usr/local/lib/python3.10/dist-packages/xinference/core/model.py", line 471, in load xinference | await asyncio.to_thread(self._model.load) xinference | File "/usr/lib/python3.10/asyncio/threads.py", line 25, in to_thread xinference | return await loop.run_in_executor(None, func_call) xinference | File "/usr/lib/python3.10/concurrent/futures/thread.py", line 58, in run xinference | result = self.fn(*self.args, **self.kwargs) xinference | File "/usr/local/lib/python3.10/dist-packages/xinference/model/llm/transformers/qwen-omni.py", line 70, in load xinference | from transformers import ( xinference | ImportError: [address=0.0.0.0:39069, pid=175] cannot import name 'Qwen2_5OmniForConditionalGeneration' from 'transformers' (/usr/local/lib/python3.10/dist-packages/transformers/init.py)
这个debug的日志,好像就没创建虚拟环境。。
启动的时候加上 --log-level debug 试下。
上面日志 已经是加了--log-level debug的 😂
This issue is stale because it has been open for 7 days with no activity.
This issue was closed because it has been inactive for 5 days since being marked as stale.