level 6
蜜恋甜柔
楼主
我另外新建了一个其他的重绘模型,发现没有这个好用,这个重新按照节点重做还是卡内部模型节点,LIB是正常的,把错误贴上来,大佬们帮我分析下。
To see the GUI go to: http://127.0.0.1:8188
FETCH DATA from: E:\ComfyUI-aki-v1.6\ComfyUI\custom_nodes\comfyui-manager\extension-node-map.json [DONE]
got prompt
Using xformers attention in VAE
Using xformers attention in VAE
VAE load device: cuda:0, offload device: cpu, dtype: torch.float16
CLIP/text encoder model load device: cuda:0, offload device: cpu, current: cpu, dtype: torch.float16
clip missing: ['text_projection.weight']
Requested to load FluxClipModel_
loaded completely 5726.8 4777.53759765625 True
Requested to load AutoencodingEngine
0 models unloaded.
loaded partially 128.0 127.999755859375 0
Exception in thread Thread-11 (<lambda>):
Traceback (most recent call last):
File "E:\ComfyUI-aki-v1.6\python\Lib\site-packages\aiohttp\streams.py", line 347, in _wait
await waiter
asyncio.exceptions.CancelledError
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "E:\ComfyUI-aki-v1.6\python\Lib\threading.py", line 1045, in _bootstrap_inner
self.run()
File "<enhanced_experience vendors.sentry_sdk.integrations.threading>", line 99, in run
File "<enhanced_experience vendors.sentry_sdk.integrations.threading>", line 94, in _run_old_run_func
File "<enhanced_experience vendors.sentry_sdk.utils>", line 1649, in reraise
File "<enhanced_experience vendors.sentry_sdk.integrations.threading>", line 92, in _run_old_run_func
File "E:\ComfyUI-aki-v1.6\python\Lib\threading.py", line 982, in run
self._target(*self._args, **self._kwargs)
File "E:\ComfyUI-aki-v1.6\ComfyUI\custom_nodes\comfyui-manager\glob\manager_server.py", line 1616, in <lambda>
threading.Thread(target=lambda: asyncio.run(default_cache_update())).start()
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "E:\ComfyUI-aki-v1.6\python\Lib\asyncio\runners.py", line 190, in run
return runner.run(main)
^^^^^^^^^^^^^^^^
File "E:\ComfyUI-aki-v1.6\python\Lib\asyncio\runners.py", line 118, in run
return self._loop.run_until_complete(task)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "E:\ComfyUI-aki-v1.6\python\Lib\asyncio\base_events.py", line 654, in run_until_complete
return future.result()
^^^^^^^^^^^^^^^
File "E:\ComfyUI-aki-v1.6\ComfyUI\custom_nodes\comfyui-manager\glob\manager_server.py", line 1602, in default_cache_update
await asyncio.gather(a, b, c, d, e)
File "E:\ComfyUI-aki-v1.6\ComfyUI\custom_nodes\comfyui-manager\glob\manager_server.py", line 1589, in get_cache
json_obj = await manager_util.get_data(uri, True)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "E:\ComfyUI-aki-v1.6\ComfyUI\custom_nodes\comfyui-manager\glob\manager_util.py", line 127, in get_data
json_text = await resp.text()
^^^^^^^^^^^^^^^^^
File "E:\ComfyUI-aki-v1.6\python\Lib\site-packages\aiohttp\client_reqrep.py", line 728, in text
await self.read()
File "E:\ComfyUI-aki-v1.6\python\Lib\site-packages\aiohttp\client_reqrep.py", line 686, in read
self._body = await self.content.read()
^^^^^^^^^^^^^^^^^^^^^^^^^
File "E:\ComfyUI-aki-v1.6\python\Lib\site-packages\aiohttp\streams.py", line 418, in read
block = await self.readany()
^^^^^^^^^^^^^^^^^^^^
File "E:\ComfyUI-aki-v1.6\python\Lib\site-packages\aiohttp\streams.py", line 440, in readany
await self._wait("readany")
File "E:\ComfyUI-aki-v1.6\python\Lib\site-packages\aiohttp\streams.py", line 346, in _wait
with self._timer:
File "E:\ComfyUI-aki-v1.6\python\Lib\site-packages\aiohttp\helpers.py", line 685, in __exit__
raise asyncio.TimeoutError from exc_val
TimeoutError
2025年07月16日 03点07分
1
To see the GUI go to: http://127.0.0.1:8188
FETCH DATA from: E:\ComfyUI-aki-v1.6\ComfyUI\custom_nodes\comfyui-manager\extension-node-map.json [DONE]
got prompt
Using xformers attention in VAE
Using xformers attention in VAE
VAE load device: cuda:0, offload device: cpu, dtype: torch.float16
CLIP/text encoder model load device: cuda:0, offload device: cpu, current: cpu, dtype: torch.float16
clip missing: ['text_projection.weight']
Requested to load FluxClipModel_
loaded completely 5726.8 4777.53759765625 True
Requested to load AutoencodingEngine
0 models unloaded.
loaded partially 128.0 127.999755859375 0
Exception in thread Thread-11 (<lambda>):
Traceback (most recent call last):
File "E:\ComfyUI-aki-v1.6\python\Lib\site-packages\aiohttp\streams.py", line 347, in _wait
await waiter
asyncio.exceptions.CancelledError
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "E:\ComfyUI-aki-v1.6\python\Lib\threading.py", line 1045, in _bootstrap_inner
self.run()
File "<enhanced_experience vendors.sentry_sdk.integrations.threading>", line 99, in run
File "<enhanced_experience vendors.sentry_sdk.integrations.threading>", line 94, in _run_old_run_func
File "<enhanced_experience vendors.sentry_sdk.utils>", line 1649, in reraise
File "<enhanced_experience vendors.sentry_sdk.integrations.threading>", line 92, in _run_old_run_func
File "E:\ComfyUI-aki-v1.6\python\Lib\threading.py", line 982, in run
self._target(*self._args, **self._kwargs)
File "E:\ComfyUI-aki-v1.6\ComfyUI\custom_nodes\comfyui-manager\glob\manager_server.py", line 1616, in <lambda>
threading.Thread(target=lambda: asyncio.run(default_cache_update())).start()
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "E:\ComfyUI-aki-v1.6\python\Lib\asyncio\runners.py", line 190, in run
return runner.run(main)
^^^^^^^^^^^^^^^^
File "E:\ComfyUI-aki-v1.6\python\Lib\asyncio\runners.py", line 118, in run
return self._loop.run_until_complete(task)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "E:\ComfyUI-aki-v1.6\python\Lib\asyncio\base_events.py", line 654, in run_until_complete
return future.result()
^^^^^^^^^^^^^^^
File "E:\ComfyUI-aki-v1.6\ComfyUI\custom_nodes\comfyui-manager\glob\manager_server.py", line 1602, in default_cache_update
await asyncio.gather(a, b, c, d, e)
File "E:\ComfyUI-aki-v1.6\ComfyUI\custom_nodes\comfyui-manager\glob\manager_server.py", line 1589, in get_cache
json_obj = await manager_util.get_data(uri, True)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "E:\ComfyUI-aki-v1.6\ComfyUI\custom_nodes\comfyui-manager\glob\manager_util.py", line 127, in get_data
json_text = await resp.text()
^^^^^^^^^^^^^^^^^
File "E:\ComfyUI-aki-v1.6\python\Lib\site-packages\aiohttp\client_reqrep.py", line 728, in text
await self.read()
File "E:\ComfyUI-aki-v1.6\python\Lib\site-packages\aiohttp\client_reqrep.py", line 686, in read
self._body = await self.content.read()
^^^^^^^^^^^^^^^^^^^^^^^^^
File "E:\ComfyUI-aki-v1.6\python\Lib\site-packages\aiohttp\streams.py", line 418, in read
block = await self.readany()
^^^^^^^^^^^^^^^^^^^^
File "E:\ComfyUI-aki-v1.6\python\Lib\site-packages\aiohttp\streams.py", line 440, in readany
await self._wait("readany")
File "E:\ComfyUI-aki-v1.6\python\Lib\site-packages\aiohttp\streams.py", line 346, in _wait
with self._timer:
File "E:\ComfyUI-aki-v1.6\python\Lib\site-packages\aiohttp\helpers.py", line 685, in __exit__
raise asyncio.TimeoutError from exc_val
TimeoutError
