-
Notifications
You must be signed in to change notification settings - Fork 3
Description
Hi,
I encountered an issue while running the code. Here is the traceback:
Traceback (most recent call last):
File "/path/to/gradio/queueing.py", line 624, in process_events
response = await route_utils.call_process_api(
File "/path/to/gradio/route_utils.py", line 323, in call_process_api
output = await app.get_blocks().process_api(
File "/path/to/gradio/blocks.py", line 2015, in process_api
result = await self.call_function(
File "/path/to/gradio/blocks.py", line 1562, in call_function
prediction = await anyio.to_thread.run_sync( # type: ignore
File "/path/to/anyio/to_thread.py", line 56, in run_sync
return await get_async_backend().run_sync_in_worker_thread(
File "/path/to/anyio/_backends/_asyncio.py", line 2441, in run_sync_in_worker_thread
return await future
File "/path/to/anyio/_backends/_asyncio.py", line 943, in run
result = context.run(func, *args)
File "/path/to/gradio/utils.py", line 865, in wrapper
response = f(*args, **kwargs)
File "/path/to/demo.py", line 174, in main_run
pred_images = model.novel_view_sample(batch, 4)
File "/path/to/torch/utils/_contextlib.py", line 116, in decorate_context
return func(*args, **kwargs)
File "/path/to/nvsadapter.py", line 398, in novel_view_sample
c, uc = self.conditioner.get_unconditional_conditioning(batch)
File "/path/to/conditioner.py", line 69, in get_unconditional_conditioning
c, uc = conditioner.get_unconditional_conditioning(
File "/path/to/modules.py", line 184, in get_unconditional_conditioning
c = self(batch_c)
File "/path/to/torch/nn/modules/module.py", line 1736, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "/path/to/torch/nn/modules/module.py", line 1747, in _call_impl
return forward_call(*args, **kwargs)
File "/path/to/modules.py", line 141, in forward
emb_out = embedder(batch[embedder.input_key])
File "/path/to/torch/nn/modules/module.py", line 1736, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "/path/to/torch/nn/modules/module.py", line 1747, in _call_impl
return forward_call(*args, **kwargs)
File "/path/to/modules.py", line 553, in forward
z = self.encode_with_transformer(tokens.to(self.device))
File "/path/to/modules.py", line 560, in encode_with_transformer
x = self.text_transformer_forward(x, attn_mask=self.model.attn_mask)
File "/path/to/modules.py", line 575, in text_transformer_forward
x = r(x, attn_mask=attn_mask)
File "/path/to/torch/nn/modules/module.py", line 1736, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "/path/to/torch/nn/modules/module.py", line 1747, in _call_impl
return forward_call(*args, **kwargs)
File "/path/to/open_clip/transformer.py", line 263, in forward
x = q_x + self.ls_1(self.attention(q_x=self.ln_1(q_x), k_x=k_x, v_x=v_x, attn_mask=attn_mask))
File "/path/to/open_clip/transformer.py", line 250, in attention
return self.attn(
File "/path/to/torch/nn/modules/module.py", line 1736, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "/path/to/torch/nn/modules/module.py", line 1747, in _call_impl
return forward_call(*args, **kwargs)
File "/path/to/torch/nn/modules/activation.py", line 1368, in forward
attn_output, attn_output_weights = F.multi_head_attention_forward(
File "/path/to/torch/nn/functional.py", line 6131, in multi_head_attention_forward
raise RuntimeError(
RuntimeError: The shape of the 2D attn_mask is torch.Size([77, 77]), but should be (1, 1).
It seems there is a shape mismatch with attn_mask. Could you please provide guidance on how to resolve this issue?
Thank you!