Upload folder using huggingface_hub
Browse files- modeling_midashenglm.py +2 -3
modeling_midashenglm.py
CHANGED
@@ -16,8 +16,7 @@ from transformers.models.qwen2_5_omni.configuration_qwen2_5_omni import (
|
|
16 |
from transformers.models.qwen2_5_omni.modeling_qwen2_5_omni import (
|
17 |
Qwen2_5OmniThinkerTextModel,
|
18 |
)
|
19 |
-
from transformers.utils import
|
20 |
-
from typing_extensions import Unpack
|
21 |
|
22 |
from .configuration_midashenglm import DashengConfig, MiDashengLMConfig
|
23 |
|
@@ -425,7 +424,7 @@ class Qwen25OmniThinkerTextOnlyDecoder(PreTrainedModel, GenerationMixin):
|
|
425 |
output_hidden_states: Optional[bool] = None,
|
426 |
cache_position: Optional[torch.LongTensor] = None,
|
427 |
labels: Optional[torch.Tensor] = None,
|
428 |
-
**kwargs
|
429 |
) -> Union[Tuple, Qwen25OmniTextModelOutput]:
|
430 |
if attention_mask is not None and position_ids is None:
|
431 |
position_ids = (
|
|
|
16 |
from transformers.models.qwen2_5_omni.modeling_qwen2_5_omni import (
|
17 |
Qwen2_5OmniThinkerTextModel,
|
18 |
)
|
19 |
+
from transformers.utils import can_return_tuple
|
|
|
20 |
|
21 |
from .configuration_midashenglm import DashengConfig, MiDashengLMConfig
|
22 |
|
|
|
424 |
output_hidden_states: Optional[bool] = None,
|
425 |
cache_position: Optional[torch.LongTensor] = None,
|
426 |
labels: Optional[torch.Tensor] = None,
|
427 |
+
**kwargs,
|
428 |
) -> Union[Tuple, Qwen25OmniTextModelOutput]:
|
429 |
if attention_mask is not None and position_ids is None:
|
430 |
position_ids = (
|