Skip to content

Commit 7d06a75

Browse files
committed
Fix bug in getting vocab_size and missing ccl in forward
Signed-off-by: quic-xiyushi <xiyushi@qti.qualcomm.com>
1 parent c829a0f commit 7d06a75

File tree

2 files changed

+4
-1
lines changed

2 files changed

+4
-1
lines changed

QEfficient/transformers/models/modeling_auto.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1081,7 +1081,7 @@ def export(
10811081
output_names=output_names["lang"],
10821082
dynamic_axes=dynamic_axes["lang"],
10831083
continuous_batching=self.continuous_batching,
1084-
vocab_size=self.config.vocab_size,
1084+
vocab_size=self.model.language_model.config.vocab_size,
10851085
qaic_config=self.lang_model.model.qaic_config,
10861086
)
10871087

QEfficient/transformers/sampler/sampler.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -105,6 +105,7 @@ def sampler_forward(
105105
attention_mask: Optional[torch.Tensor] = None,
106106
position_ids: Optional[torch.LongTensor] = None,
107107
past_key_values: Optional[Union[Cache, List[torch.FloatTensor]]] = None,
108+
comp_ctx_lengths: Optional[torch.LongTensor] = None,
108109
batch_index: Optional[torch.LongTensor] = None,
109110
inputs_embeds: Optional[torch.FloatTensor] = None,
110111
labels: Optional[torch.LongTensor] = None,
@@ -187,6 +188,7 @@ def sampler_forward(
187188
position_ids=position_ids,
188189
image_idx=image_idx,
189190
past_key_values=past_key_values,
191+
comp_ctx_lengths=comp_ctx_lengths,
190192
)
191193
if batch_index is not None:
192194
forward_kwargs["batch_index"] = batch_index
@@ -201,6 +203,7 @@ def sampler_forward(
201203
attention_mask=attention_mask,
202204
position_ids=position_ids,
203205
past_key_values=past_key_values,
206+
comp_ctx_lengths=comp_ctx_lengths,
204207
batch_index=batch_index,
205208
inputs_embeds=inputs_embeds,
206209
use_cache=use_cache,

0 commit comments

Comments
 (0)