Skip to content

Commit ff1c72b

Browse files
committed
remove debug logs
1 parent 7f3e5f5 commit ff1c72b

2 files changed

Lines changed: 0 additions & 16 deletions

File tree

src/transformers/models/gpt2/modeling_gpt2_moreh.py

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1302,13 +1302,6 @@ class GPT2LMHeadModelMoreh(GPT2PreTrainedModel):
13021302

13031303
def __init__(self, config):
13041304
super().__init__(config)
1305-
print("GPT2LMHeadModelMoreh ##################################")
1306-
if config.moreh_config is not None:
1307-
print("config.moreh_config")
1308-
for key, value in config.moreh_config.items():
1309-
print(f"\t {key}, {value}")
1310-
else:
1311-
print("config.moreh_config is None")
13121305
self.transformer = GPT2Model(config)
13131306
self.lm_head = nn.Linear(config.n_embd, config.vocab_size, bias=False)
13141307

src/transformers/models/mistral/modeling_mistral_moreh.py

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1024,7 +1024,6 @@ def forward(
10241024

10251025
hidden_states = layer_outputs[0]
10261026
if layer_idx in self.moreh_pipeline_layers:
1027-
print(f"Set pipe in mistral L : {layer_idx}")
10281027
hidden_states = torch.moreh.pipeline_assign(hidden_states)
10291028

10301029
if use_cache:
@@ -1165,14 +1164,6 @@ class MistralForCausalLMMoreh(MistralPreTrainedModel):
11651164

11661165
def __init__(self, config):
11671166
super().__init__(config)
1168-
print("MistralForCausalLMMoreh #########################################")
1169-
if config.moreh_config is not None:
1170-
print("config.moreh_config")
1171-
for key, value in config.moreh_config.items():
1172-
print(f"\t {key}, {value}")
1173-
else:
1174-
print("config.moreh_config is None")
1175-
11761167
self.model = MistralModel(config)
11771168
self.vocab_size = config.vocab_size
11781169
self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False)

0 commit comments

Comments
 (0)