Skip to content

Commit

Permalink
delete unused code
Browse files Browse the repository at this point in the history
  • Loading branch information
DesmonDay committed Dec 6, 2024
1 parent d3d5a7f commit 88ba45e
Showing 1 changed file with 0 additions and 16 deletions.
16 changes: 0 additions & 16 deletions llm/run_embedding.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,22 +106,6 @@ def main():

model_config.seq_length = data_args.max_length
model_config.embedding_negatives_cross_device = embedding_args.embedding_negatives_cross_device
# Config for model using long sequence strategy
if model_args.use_long_sequence_strategies:
data_args.scaled_max_length = int(data_args.max_length * model_args.rope_scaling_factor)
model_config.use_long_sequence_strategies = True
model_config.long_sequence_strategy_type = model_args.strategy_type
model_config.long_sequence_strategy_name = model_args.strategy_name
model_config.rope_scaling_factor = model_args.rope_scaling_factor
model_config.long_sequence_init_args = {
"dim": int(model_config.hidden_size / model_config.num_attention_heads),
"max_position_embeddings": data_args.scaled_max_length, # extended context window
"base": model_config.rope_theta,
"scaling_factor": model_args.rope_scaling_factor,
}
if model_args.strategy_name == "YaRNScalingRotaryEmbedding":
model_config.long_sequence_init_args["original_max_position_embeddings"] = data_args.max_length

logger.info(f"Final model config: {model_config}")

model_class = Qwen2SentenceEmbedding
Expand Down

0 comments on commit 88ba45e

Please sign in to comment.