-
Notifications
You must be signed in to change notification settings - Fork 2.4k
Description
INFO]: --- LOADING TASKS --- (pipeline.py:205)
[2025-02-19 19:35:26,400] [ WARNING]: If you want to use extended_tasks, make sure you installed their dependencies using pip install -e .[extended_tasks]. (registry.py:136)
[2025-02-19 19:35:26,402] [ ERROR]: extended|lcb:codegeneration not found in provided tasks (registry.py:108)
[rank0]: ╭─────────────────────────────── Traceback (most recent call last) ────────────────────────────────╮
[rank0]: │ /usr/local/lib/python3.12/dist-packages/lighteval/main_vllm.py:143 in vllm │
[rank0]: │ │
[rank0]: │ 140 │ │ model_args_dict: dict = {k.split("=")[0]: k.split("=")[1] if "=" in k else True │
[rank0]: │ 141 │ │ model_config = VLLMModelConfig(**model_args_dict) │
[rank0]: │ 142 │ │
[rank0]: │ ❱ 143 │ pipeline = Pipeline( │
[rank0]: │ 144 │ │ tasks=tasks, │
[rank0]: │ 145 │ │ pipeline_parameters=pipeline_params, │
[rank0]: │ 146 │ │ evaluation_tracker=evaluation_tracker, │
[rank0]: │ │
[rank0]: │ ╭─────────────────────────────────────────── locals ───────────────────────────────────────────╮ │
[rank0]: │ │ cache_dir = '/scratch' │ │
[rank0]: │ │ custom_tasks = None │ │
[rank0]: │ │ dataset_loading_processes = 1 │ │
[rank0]: │ │ env_config = EnvConfig(cache_dir='/scratch', token=None) │ │
[rank0]: │ │ evaluation_tracker = <lighteval.logging.evaluation_tracker.EvaluationTrack… │ │
[rank0]: │ │ object at 0x7ffff6d986b0> │ │
[rank0]: │ │ job_id = 0 │ │
[rank0]: │ │ load_responses_from_details_date_id = None │ │
[rank0]: │ │ max_samples = None │ │
[rank0]: │ │ model_args = 'pretrained=/workspace/wcp/LLaMA-Factory/outputs/r1_m… │ │
[rank0]: │ │ model_args_dict = { │ │
[rank0]: │ │ │ 'pretrained': │ │
[rank0]: │ │ '/workspace/wcp/LLaMA-Factory/outputs/r1_models/qm_r1… │ │
[rank0]: │ │ │ 'dtype': 'bfloat16', │ │
[rank0]: │ │ │ 'max_model_length': '32768', │ │
[rank0]: │ │ │ 'gpu_memory_utilisation': '0.8', │ │
[rank0]: │ │ │ 'tensor_parallel_size': '8' │ │
[rank0]: │ │ } │ │
[rank0]: │ │ model_config = VLLMModelConfig( │ │
[rank0]: │ │ │ │ │
[rank0]: │ │ pretrained='/workspace/wcp/LLaMA-Factory/outputs/r1_m… │ │
[rank0]: │ │ │ gpu_memory_utilisation='0.8', │ │
[rank0]: │ │ │ revision='main', │ │
[rank0]: │ │ │ dtype='bfloat16', │ │
[rank0]: │ │ │ tensor_parallel_size='8', │ │
[rank0]: │ │ │ pipeline_parallel_size=1, │ │
[rank0]: │ │ │ data_parallel_size=1, │ │
[rank0]: │ │ │ max_model_length='32768', │ │
[rank0]: │ │ │ swap_space=4, │ │
[rank0]: │ │ │ seed=1234, │ │
[rank0]: │ │ │ trust_remote_code=False, │ │
[rank0]: │ │ │ use_chat_template=False, │ │
[rank0]: │ │ │ add_special_tokens=True, │ │
[rank0]: │ │ │ multichoice_continuations_start_space=True, │ │
[rank0]: │ │ │ pairwise_tokenization=False, │ │
[rank0]: │ │ │ generation_parameters=GenerationParameters( │ │
[rank0]: │ │ │ │ early_stopping=None, │ │
[rank0]: │ │ │ │ repetition_penalty=None, │ │
[rank0]: │ │ │ │ frequency_penalty=None, │ │
[rank0]: │ │ │ │ length_penalty=None, │ │
[rank0]: │ │ │ │ presence_penalty=None, │ │
[rank0]: │ │ │ │ max_new_tokens=None, │ │
[rank0]: │ │ │ │ min_new_tokens=None, │ │
[rank0]: │ │ │ │ seed=None, │ │
[rank0]: │ │ │ │ stop_tokens=None, │ │
[rank0]: │ │ │ │ temperature=None, │ │
[rank0]: │ │ │ │ top_k=None, │ │
[rank0]: │ │ │ │ min_p=None, │ │
[rank0]: │ │ │ │ top_p=None, │ │
[rank0]: │ │ │ │ truncate_prompt=None │ │
[rank0]: │ │ │ ), │ │
[rank0]: │ │ │ subfolder=None │ │
[rank0]: │ │ ) │ │
[rank0]: │ │ num_fewshot_seeds = 1 │ │
[rank0]: │ │ output_dir = 'data/lcd//workspace/wcp/LLaMA-Factory/outputs/r1_mod… │ │
[rank0]: │ │ pipeline_params = PipelineParameters( │ │
[rank0]: │ │ │ launcher_type=<ParallelismManager.VLLM: 5>, │ │
[rank0]: │ │ │ env_config=EnvConfig( │ │
[rank0]: │ │ │ │ cache_dir='/scratch', │ │
[rank0]: │ │ │ │ token=None │ │
[rank0]: │ │ │ ), │ │
[rank0]: │ │ │ job_id=0, │ │
[rank0]: │ │ │ dataset_loading_processes=1, │ │
[rank0]: │ │ │ nanotron_checkpoint_path=None, │ │
[rank0]: │ │ │ custom_tasks_directory=None, │ │
[rank0]: │ │ │ override_batch_size=-1, │ │
[rank0]: │ │ │ num_fewshot_seeds=1, │ │
[rank0]: │ │ │ max_samples=None, │ │
[rank0]: │ │ │ use_chat_template=True, │ │
[rank0]: │ │ │ system_prompt=None, │ │
[rank0]: │ │ │ load_responses_from_details_date_id=None │ │
[rank0]: │ │ ) │ │
[rank0]: │ │ public_run = False │ │
[rank0]: │ │ push_to_hub = False │ │
[rank0]: │ │ push_to_tensorboard = False │ │
[rank0]: │ │ results_org = None │ │
[rank0]: │ │ save_details = False │ │
[rank0]: │ │ system_prompt = None │ │
[rank0]: │ │ tasks = 'extended|lcb:codegeneration|0|0' │ │
[rank0]: │ │ TOKEN = None │ │
[rank0]: │ │ use_chat_template = True │ │
[rank0]: │ │ yaml = <module 'yaml' from │ │
[rank0]: │ │ '/usr/local/lib/python3.12/dist-packages/yaml/_init… │ │
[rank0]: │ ╰──────────────────────────────────────────────────────────────────────────────────────────────╯ │
[rank0]: │ │
[rank0]: │ /usr/local/lib/python3.12/dist-packages/lighteval/pipeline.py:152 in init │
[rank0]: │ │
[rank0]: │ 149 │ │ self.model = self._init_model(model_config, model) │
[rank0]: │ 150 │ │ │
[rank0]: │ 151 │ │ self.evaluation_tracker.general_config_logger.log_model_info(self.model.model_in │
[rank0]: │ ❱ 152 │ │ self._init_tasks_and_requests(tasks=tasks) │
[rank0]: │ 153 │ │ self._init_random_seeds() │
[rank0]: │ 154 │ │ # Final results │
[rank0]: │ 155 │ │ self.final_dict: dict = None │
[rank0]: │ │
[rank0]: │ ╭─────────────────────────────────────────── locals ───────────────────────────────────────────╮ │
[rank0]: │ │ evaluation_tracker = <lighteval.logging.evaluation_tracker.EvaluationTracker object at │ │
[rank0]: │ │ 0x7ffff6d986b0> │ │
[rank0]: │ │ model = None │ │
[rank0]: │ │ model_config = VLLMModelConfig( │ │
[rank0]: │ │ │ │ │
[rank0]: │ │ pretrained='/workspace/wcp/LLaMA-Factory/outputs/r1_models/qm_r1zh110… │ │
[rank0]: │ │ │ gpu_memory_utilisation='0.8', │ │
[rank0]: │ │ │ revision='main', │ │
[rank0]: │ │ │ dtype='bfloat16', │ │
[rank0]: │ │ │ tensor_parallel_size='8', │ │
[rank0]: │ │ │ pipeline_parallel_size=1, │ │
[rank0]: │ │ │ data_parallel_size=1, │ │
[rank0]: │ │ │ max_model_length='32768', │ │
[rank0]: │ │ │ swap_space=4, │ │
[rank0]: │ │ │ seed=1234, │ │
[rank0]: │ │ │ trust_remote_code=False, │ │
[rank0]: │ │ │ use_chat_template=False, │ │
[rank0]: │ │ │ add_special_tokens=True, │ │
[rank0]: │ │ │ multichoice_continuations_start_space=True, │ │
[rank0]: │ │ │ pairwise_tokenization=False, │ │
[rank0]: │ │ │ generation_parameters=GenerationParameters( │ │
[rank0]: │ │ │ │ early_stopping=None, │ │
[rank0]: │ │ │ │ repetition_penalty=None, │ │
[rank0]: │ │ │ │ frequency_penalty=None, │ │
[rank0]: │ │ │ │ length_penalty=None, │ │
[rank0]: │ │ │ │ presence_penalty=None, │ │
[rank0]: │ │ │ │ max_new_tokens=None, │ │
[rank0]: │ │ │ │ min_new_tokens=None, │ │
[rank0]: │ │ │ │ seed=None, │ │
[rank0]: │ │ │ │ stop_tokens=None, │ │
[rank0]: │ │ │ │ temperature=None, │ │
[rank0]: │ │ │ │ top_k=None, │ │
[rank0]: │ │ │ │ min_p=None, │ │
[rank0]: │ │ │ │ top_p=None, │ │
[rank0]: │ │ │ │ truncate_prompt=None │ │
[rank0]: │ │ │ ), │ │
[rank0]: │ │ │ subfolder=None │ │
[rank0]: │ │ ) │ │
[rank0]: │ │ pipeline_parameters = PipelineParameters( │ │
[rank0]: │ │ │ launcher_type=<ParallelismManager.VLLM: 5>, │ │
[rank0]: │ │ │ env_config=EnvConfig(cache_dir='/scratch', token=None), │ │
[rank0]: │ │ │ job_id=0, │ │
[rank0]: │ │ │ dataset_loading_processes=1, │ │
[rank0]: │ │ │ nanotron_checkpoint_path=None, │ │
[rank0]: │ │ │ custom_tasks_directory=None, │ │
[rank0]: │ │ │ override_batch_size=-1, │ │
[rank0]: │ │ │ num_fewshot_seeds=1, │ │
[rank0]: │ │ │ max_samples=None, │ │
[rank0]: │ │ │ use_chat_template=True, │ │
[rank0]: │ │ │ system_prompt=None, │ │
[rank0]: │ │ │ load_responses_from_details_date_id=None │ │
[rank0]: │ │ ) │ │
[rank0]: │ │ self = <lighteval.pipeline.Pipeline object at 0x7ffb752f7800> │ │
[rank0]: │ │ tasks = 'extended|lcb:codegeneration|0|0' │ │
[rank0]: │ ╰──────────────────────────────────────────────────────────────────────────────────────────────╯ │
[rank0]: │ │
[rank0]: │ /usr/local/lib/python3.12/dist-packages/lighteval/pipeline.py:211 in _init_tasks_and_requests │
[rank0]: │ │
[rank0]: │ 208 │ │ │ │ custom_tasks=self.pipeline_parameters.custom_tasks_directory, │
[rank0]: │ 209 │ │ │ ) │
[rank0]: │ 210 │ │ │ task_names_list, fewshots_dict = taskinfo_selector(tasks, registry) │
[rank0]: │ ❱ 211 │ │ │ task_dict = registry.get_task_dict(task_names_list) │
[rank0]: │ 212 │ │ │ LightevalTask.load_datasets(list(task_dict.values()), self.pipeline_paramete │
[rank0]: │ 213 │ │ │ │
[rank0]: │ 214 │ │ │ self.evaluation_tracker.task_config_logger.log(task_dict) │
[rank0]: │ │
[rank0]: │ ╭──────────────────────────────────── locals ────────────────────────────────────╮ │
[rank0]: │ │ fewshots_dict = {'extended|lcb:codegeneration': [(0, False)]} │ │
[rank0]: │ │ registry = <lighteval.tasks.registry.Registry object at 0x7ffb501dcce0> │ │
[rank0]: │ │ self = <lighteval.pipeline.Pipeline object at 0x7ffb752f7800> │ │
[rank0]: │ │ task_names_list = ['extended|lcb:codegeneration'] │ │
[rank0]: │ │ tasks = 'extended|lcb:codegeneration|0|0' │ │
[rank0]: │ ╰────────────────────────────────────────────────────────────────────────────────╯ │
[rank0]: │ │
[rank0]: │ /usr/local/lib/python3.12/dist-packages/lighteval/tasks/registry.py:212 in get_task_dict │
[rank0]: │ │
[rank0]: │ 209 │ │ │ - Each task in the task_name_list will be instantiated with the correspondin │
[rank0]: │ 210 │ │ """ │
[rank0]: │ 211 │ │ # Select relevant tasks given the subset asked for by the user │
[rank0]: │ ❱ 212 │ │ return {task_name: self.get_task_instance(task_name) for task_name in task_names │
[rank0]: │ 213 │ │
[rank0]: │ 214 │ def expand_task_definition(self, task_definition: str): │
[rank0]: │ 215 │ │ """ │
[rank0]: │ │
[rank0]: │ ╭───────────────────────────────── locals ──────────────────────────────────╮ │
[rank0]: │ │ self = <lighteval.tasks.registry.Registry object at 0x7ffb501dcce0> │ │
[rank0]: │ │ task_names = ['extended|lcb:codegeneration'] │ │
[rank0]: │ ╰───────────────────────────────────────────────────────────────────────────╯ │
[rank0]: │ │
[rank0]: │ /usr/local/lib/python3.12/dist-packages/lighteval/tasks/registry.py:109 in get_task_instance │
[rank0]: │ │
[rank0]: │ 106 │ │ task_class = self.task_registry.get(task_name) │
[rank0]: │ 107 │ │ if task_class is None: │
[rank0]: │ 108 │ │ │ logger.error(f"{task_name} not found in provided tasks") │
[rank0]: │ ❱ 109 │ │ │ raise ValueError(f"Cannot find tasks {task_name} in task list or in custom t │
[rank0]: │ 110 │ │ │
[rank0]: │ 111 │ │ return task_class() │
[rank0]: │ 112 │
[rank0]: │ │
[rank0]: │ ╭───────────────────────────────── locals ──────────────────────────────────╮ │
[rank0]: │ │ self = <lighteval.tasks.registry.Registry object at 0x7ffb501dcce0> │ │
[rank0]: │ │ task_class = None │ │
[rank0]: │ │ task_name = 'extended|lcb:codegeneration' │ │
[rank0]: │ ╰───────────────────────────────────────────────────────────────────────────╯ │
[rank0]: ╰──────────────────────────────────────────────────────────────────────────────────────────────────╯
[rank0]: ValueError: Cannot find tasks extended|lcb:codegeneration in task list or in custom task registry)