|
2 | 2 |
|
3 | 3 | import os |
4 | 4 | import re |
5 | | -import types |
6 | 5 | from abc import abstractmethod |
7 | 6 | from contextlib import contextmanager, nullcontext |
8 | 7 | from typing import Any, Dict, List, Optional, Tuple, Type, Union |
@@ -73,19 +72,6 @@ class AutoModelFactory(ModelFactory): |
73 | 72 | def automodel_cls(self) -> Type[_BaseAutoModelClass]: |
74 | 73 | """Get the AutoModel class for calling from_pretrained and from_config.""" |
75 | 74 |
|
76 | | - @staticmethod |
77 | | - @abstractmethod |
78 | | - def _strict_forward(model: nn.Module, input_ids: torch.Tensor, position_ids: torch.Tensor): |
79 | | - """A strict (args-only) forward method for the model that precisely defines the signature. |
80 | | -
|
81 | | - The function should contain input_ids and position_ids as positional arguments at a |
82 | | - minimum. Other arguments can be added as needed and must follow the correct order. |
83 | | - """ |
84 | | - |
85 | | - def _set_strict_forward(self, model: nn.Module): |
86 | | - """Set the strict (args-only) forward method for the model.""" |
87 | | - model.forward = types.MethodType(self._strict_forward, model) |
88 | | - |
89 | 75 |
|
90 | 76 | @ModelFactoryRegistry.register("AutoModelForCausalLM") |
91 | 77 | class AutoModelForCausalLMFactory(AutoModelFactory): |
@@ -132,16 +118,6 @@ def __init__(self, *args, **kwargs): |
132 | 118 | def automodel_cls(self) -> Type[_BaseAutoModelClass]: |
133 | 119 | return AutoModelForCausalLM |
134 | 120 |
|
135 | | - @staticmethod |
136 | | - def _strict_forward(model: nn.Module, input_ids: torch.Tensor, position_ids: torch.Tensor): |
137 | | - """A strict (args-only) forward pass for the model to functionalize the args. |
138 | | -
|
139 | | - This follows the standard function signature as expected by factory.py. We do _not_ use the |
140 | | - model.forward method directly to create the patch. Instead we use the type of the model to |
141 | | - get the forward method to keep the patch composable with other forward patches. |
142 | | - """ |
143 | | - return type(model).forward(model, input_ids=input_ids, position_ids=position_ids) |
144 | | - |
145 | 121 | def _recursive_update_config( |
146 | 122 | self, config: PretrainedConfig, update_dict: Dict[str, Any] |
147 | 123 | ) -> Tuple[PretrainedConfig, Dict[str, Any]]: |
@@ -542,28 +518,6 @@ def init_processor(self) -> Optional[Any]: |
542 | 518 | return None |
543 | 519 | return AutoProcessor.from_pretrained(self.tokenizer, **self.tokenizer_kwargs) |
544 | 520 |
|
545 | | - # TODO: in theory the signature could be auto-derived but it would probably require some hefty |
546 | | - # meta-programming to progmatically generate the functions and signature from something like the |
547 | | - # example inputs. And even with that we would still need to figure out how to automatically |
548 | | - # infer the dynamic shapes for the extra inputs. |
549 | | - # Alternatively, we could try to directly use the HF forward again but I am not sure whether |
550 | | - # this will trigger some kind of kwarg-handling inside the graph which I would want to avoid. |
551 | | - @staticmethod |
552 | | - def _strict_forward( |
553 | | - model: nn.Module, |
554 | | - input_ids: torch.Tensor, |
555 | | - position_ids: torch.Tensor, |
556 | | - pixel_values: torch.Tensor, |
557 | | - ): |
558 | | - """A strict (args-only) forward pass for the model to functionalize the args. |
559 | | -
|
560 | | - It adds pixel_values as a positional argument as expected by most |
561 | | - AutoModelForImageTextToText in addition to the required input_ids and position_ids. |
562 | | - """ |
563 | | - return type(model).forward( |
564 | | - model, input_ids=input_ids, position_ids=position_ids, pixel_values=pixel_values |
565 | | - ) |
566 | | - |
567 | 521 | def get_example_inputs(self) -> Dict[str, torch.Tensor]: |
568 | 522 | """Return a dictionary of example inputs for the model.""" |
569 | 523 |
|
|
0 commit comments