[CI/Build] VLM Test Consolidation (#9372)

Signed-off-by: Alex-Brooks <Alex.Brooks@ibm.com>
This commit is contained in:
Alex Brooks
2024-10-30 10:32:17 -06:00
committed by GitHub
parent 211fe91aa8
commit cc98f1e079
38 changed files with 2381 additions and 3096 deletions

View File

@ -259,8 +259,7 @@ class HfRunner:
is_sentence_transformer: bool = False,
skip_tokenizer_init: bool = False,
auto_cls: Type[_BaseAutoModelClass] = AutoModelForCausalLM,
postprocess_inputs: Callable[[BatchEncoding],
BatchEncoding] = identity,
postprocess_inputs: Callable[..., BatchEncoding] = identity,
) -> None:
torch_dtype = STR_DTYPE_TO_TORCH_DTYPE[dtype]
@ -303,6 +302,7 @@ class HfRunner:
if skip_tokenizer_init:
self.tokenizer = self.processor.tokenizer
self.dtype = dtype
self.postprocess_inputs = postprocess_inputs
def get_inputs(
@ -337,7 +337,7 @@ class HfRunner:
processor_kwargs["sampling_rate"] = sr
inputs = self.processor(**processor_kwargs)
inputs = self.postprocess_inputs(inputs)
inputs = self.postprocess_inputs(inputs, dtype=self.dtype)
all_inputs.append(inputs)