defpatch_t5_tokenizer(): """Force proper loading of T5 tokenizer""" try: # Force import of T5 tokenizer module import transformers.models.t5.tokenization_t5 # Ensure the module is properly loaded ifhasattr(transformers.models.t5.tokenization_t5, 'T5Tokenizer'): print("T5Tokenizer successfully loaded") returnTrue else: print("T5Tokenizer not found in module") returnFalse except Exception as e: print(f"Failed to patch T5 tokenizer: {e}") returnFalse
defpatch_transformers(): """Apply patches to fix lazy loading""" # Import transformers first import transformers # Force load tokenizer modules from transformers import T5Tokenizer, T5TokenizerFast # Monkey patch the lazy module if needed import transformers.models.t5.tokenization_t5 as t5_tokenizer_module # Ensure classes are properly exposed ifnothasattr(t5_tokenizer_module, 'T5Tokenizer'): t5_tokenizer_module.T5Tokenizer = T5Tokenizer print("Transformers patching complete")
if __name__ == "__main__": patch_transformers() patch_t5_tokenizer()