fix(xtts): update streaming for transformers>=4.42.0 (#59)

* Fix Stream Generator on MacOS

* Make it work on mps

* Implement custom tensor.isin

* Fix for latest TF

* Comment out hack for now

* Remove unused code

* build: increase minimum transformers version

* style: fix

---------

Co-authored-by: Enno Hermann <Eginhard@users.noreply.github.com>
This commit is contained in:
Daniel Walmsley 2024-07-25 07:24:10 -07:00 committed by GitHub
parent 20583a496e
commit 20bbb411c2
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 4 additions and 4 deletions

View File

@ -376,7 +376,7 @@ class NewGenerationMixin(GenerationMixin):
elif is_sample_gen_mode:
# 11. prepare logits warper
logits_warper = self._get_logits_warper(generation_config)
logits_warper = self._get_logits_warper(generation_config, inputs_tensor.device)
# 12. expand input_ids with `num_return_sequences` additional sequences per batch
input_ids, model_kwargs = self._expand_inputs_for_generation(
@ -401,7 +401,7 @@ class NewGenerationMixin(GenerationMixin):
)
elif is_sample_gen_stream_mode:
# 11. prepare logits warper
logits_warper = self._get_logits_warper(generation_config)
logits_warper = self._get_logits_warper(generation_config, inputs_tensor.device)
# 12. expand input_ids with `num_return_sequences` additional sequences per batch
input_ids, model_kwargs = self._expand_inputs_for_generation(
@ -463,7 +463,7 @@ class NewGenerationMixin(GenerationMixin):
elif is_beam_sample_gen_mode:
# 11. prepare logits warper
logits_warper = self._get_logits_warper(generation_config)
logits_warper = self._get_logits_warper(generation_config, inputs_tensor.device)
if stopping_criteria.max_length is None:
raise ValueError("`max_length` needs to be a stopping_criteria for now.")

View File

@ -68,7 +68,7 @@ dependencies = [
"gruut[de,es,fr]==2.2.3",
# Tortoise
"einops>=0.6.0",
"transformers>=4.41.1",
"transformers>=4.42.0",
# Bark
"encodec>=0.1.1",
# XTTS