Spaces:
Running
on
Zero
Running
on
Zero
| # Copyright 2025 ByteDance and/or its affiliates. | |
| # | |
| # Licensed under the Apache License, Version 2.0 (the "License"); | |
| # you may not use this file except in compliance with the License. | |
| # You may obtain a copy of the License at | |
| # | |
| # http://www.apache.org/licenses/LICENSE-2.0 | |
| # | |
| # Unless required by applicable law or agreed to in writing, software | |
| # distributed under the License is distributed on an "AS IS" BASIS, | |
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
| # See the License for the specific language governing permissions and | |
| # limitations under the License. | |
| import torch | |
| def mel2token_to_dur(mel2token, T_txt=None, max_dur=None): | |
| is_torch = isinstance(mel2token, torch.Tensor) | |
| has_batch_dim = True | |
| if not is_torch: | |
| mel2token = torch.LongTensor(mel2token) | |
| if T_txt is None: | |
| T_txt = mel2token.max() | |
| if len(mel2token.shape) == 1: | |
| mel2token = mel2token[None, ...] | |
| has_batch_dim = False | |
| B, _ = mel2token.shape | |
| dur = mel2token.new_zeros(B, T_txt + 1).scatter_add(1, mel2token, torch.ones_like(mel2token)) | |
| dur = dur[:, 1:] | |
| if max_dur is not None: | |
| dur = dur.clamp(max=max_dur) | |
| if not is_torch: | |
| dur = dur.numpy() | |
| if not has_batch_dim: | |
| dur = dur[0] | |
| return dur | |