Skip to content

Commit

Permalink
Dev 2.3. (#242)
Browse files Browse the repository at this point in the history
* Fix inputs of duration discriminator

* Add LSTM

* Update models.py

* Update tensorboard scalar

* Noise injection for minimizing modality gap

* Update infer.py

* support bf16 run

* del unused_para flag

* support bf16 config

* add grad clip

* fix(logger and grad):add dur grad,fix grad clip

* Update webui_preprocess.py

* Fix English G2P

* fix(bert_gen):add pass

* Pass SDP to DD

* Update webui_preprocess.py

* Update config.json

* Update webui.py

* Update chinese_bert.py

* Upload webui for deploy

* Update webui.py

* torch.save as pt not npy

* Update config.json

* add freeze emo vq

* Update webui_preprocess.py

* Fix tone_sandhi.py

* Comment up grad clip

* Fix in-place addition

* Add SLM discriminator

* Add DDP for WD

* Feat: Style text: make emotions and style similar to the style text by mixing bert (#240) (#241)

* fix:(oldVersion210) Load on demand Emotion model

* feat: update fastapi.py. 添加更多错误日志信息

* Switch pyopenjtalk to pyopenjtalk-prebuilt

* fix: update fastapi.py. 2.2 reference适配

* Update resample.py

* 修复Onnx导出的BUG (#237)

* Add files via upload

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

* Add files via upload

* Add files via upload

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

* Delete attentions_onnx.py

* Delete models_onnx.py

* Add files via upload

* Add files via upload

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

* Update __init__.py

* Update __init__.py

* Update __init__.py

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

---------



* Fix onnx

* Format export

* Feat: style-text and bert mixing (JA only)

* Ensure the same tensor shape

* Update

* update gradio version

* Fix

* Style text for chinese and english (ver 2.2)

* Style text for chinese and english (ver 2.1)

* Style text in FastAPI

* Translate style text desc in chinese

---------

Co-authored-by: litagin02 <[email protected]>
Co-authored-by: Sora <[email protected]>
Co-authored-by: Sihan Wang <[email protected]>
Co-authored-by: Ναρουσέ·μ·γιουμεμί·Χινακάννα <[email protected]>
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>

* Remove CLAP

* Revert "Remove CLAP"

This reverts commit 62fd59b.

Revert

* Remove CLAP

* bf16 audo grad cilp

* Update webui and infer utils

* Update webui.py

* Update webui.py

* Update webui-preprocess.py

* Update webui_preprocess.py

---------

Co-authored-by: Sihan Wang <[email protected]>
Co-authored-by: OedoSoldier <[email protected]>
Co-authored-by: litagin02 <[email protected]>
Co-authored-by: Sora <[email protected]>
Co-authored-by: Ναρουσέ·μ·γιουμεμί·Χινακάννα <[email protected]>
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
  • Loading branch information
7 people authored Dec 19, 2023
1 parent 5479e90 commit 76653b5
Show file tree
Hide file tree
Showing 34 changed files with 2,960 additions and 1,208 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -170,6 +170,7 @@ data/*
!/default_config.yml
/Web/
/emotional/*/*.bin
/slm/*/*.bin
/bert/*/*.bin
/bert/*/*.h5
/bert/*/*.model
Expand Down
41 changes: 24 additions & 17 deletions bert_gen.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,16 @@
import argparse
from multiprocessing import Pool, cpu_count

import torch
import torch.multiprocessing as mp
from tqdm import tqdm

from multiprocessing import Pool
import commons
import utils
from tqdm import tqdm
from text import check_bert_models, cleaned_text_to_sequence, get_bert
import argparse
import torch.multiprocessing as mp
from config import config
from text import cleaned_text_to_sequence, get_bert


def process_line(line):
def process_line(x):
line, add_blank = x
device = config.bert_gen_config.device
if config.bert_gen_config.use_multi_device:
rank = mp.current_process()._identity
Expand All @@ -28,12 +27,13 @@ def process_line(line):
word2ph = [i for i in word2ph]
phone, tone, language = cleaned_text_to_sequence(phone, tone, language_str)

phone = commons.intersperse(phone, 0)
tone = commons.intersperse(tone, 0)
language = commons.intersperse(language, 0)
for i in range(len(word2ph)):
word2ph[i] = word2ph[i] * 2
word2ph[0] += 1
if add_blank:
phone = commons.intersperse(phone, 0)
tone = commons.intersperse(tone, 0)
language = commons.intersperse(language, 0)
for i in range(len(word2ph)):
word2ph[i] = word2ph[i] * 2
word2ph[0] += 1

bert_path = wav_path.replace(".WAV", ".wav").replace(".wav", ".bert.pt")

Expand All @@ -59,16 +59,23 @@ def process_line(line):
args, _ = parser.parse_known_args()
config_path = args.config
hps = utils.get_hparams_from_file(config_path)
check_bert_models()
lines = []
with open(hps.data.training_files, encoding="utf-8") as f:
lines.extend(f.readlines())

with open(hps.data.validation_files, encoding="utf-8") as f:
lines.extend(f.readlines())
add_blank = [hps.data.add_blank] * len(lines)

if len(lines) != 0:
num_processes = min(args.num_processes, cpu_count())
num_processes = args.num_processes
with Pool(processes=num_processes) as pool:
for _ in tqdm(pool.imap_unordered(process_line, lines), total=len(lines)):
pass
for _ in tqdm(
pool.imap_unordered(process_line, zip(lines, add_blank)),
total=len(lines),
):
# 这里是缩进的代码块,表示循环体
pass # 使用pass语句作为占位符

print(f"bert生成完毕!, 共有{len(lines)}个bert.pt生成!")
2 changes: 1 addition & 1 deletion clap_gen.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ def process_line(line):
device = torch.device("cpu")
wav_path, _, language_str, text, phones, tone, word2ph = line.strip().split("|")

clap_path = wav_path.replace(".WAV", ".wav").replace(".wav", ".emo.npy")
clap_path = wav_path.replace(".WAV", ".wav").replace(".wav", ".emo.pt")
if os.path.isfile(clap_path):
return

Expand Down
Loading

0 comments on commit 76653b5

Please sign in to comment.