Skip to content

Commit

Permalink
Update the version from 1.0.0 to 1.0.1
Browse files Browse the repository at this point in the history
  • Loading branch information
Takaaki-Saeki committed Feb 15, 2024
1 parent 4baff9c commit cb2c382
Show file tree
Hide file tree
Showing 4 changed files with 19 additions and 1 deletion.
6 changes: 6 additions & 0 deletions discrete_speech_metrics/speechbertscore.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,16 @@
# Copyright 2024 Takaaki Saeki
# MIT LICENSE (https://opensource.org/license/mit/)

import logging
import torchaudio
import torch
from transformers import HubertModel, Wav2Vec2Model, WavLMModel

# In PyTorch 2+, a warning for checkpoint mismatch is raised.
# But it should be a false alarm according to the following issue.
# https://github.com/huggingface/transformers/issues/26796
# I have added the following line to suppress the warning.
logging.getLogger("transformers").setLevel(logging.ERROR)

def bert_score(v_generated, v_reference):
"""
Expand Down
6 changes: 6 additions & 0 deletions discrete_speech_metrics/speechbleu.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# Copyright 2024 Takaaki Saeki
# MIT LICENSE (https://opensource.org/license/mit/)

import logging
from transformers import HubertModel
import os
import pathlib
Expand All @@ -13,6 +14,11 @@
from nltk.translate.bleu_score import sentence_bleu
nltk.download('punkt')

# In PyTorch 2+, a warning for checkpoint mismatch is raised.
# But it should be a false alarm according to the following issue.
# https://github.com/huggingface/transformers/issues/26796
# I have added the following line to suppress the warning.
logging.getLogger("transformers").setLevel(logging.ERROR)

def int_array_to_chinese_unicode(arr):
"""
Expand Down
6 changes: 6 additions & 0 deletions discrete_speech_metrics/speechtokendistance.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# Copyright 2024 Takaaki Saeki
# MIT LICENSE (https://opensource.org/license/mit/)

import logging
from transformers import HubertModel
import os
import pathlib
Expand All @@ -12,6 +13,11 @@
from Levenshtein import distance as levenshtein_distance
import jellyfish

# In PyTorch 2+, a warning for checkpoint mismatch is raised.
# But it should be a false alarm according to the following issue.
# https://github.com/huggingface/transformers/issues/26796
# I have added the following line to suppress the warning.
logging.getLogger("transformers").setLevel(logging.ERROR)

def int_array_to_chinese_unicode(arr):
"""
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

setup(
name='discrete-speech-metrics',
version='1.0.0',
version='1.0.1',
packages=find_packages(),
install_requires=[
'numpy>=1.20.3',
Expand Down

0 comments on commit cb2c382

Please sign in to comment.