Update beets to 1.4.7

Also updates:
- colorama-0.4.1
- jellyfish-0.6.1
- munkres-1.0.12
- musicbrainzngs-0.6
- mutagen-1.41.1
- pyyaml-3.13
- six-1.12.0
- unidecode-1.0.23
This commit is contained in:
Labrys of Knossos 2018-12-15 00:52:11 -05:00
commit e854005ae1
193 changed files with 15896 additions and 6384 deletions

View file

@ -1,6 +1,6 @@
import unicodedata
from collections import defaultdict
from .compat import _range, _zip_longest, _no_bytes_err
from .compat import _range, _zip_longest, IS_PY3
from .porter import Stemmer
@ -8,9 +8,16 @@ def _normalize(s):
return unicodedata.normalize('NFKD', s)
def _check_type(s):
if IS_PY3 and not isinstance(s, str):
raise TypeError('expected str or unicode, got %s' % type(s).__name__)
elif not IS_PY3 and not isinstance(s, unicode):
raise TypeError('expected unicode, got %s' % type(s).__name__)
def levenshtein_distance(s1, s2):
if isinstance(s1, bytes) or isinstance(s2, bytes):
raise TypeError(_no_bytes_err)
_check_type(s1)
_check_type(s2)
if s1 == s2:
return 0
@ -36,14 +43,14 @@ def levenshtein_distance(s1, s2):
def _jaro_winkler(ying, yang, long_tolerance, winklerize):
if isinstance(ying, bytes) or isinstance(yang, bytes):
raise TypeError(_no_bytes_err)
_check_type(ying)
_check_type(yang)
ying_len = len(ying)
yang_len = len(yang)
if not ying_len or not yang_len:
return 0
return 0.0
min_len = max(ying_len, yang_len)
search_range = (min_len // 2) - 1
@ -66,7 +73,7 @@ def _jaro_winkler(ying, yang, long_tolerance, winklerize):
# short circuit if no characters match
if not common_chars:
return 0
return 0.0
# count transpositions
k = trans_count = 0
@ -106,8 +113,8 @@ def _jaro_winkler(ying, yang, long_tolerance, winklerize):
def damerau_levenshtein_distance(s1, s2):
if isinstance(s1, bytes) or isinstance(s2, bytes):
raise TypeError(_no_bytes_err)
_check_type(s1)
_check_type(s2)
len1 = len(s1)
len2 = len(s2)
@ -155,25 +162,27 @@ def jaro_winkler(s1, s2, long_tolerance=False):
def soundex(s):
_check_type(s)
if not s:
return s
if isinstance(s, bytes):
raise TypeError(_no_bytes_err)
return ''
s = _normalize(s)
s = s.upper()
replacements = (('bfpv', '1'),
('cgjkqsxz', '2'),
('dt', '3'),
('l', '4'),
('mn', '5'),
('r', '6'))
replacements = (('BFPV', '1'),
('CGJKQSXZ', '2'),
('DT', '3'),
('L', '4'),
('MN', '5'),
('R', '6'))
result = [s[0]]
count = 1
# find would-be replacment for first character
for lset, sub in replacements:
if s[0].lower() in lset:
if s[0] in lset:
last = sub
break
else:
@ -181,7 +190,7 @@ def soundex(s):
for letter in s[1:]:
for lset, sub in replacements:
if letter.lower() in lset:
if letter in lset:
if sub != last:
result.append(sub)
count += 1
@ -197,8 +206,8 @@ def soundex(s):
def hamming_distance(s1, s2):
if isinstance(s1, bytes) or isinstance(s2, bytes):
raise TypeError(_no_bytes_err)
_check_type(s1)
_check_type(s2)
# ensure length of s1 >= s2
if len(s2) > len(s1):
@ -214,8 +223,9 @@ def hamming_distance(s1, s2):
def nysiis(s):
if isinstance(s, bytes):
raise TypeError(_no_bytes_err)
_check_type(s)
if not s:
return ''
@ -303,8 +313,8 @@ def nysiis(s):
def match_rating_codex(s):
if isinstance(s, bytes):
raise TypeError(_no_bytes_err)
_check_type(s)
s = s.upper()
codex = []
@ -368,8 +378,7 @@ def match_rating_comparison(s1, s2):
def metaphone(s):
if isinstance(s, bytes):
raise TypeError(_no_bytes_err)
_check_type(s)
result = []
@ -457,8 +466,9 @@ def metaphone(s):
elif c == 'w':
if i == 0 and next == 'h':
i += 1
next = s[i+1]
if next in 'aeiou':
if nextnext in 'aeiou' or nextnext == '*****':
result.append('w')
elif next in 'aeiou' or next == '*****':
result.append('w')
elif c == 'x':
if i == 0:
@ -484,6 +494,6 @@ def metaphone(s):
def porter_stem(s):
if isinstance(s, bytes):
raise TypeError(_no_bytes_err)
_check_type(s)
return Stemmer(s).stem()