mirror of
https://github.com/Tautulli/Tautulli.git
synced 2025-08-19 21:03:21 -07:00
Add tqdm v4.21.0
This commit is contained in:
parent
8aa34321c9
commit
fabced9942
16 changed files with 4820 additions and 0 deletions
94
lib/tqdm/tests/tests_main.py
Normal file
94
lib/tqdm/tests/tests_main.py
Normal file
|
@ -0,0 +1,94 @@
|
|||
import sys
|
||||
import subprocess
|
||||
from tqdm import main, TqdmKeyError, TqdmTypeError
|
||||
|
||||
from tests_tqdm import with_setup, pretest, posttest, _range, closing, \
|
||||
UnicodeIO, StringIO
|
||||
|
||||
|
||||
def _sh(*cmd, **kwargs):
|
||||
return subprocess.Popen(cmd, stdout=subprocess.PIPE,
|
||||
**kwargs).communicate()[0].decode('utf-8')
|
||||
|
||||
|
||||
# WARNING: this should be the last test as it messes with sys.stdin, argv
|
||||
@with_setup(pretest, posttest)
|
||||
def test_main():
|
||||
"""Test command line pipes"""
|
||||
ls_out = _sh('ls').replace('\r\n', '\n')
|
||||
ls = subprocess.Popen('ls', stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT)
|
||||
res = _sh(sys.executable, '-c', 'from tqdm import main; main()',
|
||||
stdin=ls.stdout, stderr=subprocess.STDOUT)
|
||||
ls.wait()
|
||||
|
||||
# actual test:
|
||||
|
||||
assert (ls_out in res.replace('\r\n', '\n'))
|
||||
|
||||
# semi-fake test which gets coverage:
|
||||
_SYS = sys.stdin, sys.argv
|
||||
|
||||
with closing(StringIO()) as sys.stdin:
|
||||
sys.argv = ['', '--desc', 'Test CLI-delims',
|
||||
'--ascii', 'True', '--delim', r'\0', '--buf_size', '64']
|
||||
sys.stdin.write('\0'.join(map(str, _range(int(1e3)))))
|
||||
sys.stdin.seek(0)
|
||||
main()
|
||||
|
||||
IN_DATA_LIST = map(str, _range(int(1e3)))
|
||||
sys.stdin = IN_DATA_LIST
|
||||
sys.argv = ['', '--desc', 'Test CLI pipes',
|
||||
'--ascii', 'True', '--unit_scale', 'True']
|
||||
import tqdm.__main__ # NOQA
|
||||
|
||||
IN_DATA = '\0'.join(IN_DATA_LIST)
|
||||
with closing(StringIO()) as sys.stdin:
|
||||
sys.stdin.write(IN_DATA)
|
||||
sys.stdin.seek(0)
|
||||
sys.argv = ['', '--ascii', '--bytes', '--unit_scale', 'False']
|
||||
with closing(UnicodeIO()) as fp:
|
||||
main(fp=fp)
|
||||
assert (str(len(IN_DATA)) in fp.getvalue())
|
||||
|
||||
sys.stdin = IN_DATA_LIST
|
||||
sys.argv = ['', '-ascii', '--unit_scale', 'False',
|
||||
'--desc', 'Test CLI errors']
|
||||
main()
|
||||
|
||||
sys.argv = ['', '-ascii', '-unit_scale', '--bad_arg_u_ment', 'foo']
|
||||
try:
|
||||
main()
|
||||
except TqdmKeyError as e:
|
||||
if 'bad_arg_u_ment' not in str(e):
|
||||
raise
|
||||
else:
|
||||
raise TqdmKeyError('bad_arg_u_ment')
|
||||
|
||||
sys.argv = ['', '-ascii', '-unit_scale', 'invalid_bool_value']
|
||||
try:
|
||||
main()
|
||||
except TqdmTypeError as e:
|
||||
if 'invalid_bool_value' not in str(e):
|
||||
raise
|
||||
else:
|
||||
raise TqdmTypeError('invalid_bool_value')
|
||||
|
||||
sys.argv = ['', '-ascii', '--total', 'invalid_int_value']
|
||||
try:
|
||||
main()
|
||||
except TqdmTypeError as e:
|
||||
if 'invalid_int_value' not in str(e):
|
||||
raise
|
||||
else:
|
||||
raise TqdmTypeError('invalid_int_value')
|
||||
|
||||
for i in ('-h', '--help', '-v', '--version'):
|
||||
sys.argv = ['', i]
|
||||
try:
|
||||
main()
|
||||
except SystemExit:
|
||||
pass
|
||||
|
||||
# clean up
|
||||
sys.stdin, sys.argv = _SYS
|
207
lib/tqdm/tests/tests_pandas.py
Normal file
207
lib/tqdm/tests/tests_pandas.py
Normal file
|
@ -0,0 +1,207 @@
|
|||
from nose.plugins.skip import SkipTest
|
||||
|
||||
from tqdm import tqdm
|
||||
from tests_tqdm import with_setup, pretest, posttest, StringIO, closing
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
def test_pandas_series():
|
||||
"""Test pandas.Series.progress_apply and .progress_map"""
|
||||
try:
|
||||
from numpy.random import randint
|
||||
import pandas as pd
|
||||
except ImportError:
|
||||
raise SkipTest
|
||||
|
||||
with closing(StringIO()) as our_file:
|
||||
tqdm.pandas(file=our_file, leave=True, ascii=True)
|
||||
|
||||
series = pd.Series(randint(0, 50, (123,)))
|
||||
res1 = series.progress_apply(lambda x: x + 10)
|
||||
res2 = series.apply(lambda x: x + 10)
|
||||
assert res1.equals(res2)
|
||||
|
||||
res3 = series.progress_map(lambda x: x + 10)
|
||||
res4 = series.map(lambda x: x + 10)
|
||||
assert res3.equals(res4)
|
||||
|
||||
expects = ['100%', '123/123']
|
||||
for exres in expects:
|
||||
our_file.seek(0)
|
||||
if our_file.getvalue().count(exres) < 2:
|
||||
our_file.seek(0)
|
||||
raise AssertionError(
|
||||
"\nExpected:\n{0}\nIn:\n{1}\n".format(
|
||||
exres + " at least twice.", our_file.read()))
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
def test_pandas_data_frame():
|
||||
"""Test pandas.DataFrame.progress_apply and .progress_applymap"""
|
||||
try:
|
||||
from numpy.random import randint
|
||||
import pandas as pd
|
||||
except ImportError:
|
||||
raise SkipTest
|
||||
|
||||
with closing(StringIO()) as our_file:
|
||||
tqdm.pandas(file=our_file, leave=True, ascii=True)
|
||||
df = pd.DataFrame(randint(0, 50, (100, 200)))
|
||||
|
||||
def task_func(x):
|
||||
return x + 1
|
||||
|
||||
# applymap
|
||||
res1 = df.progress_applymap(task_func)
|
||||
res2 = df.applymap(task_func)
|
||||
assert res1.equals(res2)
|
||||
|
||||
# apply
|
||||
for axis in [0, 1]:
|
||||
res3 = df.progress_apply(task_func, axis=axis)
|
||||
res4 = df.apply(task_func, axis=axis)
|
||||
assert res3.equals(res4)
|
||||
|
||||
our_file.seek(0)
|
||||
if our_file.read().count('100%') < 3:
|
||||
our_file.seek(0)
|
||||
raise AssertionError("\nExpected:\n{0}\nIn:\n{1}\n".format(
|
||||
'100% at least three times', our_file.read()))
|
||||
|
||||
# apply_map, apply axis=0, apply axis=1
|
||||
expects = ['20000/20000', '200/200', '100/100']
|
||||
for exres in expects:
|
||||
our_file.seek(0)
|
||||
if our_file.getvalue().count(exres) < 1:
|
||||
our_file.seek(0)
|
||||
raise AssertionError(
|
||||
"\nExpected:\n{0}\nIn:\n {1}\n".format(
|
||||
exres + " at least once.", our_file.read()))
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
def test_pandas_groupby_apply():
|
||||
"""Test pandas.DataFrame.groupby(...).progress_apply"""
|
||||
try:
|
||||
from numpy.random import randint
|
||||
import pandas as pd
|
||||
except ImportError:
|
||||
raise SkipTest
|
||||
|
||||
with closing(StringIO()) as our_file:
|
||||
tqdm.pandas(file=our_file, leave=False, ascii=True)
|
||||
|
||||
df = pd.DataFrame(randint(0, 50, (500, 3)))
|
||||
df.groupby(0).progress_apply(lambda x: None)
|
||||
|
||||
dfs = pd.DataFrame(randint(0, 50, (500, 3)), columns=list('abc'))
|
||||
dfs.groupby(['a']).progress_apply(lambda x: None)
|
||||
|
||||
our_file.seek(0)
|
||||
|
||||
# don't expect final output since no `leave` and
|
||||
# high dynamic `miniters`
|
||||
nexres = '100%|##########|'
|
||||
if nexres in our_file.read():
|
||||
our_file.seek(0)
|
||||
raise AssertionError("\nDid not expect:\n{0}\nIn:{1}\n".format(
|
||||
nexres, our_file.read()))
|
||||
|
||||
with closing(StringIO()) as our_file:
|
||||
tqdm.pandas(file=our_file, leave=True, ascii=True)
|
||||
|
||||
dfs = pd.DataFrame(randint(0, 50, (500, 3)), columns=list('abc'))
|
||||
dfs.loc[0] = [2, 1, 1]
|
||||
dfs['d'] = 100
|
||||
|
||||
expects = ['500/500', '1/1', '4/4', '2/2']
|
||||
dfs.groupby(dfs.index).progress_apply(lambda x: None)
|
||||
dfs.groupby('d').progress_apply(lambda x: None)
|
||||
dfs.groupby(dfs.columns, axis=1).progress_apply(lambda x: None)
|
||||
dfs.groupby([2, 2, 1, 1], axis=1).progress_apply(lambda x: None)
|
||||
|
||||
our_file.seek(0)
|
||||
if our_file.read().count('100%') < 4:
|
||||
our_file.seek(0)
|
||||
raise AssertionError("\nExpected:\n{0}\nIn:\n{1}\n".format(
|
||||
'100% at least four times', our_file.read()))
|
||||
|
||||
for exres in expects:
|
||||
our_file.seek(0)
|
||||
if our_file.getvalue().count(exres) < 1:
|
||||
our_file.seek(0)
|
||||
raise AssertionError(
|
||||
"\nExpected:\n{0}\nIn:\n {1}\n".format(
|
||||
exres + " at least once.", our_file.read()))
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
def test_pandas_leave():
|
||||
"""Test pandas with `leave=True`"""
|
||||
try:
|
||||
from numpy.random import randint
|
||||
import pandas as pd
|
||||
except ImportError:
|
||||
raise SkipTest
|
||||
|
||||
with closing(StringIO()) as our_file:
|
||||
df = pd.DataFrame(randint(0, 100, (1000, 6)))
|
||||
tqdm.pandas(file=our_file, leave=True, ascii=True)
|
||||
df.groupby(0).progress_apply(lambda x: None)
|
||||
|
||||
our_file.seek(0)
|
||||
|
||||
exres = '100%|##########| 100/100'
|
||||
if exres not in our_file.read():
|
||||
our_file.seek(0)
|
||||
raise AssertionError(
|
||||
"\nExpected:\n{0}\nIn:{1}\n".format(exres, our_file.read()))
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
def test_pandas_apply_args_deprecation():
|
||||
"""Test warning info in
|
||||
`pandas.Dataframe(Series).progress_apply(func, *args)`"""
|
||||
try:
|
||||
from numpy.random import randint
|
||||
from tqdm import tqdm_pandas
|
||||
import pandas as pd
|
||||
except ImportError:
|
||||
raise SkipTest
|
||||
|
||||
with closing(StringIO()) as our_file:
|
||||
tqdm_pandas(tqdm(file=our_file, leave=False, ascii=True, ncols=20))
|
||||
df = pd.DataFrame(randint(0, 50, (500, 3)))
|
||||
df.progress_apply(lambda x: None, 1) # 1 shall cause a warning
|
||||
# Check deprecation message
|
||||
res = our_file.getvalue()
|
||||
assert all([i in res for i in (
|
||||
"TqdmDeprecationWarning", "not supported",
|
||||
"keyword arguments instead")])
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
def test_pandas_deprecation():
|
||||
"""Test bar object instance as argument deprecation"""
|
||||
try:
|
||||
from numpy.random import randint
|
||||
from tqdm import tqdm_pandas
|
||||
import pandas as pd
|
||||
except ImportError:
|
||||
raise SkipTest
|
||||
|
||||
with closing(StringIO()) as our_file:
|
||||
tqdm_pandas(tqdm(file=our_file, leave=False, ascii=True, ncols=20))
|
||||
df = pd.DataFrame(randint(0, 50, (500, 3)))
|
||||
df.groupby(0).progress_apply(lambda x: None)
|
||||
# Check deprecation message
|
||||
assert "TqdmDeprecationWarning" in our_file.getvalue()
|
||||
assert "instead of `tqdm_pandas(tqdm(...))`" in our_file.getvalue()
|
||||
|
||||
with closing(StringIO()) as our_file:
|
||||
tqdm_pandas(tqdm, file=our_file, leave=False, ascii=True, ncols=20)
|
||||
df = pd.DataFrame(randint(0, 50, (500, 3)))
|
||||
df.groupby(0).progress_apply(lambda x: None)
|
||||
# Check deprecation message
|
||||
assert "TqdmDeprecationWarning" in our_file.getvalue()
|
||||
assert "instead of `tqdm_pandas(tqdm, ...)`" in our_file.getvalue()
|
336
lib/tqdm/tests/tests_perf.py
Normal file
336
lib/tqdm/tests/tests_perf.py
Normal file
|
@ -0,0 +1,336 @@
|
|||
from __future__ import print_function, division
|
||||
|
||||
from nose.plugins.skip import SkipTest
|
||||
|
||||
from contextlib import contextmanager
|
||||
|
||||
import sys
|
||||
from time import sleep, time
|
||||
|
||||
from tqdm import trange
|
||||
from tqdm import tqdm
|
||||
|
||||
from tests_tqdm import with_setup, pretest, posttest, StringIO, closing, _range
|
||||
|
||||
# Use relative/cpu timer to have reliable timings when there is a sudden load
|
||||
try:
|
||||
from time import process_time
|
||||
except ImportError:
|
||||
from time import clock
|
||||
process_time = clock
|
||||
|
||||
|
||||
def get_relative_time(prevtime=0):
|
||||
return process_time() - prevtime
|
||||
|
||||
|
||||
def cpu_sleep(t):
|
||||
"""Sleep the given amount of cpu time"""
|
||||
start = process_time()
|
||||
while (process_time() - start) < t:
|
||||
pass
|
||||
|
||||
|
||||
def checkCpuTime(sleeptime=0.2):
|
||||
"""Check if cpu time works correctly"""
|
||||
if checkCpuTime.passed:
|
||||
return True
|
||||
# First test that sleeping does not consume cputime
|
||||
start1 = process_time()
|
||||
sleep(sleeptime)
|
||||
t1 = process_time() - start1
|
||||
|
||||
# secondly check by comparing to cpusleep (where we actually do something)
|
||||
start2 = process_time()
|
||||
cpu_sleep(sleeptime)
|
||||
t2 = process_time() - start2
|
||||
|
||||
if abs(t1) < 0.0001 and (t1 < t2 / 10):
|
||||
return True
|
||||
raise SkipTest
|
||||
|
||||
|
||||
checkCpuTime.passed = False
|
||||
|
||||
|
||||
@contextmanager
|
||||
def relative_timer():
|
||||
start = process_time()
|
||||
|
||||
def elapser():
|
||||
return process_time() - start
|
||||
|
||||
yield lambda: elapser()
|
||||
spent = process_time() - start
|
||||
|
||||
def elapser(): # NOQA
|
||||
return spent
|
||||
|
||||
|
||||
def retry_on_except(n=3):
|
||||
def wrapper(fn):
|
||||
def test_inner():
|
||||
for i in range(1, n + 1):
|
||||
try:
|
||||
checkCpuTime()
|
||||
fn()
|
||||
except SkipTest:
|
||||
if i >= n:
|
||||
raise
|
||||
else:
|
||||
return
|
||||
|
||||
test_inner.__doc__ = fn.__doc__
|
||||
return test_inner
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
class MockIO(StringIO):
|
||||
"""Wraps StringIO to mock a file with no I/O"""
|
||||
|
||||
def write(self, data):
|
||||
return
|
||||
|
||||
|
||||
def simple_progress(iterable=None, total=None, file=sys.stdout, desc='',
|
||||
leave=False, miniters=1, mininterval=0.1, width=60):
|
||||
"""Simple progress bar reproducing tqdm's major features"""
|
||||
n = [0] # use a closure
|
||||
start_t = [time()]
|
||||
last_n = [0]
|
||||
last_t = [0]
|
||||
if iterable is not None:
|
||||
total = len(iterable)
|
||||
|
||||
def format_interval(t):
|
||||
mins, s = divmod(int(t), 60)
|
||||
h, m = divmod(mins, 60)
|
||||
if h:
|
||||
return '{0:d}:{1:02d}:{2:02d}'.format(h, m, s)
|
||||
else:
|
||||
return '{0:02d}:{1:02d}'.format(m, s)
|
||||
|
||||
def update_and_print(i=1):
|
||||
n[0] += i
|
||||
if (n[0] - last_n[0]) >= miniters:
|
||||
last_n[0] = n[0]
|
||||
|
||||
if (time() - last_t[0]) >= mininterval:
|
||||
last_t[0] = time() # last_t[0] == current time
|
||||
|
||||
spent = last_t[0] - start_t[0]
|
||||
spent_fmt = format_interval(spent)
|
||||
rate = n[0] / spent if spent > 0 else 0
|
||||
if 0.0 < rate < 1.0:
|
||||
rate_fmt = "%.2fs/it" % (1.0 / rate)
|
||||
else:
|
||||
rate_fmt = "%.2fit/s" % rate
|
||||
|
||||
frac = n[0] / total
|
||||
percentage = int(frac * 100)
|
||||
eta = (total - n[0]) / rate if rate > 0 else 0
|
||||
eta_fmt = format_interval(eta)
|
||||
|
||||
# bar = "#" * int(frac * width)
|
||||
barfill = " " * int((1.0 - frac) * width)
|
||||
bar_length, frac_bar_length = divmod(int(frac * width * 10), 10)
|
||||
bar = '#' * bar_length
|
||||
frac_bar = chr(48 + frac_bar_length) if frac_bar_length \
|
||||
else ' '
|
||||
|
||||
file.write("\r%s %i%%|%s%s%s| %i/%i [%s<%s, %s]" %
|
||||
(desc, percentage, bar, frac_bar, barfill, n[0],
|
||||
total, spent_fmt, eta_fmt, rate_fmt))
|
||||
|
||||
if n[0] == total and leave:
|
||||
file.write("\n")
|
||||
file.flush()
|
||||
|
||||
def update_and_yield():
|
||||
for elt in iterable:
|
||||
yield elt
|
||||
update_and_print()
|
||||
|
||||
update_and_print(0)
|
||||
if iterable is not None:
|
||||
return update_and_yield()
|
||||
else:
|
||||
return update_and_print
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
@retry_on_except()
|
||||
def test_iter_overhead():
|
||||
"""Test overhead of iteration based tqdm"""
|
||||
|
||||
total = int(1e6)
|
||||
|
||||
with closing(MockIO()) as our_file:
|
||||
a = 0
|
||||
with trange(total, file=our_file) as t:
|
||||
with relative_timer() as time_tqdm:
|
||||
for i in t:
|
||||
a += i
|
||||
assert (a == (total * total - total) / 2.0)
|
||||
|
||||
a = 0
|
||||
with relative_timer() as time_bench:
|
||||
for i in _range(total):
|
||||
a += i
|
||||
our_file.write(a)
|
||||
|
||||
# Compute relative overhead of tqdm against native range()
|
||||
if time_tqdm() > 9 * time_bench():
|
||||
raise AssertionError('trange(%g): %f, range(%g): %f' %
|
||||
(total, time_tqdm(), total, time_bench()))
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
@retry_on_except()
|
||||
def test_manual_overhead():
|
||||
"""Test overhead of manual tqdm"""
|
||||
|
||||
total = int(1e6)
|
||||
|
||||
with closing(MockIO()) as our_file:
|
||||
with tqdm(total=total * 10, file=our_file, leave=True) as t:
|
||||
a = 0
|
||||
with relative_timer() as time_tqdm:
|
||||
for i in _range(total):
|
||||
a += i
|
||||
t.update(10)
|
||||
|
||||
a = 0
|
||||
with relative_timer() as time_bench:
|
||||
for i in _range(total):
|
||||
a += i
|
||||
our_file.write(a)
|
||||
|
||||
# Compute relative overhead of tqdm against native range()
|
||||
if time_tqdm() > 10 * time_bench():
|
||||
raise AssertionError('tqdm(%g): %f, range(%g): %f' %
|
||||
(total, time_tqdm(), total, time_bench()))
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
@retry_on_except()
|
||||
def test_iter_overhead_hard():
|
||||
"""Test overhead of iteration based tqdm (hard)"""
|
||||
|
||||
total = int(1e5)
|
||||
|
||||
with closing(MockIO()) as our_file:
|
||||
a = 0
|
||||
with trange(total, file=our_file, leave=True, miniters=1,
|
||||
mininterval=0, maxinterval=0) as t:
|
||||
with relative_timer() as time_tqdm:
|
||||
for i in t:
|
||||
a += i
|
||||
assert (a == (total * total - total) / 2.0)
|
||||
|
||||
a = 0
|
||||
with relative_timer() as time_bench:
|
||||
for i in _range(total):
|
||||
a += i
|
||||
our_file.write(("%i" % a) * 40)
|
||||
|
||||
# Compute relative overhead of tqdm against native range()
|
||||
try:
|
||||
assert (time_tqdm() < 60 * time_bench())
|
||||
except AssertionError:
|
||||
raise AssertionError('trange(%g): %f, range(%g): %f' %
|
||||
(total, time_tqdm(), total, time_bench()))
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
@retry_on_except()
|
||||
def test_manual_overhead_hard():
|
||||
"""Test overhead of manual tqdm (hard)"""
|
||||
|
||||
total = int(1e5)
|
||||
|
||||
with closing(MockIO()) as our_file:
|
||||
t = tqdm(total=total * 10, file=our_file, leave=True, miniters=1,
|
||||
mininterval=0, maxinterval=0)
|
||||
a = 0
|
||||
with relative_timer() as time_tqdm:
|
||||
for i in _range(total):
|
||||
a += i
|
||||
t.update(10)
|
||||
|
||||
a = 0
|
||||
with relative_timer() as time_bench:
|
||||
for i in _range(total):
|
||||
a += i
|
||||
our_file.write(("%i" % a) * 40)
|
||||
|
||||
# Compute relative overhead of tqdm against native range()
|
||||
try:
|
||||
assert (time_tqdm() < 100 * time_bench())
|
||||
except AssertionError:
|
||||
raise AssertionError('tqdm(%g): %f, range(%g): %f' %
|
||||
(total, time_tqdm(), total, time_bench()))
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
@retry_on_except()
|
||||
def test_iter_overhead_simplebar_hard():
|
||||
"""Test overhead of iteration based tqdm vs simple progress bar (hard)"""
|
||||
|
||||
total = int(1e4)
|
||||
|
||||
with closing(MockIO()) as our_file:
|
||||
a = 0
|
||||
with trange(total, file=our_file, leave=True, miniters=1,
|
||||
mininterval=0, maxinterval=0) as t:
|
||||
with relative_timer() as time_tqdm:
|
||||
for i in t:
|
||||
a += i
|
||||
assert (a == (total * total - total) / 2.0)
|
||||
|
||||
a = 0
|
||||
s = simple_progress(_range(total), file=our_file, leave=True,
|
||||
miniters=1, mininterval=0)
|
||||
with relative_timer() as time_bench:
|
||||
for i in s:
|
||||
a += i
|
||||
|
||||
# Compute relative overhead of tqdm against native range()
|
||||
try:
|
||||
assert (time_tqdm() < 2.5 * time_bench())
|
||||
except AssertionError:
|
||||
raise AssertionError('trange(%g): %f, simple_progress(%g): %f' %
|
||||
(total, time_tqdm(), total, time_bench()))
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
@retry_on_except()
|
||||
def test_manual_overhead_simplebar_hard():
|
||||
"""Test overhead of manual tqdm vs simple progress bar (hard)"""
|
||||
|
||||
total = int(1e4)
|
||||
|
||||
with closing(MockIO()) as our_file:
|
||||
t = tqdm(total=total * 10, file=our_file, leave=True, miniters=1,
|
||||
mininterval=0, maxinterval=0)
|
||||
a = 0
|
||||
with relative_timer() as time_tqdm:
|
||||
for i in _range(total):
|
||||
a += i
|
||||
t.update(10)
|
||||
|
||||
simplebar_update = simple_progress(
|
||||
total=total, file=our_file, leave=True, miniters=1, mininterval=0)
|
||||
a = 0
|
||||
with relative_timer() as time_bench:
|
||||
for i in _range(total):
|
||||
a += i
|
||||
simplebar_update(10)
|
||||
|
||||
# Compute relative overhead of tqdm against native range()
|
||||
try:
|
||||
assert (time_tqdm() < 2.5 * time_bench())
|
||||
except AssertionError:
|
||||
raise AssertionError('tqdm(%g): %f, simple_progress(%g): %f' %
|
||||
(total, time_tqdm(), total, time_bench()))
|
164
lib/tqdm/tests/tests_synchronisation.py
Normal file
164
lib/tqdm/tests/tests_synchronisation.py
Normal file
|
@ -0,0 +1,164 @@
|
|||
from __future__ import division
|
||||
from tqdm import tqdm
|
||||
from tests_tqdm import with_setup, pretest, posttest, StringIO, closing
|
||||
from tests_tqdm import DiscreteTimer, cpu_timify
|
||||
|
||||
from time import sleep
|
||||
from threading import Event
|
||||
from tqdm import TMonitor
|
||||
|
||||
|
||||
class FakeSleep(object):
|
||||
"""Wait until the discrete timer reached the required time"""
|
||||
def __init__(self, dtimer):
|
||||
self.dtimer = dtimer
|
||||
|
||||
def sleep(self, t):
|
||||
end = t + self.dtimer.t
|
||||
while self.dtimer.t < end:
|
||||
sleep(0.0000001) # sleep a bit to interrupt (instead of pass)
|
||||
|
||||
|
||||
class FakeTqdm(object):
|
||||
_instances = []
|
||||
|
||||
|
||||
def make_create_fake_sleep_event(sleep):
|
||||
def wait(self, timeout=None):
|
||||
if timeout is not None:
|
||||
sleep(timeout)
|
||||
return self.is_set()
|
||||
|
||||
def create_fake_sleep_event():
|
||||
event = Event()
|
||||
event.wait = wait
|
||||
return event
|
||||
|
||||
return create_fake_sleep_event
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
def test_monitor_thread():
|
||||
"""Test dummy monitoring thread"""
|
||||
maxinterval = 10
|
||||
|
||||
# Setup a discrete timer
|
||||
timer = DiscreteTimer()
|
||||
TMonitor._time = timer.time
|
||||
# And a fake sleeper
|
||||
sleeper = FakeSleep(timer)
|
||||
TMonitor._event = make_create_fake_sleep_event(sleeper.sleep)
|
||||
|
||||
# Instanciate the monitor
|
||||
monitor = TMonitor(FakeTqdm, maxinterval)
|
||||
# Test if alive, then killed
|
||||
assert monitor.report()
|
||||
monitor.exit()
|
||||
timer.sleep(maxinterval * 2) # need to go out of the sleep to die
|
||||
assert not monitor.report()
|
||||
# assert not monitor.is_alive() # not working dunno why, thread not killed
|
||||
del monitor
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
def test_monitoring_and_cleanup():
|
||||
"""Test for stalled tqdm instance and monitor deletion"""
|
||||
# Note: should fix miniters for these tests, else with dynamic_miniters
|
||||
# it's too complicated to handle with monitoring update and maxinterval...
|
||||
maxinterval = 2
|
||||
|
||||
total = 1000
|
||||
# Setup a discrete timer
|
||||
timer = DiscreteTimer()
|
||||
# And a fake sleeper
|
||||
sleeper = FakeSleep(timer)
|
||||
# Setup TMonitor to use the timer
|
||||
TMonitor._time = timer.time
|
||||
TMonitor._event = make_create_fake_sleep_event(sleeper.sleep)
|
||||
# Set monitor interval
|
||||
tqdm.monitor_interval = maxinterval
|
||||
with closing(StringIO()) as our_file:
|
||||
with tqdm(total=total, file=our_file, miniters=500, mininterval=0.1,
|
||||
maxinterval=maxinterval) as t:
|
||||
cpu_timify(t, timer)
|
||||
# Do a lot of iterations in a small timeframe
|
||||
# (smaller than monitor interval)
|
||||
timer.sleep(maxinterval / 2) # monitor won't wake up
|
||||
t.update(500)
|
||||
# check that our fixed miniters is still there
|
||||
assert t.miniters == 500
|
||||
# Then do 1 it after monitor interval, so that monitor kicks in
|
||||
timer.sleep(maxinterval * 2)
|
||||
t.update(1)
|
||||
# Wait for the monitor to get out of sleep's loop and update tqdm..
|
||||
timeend = timer.time()
|
||||
while not (t.monitor.woken >= timeend and t.miniters == 1):
|
||||
timer.sleep(1) # Force monitor to wake up if it woken too soon
|
||||
sleep(0.000001) # sleep to allow interrupt (instead of pass)
|
||||
assert t.miniters == 1 # check that monitor corrected miniters
|
||||
# Note: at this point, there may be a race condition: monitor saved
|
||||
# current woken time but timer.sleep() happen just before monitor
|
||||
# sleep. To fix that, either sleep here or increase time in a loop
|
||||
# to ensure that monitor wakes up at some point.
|
||||
|
||||
# Try again but already at miniters = 1 so nothing will be done
|
||||
timer.sleep(maxinterval * 2)
|
||||
t.update(2)
|
||||
timeend = timer.time()
|
||||
while not (t.monitor.woken >= timeend):
|
||||
timer.sleep(1) # Force monitor to wake up if it woken too soon
|
||||
sleep(0.000001)
|
||||
# Wait for the monitor to get out of sleep's loop and update tqdm..
|
||||
assert t.miniters == 1 # check that monitor corrected miniters
|
||||
|
||||
# Check that class var monitor is deleted if no instance left
|
||||
tqdm.monitor_interval = 10
|
||||
assert tqdm.monitor is None
|
||||
|
||||
|
||||
@with_setup(pretest, posttest)
|
||||
def test_monitoring_multi():
|
||||
"""Test on multiple bars, one not needing miniters adjustment"""
|
||||
# Note: should fix miniters for these tests, else with dynamic_miniters
|
||||
# it's too complicated to handle with monitoring update and maxinterval...
|
||||
maxinterval = 2
|
||||
|
||||
total = 1000
|
||||
# Setup a discrete timer
|
||||
timer = DiscreteTimer()
|
||||
# And a fake sleeper
|
||||
sleeper = FakeSleep(timer)
|
||||
# Setup TMonitor to use the timer
|
||||
TMonitor._time = timer.time
|
||||
TMonitor._event = make_create_fake_sleep_event(sleeper.sleep)
|
||||
# Set monitor interval
|
||||
tqdm.monitor_interval = maxinterval
|
||||
with closing(StringIO()) as our_file:
|
||||
with tqdm(total=total, file=our_file, miniters=500, mininterval=0.1,
|
||||
maxinterval=maxinterval) as t1:
|
||||
# Set high maxinterval for t2 so monitor does not need to adjust it
|
||||
with tqdm(total=total, file=our_file, miniters=500, mininterval=0.1,
|
||||
maxinterval=1E5) as t2:
|
||||
cpu_timify(t1, timer)
|
||||
cpu_timify(t2, timer)
|
||||
# Do a lot of iterations in a small timeframe
|
||||
timer.sleep(maxinterval / 2)
|
||||
t1.update(500)
|
||||
t2.update(500)
|
||||
assert t1.miniters == 500
|
||||
assert t2.miniters == 500
|
||||
# Then do 1 it after monitor interval, so that monitor kicks in
|
||||
timer.sleep(maxinterval * 2)
|
||||
t1.update(1)
|
||||
t2.update(1)
|
||||
# Wait for the monitor to get out of sleep and update tqdm
|
||||
timeend = timer.time()
|
||||
while not (t1.monitor.woken >= timeend and t1.miniters == 1):
|
||||
timer.sleep(1)
|
||||
sleep(0.000001)
|
||||
assert t1.miniters == 1 # check that monitor corrected miniters
|
||||
assert t2.miniters == 500 # check that t2 was not adjusted
|
||||
|
||||
# Check that class var monitor is deleted if no instance left
|
||||
tqdm.monitor_interval = 10
|
||||
assert tqdm.monitor is None
|
1541
lib/tqdm/tests/tests_tqdm.py
Normal file
1541
lib/tqdm/tests/tests_tqdm.py
Normal file
File diff suppressed because it is too large
Load diff
12
lib/tqdm/tests/tests_version.py
Normal file
12
lib/tqdm/tests/tests_version.py
Normal file
|
@ -0,0 +1,12 @@
|
|||
import re
|
||||
|
||||
|
||||
def test_version():
|
||||
"""Test version string"""
|
||||
from tqdm import __version__
|
||||
version_parts = re.split('[.-]', __version__)
|
||||
assert 3 <= len(version_parts) # must have at least Major.minor.patch
|
||||
try:
|
||||
map(int, version_parts[:3])
|
||||
except ValueError:
|
||||
raise TypeError('Version Major.minor.patch must be 3 integers')
|
Loading…
Add table
Add a link
Reference in a new issue