Removing python_toolbox test dependency

This commit is contained in:
Ram Rachum 2019-06-14 18:04:13 +02:00
parent 43bde4b8bf
commit f793796ad3
8 changed files with 2391 additions and 28 deletions

View file

@ -0,0 +1,255 @@
# Copyright 2019 Ram Rachum and collaborators.
# This program is distributed under the MIT license.
import tempfile
import shutil
import io
import sys
from . import pathlib
from . import contextlib
@contextlib.contextmanager
def BlankContextManager():
yield
@contextlib.contextmanager
def create_temp_folder(prefix=tempfile.template, suffix='',
parent_folder=None, chmod=None):
'''
Context manager that creates a temporary folder and deletes it after usage.
After the suite finishes, the temporary folder and all its files and
subfolders will be deleted.
Example:
with create_temp_folder() as temp_folder:
# We have a temporary folder!
assert temp_folder.is_dir()
# We can create files in it:
(temp_folder / 'my_file').open('w')
# The suite is finished, now it's all cleaned:
assert not temp_folder.exists()
Use the `prefix` and `suffix` string arguments to dictate a prefix and/or a
suffix to the temporary folder's name in the filesystem.
If you'd like to set the permissions of the temporary folder, pass them to
the optional `chmod` argument, like this:
create_temp_folder(chmod=0o550)
'''
temp_folder = pathlib.Path(tempfile.mkdtemp(prefix=prefix, suffix=suffix,
dir=parent_folder))
try:
if chmod is not None:
temp_folder.chmod(chmod)
yield temp_folder
finally:
shutil.rmtree(str(temp_folder))
class NotInDict:
'''Object signifying that the key was not found in the dict.'''
class TempValueSetter(object):
'''
Context manager for temporarily setting a value to a variable.
The value is set to the variable before the suite starts, and gets reset
back to the old value after the suite finishes.
'''
def __init__(self, variable, value, assert_no_fiddling=True):
'''
Construct the `TempValueSetter`.
`variable` may be either an `(object, attribute_string)`, a `(dict,
key)` pair, or a `(getter, setter)` pair.
`value` is the temporary value to set to the variable.
'''
self.assert_no_fiddling = assert_no_fiddling
#######################################################################
# We let the user input either an `(object, attribute_string)`, a
# `(dict, key)` pair, or a `(getter, setter)` pair. So now it's our job
# to inspect `variable` and figure out which one of these options the
# user chose, and then obtain from that a `(getter, setter)` pair that
# we could use.
bad_input_exception = Exception(
'`variable` must be either an `(object, attribute_string)` pair, '
'a `(dict, key)` pair, or a `(getter, setter)` pair.'
)
try:
first, second = variable
except Exception:
raise bad_input_exception
if hasattr(first, '__getitem__') and hasattr(first, 'get') and \
hasattr(first, '__setitem__') and hasattr(first, '__delitem__'):
# `first` is a dictoid; so we were probably handed a `(dict, key)`
# pair.
self.getter = lambda: first.get(second, NotInDict)
self.setter = lambda value: (first.__setitem__(second, value) if
value is not NotInDict else
first.__delitem__(second))
### Finished handling the `(dict, key)` case. ###
elif callable(second):
# `second` is a callable; so we were probably handed a `(getter,
# setter)` pair.
if not callable(first):
raise bad_input_exception
self.getter, self.setter = first, second
### Finished handling the `(getter, setter)` case. ###
else:
# All that's left is the `(object, attribute_string)` case.
if not isinstance(second, str):
raise bad_input_exception
parent, attribute_name = first, second
self.getter = lambda: getattr(parent, attribute_name)
self.setter = lambda value: setattr(parent, attribute_name, value)
### Finished handling the `(object, attribute_string)` case. ###
#
#
### Finished obtaining a `(getter, setter)` pair from `variable`. #####
self.getter = self.getter
'''Getter for getting the current value of the variable.'''
self.setter = self.setter
'''Setter for Setting the the variable's value.'''
self.value = value
'''The value to temporarily set to the variable.'''
self.active = False
def __enter__(self):
self.active = True
self.old_value = self.getter()
'''The old value of the variable, before entering the suite.'''
self.setter(self.value)
# In `__exit__` we'll want to check if anyone changed the value of the
# variable in the suite, which is unallowed. But we can't compare to
# `.value`, because sometimes when you set a value to a variable, some
# mechanism modifies that value for various reasons, resulting in a
# supposedly equivalent, but not identical, value. For example this
# happens when you set the current working directory on Mac OS.
#
# So here we record the value right after setting, and after any
# possible processing the system did to it:
self._value_right_after_setting = self.getter()
return self
def __exit__(self, exc_type, exc_value, exc_traceback):
if self.assert_no_fiddling:
# Asserting no-one inside the suite changed our variable:
assert self.getter() == self._value_right_after_setting
self.setter(self.old_value)
self.active = False
class OutputCapturer(object):
'''
Context manager for catching all system output generated during suite.
Example:
with OutputCapturer() as output_capturer:
print('woo!')
assert output_capturer.output == 'woo!\n'
The boolean arguments `stdout` and `stderr` determine, respectively,
whether the standard-output and the standard-error streams will be
captured.
'''
def __init__(self, stdout=True, stderr=True):
self.string_io = io.StringIO()
if stdout:
self._stdout_temp_setter = \
TempValueSetter((sys, 'stdout'), self.string_io)
else: # not stdout
self._stdout_temp_setter = BlankContextManager()
if stderr:
self._stderr_temp_setter = \
TempValueSetter((sys, 'stderr'), self.string_io)
else: # not stderr
self._stderr_temp_setter = BlankContextManager()
def __enter__(self):
'''Manage the `OutputCapturer`'s context.'''
self._stdout_temp_setter.__enter__()
self._stderr_temp_setter.__enter__()
return self
def __exit__(self, exc_type, exc_value, exc_traceback):
# Not doing exception swallowing anywhere here.
self._stderr_temp_setter.__exit__(exc_type, exc_value, exc_traceback)
self._stdout_temp_setter.__exit__(exc_type, exc_value, exc_traceback)
return self
output = property(lambda self: self.string_io.getvalue(),
doc='''The string of output that was captured.''')
class TempSysPathAdder(object):
'''
Context manager for temporarily adding paths to `sys.path`.
Removes the path(s) after suite.
Example:
with TempSysPathAdder('path/to/fubar/package'):
import fubar
fubar.do_stuff()
'''
def __init__(self, addition):
self.addition = [str(addition)]
def __enter__(self):
self.entries_not_in_sys_path = [entry for entry in self.addition if
entry not in sys.path]
sys.path += self.entries_not_in_sys_path
return self
def __exit__(self, *args, **kwargs):
for entry in self.entries_not_in_sys_path:
# We don't allow anyone to remove it except for us:
assert entry in sys.path
sys.path.remove(entry)

View file

@ -0,0 +1,436 @@
"""contextlib2 - backports and enhancements to the contextlib module"""
import sys
import warnings
from collections import deque
from functools import wraps
__all__ = ["contextmanager", "closing", "ContextDecorator", "ExitStack",
"redirect_stdout", "redirect_stderr", "suppress"]
# Backwards compatibility
__all__ += ["ContextStack"]
class ContextDecorator(object):
"A base class or mixin that enables context managers to work as decorators."
def refresh_cm(self):
"""Returns the context manager used to actually wrap the call to the
decorated function.
The default implementation just returns *self*.
Overriding this method allows otherwise one-shot context managers
like _GeneratorContextManager to support use as decorators via
implicit recreation.
DEPRECATED: refresh_cm was never added to the standard library's
ContextDecorator API
"""
warnings.warn("refresh_cm was never added to the standard library",
DeprecationWarning)
return self._recreate_cm()
def _recreate_cm(self):
"""Return a recreated instance of self.
Allows an otherwise one-shot context manager like
_GeneratorContextManager to support use as
a decorator via implicit recreation.
This is a private interface just for _GeneratorContextManager.
See issue #11647 for details.
"""
return self
def __call__(self, func):
@wraps(func)
def inner(*args, **kwds):
with self._recreate_cm():
return func(*args, **kwds)
return inner
class _GeneratorContextManager(ContextDecorator):
"""Helper for @contextmanager decorator."""
def __init__(self, func, args, kwds):
self.gen = func(*args, **kwds)
self.func, self.args, self.kwds = func, args, kwds
# Issue 19330: ensure context manager instances have good docstrings
doc = getattr(func, "__doc__", None)
if doc is None:
doc = type(self).__doc__
self.__doc__ = doc
# Unfortunately, this still doesn't provide good help output when
# inspecting the created context manager instances, since pydoc
# currently bypasses the instance docstring and shows the docstring
# for the class instead.
# See http://bugs.python.org/issue19404 for more details.
def _recreate_cm(self):
# _GCM instances are one-shot context managers, so the
# CM must be recreated each time a decorated function is
# called
return self.__class__(self.func, self.args, self.kwds)
def __enter__(self):
try:
return next(self.gen)
except StopIteration:
raise RuntimeError("generator didn't yield")
def __exit__(self, type, value, traceback):
if type is None:
try:
next(self.gen)
except StopIteration:
return
else:
raise RuntimeError("generator didn't stop")
else:
if value is None:
# Need to force instantiation so we can reliably
# tell if we get the same exception back
value = type()
try:
self.gen.throw(type, value, traceback)
raise RuntimeError("generator didn't stop after throw()")
except StopIteration as exc:
# Suppress StopIteration *unless* it's the same exception that
# was passed to throw(). This prevents a StopIteration
# raised inside the "with" statement from being suppressed.
return exc is not value
except RuntimeError as exc:
# Don't re-raise the passed in exception
if exc is value:
return False
# Likewise, avoid suppressing if a StopIteration exception
# was passed to throw() and later wrapped into a RuntimeError
# (see PEP 479).
if _HAVE_EXCEPTION_CHAINING and exc.__cause__ is value:
return False
raise
except:
# only re-raise if it's *not* the exception that was
# passed to throw(), because __exit__() must not raise
# an exception unless __exit__() itself failed. But throw()
# has to raise the exception to signal propagation, so this
# fixes the impedance mismatch between the throw() protocol
# and the __exit__() protocol.
#
if sys.exc_info()[1] is not value:
raise
def contextmanager(func):
"""@contextmanager decorator.
Typical usage:
@contextmanager
def some_generator(<arguments>):
<setup>
try:
yield <value>
finally:
<cleanup>
This makes this:
with some_generator(<arguments>) as <variable>:
<body>
equivalent to this:
<setup>
try:
<variable> = <value>
<body>
finally:
<cleanup>
"""
@wraps(func)
def helper(*args, **kwds):
return _GeneratorContextManager(func, args, kwds)
return helper
class closing(object):
"""Context to automatically close something at the end of a block.
Code like this:
with closing(<module>.open(<arguments>)) as f:
<block>
is equivalent to this:
f = <module>.open(<arguments>)
try:
<block>
finally:
f.close()
"""
def __init__(self, thing):
self.thing = thing
def __enter__(self):
return self.thing
def __exit__(self, *exc_info):
self.thing.close()
class _RedirectStream(object):
_stream = None
def __init__(self, new_target):
self._new_target = new_target
# We use a list of old targets to make this CM re-entrant
self._old_targets = []
def __enter__(self):
self._old_targets.append(getattr(sys, self._stream))
setattr(sys, self._stream, self._new_target)
return self._new_target
def __exit__(self, exctype, excinst, exctb):
setattr(sys, self._stream, self._old_targets.pop())
class redirect_stdout(_RedirectStream):
"""Context manager for temporarily redirecting stdout to another file.
# How to send help() to stderr
with redirect_stdout(sys.stderr):
help(dir)
# How to write help() to a file
with open('help.txt', 'w') as f:
with redirect_stdout(f):
help(pow)
"""
_stream = "stdout"
class redirect_stderr(_RedirectStream):
"""Context manager for temporarily redirecting stderr to another file."""
_stream = "stderr"
class suppress(object):
"""Context manager to suppress specified exceptions
After the exception is suppressed, execution proceeds with the next
statement following the with statement.
with suppress(FileNotFoundError):
os.remove(somefile)
# Execution still resumes here if the file was already removed
"""
def __init__(self, *exceptions):
self._exceptions = exceptions
def __enter__(self):
pass
def __exit__(self, exctype, excinst, exctb):
# Unlike isinstance and issubclass, CPython exception handling
# currently only looks at the concrete type hierarchy (ignoring
# the instance and subclass checking hooks). While Guido considers
# that a bug rather than a feature, it's a fairly hard one to fix
# due to various internal implementation details. suppress provides
# the simpler issubclass based semantics, rather than trying to
# exactly reproduce the limitations of the CPython interpreter.
#
# See http://bugs.python.org/issue12029 for more details
return exctype is not None and issubclass(exctype, self._exceptions)
# Context manipulation is Python 3 only
_HAVE_EXCEPTION_CHAINING = sys.version_info[0] >= 3
if _HAVE_EXCEPTION_CHAINING:
def _make_context_fixer(frame_exc):
def _fix_exception_context(new_exc, old_exc):
# Context may not be correct, so find the end of the chain
while 1:
exc_context = new_exc.__context__
if exc_context is old_exc:
# Context is already set correctly (see issue 20317)
return
if exc_context is None or exc_context is frame_exc:
break
new_exc = exc_context
# Change the end of the chain to point to the exception
# we expect it to reference
new_exc.__context__ = old_exc
return _fix_exception_context
def _reraise_with_existing_context(exc_details):
try:
# bare "raise exc_details[1]" replaces our carefully
# set-up context
fixed_ctx = exc_details[1].__context__
raise exc_details[1]
except BaseException:
exc_details[1].__context__ = fixed_ctx
raise
else:
# No exception context in Python 2
def _make_context_fixer(frame_exc):
return lambda new_exc, old_exc: None
# Use 3 argument raise in Python 2,
# but use exec to avoid SyntaxError in Python 3
def _reraise_with_existing_context(exc_details):
exc_type, exc_value, exc_tb = exc_details
exec ("raise exc_type, exc_value, exc_tb")
# Handle old-style classes if they exist
try:
from types import InstanceType
except ImportError:
# Python 3 doesn't have old-style classes
_get_type = type
else:
# Need to handle old-style context managers on Python 2
def _get_type(obj):
obj_type = type(obj)
if obj_type is InstanceType:
return obj.__class__ # Old-style class
return obj_type # New-style class
# Inspired by discussions on http://bugs.python.org/issue13585
class ExitStack(object):
"""Context manager for dynamic management of a stack of exit callbacks
For example:
with ExitStack() as stack:
files = [stack.enter_context(open(fname)) for fname in filenames]
# All opened files will automatically be closed at the end of
# the with statement, even if attempts to open files later
# in the list raise an exception
"""
def __init__(self):
self._exit_callbacks = deque()
def pop_all(self):
"""Preserve the context stack by transferring it to a new instance"""
new_stack = type(self)()
new_stack._exit_callbacks = self._exit_callbacks
self._exit_callbacks = deque()
return new_stack
def _push_cm_exit(self, cm, cm_exit):
"""Helper to correctly register callbacks to __exit__ methods"""
def _exit_wrapper(*exc_details):
return cm_exit(cm, *exc_details)
_exit_wrapper.__self__ = cm
self.push(_exit_wrapper)
def push(self, exit):
"""Registers a callback with the standard __exit__ method signature
Can suppress exceptions the same way __exit__ methods can.
Also accepts any object with an __exit__ method (registering a call
to the method instead of the object itself)
"""
# We use an unbound method rather than a bound method to follow
# the standard lookup behaviour for special methods
_cb_type = _get_type(exit)
try:
exit_method = _cb_type.__exit__
except AttributeError:
# Not a context manager, so assume its a callable
self._exit_callbacks.append(exit)
else:
self._push_cm_exit(exit, exit_method)
return exit # Allow use as a decorator
def callback(self, callback, *args, **kwds):
"""Registers an arbitrary callback and arguments.
Cannot suppress exceptions.
"""
def _exit_wrapper(exc_type, exc, tb):
callback(*args, **kwds)
# We changed the signature, so using @wraps is not appropriate, but
# setting __wrapped__ may still help with introspection
_exit_wrapper.__wrapped__ = callback
self.push(_exit_wrapper)
return callback # Allow use as a decorator
def enter_context(self, cm):
"""Enters the supplied context manager
If successful, also pushes its __exit__ method as a callback and
returns the result of the __enter__ method.
"""
# We look up the special methods on the type to match the with statement
_cm_type = _get_type(cm)
_exit = _cm_type.__exit__
result = _cm_type.__enter__(cm)
self._push_cm_exit(cm, _exit)
return result
def close(self):
"""Immediately unwind the context stack"""
self.__exit__(None, None, None)
def __enter__(self):
return self
def __exit__(self, *exc_details):
received_exc = exc_details[0] is not None
# We manipulate the exception state so it behaves as though
# we were actually nesting multiple with statements
frame_exc = sys.exc_info()[1]
_fix_exception_context = _make_context_fixer(frame_exc)
# Callbacks are invoked in LIFO order to match the behaviour of
# nested context managers
suppressed_exc = False
pending_raise = False
while self._exit_callbacks:
cb = self._exit_callbacks.pop()
try:
if cb(*exc_details):
suppressed_exc = True
pending_raise = False
exc_details = (None, None, None)
except:
new_exc_details = sys.exc_info()
# simulate the stack of exceptions by setting the context
_fix_exception_context(new_exc_details[1], exc_details[1])
pending_raise = True
exc_details = new_exc_details
if pending_raise:
_reraise_with_existing_context(exc_details)
return received_exc and suppressed_exc
# Preserve backwards compatibility
class ContextStack(ExitStack):
"""Backwards compatibility alias for ExitStack"""
def __init__(self):
warnings.warn("ContextStack has been renamed to ExitStack",
DeprecationWarning)
super(ContextStack, self).__init__()
def register_exit(self, callback):
return self.push(callback)
def register(self, callback, *args, **kwds):
return self.callback(callback, *args, **kwds)
def preserve(self):
return self.pop_all()

File diff suppressed because it is too large Load diff

View file

@ -9,7 +9,6 @@ import types
import sys
from pysnooper.utils import truncate
from python_toolbox import sys_tools, temp_file_tools
import pytest
import pysnooper
@ -18,11 +17,12 @@ from pysnooper.variables import needs_parentheses
from .utils import (assert_output, assert_sample_output, VariableEntry,
CallEntry, LineEntry, ReturnEntry, OpcodeEntry,
ReturnValueEntry, ExceptionEntry)
from . import mini_toolbox
def test_chinese():
with temp_file_tools.create_temp_folder(prefix='pysnooper') as folder:
with mini_toolbox.create_temp_folder(prefix='pysnooper') as folder:
path = folder / 'foo.log'
@pysnooper.snoop(path)
def foo():

View file

@ -8,7 +8,6 @@ import types
import sys
from pysnooper.utils import truncate
from python_toolbox import sys_tools, temp_file_tools
import pytest
import pysnooper
@ -16,6 +15,7 @@ from pysnooper.variables import needs_parentheses
from .utils import (assert_output, assert_sample_output, VariableEntry,
CallEntry, LineEntry, ReturnEntry, OpcodeEntry,
ReturnValueEntry, ExceptionEntry)
from . import mini_toolbox
def test_string_io():
@ -54,7 +54,7 @@ def test_thread_info():
y = 8
return y + x
with sys_tools.OutputCapturer(stdout=False,
with mini_toolbox.OutputCapturer(stdout=False,
stderr=True) as output_capturer:
result = my_function('baba')
assert result == 15
@ -83,7 +83,7 @@ def test_multi_thread_info():
y = 8
return y + x
with sys_tools.OutputCapturer(stdout=False,
with mini_toolbox.OutputCapturer(stdout=False,
stderr=True) as output_capturer:
my_function('baba')
t1 = threading.Thread(target=my_function, name="test123",args=['bubu'])
@ -206,7 +206,7 @@ def test_watch():
for i in range(2):
foo.square()
with sys_tools.OutputCapturer(stdout=False,
with mini_toolbox.OutputCapturer(stdout=False,
stderr=True) as output_capturer:
result = my_function()
assert result is None
@ -252,7 +252,7 @@ def test_watch_explode():
lst = [7, 8, 9]
lst.append(10)
with sys_tools.OutputCapturer(stdout=False,
with mini_toolbox.OutputCapturer(stdout=False,
stderr=True) as output_capturer:
result = my_function()
assert result is None
@ -306,7 +306,7 @@ def test_variables_classes():
_s = WithSlots()
_lst = list(range(1000))
with sys_tools.OutputCapturer(stdout=False,
with mini_toolbox.OutputCapturer(stdout=False,
stderr=True) as output_capturer:
result = my_function()
assert result is None
@ -351,7 +351,7 @@ def test_single_watch_no_comma():
for i in range(2):
foo.square()
with sys_tools.OutputCapturer(stdout=False,
with mini_toolbox.OutputCapturer(stdout=False,
stderr=True) as output_capturer:
result = my_function()
assert result is None
@ -382,7 +382,7 @@ def test_long_variable():
foo = list(range(1000))
return foo
with sys_tools.OutputCapturer(stdout=False,
with mini_toolbox.OutputCapturer(stdout=False,
stderr=True) as output_capturer:
result = my_function()
assert result == list(range(1000))
@ -410,7 +410,7 @@ def test_repr_exception():
def my_function():
bad = Bad()
with sys_tools.OutputCapturer(stdout=False,
with mini_toolbox.OutputCapturer(stdout=False,
stderr=True) as output_capturer:
result = my_function()
assert result is None
@ -500,7 +500,7 @@ def test_method_and_prefix():
baz = Baz()
with sys_tools.OutputCapturer(stdout=False,
with mini_toolbox.OutputCapturer(stdout=False,
stderr=True) as output_capturer:
result = baz.square()
assert result is baz
@ -525,7 +525,7 @@ def test_method_and_prefix():
def test_file_output():
with temp_file_tools.create_temp_folder(prefix='pysnooper') as folder:
with mini_toolbox.create_temp_folder(prefix='pysnooper') as folder:
path = folder / 'foo.log'
@pysnooper.snoop(path)
@ -615,8 +615,8 @@ def test_lambda():
def test_unavailable_source():
with temp_file_tools.create_temp_folder(prefix='pysnooper') as folder, \
sys_tools.TempSysPathAdder(str(folder)):
with mini_toolbox.create_temp_folder(prefix='pysnooper') as folder, \
mini_toolbox.TempSysPathAdder(str(folder)):
module_name = 'iaerojajsijf'
python_file_path = folder / ('%s.py' % (module_name,))
content = textwrap.dedent(u'''
@ -629,7 +629,7 @@ def test_unavailable_source():
python_file.write(content)
module = __import__(module_name)
python_file_path.unlink()
with sys_tools.OutputCapturer(stdout=False,
with mini_toolbox.OutputCapturer(stdout=False,
stderr=True) as output_capturer:
result = getattr(module, 'f')(7)
assert result == 7
@ -647,7 +647,7 @@ def test_unavailable_source():
def test_no_overwrite_by_default():
with temp_file_tools.create_temp_folder(prefix='pysnooper') as folder:
with mini_toolbox.create_temp_folder(prefix='pysnooper') as folder:
path = folder / 'foo.log'
with path.open('w') as output_file:
output_file.write(u'lala')
@ -679,7 +679,7 @@ def test_no_overwrite_by_default():
def test_overwrite():
with temp_file_tools.create_temp_folder(prefix='pysnooper') as folder:
with mini_toolbox.create_temp_folder(prefix='pysnooper') as folder:
path = folder / 'foo.log'
with path.open('w') as output_file:
output_file.write(u'lala')
@ -721,7 +721,7 @@ def test_overwrite():
def test_error_in_overwrite_argument():
with temp_file_tools.create_temp_folder(prefix='pysnooper') as folder:
with mini_toolbox.create_temp_folder(prefix='pysnooper') as folder:
with pytest.raises(Exception, match='can only be used when writing'):
@pysnooper.snoop(overwrite=True)
def my_function(foo):
@ -783,7 +783,7 @@ def test_with_block():
def qux():
return 9 # not traced, mustn't show up
with sys_tools.OutputCapturer(stdout=False,
with mini_toolbox.OutputCapturer(stdout=False,
stderr=True) as output_capturer:
result = foo(2)
assert result == 2

View file

@ -10,7 +10,7 @@ try:
except ImportError:
from itertools import izip_longest as zip_longest
from python_toolbox import caching, sys_tools
from . import mini_toolbox
import pysnooper.pycompat
@ -190,7 +190,7 @@ class _BaseEventEntry(_BaseEntry):
self.thread_info_regex = (None if thread_info_regex is None else
re.compile(thread_info_regex))
@caching.CachedProperty
@property
def event_name(self):
return re.match('^[A-Z][a-z_]*', type(self).__name__).group(0).lower()
@ -270,7 +270,7 @@ def assert_output(output, expected_entries, prefix=None):
def assert_sample_output(module):
with sys_tools.OutputCapturer(stdout=False,
with mini_toolbox.OutputCapturer(stdout=False,
stderr=True) as output_capturer:
module.main()