diff --git a/docs/usage.rst b/docs/usage.rst index 4c357e0ea..dd37b5cb4 100644 --- a/docs/usage.rst +++ b/docs/usage.rst @@ -29,6 +29,16 @@ Additional command line options ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Fail tests that render templates which make use of invalid template variables. + +``--querycount`` - show top N tests with most queries +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Show a list of top N tests which executed most queries. Use `--querycount=0` +to display a list of all tests ordered by the number of queries executed. + +Using it in conjunction with `--setup-show` will display the number of +queries executed by each fixture (when the number of queries executed by the +fixture is greater than zero). Use `--noquerycount` to force the disable of it. + Running tests in parallel with pytest-xdist ------------------------------------------- pytest-django supports running tests on multiple processes to speed up test diff --git a/pytest_django/plugin.py b/pytest_django/plugin.py index 5d4f323a7..86278821e 100644 --- a/pytest_django/plugin.py +++ b/pytest_django/plugin.py @@ -86,6 +86,16 @@ def pytest_addoption(parser): 'Fail for invalid variables in templates.', type='bool', default=False) + group._addoption('--querycount', + action='store', dest='querycount', type=int, + default=None, metavar='N', + help='Show top N tests with most queries ' + '(N=0 for all).') + group._addoption('--noquerycount', '--no-querycount', + action='store_const', dest='querycount', + const=None, default=None, + help='Disable --querycount, when both are used.') + def _exists(path, ignore=EnvironmentError): try: @@ -337,6 +347,104 @@ def pytest_runtest_setup(item): _disable_class_methods(cls) +@pytest.hookimpl(hookwrapper=True) +def pytest_fixture_setup(fixturedef, request): + config = request.config + + if config.option.querycount is None or not config.option.setupshow: + yield + return + + from django.test.utils import CaptureQueriesContext + from django.db import connection + + _blocking_manager.unblock() + + try: + with CaptureQueriesContext(connection) as context: + yield + except Exception: + yield + else: + querycount = len(context.captured_queries) + + if querycount: + capman = config.pluginmanager.getplugin('capturemanager') + capman.suspend_global_capture() + + tw = config.get_terminal_writer() + tw.write(' (# of queries executed: {})'.format(querycount)) + + capman.resume_global_capture() + finally: + _blocking_manager.restore() + + +@pytest.hookimpl(hookwrapper=True) +def pytest_runtest_call(item): + count_parameter = item.config.option.querycount + if count_parameter is None: + yield + return + + from django.test.utils import CaptureQueriesContext + from django.db import connection + + with CaptureQueriesContext(connection) as context: + yield + + item.add_report_section('call', 'queries', context.captured_queries) + + +def pytest_terminal_summary(terminalreporter): + count_parameter = terminalreporter.config.option.querycount + if count_parameter is None: + return + + if count_parameter: + header = 'top {} tests with most queries'.format(count_parameter) + reports_slice = slice(None, count_parameter) + else: + header = 'top tests with most queries' + reports_slice = slice(None, None) + + terminalreporter.write_sep('=', header) + + def get_query_count(report): + sections = dict(report.sections) + return len(sections.get('Captured queries call', [])) + + reports = ( + terminalreporter.stats.get('failed', []) + + terminalreporter.stats.get('passed', []) + ) + + reports.sort(key=get_query_count) + reports.reverse() + + for report in reports[reports_slice]: + count = get_query_count(report) + nodeid = report.nodeid.replace("::()::", "::") + + terminalreporter.write_line('{count: <4} {when: <8} {nodeid}'.format( + count=count, + when=report.when, + nodeid=nodeid + )) + + import django + major, minor = django.VERSION[0:2] + + terminalreporter.write_line('') + terminalreporter.write_line( + '-- Docs: https://docs.djangoproject.com' + '/en/{major}.{minor}/topics/db/optimization/'.format( + major=major, + minor=minor + ) + ) + + @pytest.fixture(autouse=True, scope='session') def django_test_environment(request): """ diff --git a/tests/test_report.py b/tests/test_report.py new file mode 100644 index 000000000..22a1fd51f --- /dev/null +++ b/tests/test_report.py @@ -0,0 +1,147 @@ +class TestQueryCount(object): + """Test report generated by --querycount parameter""" + + def test_querycount_report_header(self, django_testdir): + django_testdir.create_test_module(''' + def test_zero_queries(): + pass + ''') + + result = django_testdir.runpytest_subprocess('--querycount=5') + result.stdout.fnmatch_lines([ + '*== top 5 tests with most queries ==*' + ]) + + def test_header_not_set_without_parameter(self, django_testdir): + django_testdir.create_test_module(''' + def test_zero_queries(): + pass + ''') + + result = django_testdir.runpytest_subprocess() + assert 'tests with most queries' not in result.stdout.str() + + def test_disabled_when_noquerycount_is_also_used(self, django_testdir): + django_testdir.create_test_module(''' + def test_zero_queries(): + pass + ''') + + result = django_testdir.runpytest_subprocess( + '--querycount=5 --noquerycount' + ) + assert 'tests with most queries' not in result.stdout.str() + + def test_query_optimization_tips_for_the_current_version_of_django( + self, + django_testdir + ): + django_testdir.create_test_module(''' + def test_zero_queries(): + pass + ''') + + result = django_testdir.runpytest_subprocess('--querycount=5') + + import django + major, minor = django.VERSION[0:2] + + url = ( + 'https://docs.djangoproject.com' + '/en/{major}.{minor}/topics/db/optimization/' + ).format( + major=major, + minor=minor + ) + + assert url in result.stdout.str() + + def test_querycount_report_lines(self, django_testdir): + django_testdir.create_test_module(''' + import pytest + from django.db import connection + + @pytest.mark.django_db + def test_one_query(): + with connection.cursor() as cursor: + cursor.execute('SELECT 1') + + assert True + + @pytest.mark.django_db + def test_two_queries(): + with connection.cursor() as cursor: + cursor.execute('SELECT 1') + cursor.execute('SELECT 1') + + assert True + + @pytest.mark.django_db + def test_failed_one_query(): + with connection.cursor() as cursor: + cursor.execute('SELECT 1') + + assert False + + def test_zero_queries(): + assert True + ''') + + result = django_testdir.runpytest_subprocess('--querycount=4') + lines = result.stdout.get_lines_after( + '*top 4 tests with most queries*' + ) + assert 'test_two_queries' in lines[0] + assert 'test_one_query' in lines[1] + assert 'test_failed' in lines[2] + assert 'test_zero_queries' in lines[3] + + def test_report_all_lines_on_querycount_zero(self, django_testdir): + django_testdir.create_test_module(''' + import pytest + from django.db import connection + + @pytest.mark.django_db + def test_one_query(): + with connection.cursor() as cursor: + cursor.execute('SELECT 1') + + assert True + + @pytest.mark.django_db + def test_two_queries(): + with connection.cursor() as cursor: + cursor.execute('SELECT 1') + cursor.execute('SELECT 1') + + assert True + ''') + + result = django_testdir.runpytest_subprocess('--querycount=0') + lines = result.stdout.get_lines_after( + '*top tests with most queries*' + ) + assert 'test_two_queries' in lines[0] + assert 'test_one_query' in lines[1] + + def test_should_report_fixture_queries(self, django_testdir): + django_testdir.create_test_module(''' + import pytest + from django.db import connection + + @pytest.fixture + def one_query(): + with connection.cursor() as cursor: + cursor.execute('SELECT 1') + + @pytest.mark.django_db + def test_without_queries(one_query): + pass + ''') + + result = django_testdir.runpytest_subprocess( + '--setup-show', + '--querycount=5' + ) + + assert '(# of queries executed: 1)' in result.stdout.str()