Group :: Development/Python3
RPM: python3-module-pytest-benchmark
Main Changelog Spec Patches Sources Download Gear Bugs e FR Repocop
Patch: python3-module-pytest-benchmark-4.0.0-alt.patch
Download
Download
src/pytest_benchmark/compat.py | 1 +
src/pytest_benchmark/utils.py | 6 +++++-
tests/test_benchmark.py | 19 ++++++++-----------
3 files changed, 14 insertions(+), 12 deletions(-)
diff --git a/src/pytest_benchmark/compat.py b/src/pytest_benchmark/compat.py
index 63d01bd..9afecf2 100644
--- a/src/pytest_benchmark/compat.py
+++ b/src/pytest_benchmark/compat.py
@@ -1,3 +1,4 @@
import sys
PY38 = sys.version_info[0] == 3 and sys.version_info[1] >= 8
+PY311 = sys.version_info[0] == 3 and sys.version_info[1] >= 11
diff --git a/src/pytest_benchmark/utils.py b/src/pytest_benchmark/utils.py
index c80352a..e28c04e 100644
--- a/src/pytest_benchmark/utils.py
+++ b/src/pytest_benchmark/utils.py
@@ -26,7 +26,7 @@ from urllib.parse import urlparse
import genericpath
-from .compat import PY38
+from .compat import PY38, PY311
# This is here (in the utils module) because it might be used by
# various other modules.
@@ -521,6 +521,10 @@ def clonefunc(f):
co.co_firstlineno, co.co_lnotab, co.co_freevars, co.co_cellvars]
if PY38:
args.insert(1, co.co_posonlyargcount)
+
+ if PY311:
+ args.insert(12, co.co_qualname)
+ args.insert(15, co.co_exceptiontable)
co2 = types.CodeType(*args)
#
# then, we clone the function itself, using the new co2
diff --git a/tests/test_benchmark.py b/tests/test_benchmark.py
index 280ce24..c40ce15 100644
--- a/tests/test_benchmark.py
+++ b/tests/test_benchmark.py
@@ -952,7 +952,6 @@ def test_ok(benchmark, bad_fixture):
" def test_bad(benchmark):",
"? @benchmark",
- "? def result():",
"test_abort_broken.py:*",
"_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _*",
@@ -1001,12 +1000,6 @@ from functools import partial
import pytest
-def test_fast(benchmark):
- @benchmark
- def result():
- return time.sleep(0.000001)
- assert result is None
-
def test_slow(benchmark):
assert benchmark(partial(time.sleep, 0.001)) is None
@@ -1025,13 +1018,15 @@ def test_fast(benchmark):
def test_basic(testdir):
test = testdir.makepyfile(BASIC_TEST)
result = testdir.runpytest_subprocess('-vv', '--doctest-modules', test)
- result.stdout.fnmatch_lines([
+ result.stdout.fnmatch_lines_random([
"*collected 5 items",
"test_basic.py::*test_basic PASSED*",
"test_basic.py::test_fast PASSED*",
"test_basic.py::test_slow PASSED*",
"test_basic.py::test_slower PASSED*",
"test_basic.py::test_xfast PASSED*",
+ ])
+ result.stdout.fnmatch_lines([
"",
"* benchmark: 4 tests *",
"Name (time in ?s) * Min * Max * Mean * StdDev * Rounds * Iterations",
@@ -1049,7 +1044,7 @@ def test_basic(testdir):
def test_skip(testdir):
test = testdir.makepyfile(BASIC_TEST)
result = testdir.runpytest_subprocess('-vv', '--doctest-modules', '--benchmark-skip', test)
- result.stdout.fnmatch_lines([
+ result.stdout.fnmatch_lines_random([
"*collected 5 items",
"test_skip.py::*test_skip PASSED*",
"test_skip.py::test_fast SKIPPED*",
@@ -1063,7 +1058,7 @@ def test_skip(testdir):
def test_disable(testdir):
test = testdir.makepyfile(BASIC_TEST)
result = testdir.runpytest_subprocess('-vv', '--doctest-modules', '--benchmark-disable', test)
- result.stdout.fnmatch_lines([
+ result.stdout.fnmatch_lines_random([
"*collected 5 items",
"test_disable.py::*test_disable PASSED*",
"test_disable.py::test_fast PASSED*",
@@ -1092,13 +1087,15 @@ def test_mark_selection(testdir):
def test_only_benchmarks(testdir):
test = testdir.makepyfile(BASIC_TEST)
result = testdir.runpytest_subprocess('-vv', '--doctest-modules', '--benchmark-only', test)
- result.stdout.fnmatch_lines([
+ result.stdout.fnmatch_lines_random([
"*collected 5 items",
"test_only_benchmarks.py::*test_only_benchmarks SKIPPED*",
"test_only_benchmarks.py::test_fast PASSED*",
"test_only_benchmarks.py::test_slow PASSED*",
"test_only_benchmarks.py::test_slower PASSED*",
"test_only_benchmarks.py::test_xfast PASSED*",
+ ])
+ result.stdout.fnmatch_lines([
"* benchmark: 4 tests *",
"Name (time in ?s) * Min * Max * Mean * StdDev * Rounds * Iterations",
"------*",