|
1 | 1 | #!/usr/bin/env python3 |
2 | 2 | import os.path |
3 | | -import shutil |
4 | 3 | import subprocess |
5 | 4 | import sys |
6 | | -import tempfile |
7 | | - |
8 | | - |
9 | | -def run_cmd(cmd): |
10 | | - print("(runtests.py) Execute: %s" % ' '.join(cmd), flush=True) |
11 | | - proc = subprocess.Popen(cmd) |
12 | | - try: |
13 | | - proc.wait() |
14 | | - except: # noqa |
15 | | - proc.kill() |
16 | | - proc.wait() |
17 | | - raise |
18 | | - sys.stdout.flush() |
19 | | - exitcode = proc.returncode |
20 | | - if exitcode: |
21 | | - sys.exit(exitcode) |
22 | | - print("", flush=True) |
23 | | - |
24 | | - |
25 | | -def run_tests(venv): |
26 | | - # Move to the root directly |
27 | | - root = os.path.dirname(__file__) |
28 | | - if root: |
29 | | - os.chdir(root) |
30 | | - |
31 | | - python = sys.executable |
32 | | - script = 'pyperformance' |
33 | | - if os.name == "nt": |
34 | | - python_executable = os.path.basename(python) |
35 | | - venv_python = os.path.join(venv, 'Scripts', python_executable) |
36 | | - else: |
37 | | - venv_python = os.path.join(venv, 'bin', 'python') |
38 | | - |
39 | | - def run_bench(*cmd): |
40 | | - cmd = cmd + ('--venv', venv) |
41 | | - run_cmd(cmd) |
42 | | - |
43 | | - run_bench(python, '-u', script, 'venv', 'create', '-b', 'all') |
44 | | - |
45 | | - egg_info = "pyperformance.egg-info" |
46 | | - print("(runtests.py) Remove directory %s" % egg_info, flush=True) |
47 | | - try: |
48 | | - shutil.rmtree(egg_info) |
49 | | - except FileNotFoundError: |
50 | | - pass |
51 | | - |
52 | | - run_bench(python, '-u', script, 'venv', 'create') |
53 | | - |
54 | | - for filename in ( |
55 | | - os.path.join('pyperformance', 'tests', 'data', 'py36.json'), |
56 | | - os.path.join('pyperformance', 'tests', 'data', 'mem1.json'), |
57 | | - ): |
58 | | - run_cmd((python, script, 'show', filename)) |
59 | | - |
60 | | - run_bench(python, '-u', script, 'list') |
61 | | - run_bench(python, '-u', script, 'list_groups') |
62 | | - |
63 | | - json = os.path.join(venv, 'bench.json') |
64 | | - |
65 | | - # -b all: check that *all* benchmark work |
66 | | - # |
67 | | - # --debug-single-value: benchmark results don't matter, we only |
68 | | - # check that running benchmarks don't fail. |
69 | | - run_bench(python, '-u', script, 'run', '-b', 'all', '--debug-single-value', |
70 | | - '-o', json) |
71 | | - |
72 | | - # Display slowest benchmarks |
73 | | - run_cmd((venv_python, '-u', '-m', 'pyperf', 'slowest', json)) |
74 | | - |
75 | | - run_bench(python, '-u', script, 'venv', 'remove') |
76 | 5 |
|
77 | 6 |
|
78 | 7 | def main(): |
79 | | - # Unit tests |
80 | | - cmd = [sys.executable, '-u', |
81 | | - os.path.join('pyperformance', 'tests', 'test_compare.py')] |
82 | | - run_cmd(cmd) |
83 | | - |
84 | | - # Functional tests |
85 | | - tmpdir = tempfile.mkdtemp() |
86 | | - try: |
87 | | - run_tests(tmpdir) |
88 | | - finally: |
89 | | - if os.path.exists(tmpdir): |
90 | | - shutil.rmtree(tmpdir) |
| 8 | + subprocess.run( |
| 9 | + [sys.executable, '-u', '-m', 'pyperformance.tests'], |
| 10 | + cwd=os.path.dirname(__file__) or None, |
| 11 | + ) |
91 | 12 |
|
92 | 13 |
|
93 | 14 | if __name__ == "__main__": |
|
0 commit comments