Skip to content

Commit f672c34

Browse files
committed
add some basic benchmarks
1 parent 38745dd commit f672c34

7 files changed

Lines changed: 396 additions & 0 deletions

File tree

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -49,3 +49,4 @@ wheelhouse
4949

5050
.cache
5151
compile_commands.json
52+
.asv

asv.conf.json

Lines changed: 184 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,184 @@
1+
{
2+
// The version of the config file format. Do not change, unless
3+
// you know what you are doing.
4+
"version": 1,
5+
6+
// The name of the project being benchmarked
7+
"project": "reticula",
8+
9+
// The project's homepage
10+
"project_url": "https://reticula.network/",
11+
12+
// The URL or local path of the source code repository for the
13+
// project being benchmarked
14+
"repo": ".",
15+
16+
// The Python project's subdirectory in your repo. If missing or
17+
// the empty string, the project is assumed to be located at the root
18+
// of the repository.
19+
// "repo_subdir": "",
20+
21+
// Customizable commands for building the project.
22+
// See asv.conf.json documentation.
23+
// To build the package using pyproject.toml (PEP518), uncomment the following lines
24+
"build_command": [
25+
"python -m pip wheel -v -w {build_cache_dir} {build_dir}"
26+
],
27+
28+
// Customizable commands for installing and uninstalling the project.
29+
// See asv.conf.json documentation.
30+
// "install_command": ["in-dir={env_dir} python -mpip install {wheel_file}"],
31+
// "uninstall_command": ["return-code=any python -mpip uninstall -y {project}"],
32+
33+
"branches": ["main"], // for git
34+
35+
// The DVCS being used. If not set, it will be automatically
36+
// determined from "repo" by looking at the protocol in the URL
37+
// (if remote), or by looking for special directories, such as
38+
// ".git" (if local).
39+
// "dvcs": "git",
40+
41+
// The tool to use to create environments. May be "conda",
42+
// "virtualenv", "mamba" (above 3.8)
43+
// or other value depending on the plugins in use.
44+
// If missing or the empty string, the tool will be automatically
45+
// determined by looking for tools on the PATH environment
46+
// variable.
47+
"environment_type": "virtualenv",
48+
49+
// timeout in seconds for installing any dependencies in environment
50+
// defaults to 10 min
51+
//"install_timeout": 600,
52+
53+
// the base URL to show a commit for the project.
54+
"show_commit_url": "http://github.com/reticula-network/reticula-python/commit/",
55+
56+
// The Pythons you'd like to test against. If not provided, defaults
57+
// to the current version of Python used to run `asv`.
58+
// "pythons": ["3.8", "3.12"],
59+
60+
// The list of conda channel names to be searched for benchmark
61+
// dependency packages in the specified order
62+
// "conda_channels": ["conda-forge", "defaults"],
63+
64+
// A conda environment file that is used for environment creation.
65+
// "conda_environment_file": "environment.yml",
66+
67+
// The matrix of dependencies to test. Each key of the "req"
68+
// requirements dictionary is the name of a package (in PyPI) and
69+
// the values are version numbers. An empty list or empty string
70+
// indicates to just test against the default (latest)
71+
// version. null indicates that the package is to not be
72+
// installed. If the package to be tested is only available from
73+
// PyPi, and the 'environment_type' is conda, then you can preface
74+
// the package name by 'pip+', and the package will be installed
75+
// via pip (with all the conda available packages installed first,
76+
// followed by the pip installed packages).
77+
//
78+
// The ``@env`` and ``@env_nobuild`` keys contain the matrix of
79+
// environment variables to pass to build and benchmark commands.
80+
// An environment will be created for every combination of the
81+
// cartesian product of the "@env" variables in this matrix.
82+
// Variables in "@env_nobuild" will be passed to every environment
83+
// during the benchmark phase, but will not trigger creation of
84+
// new environments. A value of ``null`` means that the variable
85+
// will not be set for the current combination.
86+
//
87+
// "matrix": {
88+
// "req": {
89+
// "numpy": ["1.6", "1.7"],
90+
// "six": ["", null], // test with and without six installed
91+
// "pip+emcee": [""] // emcee is only available for install with pip.
92+
// },
93+
// "env": {"ENV_VAR_1": ["val1", "val2"]},
94+
// "env_nobuild": {"ENV_VAR_2": ["val3", null]},
95+
// },
96+
97+
// Combinations of libraries/python versions can be excluded/included
98+
// from the set to test. Each entry is a dictionary containing additional
99+
// key-value pairs to include/exclude.
100+
//
101+
// An exclude entry excludes entries where all values match. The
102+
// values are regexps that should match the whole string.
103+
//
104+
// An include entry adds an environment. Only the packages listed
105+
// are installed. The 'python' key is required. The exclude rules
106+
// do not apply to includes.
107+
//
108+
// In addition to package names, the following keys are available:
109+
//
110+
// - python
111+
// Python version, as in the *pythons* variable above.
112+
// - environment_type
113+
// Environment type, as above.
114+
// - sys_platform
115+
// Platform, as in sys.platform. Possible values for the common
116+
// cases: 'linux2', 'win32', 'cygwin', 'darwin'.
117+
// - req
118+
// Required packages
119+
// - env
120+
// Environment variables
121+
// - env_nobuild
122+
// Non-build environment variables
123+
//
124+
// "exclude": [
125+
// {"python": "3.2", "sys_platform": "win32"}, // skip py3.2 on windows
126+
// {"environment_type": "conda", "req": {"six": null}}, // don't run without six on conda
127+
// {"env": {"ENV_VAR_1": "val2"}}, // skip val2 for ENV_VAR_1
128+
// ],
129+
//
130+
// "include": [
131+
// // additional env for python3.12
132+
// {"python": "3.12", "req": {"numpy": "1.26"}, "env_nobuild": {"FOO": "123"}},
133+
// // additional env if run on windows+conda
134+
// {"platform": "win32", "environment_type": "conda", "python": "3.12", "req": {"libpython": ""}},
135+
// ],
136+
137+
// The directory (relative to the current directory) that benchmarks are
138+
// stored in. If not provided, defaults to "benchmarks"
139+
// "benchmark_dir": "benchmarks",
140+
141+
// The directory (relative to the current directory) to cache the Python
142+
// environments in. If not provided, defaults to "env"
143+
"env_dir": ".asv/env",
144+
145+
// The directory (relative to the current directory) that raw benchmark
146+
// results are stored in. If not provided, defaults to "results".
147+
"results_dir": ".asv/results",
148+
149+
// The directory (relative to the current directory) that the html tree
150+
// should be written to. If not provided, defaults to "html".
151+
"html_dir": ".asv/html",
152+
153+
// The number of characters to retain in the commit hashes.
154+
// "hash_length": 8,
155+
156+
// `asv` will cache results of the recent builds in each
157+
// environment, making them faster to install next time. This is
158+
// the number of builds to keep, per environment.
159+
// "build_cache_size": 2,
160+
161+
// The commits after which the regression search in `asv publish`
162+
// should start looking for regressions. Dictionary whose keys are
163+
// regexps matching to benchmark names, and values corresponding to
164+
// the commit (exclusive) after which to start looking for
165+
// regressions. The default is to start from the first commit
166+
// with results. If the commit is `null`, regression detection is
167+
// skipped for the matching benchmark.
168+
//
169+
// "regressions_first_commits": {
170+
// "some_benchmark": "352cdf", // Consider regressions only after this commit
171+
// "another_benchmark": null, // Skip regression detection altogether
172+
// },
173+
174+
// The thresholds for relative change in results, after which `asv
175+
// publish` starts reporting regressions. Dictionary of the same
176+
// form as in ``regressions_first_commits``, with values
177+
// indicating the thresholds. If multiple entries match, the
178+
// maximum is taken. If no entry matches, the default is 5%.
179+
//
180+
// "regressions_thresholds": {
181+
// "some_benchmark": 0.01, // Threshold of 1%
182+
// "another_benchmark": 0.5, // Threshold of 50%
183+
// },
184+
}

benchmarks/__init__.py

Whitespace-only changes.

benchmarks/ba_sequence.json

Lines changed: 1 addition & 0 deletions
Large diffs are not rendered by default.

benchmarks/module.py

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
class ModuleImport:
2+
"""
3+
Benchmark total import time.
4+
"""
5+
6+
def timeraw_import_reticula(self):
7+
return """
8+
import reticula
9+
"""

benchmarks/static.py

Lines changed: 123 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,123 @@
1+
import reticula as ret
2+
3+
import random
4+
import json
5+
import pathlib
6+
7+
8+
class Construction:
9+
"""
10+
Benchmarking the construction and basic operation of static graphs.
11+
"""
12+
13+
def setup(self):
14+
self.n = 2**16
15+
k = 32
16+
self.g = ret.regular_ring_lattice[ret.int64](size=self.n, degree=k)
17+
self.verts = list(self.g.vertices())
18+
19+
self.edges = list(self.g.edges())
20+
self.tuples = []
21+
for e in self.edges:
22+
verts = e.incident_verts()
23+
self.tuples.append((verts[0], verts[-1]))
24+
25+
rand = random.Random(42)
26+
27+
self.shuffled_edges = rand.sample(self.edges, k=len(self.edges))
28+
self.shuffled_tuples = []
29+
for e in self.shuffled_edges:
30+
verts = e.incident_verts()
31+
if rand.random() > 0.5:
32+
self.shuffled_tuples.append((verts[0], verts[-1]))
33+
else:
34+
self.shuffled_tuples.append((verts[-1], verts[0]))
35+
36+
def time_copy_construction(self):
37+
ret.undirected_network[ret.int64](self.g)
38+
39+
def time_tuple_construction(self):
40+
ret.undirected_network[ret.int64](
41+
verts=self.verts, edges=self.tuples)
42+
43+
def time_shuffled_tuple_construction(self):
44+
ret.undirected_network[ret.int64](
45+
verts=self.verts, edges=self.shuffled_tuples)
46+
47+
def time_edge_obj_construction(self):
48+
ret.undirected_network[ret.int64](
49+
verts=self.verts, edges=self.edges)
50+
51+
def time_shuffled_edge_obj_construction(self):
52+
ret.undirected_network[ret.int64](
53+
verts=self.verts, edges=self.shuffled_edges)
54+
55+
56+
class Graphicallity:
57+
def setup(self):
58+
base_path = pathlib.Path(__file__).parent
59+
with open(base_path / "ba_sequence.json") as f:
60+
self.ba_sequence = json.load(f)
61+
62+
def time_is_graphic(self):
63+
ret.is_graphic(self.ba_sequence)
64+
65+
66+
class Algorithms:
67+
def setup(self):
68+
self.n = 2**16
69+
k = 32
70+
self.g = ret.regular_ring_lattice[ret.int64](size=self.n, degree=k)
71+
self.g_isolated = ret.undirected_network[ret.int64](
72+
edges=[], verts=range(self.n))
73+
74+
def time_degree(self):
75+
ret.degree(self.g, 12)
76+
77+
def time_degree_sequence(self):
78+
ret.degree_sequence(self.g)
79+
80+
def time_density(self):
81+
ret.density(self.g)
82+
83+
def time_assortativity(self):
84+
ret.degree_assortativity(self.g)
85+
86+
def time_attribute_assortativity_lambda(self):
87+
ret.attribute_assortativity(self.g, lambda x: x)
88+
89+
def time_connected_component(self):
90+
ret.connected_component(self.g, 12)
91+
92+
def time_connected_component_isolated(self):
93+
ret.connected_component(self.g_isolated, 12)
94+
95+
def time_connected_components(self):
96+
ret.connected_components(self.g)
97+
98+
def time_connected_components_isolated(self):
99+
ret.connected_components(self.g_isolated)
100+
101+
def time_is_connected(self):
102+
ret.is_connected(self.g)
103+
104+
def time_is_connected_isolated(self):
105+
ret.is_connected(self.g_isolated)
106+
107+
def time_largest_connected_component(self):
108+
ret.largest_connected_component(self.g)
109+
110+
def time_largest_connected_component_isolated(self):
111+
ret.largest_connected_component(self.g_isolated)
112+
113+
def time_is_reachable(self):
114+
ret.is_reachable(self.g, 0, self.n//2)
115+
116+
def time_is_reachable_isolated(self):
117+
ret.is_reachable(self.g_isolated, 0, self.n//2)
118+
119+
def time_shortest_path_lengths_from(self):
120+
ret.shortest_path_lengths_from(self.g, 0)
121+
122+
def time_shortest_path_lengths_to(self):
123+
ret.shortest_path_lengths_to(self.g, 0)

0 commit comments

Comments
 (0)