summaryrefslogtreecommitdiff
path: root/tools
diff options
context:
space:
mode:
Diffstat (limited to 'tools')
-rwxr-xr-xtools/benchmark.py12
-rwxr-xr-xtools/http_benchmark.py24
-rw-r--r--tools/testdata/wrk2.txt8
-rw-r--r--tools/testdata/wrk3.txt8
-rw-r--r--tools/util.py28
-rw-r--r--tools/util_test.py15
6 files changed, 76 insertions, 19 deletions
diff --git a/tools/benchmark.py b/tools/benchmark.py
index 53037b1e7..6a4ec29c8 100755
--- a/tools/benchmark.py
+++ b/tools/benchmark.py
@@ -206,8 +206,16 @@ def main(argv):
hyper_hello_path = os.path.join(build_dir, "hyper_hello")
core_http_bench_exe = os.path.join(build_dir, "deno_core_http_bench")
new_data["throughput"] = run_throughput(deno_path)
- new_data["req_per_sec"] = http_benchmark(deno_path, hyper_hello_path,
- core_http_bench_exe)
+ stats = http_benchmark(deno_path, hyper_hello_path,
+ core_http_bench_exe)
+ new_data["req_per_sec"] = {
+ k: v["req_per_sec"]
+ for k, v in stats.items()
+ }
+ new_data["max_latency"] = {
+ k: v["max_latency"]
+ for k, v in stats.items()
+ }
if "linux" in sys.platform:
# Thread count test, only on linux
new_data["thread_count"] = run_thread_count_benchmark(deno_path)
diff --git a/tools/http_benchmark.py b/tools/http_benchmark.py
index 2df5fcb89..bd43a5ec4 100755
--- a/tools/http_benchmark.py
+++ b/tools/http_benchmark.py
@@ -59,16 +59,18 @@ def hyper_http_benchmark(hyper_hello_exe):
def http_benchmark(deno_exe, hyper_hello_exe, core_http_bench_exe):
- r = {}
+
# TODO Rename to "deno_tcp"
- r["deno"] = deno_http_benchmark(deno_exe)
- r["deno_net_http"] = deno_net_http_benchmark(deno_exe)
- r["deno_core_single"] = deno_core_single(core_http_bench_exe)
- r["deno_core_multi"] = deno_core_multi(core_http_bench_exe)
- r["node"] = node_http_benchmark()
- r["node_tcp"] = node_tcp_benchmark()
- r["hyper"] = hyper_http_benchmark(hyper_hello_exe)
- return r
+
+ return {
+ "deno": deno_http_benchmark(deno_exe),
+ "deno_net_http": deno_net_http_benchmark(deno_exe),
+ "deno_core_single": deno_core_single(core_http_bench_exe),
+ "deno_core_multi": deno_core_multi(core_http_bench_exe),
+ "node": node_http_benchmark(),
+ "node_tcp": node_tcp_benchmark(),
+ "hyper": hyper_http_benchmark(hyper_hello_exe)
+ }
def run(server_cmd, merge_env=None):
@@ -93,9 +95,9 @@ def run(server_cmd, merge_env=None):
DURATION, ADDR)
print cmd
output = subprocess.check_output(cmd, shell=True)
- req_per_sec = util.parse_wrk_output(output)
+ stats = util.parse_wrk_output(output)
print output
- return req_per_sec
+ return stats
finally:
server.kill()
diff --git a/tools/testdata/wrk2.txt b/tools/testdata/wrk2.txt
new file mode 100644
index 000000000..3be41437c
--- /dev/null
+++ b/tools/testdata/wrk2.txt
@@ -0,0 +1,8 @@
+Running 10s test @ http://127.0.0.1:4544/
+ 2 threads and 10 connections
+ Thread Stats Avg Stdev Max +/- Stdev
+ Latency 402.90us 1.15ms 1.25us 94.86%
+ Req/Sec 26.86k 2.01k 31.81k 78.71%
+ 539721 requests in 10.10s, 26.25MB read
+Requests/sec: 53435.75
+Transfer/sec: 2.60MB \ No newline at end of file
diff --git a/tools/testdata/wrk3.txt b/tools/testdata/wrk3.txt
new file mode 100644
index 000000000..71150f9f3
--- /dev/null
+++ b/tools/testdata/wrk3.txt
@@ -0,0 +1,8 @@
+Running 10s test @ http://127.0.0.1:4544/
+ 2 threads and 10 connections
+ Thread Stats Avg Stdev Max +/- Stdev
+ Latency 26.55ms 152.26ms 1.63s 97.45%
+ Req/Sec 48.26k 3.13k 61.41k 93.00%
+ 960491 requests in 10.00s, 80.61MB read
+Requests/sec: 96037.58
+Transfer/sec: 8.06MB \ No newline at end of file
diff --git a/tools/util.py b/tools/util.py
index fc307512d..5576bef91 100644
--- a/tools/util.py
+++ b/tools/util.py
@@ -358,12 +358,32 @@ def extract_number(pattern, string):
return int(matches[0])
+def extract_max_latency_in_milliseconds(pattern, string):
+ matches = re.findall(pattern, string)
+ if len(matches) != 1:
+ return None
+ num = float(matches[0][0])
+ unit = matches[0][1]
+ if (unit == 'ms'):
+ return num
+ elif (unit == 'us'):
+ return num / 1000
+ elif (unit == 's'):
+ return num * 1000
+
+
def parse_wrk_output(output):
- req_per_sec = None
+ stats = {}
+ stats['req_per_sec'] = None
+ stats['max_latency'] = None
for line in output.split("\n"):
- if req_per_sec is None:
- req_per_sec = extract_number(r'Requests/sec:\s+(\d+)', line)
- return req_per_sec
+ if stats['req_per_sec'] is None:
+ stats['req_per_sec'] = extract_number(r'Requests/sec:\s+(\d+)',
+ line)
+ if stats['max_latency'] is None:
+ stats['max_latency'] = extract_max_latency_in_milliseconds(
+ r'Latency(?:\s+(\d+.\d+)([a-z]+)){3}', line)
+ return stats
def platform():
diff --git a/tools/util_test.py b/tools/util_test.py
index b7c054b92..e4c8e697b 100644
--- a/tools/util_test.py
+++ b/tools/util_test.py
@@ -82,8 +82,19 @@ def parse_unit_test_output_test():
def parse_wrk_output_test():
print "Testing util.parse_wrk_output_test()..."
f = open(os.path.join(util.root_path, "tools/testdata/wrk1.txt"))
- req_per_sec = util.parse_wrk_output(f.read())
- assert req_per_sec == 1837
+ stats = util.parse_wrk_output(f.read())
+ assert stats['req_per_sec'] == 1837
+ assert stats['max_latency'] == 34.96
+
+ f2 = open(os.path.join(util.root_path, "tools/testdata/wrk2.txt"))
+ stats2 = util.parse_wrk_output(f2.read())
+ assert stats2['req_per_sec'] == 53435
+ assert stats2['max_latency'] == 0.00125
+
+ f3 = open(os.path.join(util.root_path, "tools/testdata/wrk3.txt"))
+ stats3 = util.parse_wrk_output(f3.read())
+ assert stats3['req_per_sec'] == 96037
+ assert stats3['max_latency'] == 1630.0
def util_test():