Add node and php to benchmark dashboard
diff --git a/benchmarks/Makefile.am b/benchmarks/Makefile.am
index c758ee6..a9c69c7 100644
--- a/benchmarks/Makefile.am
+++ b/benchmarks/Makefile.am
@@ -260,7 +260,7 @@
@echo 'all_data=""' >> go-benchmark
@echo 'conf=()' >> go-benchmark
@echo 'data_files=()' >> go-benchmark
- @echo 'for arg in $$@; do if [[ $${arg:0:1} == "-" ]]; then conf+=($$arg); else data_files+=("../$$arg"); fi; done' >> go-benchmark
+ @echo 'for arg in $$@; do if [[ $${arg:0:1} == "-" ]]; then conf+=($$arg); else data_files+=("$$arg"); fi; done' >> go-benchmark
@echo 'go test -bench=. $${conf[*]} -- $${data_files[*]}' >> go-benchmark
@echo 'cd ..' >> go-benchmark
@chmod +x go-benchmark
@@ -533,7 +533,6 @@
@echo '#! /bin/bash' > php-c-benchmark
@echo 'export PROTOBUF_PHP_SRCDIR="$$(cd $(top_srcdir) && pwd)/php/src"' >> php-c-benchmark
@echo 'export PROTOBUF_PHP_EXTDIR="$$PROTOBUF_PHP_SRCDIR/../ext/google/protobuf/modules"' >> php-c-benchmark
- @echo 'echo "$$PROTOBUF_PHP_EXTDIR/protobuf.so"' >> php-c-benchmark
@echo 'cd tmp/php' >> php-c-benchmark
@echo 'export CURRENT_DIR=$$(pwd)' >> php-c-benchmark
@echo 'php -d auto_prepend_file="autoload.php" -d include_path="$$(pwd)" -d extension="$$PROTOBUF_PHP_EXTDIR/protobuf.so" Google/Protobuf/Benchmark/PhpBenchmark.php $$@' >> php-c-benchmark
diff --git a/benchmarks/js/js_benchmark.js b/benchmarks/js/js_benchmark.js
index 14905e3..875be68 100644
--- a/benchmarks/js/js_benchmark.js
+++ b/benchmarks/js/js_benchmark.js
@@ -18,6 +18,7 @@
}
var results = [];
+var json_file = "";
console.log("#####################################################");
console.log("Js Benchmark: ");
@@ -25,6 +26,11 @@
if (index < 2) {
return;
}
+ if (filename.indexOf("--json_output") != -1) {
+ json_file = filename.replace(/^--json_output=/, '');
+ return;
+ }
+
var benchmarkDataset =
proto.benchmarks.BenchmarkDataset.deserializeBinary(fs.readFileSync(filename));
var messageList = [];
@@ -55,8 +61,8 @@
results.push({
filename: filename,
benchmarks: {
- protobufjs_decoding: senarios.benches[0] * totalBytes,
- protobufjs_encoding: senarios.benches[1] * totalBytes
+ protobufjs_decoding: senarios.benches[0] * totalBytes / 1024 / 1024,
+ protobufjs_encoding: senarios.benches[1] * totalBytes / 1024 / 1024
}
})
@@ -68,3 +74,9 @@
});
console.log("#####################################################");
+if (json_file != "") {
+ fs.writeFile(json_file, JSON.stringify(results), (err) => {
+ if (err) throw err;
+ });
+}
+
diff --git a/benchmarks/php/PhpBenchmark.php b/benchmarks/php/PhpBenchmark.php
index 058940d..9c1132d 100644
--- a/benchmarks/php/PhpBenchmark.php
+++ b/benchmarks/php/PhpBenchmark.php
@@ -93,7 +93,7 @@
}
}
-function runBenchmark($file) {
+function runBenchmark($file, $behavior_prefix) {
$datafile = fopen($file, "r") or die("Unable to open file " . $file);
$bytes = fread($datafile, filesize($file));
$dataset = new BenchmarkDataset(NULL);
@@ -119,8 +119,8 @@
return array(
"filename" => $file,
"benchmarks" => array(
- "parse_php" => $parse_benchmark->runBenchmark(),
- "serailize_php" => $serialize_benchmark->runBenchmark()
+ $behavior_prefix . "_parse" => $parse_benchmark->runBenchmark(),
+ $behavior_prefix . "_serailize" => $serialize_benchmark->runBenchmark()
),
"message_name" => $dataset->getMessageName()
);
@@ -129,15 +129,27 @@
// main
$json_output = false;
$results = array();
+$behavior_prefix = "";
+
foreach ($argv as $index => $arg) {
if ($index == 0) {
continue;
}
if ($arg == "--json") {
$json_output = true;
+ } else if (strpos($arg, "--behavior_prefix") == 0) {
+ $behavior_prefix = str_replace("--behavior_prefix=", "", $arg);
+ }
+}
+
+foreach ($argv as $index => $arg) {
+ if ($index == 0) {
+ continue;
+ }
+ if (substr($arg, 0, 2) == "--") {
continue;
} else {
- array_push($results, runBenchmark($arg));
+ array_push($results, runBenchmark($arg, $behavior_prefix));
}
}
diff --git a/benchmarks/python/py_benchmark.py b/benchmarks/python/py_benchmark.py
index e86b61e..ebb1974 100755
--- a/benchmarks/python/py_benchmark.py
+++ b/benchmarks/python/py_benchmark.py
@@ -44,9 +44,13 @@
data = open(filename).read()
benchmark_dataset = benchmarks_pb2.BenchmarkDataset()
benchmark_dataset.ParseFromString(data)
+ total_bytes = 0
+ for payload in benchmark_dataset.payload:
+ total_bytes += len(payload)
benchmark_util = Benchmark(full_iteration=len(benchmark_dataset.payload),
module="py_benchmark",
- setup_method="init")
+ setup_method="init",
+ total_bytes=total_bytes)
result={}
result["filename"] = filename
result["message_name"] = benchmark_dataset.message_name
@@ -61,10 +65,11 @@
def init(filename):
- global benchmark_dataset, message_class, message_list, counter
+ global benchmark_dataset, message_class, message_list, counter, total_bytes
message_list=[]
counter = 0
- data = open(os.path.dirname(sys.argv[0]) + "/../" + filename).read()
+ total_bytes = 0
+ data = open(filename).read()
benchmark_dataset = benchmarks_pb2.BenchmarkDataset()
benchmark_dataset.ParseFromString(data)
@@ -85,6 +90,7 @@
temp = message_class()
temp.ParseFromString(one_payload)
message_list.append(temp)
+ total_bytes += len(one_payload)
def parse_from_benchmark():
@@ -101,11 +107,12 @@
class Benchmark:
def __init__(self, module=None, test_method=None,
- setup_method=None, full_iteration = 1):
+ setup_method=None, total_bytes=None, full_iteration = 1):
self.full_iteration = full_iteration
self.module = module
self.test_method = test_method
self.setup_method = setup_method
+ self.total_bytes = total_bytes
def set_test_method(self, test_method):
self.test_method = test_method
@@ -130,7 +137,7 @@
t = timeit.timeit(stmt="%s(%s)" % (self.test_method, test_method_args),
setup=self.full_setup_code(setup_method_args),
number=reps);
- return 1.0 * t / reps * (10 ** 9)
+ return self.total_bytes * 1.0 / 2 ** 20 / (1.0 * t / reps)
if __name__ == "__main__":
@@ -144,10 +151,10 @@
for result in results:
print("Message %s of dataset file %s" % \
(result["message_name"], result["filename"]))
- print("Average time for parse_from_benchmark: %.2f ns" % \
+ print("Average throughput for parse_from_benchmark: %.2f MB/s" % \
(result["benchmarks"][ \
args.behavior_prefix + "_parse_from_benchmark"]))
- print("Average time for serialize_to_benchmark: %.2f ns" % \
+ print("Average throughput for serialize_to_benchmark: %.2f MB/s" % \
(result["benchmarks"][ \
args.behavior_prefix + "_serialize_to_benchmark"]))
print("")
diff --git a/benchmarks/util/result_parser.py b/benchmarks/util/result_parser.py
index 32f35a9..0b8fc49 100755
--- a/benchmarks/util/result_parser.py
+++ b/benchmarks/util/result_parser.py
@@ -115,7 +115,6 @@
# behavior: results,
# ...
# },
-# "message_name": STRING
# },
# ...
# ], #pure-python
@@ -136,8 +135,7 @@
"language": "python",
"dataFilename": __extract_file_name(result["filename"]),
"behavior": behavior,
- "throughput": avg_size /
- result["benchmarks"][behavior] * 1e9 / 2 ** 20
+ "throughput": result["benchmarks"][behavior]
})
@@ -220,7 +218,7 @@
continue
first_slash_index = result_list[0].find('/')
last_slash_index = result_list[0].rfind('/')
- full_filename = result_list[0][first_slash_index+4:last_slash_index] # delete ../ prefix
+ full_filename = result_list[0][first_slash_index+1:last_slash_index]
total_bytes, _ = __get_data_size(full_filename)
behavior_with_suffix = result_list[0][last_slash_index+1:]
last_dash = behavior_with_suffix.rfind("-")
@@ -236,11 +234,45 @@
})
+# Node/Php results example:
+#
+# [
+# {
+# "filename": string,
+# "benchmarks": {
+# behavior: results,
+# ...
+# },
+# },
+# ...
+# ]
+def __parse_js_php_result(filename, language):
+ if filename == "":
+ return
+ if filename[0] != '/':
+ filename = os.path.dirname(os.path.abspath(__file__)) + '/' + filename
+ with open(filename) as f:
+ results = json.loads(f.read())
+ for result in results:
+ _, avg_size = __get_data_size(result["filename"])
+ for behavior in result["benchmarks"]:
+ __results.append({
+ "language": language,
+ "dataFilename": __extract_file_name(result["filename"]),
+ "behavior": behavior,
+ "throughput": result["benchmarks"][behavior]
+ })
+
+
+
def get_result_from_file(cpp_file="",
java_file="",
python_file="",
go_file="",
- synthetic_file=""):
+ synthetic_file="",
+ node_file="",
+ php_c_file="",
+ php_file=""):
results = {}
if cpp_file != "":
__parse_cpp_result(cpp_file)
@@ -252,5 +284,11 @@
__parse_go_result(go_file)
if synthetic_file != "":
__parse_synthetic_result(synthetic_file)
+ if node_file != "":
+ __parse_js_php_result(node_file, "node")
+ if php_file != "":
+ __parse_js_php_result(php_file, "php")
+ if php_c_file != "":
+ __parse_js_php_result(php_c_file, "php")
return __results
diff --git a/benchmarks/util/result_uploader.py b/benchmarks/util/result_uploader.py
index a667da0..9167caf 100755
--- a/benchmarks/util/result_uploader.py
+++ b/benchmarks/util/result_uploader.py
@@ -59,13 +59,14 @@
labels_string += ",|%s:%s|" % (key, result[key])
new_result["labels"] = labels_string[1:]
new_result["timestamp"] = _INITIAL_TIME
-
- bq = big_query_utils.create_big_query()
- row = big_query_utils.make_row(str(uuid.uuid4()), new_result)
- if not big_query_utils.insert_rows(bq, _PROJECT_ID, _DATASET,
- _TABLE + "$" + _NOW,
- [row]):
- print('Error when uploading result', new_result)
+ print(labels_string)
+#
+# bq = big_query_utils.create_big_query()
+# row = big_query_utils.make_row(str(uuid.uuid4()), new_result)
+# if not big_query_utils.insert_rows(bq, _PROJECT_ID, _DATASET,
+# _TABLE + "$" + _NOW,
+# [row]):
+# print('Error when uploading result', new_result)
if __name__ == "__main__":
@@ -82,6 +83,15 @@
parser.add_argument("-go", "--go_input_file",
help="The golang benchmark result file's name",
default="")
+ parser.add_argument("-node", "--node_input_file",
+ help="The node.js benchmark result file's name",
+ default="")
+ parser.add_argument("-php", "--php_input_file",
+ help="The pure php benchmark result file's name",
+ default="")
+ parser.add_argument("-php_c", "--php_c_input_file",
+ help="The php with c ext benchmark result file's name",
+ default="")
args = parser.parse_args()
metadata = get_metadata()
@@ -90,5 +100,8 @@
cpp_file=args.cpp_input_file,
java_file=args.java_input_file,
python_file=args.python_input_file,
- go_file=args.go_input_file
+ go_file=args.go_input_file,
+ node_file=args.node_input_file,
+ php_file=args.php_input_file,
+ php_c_file=args.php_c_input_file,
), metadata)
\ No newline at end of file