Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
i4
manycore
emper-io-eval
Commits
792af5ce
Commit
792af5ce
authored
Nov 30, 2021
by
Florian Fischer
Browse files
add option to record profile of the server using perf
parent
cc5067d2
Pipeline
#73070
passed with stage
in 1 minute and 48 seconds
Changes
3
Pipelines
1
Hide whitespace changes
Inline
Side-by-side
bench/perf.py
0 → 100644
View file @
792af5ce
# Copyright 2021 Florian Fischer
"""Perf related variables"""
PERF_EXE
=
'perf'
DEFAULT_EVENTS
=
(
'task-clock,context-switches,cpu-migrations,page-faults,cycles,'
'stalled-cycles-frontend,stalled-cycles-backend,instructions,'
'branches,branch-misses'
)
D_EVENTS
=
DEFAULT_EVENTS
+
',L1-dcache-loads,L1-dcache-load-misses,LLC-loads,LLC-load-misses'
DD_EVENTS
=
D_EVENTS
+
(
',L1-icache-loads,L1-icache-load-misses,dTLB-loads,'
'dTLB-load-misses,iTLB-loads,iTLB-load-misses'
)
DDD_EVENTS
=
DD_EVENTS
+
',L1-dcache-prefetches,L1-dcache-prefetch-misses'
PERF_RECORD_CMD
=
f
'
{
PERF_EXE
}
record --call-graph dwarf -o {{output}} -p {{pid}}'
PERF_STAT_CMD
=
f
'
{
PERF_EXE
}
stat -e {{events}} record -o {{output}} -p {{pid}}'
PERF_RECORD_OUTPUT
=
'{run}_perf_record.data'
PERF_STAT_OUTPUT
=
'{run}_perf_stat.data'
bench/server.py
View file @
792af5ce
...
...
@@ -7,6 +7,7 @@ import subprocess
from
.globalvars
import
KILL_CMD
,
TERMINATION_TIME
from
.util
import
cmd_run
,
prepare_env
from
.
import
perf
log
=
logging
.
getLogger
(
__name__
)
...
...
@@ -26,11 +27,16 @@ class Server(subprocess.Popen):
env
=
None
,
measure_cmd
=
None
,
remote_cmd
=
None
,
perf_counters
=
None
,
perf_record
=
False
,
host
=
None
):
self
.
name
=
name
self
.
cmd
=
cmd
self
.
bin
=
cmd
.
split
()[
0
]
self
.
remote_cmd
=
remote_cmd
self
.
server_pid
=
0
self
.
perf_record_popen
=
None
self
.
perf_stat_popen
=
None
if
measure_cmd
:
self
.
cmd
=
f
'
{
measure_cmd
}
{
cmd
}
'
...
...
@@ -64,6 +70,55 @@ class Server(subprocess.Popen):
env
=
server_env
,
text
=
True
)
# type: ignore
if
perf_record
:
perf_record_out
=
bench_dir
/
perf
.
PERF_RECORD_OUTPUT
.
format
(
run
=
run
)
perf_record_cmd
=
perf
.
PERF_RECORD_CMD
.
format
(
pid
=
self
.
get_server_pid
(),
output
=
perf_record_out
)
log
.
debug
(
'start perf record process using: %s'
,
perf_record_cmd
)
self
.
perf_record_popen
=
subprocess
.
Popen
(
perf_record_cmd
.
split
())
if
perf_counters
is
not
None
:
if
len
(
perf_counters
)
==
0
:
perf_counters
=
perf
.
DDD_EVENTS
else
:
perf_counters
=
','
.
join
(
perf_counters
)
perf_stat_out
=
bench_dir
/
perf
.
PERF_STAT_OUTPUT
.
format
(
run
=
run
)
perf_stat_cmd
=
perf
.
PERF_STAT_CMD
.
format
(
pid
=
self
.
get_server_pid
(),
events
=
perf_counters
,
output
=
perf_stat_out
)
log
.
debug
(
'start perf stat process using: %s'
,
perf_stat_cmd
)
self
.
perf_stat_popen
=
subprocess
.
Popen
(
perf_stat_cmd
.
split
())
def
get_server_pid
(
self
)
->
int
:
"""Retrieve the pid of the server"""
if
self
.
server_pid
:
return
self
.
server_pid
log
.
debug
(
'retrieve pid of the started echo server'
)
ppid
=
self
.
pid
# We just started the server
if
self
.
cmd
.
startswith
(
self
.
bin
):
log
.
debug
(
'We just run started the echoserver -> Popen.pid: %s'
,
ppid
)
return
ppid
retrieval_cmd
=
f
'pgrep -x
{
Path
(
self
.
bin
).
name
}
'
if
self
.
remote_cmd
:
retrieval_cmd
=
f
'
{
self
.
remote_cmd
}
{
retrieval_cmd
}
'
output
=
subprocess
.
check_output
(
retrieval_cmd
.
split
(),
text
=
True
)
log
.
debug
(
'We retrieved the echoserver
\'
s pid using %s -> %s'
,
retrieval_cmd
,
output
)
self
.
server_pid
=
int
(
output
.
strip
())
return
self
.
server_pid
def
shutdown
(
self
)
->
bool
:
"""Gracfully shutdown a running server"""
if
self
.
poll
()
is
not
None
:
...
...
@@ -116,3 +171,9 @@ class Server(subprocess.Popen):
self
.
fout
.
close
()
self
.
ferr
.
close
()
if
self
.
perf_record_popen
:
self
.
perf_record_popen
.
wait
()
if
self
.
perf_stat_popen
:
self
.
perf_stat_popen
.
wait
()
eval.py
View file @
792af5ce
...
...
@@ -131,7 +131,9 @@ def bench(server_cmds) -> EvalResult:
env
=
SERVER_ENV
,
measure_cmd
=
measure_cmd
,
remote_cmd
=
remote_cmd
if
REMOTE_CMD
else
None
,
host
=
HOST
)
host
=
HOST
,
perf_record
=
args
.
perf_record
,
perf_counters
=
args
.
perf_stat
)
sleep
(
STARTUP_TIME
)
if
server
.
poll
()
is
not
None
:
...
...
@@ -241,6 +243,14 @@ if __name__ == '__main__':
'--no-fetch'
,
help
=
'do not fetch the emper remotes'
,
action
=
'store_true'
)
parser
.
add_argument
(
'--perf-stat'
,
help
=
'use perf to collect performance counter stats of the specified counters'
,
nargs
=
'*'
)
parser
.
add_argument
(
'--perf-record'
,
help
=
'use perf to record the servers profile'
,
action
=
'store_true'
)
parser
.
add_argument
(
'implementations'
,
help
=
'server implementations to benchmark'
,
nargs
=
'*'
)
...
...
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment