You've already forked linux-packaging-mono
acceptance-tests
data
debian
docs
external
Newtonsoft.Json
api-doc-tools
api-snapshot
aspnetwebstack
bdwgc
binary-reference-assemblies
bockbuild
boringssl
cecil
cecil-legacy
corefx
corert
helix-binaries
ikdasm
ikvm
illinker-test-assets
linker
llvm-project
clang
clang-tools-extra
compiler-rt
eng
libcxx
libcxxabi
libunwind
lld
lldb
cmake
docs
examples
customization
darwin
functions
interposing
lookup
plugins
python
cmdtemplate.py
crashlog.py
delta.py
diagnose_nsstring.py
diagnose_unwind.py
dict_utils.py
disasm-stress-test.py
disasm.py
disassembly_mode.py
file_extract.py
gdb_disassemble.py
gdbremote.py
globals.py
jump.py
lldb_module_utils.py
lldbtk.py
mach_o.py
memory.py
operating_system.py
performance.py
process_events.py
pytracer.py
sbvalue.py
scripted_step.py
shadow.py
sources.py
stacks.py
step_and_print.py
symbolication.py
types.py
x86_64_linux_target_definition.py
x86_64_qemu_target_definition.py
x86_64_target_definition.py
scripting
summaries
synthetic
test
include
lit
lldb.xcodeproj
lldb.xcworkspace
packages
resources
scripts
source
third_party
tools
unittests
utils
www
.arcconfig
.clang-format
.gitignore
CMakeLists.txt
CODE_OWNERS.txt
INSTALL.txt
LICENSE.TXT
use_lldb_suite_root.py
llvm
nuget
openmp
polly
Directory.Build.props
Directory.Build.targets
NuGet.config
azure-pipelines.yml
build.cmd
build.sh
dir.common.props
global.json
llvm.proj
mxe-Win64.cmake.in
nuget-buildtasks
nunit-lite
roslyn-binaries
rx
xunit-binaries
how-to-bump-roslyn-binaries.md
ikvm-native
llvm
m4
man
mcs
mono
msvc
netcore
po
runtime
samples
scripts
support
tools
COPYING.LIB
LICENSE
Makefile.am
Makefile.in
NEWS
README.md
acinclude.m4
aclocal.m4
autogen.sh
code_of_conduct.md
compile
config.guess
config.h.in
config.rpath
config.sub
configure.REMOVED.git-id
configure.ac.REMOVED.git-id
depcomp
install-sh
ltmain.sh.REMOVED.git-id
missing
mkinstalldirs
mono-uninstalled.pc.in
test-driver
winconfig.h
70 lines
2.5 KiB
Python
Executable File
70 lines
2.5 KiB
Python
Executable File
#!/usr/bin/python
|
|
|
|
import lldb
|
|
import commands
|
|
import optparse
|
|
import shlex
|
|
|
|
|
|
def stack_frames(debugger, command, result, dict):
|
|
command_args = shlex.split(command)
|
|
usage = "usage: %prog [options] <PATH> [PATH ...]"
|
|
description = '''This command will enumerate all stack frames, print the stack size for each, and print an aggregation of which functions have the largest stack frame sizes at the end.'''
|
|
parser = optparse.OptionParser(
|
|
description=description, prog='ls', usage=usage)
|
|
parser.add_option(
|
|
'-v',
|
|
'--verbose',
|
|
action='store_true',
|
|
dest='verbose',
|
|
help='display verbose debug info',
|
|
default=False)
|
|
try:
|
|
(options, args) = parser.parse_args(command_args)
|
|
except:
|
|
return
|
|
|
|
target = debugger.GetSelectedTarget()
|
|
process = target.GetProcess()
|
|
|
|
frame_info = {}
|
|
for thread in process:
|
|
last_frame = None
|
|
print "thread %u" % (thread.id)
|
|
for frame in thread.frames:
|
|
if last_frame:
|
|
frame_size = 0
|
|
if frame.idx == 1:
|
|
if frame.fp == last_frame.fp:
|
|
# No frame one the first frame (might be right at the
|
|
# entry point)
|
|
first_frame_size = 0
|
|
frame_size = frame.fp - frame.sp
|
|
else:
|
|
# First frame that has a valid size
|
|
first_frame_size = last_frame.fp - last_frame.sp
|
|
print "<%#7x> %s" % (first_frame_size, last_frame)
|
|
if first_frame_size:
|
|
name = last_frame.name
|
|
if name not in frame_info:
|
|
frame_info[name] = first_frame_size
|
|
else:
|
|
frame_info[name] += first_frame_size
|
|
else:
|
|
# Second or higher frame
|
|
frame_size = frame.fp - last_frame.fp
|
|
print "<%#7x> %s" % (frame_size, frame)
|
|
if frame_size > 0:
|
|
name = frame.name
|
|
if name not in frame_info:
|
|
frame_info[name] = frame_size
|
|
else:
|
|
frame_info[name] += frame_size
|
|
last_frame = frame
|
|
print frame_info
|
|
|
|
|
|
lldb.debugger.HandleCommand(
|
|
"command script add -f stacks.stack_frames stack_frames")
|
|
print "A new command called 'stack_frames' was added, type 'stack_frames --help' for more information."
|