Imported Upstream version 6.10.0.49

Former-commit-id: 1d6753294b2993e1fbf92de9366bb9544db4189b
This commit is contained in:
Xamarin Public Jenkins (auto-signing)
2020-01-16 16:38:04 +00:00
parent d94e79959b
commit 468663ddbb
48518 changed files with 2789335 additions and 61176 deletions

View File

@ -0,0 +1,142 @@
#!/bin/sh
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
# Guess path to LLVM_CHECKOUT if not provided
if [ "${LLVM_CHECKOUT}" = "" ]; then
LLVM_CHECKOUT="${SCRIPT_DIR}/../../../../../"
fi
# python tools setup
CPPLINT=${SCRIPT_DIR}/cpplint.py
LITLINT=${SCRIPT_DIR}/litlint.py
if [ "${PYTHON_EXECUTABLE}" != "" ]; then
CPPLINT="${PYTHON_EXECUTABLE} ${CPPLINT}"
LITLINT="${PYTHON_EXECUTABLE} ${LITLINT}"
fi
# Filters
# TODO: remove some of these filters
COMMON_LINT_FILTER=-build/include,-build/header_guard,-legal/copyright,-whitespace/comments,-readability/casting,\
-build/namespaces,-readability/braces
ASAN_RTL_LINT_FILTER=${COMMON_LINT_FILTER},-runtime/int
ASAN_TEST_LINT_FILTER=${COMMON_LINT_FILTER},-runtime/sizeof,-runtime/int,-runtime/printf,-runtime/threadsafe_fn
ASAN_LIT_TEST_LINT_FILTER=${ASAN_TEST_LINT_FILTER},-whitespace/line_length
TSAN_RTL_LINT_FILTER=${COMMON_LINT_FILTER}
TSAN_TEST_LINT_FILTER=${TSAN_RTL_LINT_FILTER},-runtime/threadsafe_fn,-runtime/int
TSAN_LIT_TEST_LINT_FILTER=${TSAN_TEST_LINT_FILTER},-whitespace/line_length
MSAN_RTL_LINT_FILTER=${COMMON_LINT_FILTER}
LSAN_RTL_LINT_FILTER=${COMMON_LINT_FILTER}
LSAN_LIT_TEST_LINT_FILTER=${LSAN_RTL_LINT_FILTER},-whitespace/line_length
DFSAN_RTL_LINT_FILTER=${COMMON_LINT_FILTER},-runtime/int,-runtime/printf,-runtime/references,-readability/function
SCUDO_RTL_LINT_FILTER=${COMMON_LINT_FILTER}
COMMON_RTL_INC_LINT_FILTER=${COMMON_LINT_FILTER},-runtime/int,-runtime/sizeof,-runtime/printf,-readability/fn_size
SANITIZER_INCLUDES_LINT_FILTER=${COMMON_LINT_FILTER},-runtime/int
MKTEMP_DIR=$(mktemp -qd /tmp/check_lint.XXXXXXXXXX)
MKTEMP="mktemp -q ${MKTEMP_DIR}/tmp.XXXXXXXXXX"
cleanup() {
rm -rf $MKTEMP_DIR
}
trap cleanup EXIT
cd ${LLVM_CHECKOUT}
EXITSTATUS=0
ERROR_LOG=$(${MKTEMP})
run_lint() {
FILTER=$1
shift
TASK_LOG=$(${MKTEMP})
${CPPLINT} --filter=${FILTER} "$@" 2>$TASK_LOG
if [ "$?" != "0" ]; then
cat $TASK_LOG | grep -v "Done processing" | grep -v "Total errors found" \
| grep -v "Skipping input" >> $ERROR_LOG
fi
if [ "${SILENT}" != "1" ]; then
cat $TASK_LOG
fi
${LITLINT} "$@" 2>>$ERROR_LOG
}
if [ "${COMPILER_RT}" = "" ]; then
COMPILER_RT=projects/compiler-rt
fi
LIT_TESTS=${COMPILER_RT}/test
# Headers
SANITIZER_INCLUDES=${COMPILER_RT}/include/sanitizer
run_lint ${SANITIZER_INCLUDES_LINT_FILTER} ${SANITIZER_INCLUDES}/*.h &
# Sanitizer_common
COMMON_RTL=${COMPILER_RT}/lib/sanitizer_common
run_lint ${COMMON_RTL_INC_LINT_FILTER} ${COMMON_RTL}/*.cc \
${COMMON_RTL}/*.h \
${COMMON_RTL}/tests/*.cc &
# Interception
INTERCEPTION=${COMPILER_RT}/lib/interception
run_lint ${ASAN_RTL_LINT_FILTER} ${INTERCEPTION}/*.cc \
${INTERCEPTION}/*.h &
# ASan
ASAN_RTL=${COMPILER_RT}/lib/asan
run_lint ${ASAN_RTL_LINT_FILTER} ${ASAN_RTL}/*.cc \
${ASAN_RTL}/*.h &
run_lint ${ASAN_TEST_LINT_FILTER} ${ASAN_RTL}/tests/*.cc \
${ASAN_RTL}/tests/*.h &
run_lint ${ASAN_LIT_TEST_LINT_FILTER} ${LIT_TESTS}/asan/*/*.cc &
# TSan
TSAN_RTL=${COMPILER_RT}/lib/tsan
run_lint ${TSAN_RTL_LINT_FILTER} ${TSAN_RTL}/rtl/*.cc \
${TSAN_RTL}/rtl/*.h &
run_lint ${TSAN_TEST_LINT_FILTER} ${TSAN_RTL}/tests/rtl/*.cc \
${TSAN_RTL}/tests/rtl/*.h \
${TSAN_RTL}/tests/unit/*.cc &
run_lint ${TSAN_LIT_TEST_LINT_FILTER} ${LIT_TESTS}/tsan/*.cc &
# MSan
MSAN_RTL=${COMPILER_RT}/lib/msan
run_lint ${MSAN_RTL_LINT_FILTER} ${MSAN_RTL}/*.cc \
${MSAN_RTL}/*.h &
# LSan
LSAN_RTL=${COMPILER_RT}/lib/lsan
run_lint ${LSAN_RTL_LINT_FILTER} ${LSAN_RTL}/*.cc \
${LSAN_RTL}/*.h &
run_lint ${LSAN_LIT_TEST_LINT_FILTER} ${LIT_TESTS}/lsan/*/*.cc &
# DFSan
DFSAN_RTL=${COMPILER_RT}/lib/dfsan
run_lint ${DFSAN_RTL_LINT_FILTER} ${DFSAN_RTL}/*.cc \
${DFSAN_RTL}/*.h &
${DFSAN_RTL}/scripts/check_custom_wrappers.sh >> $ERROR_LOG
# Scudo
SCUDO_RTL=${COMPILER_RT}/lib/scudo
run_lint ${SCUDO_RTL_LINT_FILTER} ${SCUDO_RTL}/*.cpp \
${SCUDO_RTL}/*.h &
# Misc files
FILES=${COMMON_RTL}/*.inc
TMPFILES=""
for FILE in $FILES; do
TMPFILE="$(${MKTEMP}).$(basename ${FILE}).cc"
cp -f $FILE $TMPFILE
run_lint ${COMMON_RTL_INC_LINT_FILTER} $TMPFILE &
TMPFILES="$TMPFILES $TMPFILE"
done
wait
for temp in $TMPFILES; do
rm -f $temp
done
if [ -s $ERROR_LOG ]; then
cat $ERROR_LOG
exit 1
fi
exit 0

View File

@ -0,0 +1 @@
d45c47f7ed0cd3e3bb917ccbb9a3cb971186a7a0

View File

@ -0,0 +1,132 @@
#!/usr/bin/env python
#===- lib/sanitizer_common/scripts/gen_dynamic_list.py ---------------------===#
#
# The LLVM Compiler Infrastructure
#
# This file is distributed under the University of Illinois Open Source
# License. See LICENSE.TXT for details.
#
#===------------------------------------------------------------------------===#
#
# Generates the list of functions that should be exported from sanitizer
# runtimes. The output format is recognized by --dynamic-list linker option.
# Usage:
# gen_dynamic_list.py libclang_rt.*san*.a [ files ... ]
#
#===------------------------------------------------------------------------===#
import argparse
import os
import re
import subprocess
import sys
import platform
new_delete = set([
'_Znam', '_ZnamRKSt9nothrow_t', # operator new[](unsigned long)
'_Znwm', '_ZnwmRKSt9nothrow_t', # operator new(unsigned long)
'_Znaj', '_ZnajRKSt9nothrow_t', # operator new[](unsigned int)
'_Znwj', '_ZnwjRKSt9nothrow_t', # operator new(unsigned int)
# operator new(unsigned long, std::align_val_t)
'_ZnwmSt11align_val_t', '_ZnwmSt11align_val_tRKSt9nothrow_t',
# operator new(unsigned int, std::align_val_t)
'_ZnwjSt11align_val_t', '_ZnwjSt11align_val_tRKSt9nothrow_t',
# operator new[](unsigned long, std::align_val_t)
'_ZnamSt11align_val_t', '_ZnamSt11align_val_tRKSt9nothrow_t',
# operator new[](unsigned int, std::align_val_t)
'_ZnajSt11align_val_t', '_ZnajSt11align_val_tRKSt9nothrow_t',
'_ZdaPv', '_ZdaPvRKSt9nothrow_t', # operator delete[](void *)
'_ZdlPv', '_ZdlPvRKSt9nothrow_t', # operator delete(void *)
'_ZdaPvm', # operator delete[](void*, unsigned long)
'_ZdlPvm', # operator delete(void*, unsigned long)
'_ZdaPvj', # operator delete[](void*, unsigned int)
'_ZdlPvj', # operator delete(void*, unsigned int)
# operator delete(void*, std::align_val_t)
'_ZdlPvSt11align_val_t', '_ZdlPvSt11align_val_tRKSt9nothrow_t',
# operator delete[](void*, std::align_val_t)
'_ZdaPvSt11align_val_t', '_ZdaPvSt11align_val_tRKSt9nothrow_t',
# operator delete(void*, unsigned long, std::align_val_t)
'_ZdlPvmSt11align_val_t',
# operator delete[](void*, unsigned long, std::align_val_t)
'_ZdaPvmSt11align_val_t',
# operator delete(void*, unsigned int, std::align_val_t)
'_ZdlPvjSt11align_val_t',
# operator delete[](void*, unsigned int, std::align_val_t)
'_ZdaPvjSt11align_val_t',
])
versioned_functions = set(['memcpy', 'pthread_attr_getaffinity_np',
'pthread_cond_broadcast',
'pthread_cond_destroy', 'pthread_cond_init',
'pthread_cond_signal', 'pthread_cond_timedwait',
'pthread_cond_wait', 'realpath',
'sched_getaffinity'])
def get_global_functions(library):
functions = []
nm = os.environ.get('NM', 'nm')
nm_proc = subprocess.Popen([nm, library], stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
nm_out = nm_proc.communicate()[0].decode().split('\n')
if nm_proc.returncode != 0:
raise subprocess.CalledProcessError(nm_proc.returncode, nm)
func_symbols = ['T', 'W']
# On PowerPC, nm prints function descriptors from .data section.
if platform.uname()[4] in ["powerpc", "ppc64"]:
func_symbols += ['D']
for line in nm_out:
cols = line.split(' ')
if len(cols) == 3 and cols[1] in func_symbols :
functions.append(cols[2])
return functions
def main(argv):
parser = argparse.ArgumentParser()
parser.add_argument('--version-list', action='store_true')
parser.add_argument('--extra', default=[], action='append')
parser.add_argument('libraries', default=[], nargs='+')
args = parser.parse_args()
result = []
all_functions = []
for library in args.libraries:
all_functions.extend(get_global_functions(library))
function_set = set(all_functions)
for func in all_functions:
# Export new/delete operators.
if func in new_delete:
result.append(func)
continue
# Export interceptors.
match = re.match('__interceptor_(.*)', func)
if match:
result.append(func)
# We have to avoid exporting the interceptors for versioned library
# functions due to gold internal error.
orig_name = match.group(1)
if orig_name in function_set and (args.version_list or orig_name not in versioned_functions):
result.append(orig_name)
continue
# Export sanitizer interface functions.
if re.match('__sanitizer_(.*)', func):
result.append(func)
# Additional exported functions from files.
for fname in args.extra:
f = open(fname, 'r')
for line in f:
result.append(line.rstrip())
# Print the resulting list in the format recognized by ld.
print('{')
if args.version_list:
print('global:')
result.sort()
for f in result:
print(u' %s;' % f)
if args.version_list:
print('local:')
print(' *;')
print('};')
if __name__ == '__main__':
main(sys.argv)

View File

@ -0,0 +1,72 @@
#!/usr/bin/env python
#
# litlint
#
# Ensure RUN commands in lit tests are free of common errors.
#
# If any errors are detected, litlint returns a nonzero exit code.
#
import optparse
import re
import sys
# Compile regex once for all files
runRegex = re.compile(r'(?<!-o)(?<!%run) %t\s')
def LintLine(s):
""" Validate a line
Args:
s: str, the line to validate
Returns:
Returns an error message and a 1-based column number if an error was
detected, otherwise (None, None).
"""
# Check that RUN command can be executed with an emulator
m = runRegex.search(s)
if m:
start, end = m.span()
return ('missing %run before %t', start + 2)
# No errors
return (None, None)
def LintFile(p):
""" Check that each RUN command can be executed with an emulator
Args:
p: str, valid path to a file
Returns:
The number of errors detected.
"""
errs = 0
with open(p, 'r') as f:
for i, s in enumerate(f.readlines(), start=1):
msg, col = LintLine(s)
if msg != None:
errs += 1
errorMsg = 'litlint: {}:{}:{}: error: {}.\n{}{}\n'
arrow = (col-1) * ' ' + '^'
sys.stderr.write(errorMsg.format(p, i, col, msg, s, arrow))
return errs
if __name__ == "__main__":
# Parse args
parser = optparse.OptionParser()
parser.add_option('--filter') # ignored
(options, filenames) = parser.parse_args()
# Lint each file
errs = 0
for p in filenames:
errs += LintFile(p)
# If errors, return nonzero
if errs > 0:
sys.exit(1)

View File

@ -0,0 +1,23 @@
#!/usr/bin/python
# Tests for litlint.py
#
# Usage: python litlint_test.py
#
# Returns nonzero if any test fails
import litlint
import unittest
class TestLintLine(unittest.TestCase):
def test_missing_run(self):
f = litlint.LintLine
self.assertEqual(f(' %t '), ('missing %run before %t', 2))
self.assertEqual(f(' %t\n'), ('missing %run before %t', 2))
self.assertEqual(f(' %t.so '), (None, None))
self.assertEqual(f(' %t.o '), (None, None))
self.assertEqual(f('%run %t '), (None, None))
self.assertEqual(f('-o %t '), (None, None))
if __name__ == '__main__':
unittest.main()

View File

@ -0,0 +1,251 @@
#!/usr/bin/env python
# Merge or print the coverage data collected by asan's coverage.
# Input files are sequences of 4-byte integers.
# We need to merge these integers into a set and then
# either print them (as hex) or dump them into another file.
import array
import bisect
import glob
import os.path
import struct
import subprocess
import sys
prog_name = ""
def Usage():
sys.stderr.write(
"Usage: \n" + \
" " + prog_name + " merge FILE [FILE...] > OUTPUT\n" \
" " + prog_name + " print FILE [FILE...]\n" \
" " + prog_name + " unpack FILE [FILE...]\n" \
" " + prog_name + " rawunpack FILE [FILE ...]\n" \
" " + prog_name + " missing BINARY < LIST_OF_PCS\n" \
"\n")
exit(1)
def CheckBits(bits):
if bits != 32 and bits != 64:
raise Exception("Wrong bitness: %d" % bits)
def TypeCodeForBits(bits):
CheckBits(bits)
return 'L' if bits == 64 else 'I'
def TypeCodeForStruct(bits):
CheckBits(bits)
return 'Q' if bits == 64 else 'I'
kMagic32SecondHalf = 0xFFFFFF32;
kMagic64SecondHalf = 0xFFFFFF64;
kMagicFirstHalf = 0xC0BFFFFF;
def MagicForBits(bits):
CheckBits(bits)
if sys.byteorder == 'little':
return [kMagic64SecondHalf if bits == 64 else kMagic32SecondHalf, kMagicFirstHalf]
else:
return [kMagicFirstHalf, kMagic64SecondHalf if bits == 64 else kMagic32SecondHalf]
def ReadMagicAndReturnBitness(f, path):
magic_bytes = f.read(8)
magic_words = struct.unpack('II', magic_bytes);
bits = 0
idx = 1 if sys.byteorder == 'little' else 0
if magic_words[idx] == kMagicFirstHalf:
if magic_words[1-idx] == kMagic64SecondHalf:
bits = 64
elif magic_words[1-idx] == kMagic32SecondHalf:
bits = 32
if bits == 0:
raise Exception('Bad magic word in %s' % path)
return bits
def ReadOneFile(path):
with open(path, mode="rb") as f:
f.seek(0, 2)
size = f.tell()
f.seek(0, 0)
if size < 8:
raise Exception('File %s is short (< 8 bytes)' % path)
bits = ReadMagicAndReturnBitness(f, path)
size -= 8
w = size * 8 // bits
s = struct.unpack_from(TypeCodeForStruct(bits) * (w), f.read(size))
sys.stderr.write(
"%s: read %d %d-bit PCs from %s\n" % (prog_name, w, bits, path))
return s
def Merge(files):
s = set()
for f in files:
s = s.union(set(ReadOneFile(f)))
sys.stderr.write(
"%s: %d files merged; %d PCs total\n" % (prog_name, len(files), len(s))
)
return sorted(s)
def PrintFiles(files):
if len(files) > 1:
s = Merge(files)
else: # If there is just on file, print the PCs in order.
s = ReadOneFile(files[0])
sys.stderr.write("%s: 1 file merged; %d PCs total\n" % (prog_name, len(s)))
for i in s:
print("0x%x" % i)
def MergeAndPrint(files):
if sys.stdout.isatty():
Usage()
s = Merge(files)
bits = 32
if max(s) > 0xFFFFFFFF:
bits = 64
stdout_buf = getattr(sys.stdout, 'buffer', sys.stdout)
array.array('I', MagicForBits(bits)).tofile(stdout_buf)
a = struct.pack(TypeCodeForStruct(bits) * len(s), *s)
stdout_buf.write(a)
def UnpackOneFile(path):
with open(path, mode="rb") as f:
sys.stderr.write("%s: unpacking %s\n" % (prog_name, path))
while True:
header = f.read(12)
if not header: return
if len(header) < 12:
break
pid, module_length, blob_size = struct.unpack('iII', header)
module = f.read(module_length).decode('utf-8')
blob = f.read(blob_size)
assert(len(module) == module_length)
assert(len(blob) == blob_size)
extracted_file = "%s.%d.sancov" % (module, pid)
sys.stderr.write("%s: extracting %s\n" % (prog_name, extracted_file))
# The packed file may contain multiple blobs for the same pid/module
# pair. Append to the end of the file instead of overwriting.
with open(extracted_file, 'ab') as f2:
f2.write(blob)
# fail
raise Exception('Error reading file %s' % path)
def Unpack(files):
for f in files:
UnpackOneFile(f)
def UnpackOneRawFile(path, map_path):
mem_map = []
with open(map_path, mode="rt") as f_map:
sys.stderr.write("%s: reading map %s\n" % (prog_name, map_path))
bits = int(f_map.readline())
if bits != 32 and bits != 64:
raise Exception('Wrong bits size in the map')
for line in f_map:
parts = line.rstrip().split()
mem_map.append((int(parts[0], 16),
int(parts[1], 16),
int(parts[2], 16),
' '.join(parts[3:])))
mem_map.sort(key=lambda m : m[0])
mem_map_keys = [m[0] for m in mem_map]
with open(path, mode="rb") as f:
sys.stderr.write("%s: unpacking %s\n" % (prog_name, path))
f.seek(0, 2)
size = f.tell()
f.seek(0, 0)
pcs = struct.unpack_from(TypeCodeForStruct(bits) * (size * 8 // bits), f.read(size))
mem_map_pcs = [[] for i in range(0, len(mem_map))]
for pc in pcs:
if pc == 0: continue
map_idx = bisect.bisect(mem_map_keys, pc) - 1
(start, end, base, module_path) = mem_map[map_idx]
assert pc >= start
if pc >= end:
sys.stderr.write("warning: %s: pc %x outside of any known mapping\n" % (prog_name, pc))
continue
mem_map_pcs[map_idx].append(pc - base)
for ((start, end, base, module_path), pc_list) in zip(mem_map, mem_map_pcs):
if len(pc_list) == 0: continue
assert path.endswith('.sancov.raw')
dst_path = module_path + '.' + os.path.basename(path)[:-4]
sys.stderr.write("%s: writing %d PCs to %s\n" % (prog_name, len(pc_list), dst_path))
sorted_pc_list = sorted(pc_list)
pc_buffer = struct.pack(TypeCodeForStruct(bits) * len(pc_list), *sorted_pc_list)
with open(dst_path, 'ab+') as f2:
array.array('I', MagicForBits(bits)).tofile(f2)
f2.seek(0, 2)
f2.write(pc_buffer)
def RawUnpack(files):
for f in files:
if not f.endswith('.sancov.raw'):
raise Exception('Unexpected raw file name %s' % f)
f_map = f[:-3] + 'map'
UnpackOneRawFile(f, f_map)
def GetInstrumentedPCs(binary):
# This looks scary, but all it does is extract all offsets where we call:
# - __sanitizer_cov() or __sanitizer_cov_with_check(),
# - with call or callq,
# - directly or via PLT.
cmd = "objdump -d %s | " \
"grep '^\s\+[0-9a-f]\+:.*\scall\(q\|\)\s\+[0-9a-f]\+ <__sanitizer_cov\(_with_check\|\|_trace_pc_guard\)\(@plt\|\)>' | " \
"grep '^\s\+[0-9a-f]\+' -o" % binary
proc = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
shell=True)
proc.stdin.close()
# The PCs we get from objdump are off by 4 bytes, as they point to the
# beginning of the callq instruction. Empirically this is true on x86 and
# x86_64.
return set(int(line.strip(), 16) + 4 for line in proc.stdout)
def PrintMissing(binary):
if not os.path.isfile(binary):
raise Exception('File not found: %s' % binary)
instrumented = GetInstrumentedPCs(binary)
sys.stderr.write("%s: found %d instrumented PCs in %s\n" % (prog_name,
len(instrumented),
binary))
covered = set(int(line, 16) for line in sys.stdin)
sys.stderr.write("%s: read %d PCs from stdin\n" % (prog_name, len(covered)))
missing = instrumented - covered
sys.stderr.write("%s: %d PCs missing from coverage\n" % (prog_name, len(missing)))
if (len(missing) > len(instrumented) - len(covered)):
sys.stderr.write(
"%s: WARNING: stdin contains PCs not found in binary\n" % prog_name
)
for pc in sorted(missing):
print("0x%x" % pc)
if __name__ == '__main__':
prog_name = sys.argv[0]
if len(sys.argv) <= 2:
Usage();
if sys.argv[1] == "missing":
if len(sys.argv) != 3:
Usage()
PrintMissing(sys.argv[2])
exit(0)
file_list = []
for f in sys.argv[2:]:
file_list += glob.glob(f)
if not file_list:
Usage()
if sys.argv[1] == "print":
PrintFiles(file_list)
elif sys.argv[1] == "merge":
MergeAndPrint(file_list)
elif sys.argv[1] == "unpack":
Unpack(file_list)
elif sys.argv[1] == "rawunpack":
RawUnpack(file_list)
else:
Usage()