summaryrefslogtreecommitdiffstats
path: root/scripts
diff options
context:
space:
mode:
Diffstat (limited to 'scripts')
-rwxr-xr-xscripts/archive-source.sh3
-rw-r--r--scripts/check_sparse.py25
-rwxr-xr-xscripts/clean-includes2
-rwxr-xr-xscripts/create_config129
-rw-r--r--[-rwxr-xr-x]scripts/decodetree.py2
-rwxr-xr-xscripts/device-crash-test2
-rw-r--r--scripts/feature_to_c.sh24
-rwxr-xr-xscripts/grepy.sh3
-rwxr-xr-x[-rw-r--r--]scripts/hxtool2
-rw-r--r--[-rwxr-xr-x]scripts/minikconf.py0
-rw-r--r--scripts/mtest2make.py102
-rwxr-xr-xscripts/ninjatool.py1002
-rwxr-xr-xscripts/oss-fuzz/build.sh32
-rw-r--r--[-rwxr-xr-x]scripts/qapi-gen.py0
-rwxr-xr-xscripts/qemu-version.sh25
-rw-r--r--[-rwxr-xr-x]scripts/tracetool.py0
-rw-r--r--scripts/tracetool/backend/dtrace.py2
-rw-r--r--scripts/tracetool/backend/ust.py6
-rw-r--r--scripts/tracetool/format/c.py5
-rw-r--r--scripts/tracetool/format/tcg_h.py2
-rw-r--r--scripts/tracetool/format/tcg_helper_c.py2
-rwxr-xr-xscripts/undefsym.sh20
22 files changed, 1210 insertions, 180 deletions
diff --git a/scripts/archive-source.sh b/scripts/archive-source.sh
index fb5d6b3918..c6169db69f 100755
--- a/scripts/archive-source.sh
+++ b/scripts/archive-source.sh
@@ -26,7 +26,8 @@ sub_file="${sub_tdir}/submodule.tar"
# independent of what the developer currently has initialized
# in their checkout, because the build environment is completely
# different to the host OS.
-submodules="dtc slirp ui/keycodemapdb tests/fp/berkeley-softfloat-3 tests/fp/berkeley-testfloat-3"
+submodules="dtc slirp meson ui/keycodemapdb"
+submodules="$submodules tests/fp/berkeley-softfloat-3 tests/fp/berkeley-testfloat-3"
sub_deinit=""
function cleanup() {
diff --git a/scripts/check_sparse.py b/scripts/check_sparse.py
new file mode 100644
index 0000000000..0de7aa55d9
--- /dev/null
+++ b/scripts/check_sparse.py
@@ -0,0 +1,25 @@
+#! /usr/bin/env python3
+
+# Invoke sparse based on the contents of compile_commands.json
+
+import json
+import subprocess
+import sys
+import shlex
+
+def extract_cflags(shcmd):
+ cflags = shlex.split(shcmd)
+ return [x for x in cflags
+ if x.startswith('-D') or x.startswith('-I') or x.startswith('-W')
+ or x.startswith('-std=')]
+
+cflags = sys.argv[1:-1]
+with open(sys.argv[-1], 'r') as fd:
+ compile_commands = json.load(fd)
+
+for cmd in compile_commands:
+ cmd = ['sparse'] + cflags + extract_cflags(cmd['command']) + [cmd['file']]
+ print(' '.join((shlex.quote(x) for x in cmd)))
+ r = subprocess.run(cmd)
+ if r.returncode != 0:
+ sys.exit(r.returncode)
diff --git a/scripts/clean-includes b/scripts/clean-includes
index 795b3bea31..aaa7d4ceb3 100755
--- a/scripts/clean-includes
+++ b/scripts/clean-includes
@@ -113,7 +113,7 @@ EOT
for f in "$@"; do
case "$f" in
- *.inc.c)
+ *.c.inc)
# These aren't standalone C source files
echo "SKIPPING $f (not a standalone source file)"
continue
diff --git a/scripts/create_config b/scripts/create_config
deleted file mode 100755
index 6d8f08b39d..0000000000
--- a/scripts/create_config
+++ /dev/null
@@ -1,129 +0,0 @@
-#!/bin/sh
-
-echo "/* Automatically generated by create_config - do not modify */"
-
-while read line; do
-
-case $line in
- VERSION=*) # configuration
- version=${line#*=}
- major=$(echo "$version" | cut -d. -f1)
- minor=$(echo "$version" | cut -d. -f2)
- micro=$(echo "$version" | cut -d. -f3)
- echo "#define QEMU_VERSION \"$version\""
- echo "#define QEMU_VERSION_MAJOR $major"
- echo "#define QEMU_VERSION_MINOR $minor"
- echo "#define QEMU_VERSION_MICRO $micro"
- ;;
- qemu_*dir=* | qemu_*path=*) # qemu-specific directory configuration
- name=${line%=*}
- value=${line#*=}
- define_name=$(echo $name | LC_ALL=C tr '[a-z]' '[A-Z]')
- eval "define_value=\"$value\""
- echo "#define CONFIG_$define_name \"$define_value\""
- # save for the next definitions
- eval "$name=\$define_value"
- ;;
- prefix=*)
- # save for the next definitions
- prefix=${line#*=}
- ;;
- IASL=*) # iasl executable
- value=${line#*=}
- echo "#define CONFIG_IASL $value"
- ;;
- CONFIG_AUDIO_DRIVERS=*)
- drivers=${line#*=}
- echo "#define CONFIG_AUDIO_DRIVERS \\"
- for drv in $drivers; do
- echo " \"${drv}\",\\"
- done
- echo ""
- ;;
- CONFIG_BDRV_RW_WHITELIST=*)
- echo "#define CONFIG_BDRV_RW_WHITELIST\\"
- for drv in ${line#*=}; do
- echo " \"${drv}\",\\"
- done
- echo " NULL"
- ;;
- CONFIG_BDRV_RO_WHITELIST=*)
- echo "#define CONFIG_BDRV_RO_WHITELIST\\"
- for drv in ${line#*=}; do
- echo " \"${drv}\",\\"
- done
- echo " NULL"
- ;;
- CONFIG_*=y) # configuration
- name=${line%=*}
- echo "#define $name 1"
- ;;
- CONFIG_*=n) # configuration
- ;;
- CONFIG_*=*) # configuration
- name=${line%=*}
- value=${line#*=}
- echo "#define $name $value"
- ;;
- HAVE_*=y) # configuration
- name=${line%=*}
- echo "#define $name 1"
- ;;
- HAVE_*=*) # configuration
- name=${line%=*}
- value=${line#*=}
- echo "#define $name $value"
- ;;
- ARCH=*) # configuration
- arch=${line#*=}
- arch_name=$(echo $arch | LC_ALL=C tr '[a-z]' '[A-Z]')
- echo "#define HOST_$arch_name 1"
- ;;
- HOST_USB=*)
- # do nothing
- ;;
- HOST_CC=*)
- # do nothing
- ;;
- HOST_*=y) # configuration
- name=${line%=*}
- echo "#define $name 1"
- ;;
- HOST_*=*) # configuration
- name=${line%=*}
- value=${line#*=}
- echo "#define $name $value"
- ;;
- TARGET_BASE_ARCH=*) # configuration
- target_base_arch=${line#*=}
- base_arch_name=$(echo $target_base_arch | LC_ALL=C tr '[a-z]' '[A-Z]')
- echo "#define TARGET_$base_arch_name 1"
- ;;
- TARGET_XML_FILES=*)
- # do nothing
- ;;
- TARGET_ABI_DIR=*)
- # do nothing
- ;;
- TARGET_NAME=*)
- target_name=${line#*=}
- echo "#define TARGET_NAME \"$target_name\""
- ;;
- TARGET_DIRS=*)
- # do nothing
- ;;
- TARGET_*=y) # configuration
- name=${line%=*}
- echo "#define $name 1"
- ;;
- TARGET_*=*) # configuration
- name=${line%=*}
- value=${line#*=}
- echo "#define $name $value"
- ;;
- DSOSUF=*)
- echo "#define HOST_DSOSUF \"${line#*=}\""
- ;;
-esac
-
-done # read
diff --git a/scripts/decodetree.py b/scripts/decodetree.py
index 694757b6c2..4cd1e10904 100755..100644
--- a/scripts/decodetree.py
+++ b/scripts/decodetree.py
@@ -1257,7 +1257,7 @@ def main():
long_opts = ['decode=', 'translate=', 'output=', 'insnwidth=',
'static-decode=', 'varinsnwidth=']
try:
- (opts, args) = getopt.getopt(sys.argv[1:], 'o:vw:', long_opts)
+ (opts, args) = getopt.gnu_getopt(sys.argv[1:], 'o:vw:', long_opts)
except getopt.GetoptError as err:
error(0, err)
for o, a in opts:
diff --git a/scripts/device-crash-test b/scripts/device-crash-test
index 305d0427af..866baf7058 100755
--- a/scripts/device-crash-test
+++ b/scripts/device-crash-test
@@ -383,7 +383,7 @@ def binariesToTest(args, testcase):
if args.qemu:
r = args.qemu
else:
- r = glob.glob('./*-softmmu/qemu-system-*')
+ r = glob.glob('./qemu-system-*')
return r
diff --git a/scripts/feature_to_c.sh b/scripts/feature_to_c.sh
index c8ce9b88f6..b1169899c1 100644
--- a/scripts/feature_to_c.sh
+++ b/scripts/feature_to_c.sh
@@ -19,16 +19,8 @@
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
-output=$1
-shift
-
-if test -z "$output" || test -z "$1"; then
- echo "Usage: $0 OUTPUTFILE INPUTFILE..."
- exit 1
-fi
-
-if test -e "$output"; then
- echo "Output file \"$output\" already exists; refusing to overwrite."
+if test -z "$1"; then
+ echo "Usage: $0 INPUTFILE..."
exit 1
fi
@@ -60,17 +52,17 @@ for input; do
printf "'\''\\n'\'', \n"
} END {
print " 0 };"
- }' < $input >> $output
+ }' < $input
done
-echo >> $output
-echo "const char *const xml_builtin[][2] = {" >> $output
+echo
+echo "const char *const xml_builtin[][2] = {"
for input; do
basename=$(echo $input | sed 's,.*/,,')
arrayname=xml_feature_$(echo $input | sed 's,.*/,,; s/[-.]/_/g')
- echo " { \"$basename\", $arrayname }," >> $output
+ echo " { \"$basename\", $arrayname },"
done
-echo " { (char *)0, (char *)0 }" >> $output
-echo "};" >> $output
+echo " { (char *)0, (char *)0 }"
+echo "};"
diff --git a/scripts/grepy.sh b/scripts/grepy.sh
new file mode 100755
index 0000000000..aee46ddc8d
--- /dev/null
+++ b/scripts/grepy.sh
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+grep -h '=y$' "$@" | sort -u
diff --git a/scripts/hxtool b/scripts/hxtool
index 7b1452f3cf..80516b9437 100644..100755
--- a/scripts/hxtool
+++ b/scripts/hxtool
@@ -19,6 +19,6 @@ hxtoh()
case "$1" in
"-h") hxtoh ;;
*) exit 1 ;;
-esac
+esac < "$2"
exit 0
diff --git a/scripts/minikconf.py b/scripts/minikconf.py
index bcd91015d3..bcd91015d3 100755..100644
--- a/scripts/minikconf.py
+++ b/scripts/minikconf.py
diff --git a/scripts/mtest2make.py b/scripts/mtest2make.py
new file mode 100644
index 0000000000..bdb257bbd9
--- /dev/null
+++ b/scripts/mtest2make.py
@@ -0,0 +1,102 @@
+#! /usr/bin/env python3
+
+# Create Makefile targets to run tests, from Meson's test introspection data.
+#
+# Author: Paolo Bonzini <pbonzini@redhat.com>
+
+from collections import defaultdict
+import json
+import os
+import shlex
+import sys
+
+class Suite(object):
+ def __init__(self):
+ self.tests = list()
+ self.slow_tests = list()
+ self.executables = set()
+
+print('''
+SPEED = quick
+
+# $1 = test command, $2 = test name
+.test-human-tap = $1 < /dev/null | ./scripts/tap-driver.pl --test-name="$2" $(if $(V),,--show-failures-only)
+.test-human-exitcode = $1 < /dev/null
+.test-tap-tap = $1 < /dev/null | sed "s/^[a-z][a-z]* [0-9]*/& $2/" || true
+.test-tap-exitcode = printf "%s\\n" 1..1 "`$1 < /dev/null > /dev/null || echo "not "`ok 1 $2"
+.test.print = echo $(if $(V),'$1','Running test $2') >&3
+.test.env = MALLOC_PERTURB_=$${MALLOC_PERTURB_:-$$(( $${RANDOM:-0} % 255 + 1))}
+
+# $1 = test name, $2 = test target (human or tap)
+.test.run = $(call .test.print,$(.test.cmd.$1),$(.test.name.$1)) && $(call .test-$2-$(.test.driver.$1),$(.test.cmd.$1),$(.test.name.$1))
+
+define .test.human_k
+ @exec 3>&1; rc=0; $(foreach TEST, $1, $(call .test.run,$(TEST),human) || rc=$$?;) \\
+ exit $$rc
+endef
+define .test.human_no_k
+ $(foreach TEST, $1, @exec 3>&1; $(call .test.run,$(TEST),human)
+)
+endef
+.test.human = \\
+ $(if $(findstring k, $(MAKEFLAGS)), $(.test.human_k), $(.test.human_no_k))
+
+define .test.tap
+ @exec 3>&1; { $(foreach TEST, $1, $(call .test.run,$(TEST),tap); ) } \\
+ | ./scripts/tap-merge.pl | tee "$@" \\
+ | ./scripts/tap-driver.pl $(if $(V),, --show-failures-only)
+endef
+''')
+
+suites = defaultdict(Suite)
+i = 0
+for test in json.load(sys.stdin):
+ env = ' '.join(('%s=%s' % (shlex.quote(k), shlex.quote(v))
+ for k, v in test['env'].items()))
+ executable = os.path.relpath(test['cmd'][0])
+ if test['workdir'] is not None:
+ test['cmd'][0] = os.path.relpath(test['cmd'][0], test['workdir'])
+ else:
+ test['cmd'][0] = executable
+ cmd = '$(.test.env) %s %s' % (env, ' '.join((shlex.quote(x) for x in test['cmd'])))
+ if test['workdir'] is not None:
+ cmd = '(cd %s && %s)' % (shlex.quote(test['workdir']), cmd)
+ driver = test['protocol'] if 'protocol' in test else 'exitcode'
+
+ i += 1
+ print('.test.name.%d := %s' % (i, test['name']))
+ print('.test.driver.%d := %s' % (i, driver))
+ print('.test.cmd.%d := %s' % (i, cmd))
+
+ test_suites = test['suite'] or ['default']
+ is_slow = any(s.endswith('-slow') for s in test_suites)
+ for s in test_suites:
+ # The suite name in the introspection info is "PROJECT:SUITE"
+ s = s.split(':')[1]
+ if s.endswith('-slow'):
+ s = s[:-5]
+ if is_slow:
+ suites[s].slow_tests.append(i)
+ else:
+ suites[s].tests.append(i)
+ suites[s].executables.add(executable)
+
+print('.PHONY: check check-report.tap')
+print('check:')
+print('check-report.tap:')
+print('\t@cat $^ | scripts/tap-merge.pl >$@')
+for name, suite in suites.items():
+ executables = ' '.join(suite.executables)
+ slow_test_numbers = ' '.join((str(x) for x in suite.slow_tests))
+ test_numbers = ' '.join((str(x) for x in suite.tests))
+ print('.test.suite-quick.%s := %s' % (name, test_numbers))
+ print('.test.suite-slow.%s := $(.test.suite-quick.%s) %s' % (name, name, slow_test_numbers))
+ print('check-build: %s' % executables)
+ print('.PHONY: check-%s' % name)
+ print('.PHONY: check-report-%s.tap' % name)
+ print('check: check-%s' % name)
+ print('check-%s: all %s' % (name, executables))
+ print('\t$(call .test.human, $(.test.suite-$(SPEED).%s))' % (name, ))
+ print('check-report.tap: check-report-%s.tap' % name)
+ print('check-report-%s.tap: %s' % (name, executables))
+ print('\t$(call .test.tap, $(.test.suite-$(SPEED).%s))' % (name, ))
diff --git a/scripts/ninjatool.py b/scripts/ninjatool.py
new file mode 100755
index 0000000000..cc77d51aa8
--- /dev/null
+++ b/scripts/ninjatool.py
@@ -0,0 +1,1002 @@
+#! /bin/sh
+
+# Python module for parsing and processing .ninja files.
+#
+# Author: Paolo Bonzini
+#
+# Copyright (C) 2019 Red Hat, Inc.
+
+
+# We don't want to put "#! @PYTHON@" as the shebang and
+# make the file executable, so instead we make this a
+# Python/shell polyglot. The first line below starts a
+# multiline string literal for Python, while it is just
+# ":" for bash. The closing of the multiline string literal
+# is never parsed by bash since it exits before.
+
+'''':
+case "$0" in
+ /*) me=$0 ;;
+ *) me=$(command -v "$0") ;;
+esac
+python="@PYTHON@"
+case $python in
+ @*) python=python3 ;;
+esac
+exec $python "$me" "$@"
+exit 1
+'''
+
+
+from collections import namedtuple, defaultdict
+import sys
+import os
+import re
+import json
+import argparse
+import shutil
+
+
+class InvalidArgumentError(Exception):
+ pass
+
+# faster version of os.path.normpath: do nothing unless there is a double
+# slash or a "." or ".." component. The filter does not have to be super
+# precise, but it has to be fast. os.path.normpath is the hottest function
+# for ninja2make without this optimization!
+if os.path.sep == '/':
+ def normpath(path, _slow_re=re.compile('/[./]')):
+ return os.path.normpath(path) if _slow_re.search(path) or path[0] == '.' else path
+else:
+ normpath = os.path.normpath
+
+
+# ---- lexer and parser ----
+
+PATH_RE = r"[^$\s:|]+|\$[$ :]|\$[a-zA-Z0-9_-]+|\$\{[a-zA-Z0-9_.-]+\}"
+
+SIMPLE_PATH_RE = re.compile(r"[^$\s:|]+")
+IDENT_RE = re.compile(r"[a-zA-Z0-9_.-]+$")
+STRING_RE = re.compile(r"(" + PATH_RE + r"|[\s:|])(?:\r?\n)?|.")
+TOPLEVEL_RE = re.compile(r"([=:#]|\|\|?|^ +|(?:" + PATH_RE + r")+)\s*|.")
+VAR_RE=re.compile(r'\$\$|\$\{([^}]*)\}')
+
+BUILD = 1
+POOL = 2
+RULE = 3
+DEFAULT = 4
+EQUALS = 5
+COLON = 6
+PIPE = 7
+PIPE2 = 8
+IDENT = 9
+INCLUDE = 10
+INDENT = 11
+EOL = 12
+
+
+class LexerError(Exception):
+ pass
+
+
+class ParseError(Exception):
+ pass
+
+
+class NinjaParserEvents(object):
+ def __init__(self, parser):
+ self.parser = parser
+
+ def dollar_token(self, word, in_path=False):
+ return '$$' if word == '$' else word
+
+ def variable_expansion_token(self, varname):
+ return '${%s}' % varname
+
+ def variable(self, name, arg):
+ pass
+
+ def begin_file(self):
+ pass
+
+ def end_file(self):
+ pass
+
+ def end_scope(self):
+ pass
+
+ def begin_pool(self, name):
+ pass
+
+ def begin_rule(self, name):
+ pass
+
+ def begin_build(self, out, iout, rule, in_, iin, orderdep):
+ pass
+
+ def default(self, targets):
+ pass
+
+
+class NinjaParser(object):
+
+ InputFile = namedtuple('InputFile', 'filename iter lineno')
+
+ def __init__(self, filename, input):
+ self.stack = []
+ self.top = None
+ self.iter = None
+ self.lineno = None
+ self.match_keyword = False
+ self.push(filename, input)
+
+ def file_changed(self):
+ self.iter = self.top.iter
+ self.lineno = self.top.lineno
+ if self.top.filename is not None:
+ os.chdir(os.path.dirname(self.top.filename) or '.')
+
+ def push(self, filename, input):
+ if self.top:
+ self.top.lineno = self.lineno
+ self.top.iter = self.iter
+ self.stack.append(self.top)
+ self.top = self.InputFile(filename=filename or 'stdin',
+ iter=self._tokens(input), lineno=0)
+ self.file_changed()
+
+ def pop(self):
+ if len(self.stack):
+ self.top = self.stack[-1]
+ self.stack.pop()
+ self.file_changed()
+ else:
+ self.top = self.iter = None
+
+ def next_line(self, input):
+ line = next(input).rstrip()
+ self.lineno += 1
+ while len(line) and line[-1] == '$':
+ line = line[0:-1] + next(input).strip()
+ self.lineno += 1
+ return line
+
+ def print_token(self, tok):
+ if tok == EOL:
+ return "end of line"
+ if tok == BUILD:
+ return '"build"'
+ if tok == POOL:
+ return '"pool"'
+ if tok == RULE:
+ return '"rule"'
+ if tok == DEFAULT:
+ return '"default"'
+ if tok == EQUALS:
+ return '"="'
+ if tok == COLON:
+ return '":"'
+ if tok == PIPE:
+ return '"|"'
+ if tok == PIPE2:
+ return '"||"'
+ if tok == INCLUDE:
+ return '"include"'
+ if tok == IDENT:
+ return 'identifier'
+ return '"%s"' % tok
+
+ def error(self, msg):
+ raise LexerError("%s:%d: %s" % (self.stack[-1].filename, self.lineno, msg))
+
+ def parse_error(self, msg):
+ raise ParseError("%s:%d: %s" % (self.stack[-1].filename, self.lineno, msg))
+
+ def expected(self, expected, tok):
+ msg = "found %s, expected " % (self.print_token(tok), )
+ for i, exp_tok in enumerate(expected):
+ if i > 0:
+ msg = msg + (' or ' if i == len(expected) - 1 else ', ')
+ msg = msg + self.print_token(exp_tok)
+ self.parse_error(msg)
+
+ def _variable_tokens(self, value):
+ for m in STRING_RE.finditer(value):
+ match = m.group(1)
+ if not match:
+ self.error("unexpected '%s'" % (m.group(0), ))
+ yield match
+
+ def _tokens(self, input):
+ while True:
+ try:
+ line = self.next_line(input)
+ except StopIteration:
+ return
+ for m in TOPLEVEL_RE.finditer(line):
+ match = m.group(1)
+ if not match:
+ self.error("unexpected '%s'" % (m.group(0), ))
+ if match == ':':
+ yield COLON
+ continue
+ if match == '|':
+ yield PIPE
+ continue
+ if match == '||':
+ yield PIPE2
+ continue
+ if match[0] == ' ':
+ yield INDENT
+ continue
+ if match[0] == '=':
+ yield EQUALS
+ value = line[m.start() + 1:].lstrip()
+ yield from self._variable_tokens(value)
+ break
+ if match[0] == '#':
+ break
+
+ # identifier
+ if self.match_keyword:
+ if match == 'build':
+ yield BUILD
+ continue
+ if match == 'pool':
+ yield POOL
+ continue
+ if match == 'rule':
+ yield RULE
+ continue
+ if match == 'default':
+ yield DEFAULT
+ continue
+ if match == 'include':
+ filename = line[m.start() + 8:].strip()
+ self.push(filename, open(filename, 'r'))
+ break
+ if match == 'subninja':
+ self.error('subninja is not supported')
+ yield match
+ yield EOL
+
+ def parse(self, events):
+ global_var = True
+
+ def look_for(*expected):
+ # The last token in the token stream is always EOL. This
+ # is exploited to avoid catching StopIteration everywhere.
+ tok = next(self.iter)
+ if tok not in expected:
+ self.expected(expected, tok)
+ return tok
+
+ def look_for_ident(*expected):
+ tok = next(self.iter)
+ if isinstance(tok, str):
+ if not IDENT_RE.match(tok):
+ self.parse_error('variable expansion not allowed')
+ elif tok not in expected:
+ self.expected(expected + (IDENT,), tok)
+ return tok
+
+ def parse_assignment_rhs(gen, expected, in_path):
+ tokens = []
+ for tok in gen:
+ if not isinstance(tok, str):
+ if tok in expected:
+ break
+ self.expected(expected + (IDENT,), tok)
+ if tok[0] != '$':
+ tokens.append(tok)
+ elif tok == '$ ' or tok == '$$' or tok == '$:':
+ tokens.append(events.dollar_token(tok[1], in_path))
+ else:
+ var = tok[2:-1] if tok[1] == '{' else tok[1:]
+ tokens.append(events.variable_expansion_token(var))
+ else:
+ # gen must have raised StopIteration
+ tok = None
+
+ if tokens:
+ # Fast path avoiding str.join()
+ value = tokens[0] if len(tokens) == 1 else ''.join(tokens)
+ else:
+ value = None
+ return value, tok
+
+ def look_for_path(*expected):
+ # paths in build rules are parsed one space-separated token
+ # at a time and expanded
+ token = next(self.iter)
+ if not isinstance(token, str):
+ return None, token
+ # Fast path if there are no dollar and variable expansion
+ if SIMPLE_PATH_RE.match(token):
+ return token, None
+ gen = self._variable_tokens(token)
+ return parse_assignment_rhs(gen, expected, True)
+
+ def parse_assignment(tok):
+ name = tok
+ assert isinstance(name, str)
+ look_for(EQUALS)
+ value, tok = parse_assignment_rhs(self.iter, (EOL,), False)
+ assert tok == EOL
+ events.variable(name, value)
+
+ def parse_build():
+ # parse outputs
+ out = []
+ iout = []
+ while True:
+ value, tok = look_for_path(COLON, PIPE)
+ if value is None:
+ break
+ out.append(value)
+ if tok == PIPE:
+ while True:
+ value, tok = look_for_path(COLON)
+ if value is None:
+ break
+ iout.append(value)
+
+ # parse rule
+ assert tok == COLON
+ rule = look_for_ident()
+
+ # parse inputs and dependencies
+ in_ = []
+ iin = []
+ orderdep = []
+ while True:
+ value, tok = look_for_path(PIPE, PIPE2, EOL)
+ if value is None:
+ break
+ in_.append(value)
+ if tok == PIPE:
+ while True:
+ value, tok = look_for_path(PIPE2, EOL)
+ if value is None:
+ break
+ iin.append(value)
+ if tok == PIPE2:
+ while True:
+ value, tok = look_for_path(EOL)
+ if value is None:
+ break
+ orderdep.append(value)
+ assert tok == EOL
+ events.begin_build(out, iout, rule, in_, iin, orderdep)
+ nonlocal global_var
+ global_var = False
+
+ def parse_pool():
+ # pool declarations are ignored. Just gobble all the variables
+ ident = look_for_ident()
+ look_for(EOL)
+ events.begin_pool(ident)
+ nonlocal global_var
+ global_var = False
+
+ def parse_rule():
+ ident = look_for_ident()
+ look_for(EOL)
+ events.begin_rule(ident)
+ nonlocal global_var
+ global_var = False
+
+ def parse_default():
+ idents = []
+ while True:
+ ident = look_for_ident(EOL)
+ if ident == EOL:
+ break
+ idents.append(ident)
+ events.default(idents)
+
+ def parse_declaration(tok):
+ if tok == EOL:
+ return
+
+ nonlocal global_var
+ if tok == INDENT:
+ if global_var:
+ self.parse_error('indented line outside rule or edge')
+ tok = look_for_ident(EOL)
+ if tok == EOL:
+ return
+ parse_assignment(tok)
+ return
+
+ if not global_var:
+ events.end_scope()
+ global_var = True
+ if tok == POOL:
+ parse_pool()
+ elif tok == BUILD:
+ parse_build()
+ elif tok == RULE:
+ parse_rule()
+ elif tok == DEFAULT:
+ parse_default()
+ elif isinstance(tok, str):
+ parse_assignment(tok)
+ else:
+ self.expected((POOL, BUILD, RULE, INCLUDE, DEFAULT, IDENT), tok)
+
+ events.begin_file()
+ while self.iter:
+ try:
+ self.match_keyword = True
+ token = next(self.iter)
+ self.match_keyword = False
+ parse_declaration(token)
+ except StopIteration:
+ self.pop()
+ events.end_file()
+
+
+# ---- variable handling ----
+
+def expand(x, rule_vars=None, build_vars=None, global_vars=None):
+ if x is None:
+ return None
+ changed = True
+ have_dollar_replacement = False
+ while changed:
+ changed = False
+ matches = list(VAR_RE.finditer(x))
+ if not matches:
+ break
+
+ # Reverse the match so that expanding later matches does not
+ # invalidate m.start()/m.end() for earlier ones. Do not reduce $$ to $
+ # until all variables are dealt with.
+ for m in reversed(matches):
+ name = m.group(1)
+ if not name:
+ have_dollar_replacement = True
+ continue
+ changed = True
+ if build_vars and name in build_vars:
+ value = build_vars[name]
+ elif rule_vars and name in rule_vars:
+ value = rule_vars[name]
+ elif name in global_vars:
+ value = global_vars[name]
+ else:
+ value = ''
+ x = x[:m.start()] + value + x[m.end():]
+ return x.replace('$$', '$') if have_dollar_replacement else x
+
+
+class Scope(object):
+ def __init__(self, events):
+ self.events = events
+
+ def on_left_scope(self):
+ pass
+
+ def on_variable(self, key, value):
+ pass
+
+
+class BuildScope(Scope):
+ def __init__(self, events, out, iout, rule, in_, iin, orderdep, rule_vars):
+ super().__init__(events)
+ self.rule = rule
+ self.out = [events.expand_and_normalize(x) for x in out]
+ self.in_ = [events.expand_and_normalize(x) for x in in_]
+ self.iin = [events.expand_and_normalize(x) for x in iin]
+ self.orderdep = [events.expand_and_normalize(x) for x in orderdep]
+ self.iout = [events.expand_and_normalize(x) for x in iout]
+ self.rule_vars = rule_vars
+ self.build_vars = dict()
+ self._define_variable('out', ' '.join(self.out))
+ self._define_variable('in', ' '.join(self.in_))
+
+ def expand(self, x):
+ return self.events.expand(x, self.rule_vars, self.build_vars)
+
+ def on_left_scope(self):
+ self.events.variable('out', self.build_vars['out'])
+ self.events.variable('in', self.build_vars['in'])
+ self.events.end_build(self, self.out, self.iout, self.rule, self.in_,
+ self.iin, self.orderdep)
+
+ def _define_variable(self, key, value):
+ # The value has been expanded already, quote it for further
+ # expansion from rule variables
+ value = value.replace('$', '$$')
+ self.build_vars[key] = value
+
+ def on_variable(self, key, value):
+ # in and out are at the top of the lookup order and cannot
+ # be overridden. Also, unlike what the manual says, build
+ # variables only lookup global variables. They never lookup
+ # rule variables, earlier build variables, or in/out.
+ if key not in ('in', 'in_newline', 'out'):
+ self._define_variable(key, self.events.expand(value))
+
+
+class RuleScope(Scope):
+ def __init__(self, events, name, vars_dict):
+ super().__init__(events)
+ self.name = name
+ self.vars_dict = vars_dict
+ self.generator = False
+
+ def on_left_scope(self):
+ self.events.end_rule(self, self.name)
+
+ def on_variable(self, key, value):
+ self.vars_dict[key] = value
+ if key == 'generator':
+ self.generator = True
+
+
+class NinjaParserEventsWithVars(NinjaParserEvents):
+ def __init__(self, parser):
+ super().__init__(parser)
+ self.rule_vars = defaultdict(lambda: dict())
+ self.global_vars = dict()
+ self.scope = None
+
+ def variable(self, name, value):
+ if self.scope:
+ self.scope.on_variable(name, value)
+ else:
+ self.global_vars[name] = self.expand(value)
+
+ def begin_build(self, out, iout, rule, in_, iin, orderdep):
+ if rule != 'phony' and rule not in self.rule_vars:
+ self.parser.parse_error("undefined rule '%s'" % rule)
+
+ self.scope = BuildScope(self, out, iout, rule, in_, iin, orderdep, self.rule_vars[rule])
+
+ def begin_pool(self, name):
+ # pool declarations are ignored. Just gobble all the variables
+ self.scope = Scope(self)
+
+ def begin_rule(self, name):
+ if name in self.rule_vars:
+ self.parser.parse_error("duplicate rule '%s'" % name)
+ self.scope = RuleScope(self, name, self.rule_vars[name])
+
+ def end_scope(self):
+ self.scope.on_left_scope()
+ self.scope = None
+
+ # utility functions:
+
+ def expand(self, x, rule_vars=None, build_vars=None):
+ return expand(x, rule_vars, build_vars, self.global_vars)
+
+ def expand_and_normalize(self, x):
+ return normpath(self.expand(x))
+
+ # extra events not present in the superclass:
+
+ def end_build(self, scope, out, iout, rule, in_, iin, orderdep):
+ pass
+
+ def end_rule(self, scope, name):
+ pass
+
+
+# ---- test client that just prints back whatever it parsed ----
+
+class Writer(NinjaParserEvents):
+ ARGS = argparse.ArgumentParser(description='Rewrite input build.ninja to stdout.')
+
+ def __init__(self, output, parser, args):
+ super().__init__(parser)
+ self.output = output
+ self.indent = ''
+ self.had_vars = False
+
+ def dollar_token(self, word, in_path=False):
+ return '$' + word
+
+ def print(self, *args, **kwargs):
+ if len(args):
+ self.output.write(self.indent)
+ print(*args, **kwargs, file=self.output)
+
+ def variable(self, name, value):
+ self.print('%s = %s' % (name, value))
+ self.had_vars = True
+
+ def begin_scope(self):
+ self.indent = ' '
+ self.had_vars = False
+
+ def end_scope(self):
+ if self.had_vars:
+ self.print()
+ self.indent = ''
+ self.had_vars = False
+
+ def begin_pool(self, name):
+ self.print('pool %s' % name)
+ self.begin_scope()
+
+ def begin_rule(self, name):
+ self.print('rule %s' % name)
+ self.begin_scope()
+
+ def begin_build(self, outputs, implicit_outputs, rule, inputs, implicit, order_only):
+ all_outputs = list(outputs)
+ all_inputs = list(inputs)
+
+ if implicit:
+ all_inputs.append('|')
+ all_inputs.extend(implicit)
+ if order_only:
+ all_inputs.append('||')
+ all_inputs.extend(order_only)
+ if implicit_outputs:
+ all_outputs.append('|')
+ all_outputs.extend(implicit_outputs)
+
+ self.print('build %s: %s' % (' '.join(all_outputs),
+ ' '.join([rule] + all_inputs)))
+ self.begin_scope()
+
+ def default(self, targets):
+ self.print('default %s' % ' '.join(targets))
+
+
+# ---- emit compile_commands.json ----
+
+class Compdb(NinjaParserEventsWithVars):
+ ARGS = argparse.ArgumentParser(description='Emit compile_commands.json.')
+ ARGS.add_argument('rules', nargs='*',
+ help='The ninja rules to emit compilation commands for.')
+
+ def __init__(self, output, parser, args):
+ super().__init__(parser)
+ self.output = output
+ self.rules = args.rules
+ self.sep = ''
+
+ def begin_file(self):
+ self.output.write('[')
+ self.directory = os.getcwd()
+
+ def print_entry(self, **entry):
+ entry['directory'] = self.directory
+ self.output.write(self.sep + json.dumps(entry))
+ self.sep = ',\n'
+
+ def begin_build(self, out, iout, rule, in_, iin, orderdep):
+ if in_ and rule in self.rules:
+ super().begin_build(out, iout, rule, in_, iin, orderdep)
+ else:
+ self.scope = Scope(self)
+
+ def end_build(self, scope, out, iout, rule, in_, iin, orderdep):
+ self.print_entry(command=scope.expand('${command}'), file=in_[0])
+
+ def end_file(self):
+ self.output.write(']\n')
+
+
+# ---- clean output files ----
+
+class Clean(NinjaParserEventsWithVars):
+ ARGS = argparse.ArgumentParser(description='Remove output build files.')
+ ARGS.add_argument('-g', dest='generator', action='store_true',
+ help='clean generated files too')
+
+ def __init__(self, output, parser, args):
+ super().__init__(parser)
+ self.dry_run = args.dry_run
+ self.verbose = args.verbose or args.dry_run
+ self.generator = args.generator
+
+ def begin_file(self):
+ print('Cleaning... ', end=(None if self.verbose else ''), flush=True)
+ self.cnt = 0
+
+ def end_file(self):
+ print('%d files' % self.cnt)
+
+ def do_clean(self, *files):
+ for f in files:
+ if self.dry_run:
+ if os.path.exists(f):
+ self.cnt += 1
+ print('Would remove ' + f)
+ continue
+ else:
+ try:
+ if os.path.isdir(f):
+ shutil.rmtree(f)
+ else:
+ os.unlink(f)
+ self.cnt += 1
+ if self.verbose:
+ print('Removed ' + f)
+ except FileNotFoundError:
+ pass
+
+ def end_build(self, scope, out, iout, rule, in_, iin, orderdep):
+ if rule == 'phony':
+ return
+ if self.generator:
+ rspfile = scope.expand('${rspfile}')
+ if rspfile:
+ self.do_clean(rspfile)
+ if self.generator or not scope.expand('${generator}'):
+ self.do_clean(*out, *iout)
+ depfile = scope.expand('${depfile}')
+ if depfile:
+ self.do_clean(depfile)
+
+
+# ---- convert build.ninja to makefile ----
+
+class Ninja2Make(NinjaParserEventsWithVars):
+ ARGS = argparse.ArgumentParser(description='Convert build.ninja to a Makefile.')
+ ARGS.add_argument('--clean', dest='emit_clean', action='store_true',
+ help='Emit clean/distclean rules.')
+ ARGS.add_argument('--doublecolon', action='store_true',
+ help='Emit double-colon rules for phony targets.')
+ ARGS.add_argument('--omit', metavar='TARGET', nargs='+',
+ help='Targets to omit.')
+
+ def __init__(self, output, parser, args):
+ super().__init__(parser)
+ self.output = output
+
+ self.emit_clean = args.emit_clean
+ self.doublecolon = args.doublecolon
+ self.omit = set(args.omit)
+
+ if self.emit_clean:
+ self.omit.update(['clean', 'distclean'])
+
+ # Lists of targets are kept in memory and emitted only at the
+ # end because appending is really inefficient in GNU make.
+ # We only do it when it's O(#rules) or O(#variables), but
+ # never when it could be O(#targets).
+ self.depfiles = list()
+ self.rspfiles = list()
+ self.build_vars = defaultdict(lambda: dict())
+ self.rule_targets = defaultdict(lambda: list())
+ self.stamp_targets = defaultdict(lambda: list())
+ self.num_stamp = defaultdict(lambda: 0)
+ self.all_outs = set()
+ self.all_ins = set()
+ self.all_phony = set()
+ self.seen_default = False
+
+ def print(self, *args, **kwargs):
+ print(*args, **kwargs, file=self.output)
+
+ def dollar_token(self, word, in_path=False):
+ if in_path and word == ' ':
+ self.parser.parse_error('Make does not support spaces in filenames')
+ return '$$' if word == '$' else word
+
+ def print_phony(self, outs, ins):
+ targets = ' '.join(outs).replace('$', '$$')
+ deps = ' '.join(ins).replace('$', '$$')
+ deps = deps.strip()
+ if self.doublecolon:
+ self.print(targets + '::' + (' ' if deps else '') + deps + ';@:')
+ else:
+ self.print(targets + ':' + (' ' if deps else '') + deps)
+ self.all_phony.update(outs)
+
+ def begin_file(self):
+ self.print(r'# This is an automatically generated file, and it shows.')
+ self.print(r'ninja-default:')
+ self.print(r'.PHONY: ninja-default ninja-clean ninja-distclean')
+ if self.emit_clean:
+ self.print(r'ninja-clean:: ninja-clean-start; $(if $V,,@)rm -f ${ninja-depfiles}')
+ self.print(r'ninja-clean-start:; $(if $V,,@echo Cleaning...)')
+ self.print(r'ninja-distclean:: clean; $(if $V,,@)rm -f ${ninja-rspfiles}')
+ self.print(r'.PHONY: ninja-clean-start')
+ self.print_phony(['clean'], ['ninja-clean'])
+ self.print_phony(['distclean'], ['ninja-distclean'])
+ self.print(r'vpath')
+ self.print(r'NULL :=')
+ self.print(r'SPACE := ${NULL} #')
+ self.print(r'MAKEFLAGS += -rR')
+ self.print(r'define NEWLINE')
+ self.print(r'')
+ self.print(r'endef')
+ self.print(r'.var.in_newline = $(subst $(SPACE),$(NEWLINE),${.var.in})')
+ self.print(r"ninja-command = $(if $V,,$(if ${.var.description},@printf '%s\n' '$(subst ','\'',${.var.description})' && ))${.var.command}")
+ self.print(r"ninja-command-restat = $(if $V,,$(if ${.var.description},@printf '%s\n' '$(subst ','\'',${.var.description})' && ))${.var.command} && if test -e $(firstword ${.var.out}); then printf '%s\n' ${.var.out} > $@; fi")
+
+ def end_file(self):
+ def natural_sort_key(s, _nsre=re.compile('([0-9]+)')):
+ return [int(text) if text.isdigit() else text.lower()
+ for text in _nsre.split(s)]
+
+ self.print()
+ self.print('ninja-outputdirs :=')
+ for rule in self.rule_vars:
+ if rule == 'phony':
+ continue
+ self.print('ninja-targets-%s := %s' % (rule, ' '.join(self.rule_targets[rule])))
+ self.print('ninja-stamp-%s := %s' % (rule, ' '.join(self.stamp_targets[rule])))
+ self.print('ninja-outputdirs += $(sort $(dir ${ninja-targets-%s}))' % rule)
+ self.print()
+ self.print('dummy := $(shell mkdir -p . $(sort $(ninja-outputdirs)))')
+ self.print('ninja-depfiles :=' + ' '.join(self.depfiles))
+ self.print('ninja-rspfiles :=' + ' '.join(self.rspfiles))
+ self.print('-include ${ninja-depfiles}')
+ self.print()
+ for targets in self.build_vars:
+ for name, value in self.build_vars[targets].items():
+ self.print('%s: private .var.%s := %s' % (targets, name, value))
+ self.print()
+ if not self.seen_default:
+ default_targets = sorted(self.all_outs - self.all_ins, key=natural_sort_key)
+ self.print('ninja-default: ' + ' '.join(default_targets))
+
+ # This is a hack... Meson declares input meson.build files as
+ # phony, because Ninja does not have an equivalent of Make's
+ # "path/to/file:" declaration that ignores "path/to/file" even
+ # if it is absent. However, Makefile.ninja wants to depend on
+ # build.ninja, which in turn depends on these phony targets which
+ # would cause Makefile.ninja to be rebuilt in a loop.
+ phony_targets = sorted(self.all_phony - self.all_ins, key=natural_sort_key)
+ self.print('.PHONY: ' + ' '.join(phony_targets))
+
+ def variable(self, name, value):
+ super().variable(name, value)
+ if self.scope is None:
+ self.global_vars[name] = self.expand(value)
+ self.print('.var.%s := %s' % (name, self.global_vars[name]))
+
+ def begin_build(self, out, iout, rule, in_, iin, orderdep):
+ if any(x in self.omit for x in out):
+ self.scope = Scope(self)
+ return
+
+ super().begin_build(out, iout, rule, in_, iin, orderdep)
+ self.current_targets = ' '.join(self.scope.out + self.scope.iout).replace('$', '$$')
+
+ def end_build(self, scope, out, iout, rule, in_, iin, orderdep):
+ self.rule_targets[rule] += self.scope.out
+ self.rule_targets[rule] += self.scope.iout
+
+ self.all_outs.update(self.scope.iout)
+ self.all_outs.update(self.scope.out)
+ self.all_ins.update(self.scope.in_)
+ self.all_ins.update(self.scope.iin)
+
+ targets = self.current_targets
+ self.current_targets = None
+ if rule == 'phony':
+ # Phony rules treat order-only dependencies as normal deps
+ self.print_phony(out + iout, in_ + iin + orderdep)
+ return
+
+ inputs = ' '.join(in_ + iin).replace('$', '$$')
+ orderonly = ' '.join(orderdep).replace('$', '$$')
+
+ rspfile = scope.expand('${rspfile}')
+ if rspfile:
+ rspfile_content = scope.expand('${rspfile_content}')
+ with open(rspfile, 'w') as f:
+ f.write(rspfile_content)
+ inputs += ' ' + rspfile
+ self.rspfiles.append(rspfile)
+
+ restat = 'restat' in self.scope.build_vars or 'restat' in self.rule_vars[rule]
+ depfile = scope.expand('${depfile}')
+ build_vars = {
+ 'command': scope.expand('${command}'),
+ 'description': scope.expand('${description}'),
+ 'out': scope.expand('${out}')
+ }
+
+ if restat and not depfile:
+ if len(out) == 1:
+ stamp = out[0] + '.stamp'
+ else:
+ stamp = '%s%d.stamp' %(rule, self.num_stamp[rule])
+ self.num_stamp[rule] += 1
+ self.print('%s: %s; @:' % (targets, stamp))
+ self.print('%s: %s | %s; ${ninja-command-restat}' % (stamp, inputs, orderonly))
+ self.rule_targets[rule].append(stamp)
+ self.stamp_targets[rule].append(stamp)
+ self.build_vars[stamp] = build_vars
+ else:
+ self.print('%s: %s | %s; ${ninja-command}' % (targets, inputs, orderonly))
+ self.build_vars[targets] = build_vars
+ if depfile:
+ self.depfiles.append(depfile)
+
+ def end_rule(self, scope, name):
+ # Note that the generator pseudo-variable could also be attached
+ # to a build block rather than a rule. This is not handled here
+ # in order to reduce the number of "rm" invocations. However,
+ # "ninjatool.py -t clean" does that correctly.
+ target = 'distclean' if scope.generator else 'clean'
+ self.print('ninja-%s:: ; $(if $V,,@)rm -f ${ninja-stamp-%s}' % (target, name))
+ if self.emit_clean:
+ self.print('ninja-%s:: ; $(if $V,,@)rm -rf ${ninja-targets-%s}' % (target, name))
+
+ def default(self, targets):
+ self.print("ninja-default: " + ' '.join(targets))
+ self.seen_default = True
+
+
+# ---- command line parsing ----
+
+# we cannot use subparsers because tools are chosen through the "-t"
+# option.
+
+class ToolAction(argparse.Action):
+ def __init__(self, option_strings, dest, choices, metavar='TOOL', nargs=None, **kwargs):
+ if nargs is not None:
+ raise ValueError("nargs not allowed")
+ super().__init__(option_strings, dest, required=True, choices=choices,
+ metavar=metavar, **kwargs)
+
+ def __call__(self, parser, namespace, value, option_string):
+ tool = self.choices[value]
+ setattr(namespace, self.dest, tool)
+ tool.ARGS.prog = '%s %s %s' % (parser.prog, option_string, value)
+
+
+class ToolHelpAction(argparse.Action):
+ def __init__(self, option_strings, dest, nargs=None, **kwargs):
+ if nargs is not None:
+ raise ValueError("nargs not allowed")
+ super().__init__(option_strings, dest, nargs=0, **kwargs)
+
+ def __call__(self, parser, namespace, values, option_string=None):
+ if namespace.tool:
+ namespace.tool.ARGS.print_help()
+ else:
+ parser.print_help()
+ parser.exit()
+
+
+tools = {
+ 'test': Writer,
+ 'ninja2make': Ninja2Make,
+ 'compdb': Compdb,
+ 'clean': Clean,
+}
+
+parser = argparse.ArgumentParser(description='Process and transform build.ninja files.',
+ add_help=False)
+parser.add_argument('-C', metavar='DIR', dest='dir', default='.',
+ help='change to DIR before doing anything else')
+parser.add_argument('-f', metavar='FILE', dest='file', default='build.ninja',
+ help='specify input build file [default=build.ninja]')
+parser.add_argument('-n', dest='dry_run', action='store_true',
+ help='do not actually do anything')
+parser.add_argument('-v', dest='verbose', action='store_true',
+ help='be more verbose')
+
+parser.add_argument('-t', dest='tool', choices=tools, action=ToolAction,
+ help='choose the tool to run')
+parser.add_argument('-h', '--help', action=ToolHelpAction,
+ help='show this help message and exit')
+
+if len(sys.argv) >= 2 and sys.argv[1] == '--version':
+ print('1.8')
+ sys.exit(0)
+
+args, tool_args = parser.parse_known_args()
+args.tool.ARGS.parse_args(tool_args, args)
+
+os.chdir(args.dir)
+with open(args.file, 'r') as f:
+ parser = NinjaParser(args.file, f)
+ try:
+ events = args.tool(sys.stdout, parser, args)
+ except InvalidArgumentError as e:
+ parser.error(str(e))
+ parser.parse(events)
diff --git a/scripts/oss-fuzz/build.sh b/scripts/oss-fuzz/build.sh
index a07b3022e8..f0b7442c96 100755
--- a/scripts/oss-fuzz/build.sh
+++ b/scripts/oss-fuzz/build.sh
@@ -20,7 +20,7 @@
# e.g.
# $CXX $CXXFLAGS -std=c++11 -Iinclude \
# /path/to/name_of_fuzzer.cc -o $OUT/name_of_fuzzer \
-# $LIB_FUZZING_ENGINE /path/to/library.a
+# -fsanitize=fuzzer /path/to/library.a
fatal () {
echo "Error : ${*}, exiting."
@@ -54,10 +54,6 @@ mkdir -p $OSS_FUZZ_BUILD_DIR || fatal "mkdir $OSS_FUZZ_BUILD_DIR failed"
cd $OSS_FUZZ_BUILD_DIR || fatal "cd $OSS_FUZZ_BUILD_DIR failed"
-if [ -z ${LIB_FUZZING_ENGINE+x} ]; then
- LIB_FUZZING_ENGINE="-fsanitize=fuzzer"
-fi
-
if [ -z ${OUT+x} ]; then
DEST_DIR=$(realpath "./DEST_DIR")
else
@@ -67,27 +63,27 @@ fi
mkdir -p "$DEST_DIR/lib/" # Copy the shared libraries here
# Build once to get the list of dynamic lib paths, and copy them over
-../configure --disable-werror --cc="$CC" --cxx="$CXX" \
+../configure --disable-werror --cc="$CC" --cxx="$CXX" --enable-fuzzing \
+ --prefix="$DEST_DIR" --bindir="$DEST_DIR" --datadir="$DEST_DIR/data/" \
--extra-cflags="$EXTRA_CFLAGS" --target-list="i386-softmmu"
-if ! make CONFIG_FUZZ=y CFLAGS="$LIB_FUZZING_ENGINE" "-j$(nproc)" \
- i386-softmmu/fuzz; then
+if ! make "-j$(nproc)" qemu-fuzz-i386; then
fatal "Build failed. Please specify a compiler with fuzzing support"\
- "using the \$CC and \$CXX environemnt variables, or specify a"\
- "\$LIB_FUZZING_ENGINE compatible with your compiler"\
+ "using the \$CC and \$CXX environemnt variables"\
"\nFor example: CC=clang CXX=clang++ $0"
fi
-for i in $(ldd ./i386-softmmu/qemu-fuzz-i386 | cut -f3 -d' '); do
+for i in $(ldd ./qemu-fuzz-i386 | cut -f3 -d' '); do
cp "$i" "$DEST_DIR/lib/"
done
-rm ./i386-softmmu/qemu-fuzz-i386
+rm qemu-fuzz-i386
# Build a second time to build the final binary with correct rpath
-../configure --bindir="$DEST_DIR" --datadir="$DEST_DIR/data/" --disable-werror \
- --cc="$CC" --cxx="$CXX" --extra-cflags="$EXTRA_CFLAGS" \
- --extra-ldflags="-Wl,-rpath,'\$\$ORIGIN/lib'"
-make CONFIG_FUZZ=y CFLAGS="$LIB_FUZZING_ENGINE" "-j$(nproc)" i386-softmmu/fuzz
+../configure --disable-werror --cc="$CC" --cxx="$CXX" --enable-fuzzing \
+ --prefix="$DEST_DIR" --bindir="$DEST_DIR" --datadir="$DEST_DIR/data/" \
+ --extra-cflags="$EXTRA_CFLAGS" --extra-ldflags="-Wl,-rpath,'\$\$ORIGIN/lib'" \
+ --target-list="i386-softmmu"
+make "-j$(nproc)" qemu-fuzz-i386 V=1
# Copy over the datadir
cp -r ../pc-bios/ "$DEST_DIR/pc-bios"
@@ -96,9 +92,9 @@ cp -r ../pc-bios/ "$DEST_DIR/pc-bios"
# of available fuzz-targets. Copy over the qemu-fuzz-i386, naming it according
# to each available fuzz target (See 05509c8e6d fuzz: select fuzz target using
# executable name)
-for target in $(./i386-softmmu/qemu-fuzz-i386 | awk '$1 ~ /\*/ {print $2}');
+for target in $(./qemu-fuzz-i386 | awk '$1 ~ /\*/ {print $2}');
do
- cp ./i386-softmmu/qemu-fuzz-i386 "$DEST_DIR/qemu-fuzz-i386-target-$target"
+ cp qemu-fuzz-i386 "$DEST_DIR/qemu-fuzz-i386-target-$target"
done
echo "Done. The fuzzers are located in $DEST_DIR"
diff --git a/scripts/qapi-gen.py b/scripts/qapi-gen.py
index 4b03f7d53b..4b03f7d53b 100755..100644
--- a/scripts/qapi-gen.py
+++ b/scripts/qapi-gen.py
diff --git a/scripts/qemu-version.sh b/scripts/qemu-version.sh
new file mode 100755
index 0000000000..4847385e42
--- /dev/null
+++ b/scripts/qemu-version.sh
@@ -0,0 +1,25 @@
+#!/bin/sh
+
+set -eu
+
+dir="$1"
+pkgversion="$2"
+version="$3"
+
+if [ -z "$pkgversion"]; then
+ cd "$dir"
+ if [ -e .git ]; then
+ pkgversion=$(git describe --match 'v*' --dirty | echo "")
+ fi
+fi
+
+if [ -n "$pkgversion" ]; then
+ fullversion="$version ($pkgversion)"
+else
+ fullversion="$version"
+fi
+
+cat <<EOF
+#define QEMU_PKGVERSION "$pkgversion"
+#define QEMU_FULL_VERSION "$fullversion"
+EOF
diff --git a/scripts/tracetool.py b/scripts/tracetool.py
index 31146242b7..31146242b7 100755..100644
--- a/scripts/tracetool.py
+++ b/scripts/tracetool.py
diff --git a/scripts/tracetool/backend/dtrace.py b/scripts/tracetool/backend/dtrace.py
index fc0c8fc52f..e17edc9b9d 100644
--- a/scripts/tracetool/backend/dtrace.py
+++ b/scripts/tracetool/backend/dtrace.py
@@ -38,7 +38,7 @@ def generate_h_begin(events, group):
if group == "root":
header = "trace-dtrace-root.h"
else:
- header = "trace-dtrace.h"
+ header = "trace-dtrace-%s.h" % group
# Workaround for ust backend, which also includes <sys/sdt.h> and may
# require SDT_USE_VARIADIC to be defined. If dtrace includes <sys/sdt.h>
diff --git a/scripts/tracetool/backend/ust.py b/scripts/tracetool/backend/ust.py
index 6c0a5f8d68..c857516f21 100644
--- a/scripts/tracetool/backend/ust.py
+++ b/scripts/tracetool/backend/ust.py
@@ -19,11 +19,7 @@ PUBLIC = True
def generate_h_begin(events, group):
- if group == "root":
- header = "trace-ust-root.h"
- else:
- header = "trace-ust.h"
-
+ header = 'trace-ust-' + group + '.h'
out('#include <lttng/tracepoint.h>',
'#include "%s"' % header,
'',
diff --git a/scripts/tracetool/format/c.py b/scripts/tracetool/format/c.py
index 23d82ea861..c390c1844a 100644
--- a/scripts/tracetool/format/c.py
+++ b/scripts/tracetool/format/c.py
@@ -19,10 +19,7 @@ def generate(events, backend, group):
active_events = [e for e in events
if "disable" not in e.properties]
- if group == "root":
- header = "trace-root.h"
- else:
- header = "trace.h"
+ header = "trace-" + group + ".h"
out('/* This file is autogenerated by tracetool, do not edit. */',
'',
diff --git a/scripts/tracetool/format/tcg_h.py b/scripts/tracetool/format/tcg_h.py
index 33cf6a31b3..4d84440aff 100644
--- a/scripts/tracetool/format/tcg_h.py
+++ b/scripts/tracetool/format/tcg_h.py
@@ -28,7 +28,7 @@ def vcpu_transform_args(args):
def generate(events, backend, group):
if group == "root":
- header = "trace-root.h"
+ header = "trace/trace-root.h"
else:
header = "trace.h"
diff --git a/scripts/tracetool/format/tcg_helper_c.py b/scripts/tracetool/format/tcg_helper_c.py
index 2db6317f3c..72576e67d1 100644
--- a/scripts/tracetool/format/tcg_helper_c.py
+++ b/scripts/tracetool/format/tcg_helper_c.py
@@ -41,7 +41,7 @@ def vcpu_transform_args(args, mode):
def generate(events, backend, group):
if group == "root":
- header = "trace-root.h"
+ header = "trace/trace-root.h"
else:
header = "trace.h"
diff --git a/scripts/undefsym.sh b/scripts/undefsym.sh
new file mode 100755
index 0000000000..b9ec332e95
--- /dev/null
+++ b/scripts/undefsym.sh
@@ -0,0 +1,20 @@
+#! /usr/bin/env bash
+
+# Before a shared module's DSO is produced, a static library is built for it
+# and passed to this script. The script generates -Wl,-u options to force
+# the inclusion of symbol from libqemuutil.a if the shared modules need them,
+# This is necessary because the modules may use functions not needed by the
+# executable itself, which would cause the function to not be linked in.
+# Then the DSO loading would fail because of the missing symbol.
+
+if test $# -le 2; then
+ exit 0
+fi
+
+NM=$1
+staticlib=$2
+shift 2
+# Find symbols defined in static libraries and undefined in shared modules
+comm -12 \
+ <( $NM -P -g $staticlib | awk '$2!="U"{print "-Wl,-u," $1}' | sort -u) \
+ <( $NM -P -g "$@" | awk '$2=="U"{print "-Wl,-u," $1}' | sort -u)